gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.http.common; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.stream.Collectors; import javax.servlet.AsyncContext; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.camel.Exchange; import org.apache.camel.ExchangePattern; import org.apache.camel.RuntimeCamelException; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A servlet to use as a Camel route as entry. */ public class CamelServlet extends HttpServlet implements HttpRegistryProvider { public static final String ASYNC_PARAM = "async"; public static final List<String> METHODS = Arrays.asList("GET", "HEAD", "POST", "PUT", "DELETE", "TRACE", "OPTIONS", "CONNECT", "PATCH"); private static final long serialVersionUID = -7061982839117697829L; protected final Logger log = LoggerFactory.getLogger(getClass()); /** * We have to define this explicitly so the name can be set as we can not always be sure that it is already set via * the init method */ private String servletName; private boolean async; private ServletResolveConsumerStrategy servletResolveConsumerStrategy = new HttpServletResolveConsumerStrategy(); private final ConcurrentMap<String, HttpConsumer> consumers = new ConcurrentHashMap<>(); @Override public void init(ServletConfig config) throws ServletException { super.init(config); this.servletName = config.getServletName(); final String asyncParam = config.getInitParameter(ASYNC_PARAM); this.async = asyncParam == null ? false : ObjectHelper.toBoolean(asyncParam); log.trace("servlet '{}' initialized with: async={}", servletName, async); } @Override protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { if (isAsync()) { final AsyncContext context = req.startAsync(); //run async context.start(() -> doServiceAsync(context)); } else { doService(req, resp); } } /** * This is used to handle request asynchronously * * @param context the {@link AsyncContext} */ protected void doServiceAsync(AsyncContext context) { final HttpServletRequest request = (HttpServletRequest) context.getRequest(); final HttpServletResponse response = (HttpServletResponse) context.getResponse(); try { doService(request, response); } catch (Exception e) { //An error shouldn't occur as we should handle most of error in doService log.error("Error processing request", e); try { response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } catch (Exception e1) { log.debug("Cannot send reply to client!", e1); } //Need to wrap it in RuntimeException as it occurs in a Runnable throw new RuntimeCamelException(e); } finally { context.complete(); } } /** * This is the logical implementation to handle request with {@link CamelServlet} This is where most exceptions * should be handled * * @param request the {@link HttpServletRequest} * @param response the {@link HttpServletResponse} * @throws ServletException * @throws IOException */ protected void doService(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { log.trace("Service: {}", request); // Is there a consumer registered for the request. HttpConsumer consumer = resolve(request); if (consumer == null) { // okay we cannot process this requires so return either 404 or 405. // to know if its 405 then we need to check if any other HTTP method would have a consumer for the "same" request boolean hasAnyMethod = METHODS.stream() .anyMatch(m -> getServletResolveConsumerStrategy().isHttpMethodAllowed(request, m, getConsumers())); if (hasAnyMethod) { log.debug("No consumer to service request {} as method {} is not allowed", request, request.getMethod()); response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); return; } else { log.debug("No consumer to service request {} as resource is not found", request); response.sendError(HttpServletResponse.SC_NOT_FOUND); return; } } // are we suspended? if (consumer.isSuspended()) { log.debug("Consumer suspended, cannot service request {}", request); response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE); return; } // if its an OPTIONS request then return which method is allowed if ("OPTIONS".equals(request.getMethod()) && !consumer.isOptionsEnabled()) { String allowedMethods = METHODS.stream() .filter(m -> getServletResolveConsumerStrategy().isHttpMethodAllowed(request, m, getConsumers())) .collect(Collectors.joining(",")); if (allowedMethods == null && consumer.getEndpoint().getHttpMethodRestrict() != null) { allowedMethods = consumer.getEndpoint().getHttpMethodRestrict(); } if (allowedMethods == null) { // allow them all allowedMethods = "GET,HEAD,POST,PUT,DELETE,TRACE,OPTIONS,CONNECT,PATCH"; } if (!allowedMethods.contains("OPTIONS")) { allowedMethods = allowedMethods + ",OPTIONS"; } response.addHeader("Allow", allowedMethods); response.setStatus(HttpServletResponse.SC_OK); return; } if (consumer.getEndpoint().getHttpMethodRestrict() != null && !consumer.getEndpoint().getHttpMethodRestrict().contains(request.getMethod())) { response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); return; } if ("TRACE".equals(request.getMethod()) && !consumer.isTraceEnabled()) { response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); return; } // create exchange and set data on it Exchange exchange = consumer.getEndpoint().createExchange(ExchangePattern.InOut); if (consumer.getEndpoint().isBridgeEndpoint()) { exchange.setProperty(Exchange.SKIP_GZIP_ENCODING, Boolean.TRUE); exchange.setProperty(Exchange.SKIP_WWW_FORM_URLENCODED, Boolean.TRUE); } if (consumer.getEndpoint().isDisableStreamCache()) { exchange.setProperty(Exchange.DISABLE_HTTP_STREAM_CACHE, Boolean.TRUE); } // we override the classloader before building the HttpMessage just in case the binding // does some class resolution ClassLoader oldTccl = overrideTccl(exchange); HttpHelper.setCharsetFromContentType(request.getContentType(), exchange); exchange.setIn(new HttpMessage(exchange, consumer.getEndpoint(), request, response)); // set context path as header String contextPath = consumer.getEndpoint().getPath(); exchange.getIn().setHeader("CamelServletContextPath", contextPath); String httpPath = (String) exchange.getIn().getHeader(Exchange.HTTP_PATH); // here we just remove the CamelServletContextPath part from the HTTP_PATH if (contextPath != null && httpPath.startsWith(contextPath)) { exchange.getIn().setHeader(Exchange.HTTP_PATH, httpPath.substring(contextPath.length())); } // we want to handle the UoW try { consumer.createUoW(exchange); } catch (Exception e) { log.error("Error processing request", e); throw new ServletException(e); } try { if (log.isTraceEnabled()) { log.trace("Processing request for exchangeId: {}", exchange.getExchangeId()); } // process the exchange consumer.getProcessor().process(exchange); } catch (Exception e) { exchange.setException(e); } try { // now lets output to the response if (log.isTraceEnabled()) { log.trace("Writing response for exchangeId: {}", exchange.getExchangeId()); } Integer bs = consumer.getEndpoint().getResponseBufferSize(); if (bs != null) { log.trace("Using response buffer size: {}", bs); response.setBufferSize(bs); } consumer.getBinding().writeResponse(exchange, response); } catch (IOException e) { log.error("Error processing request", e); throw e; } catch (Exception e) { log.error("Error processing request", e); throw new ServletException(e); } finally { consumer.doneUoW(exchange); restoreTccl(exchange, oldTccl); } } /** * @deprecated use * {@link ServletResolveConsumerStrategy#resolve(javax.servlet.http.HttpServletRequest, java.util.Map)} */ @Deprecated protected HttpConsumer resolve(HttpServletRequest request) { return getServletResolveConsumerStrategy().resolve(request, getConsumers()); } @Override public void connect(HttpConsumer consumer) { log.debug("Connecting consumer: {}", consumer); consumers.put(consumer.getEndpoint().getEndpointUri(), consumer); } @Override public void disconnect(HttpConsumer consumer) { log.debug("Disconnecting consumer: {}", consumer); consumers.remove(consumer.getEndpoint().getEndpointUri()); } @Override public String getServletName() { return servletName; } public void setServletName(String servletName) { this.servletName = servletName; } public ServletResolveConsumerStrategy getServletResolveConsumerStrategy() { return servletResolveConsumerStrategy; } public void setServletResolveConsumerStrategy(ServletResolveConsumerStrategy servletResolveConsumerStrategy) { this.servletResolveConsumerStrategy = servletResolveConsumerStrategy; } public boolean isAsync() { return async; } public void setAsync(boolean async) { this.async = async; } public Map<String, HttpConsumer> getConsumers() { return Collections.unmodifiableMap(consumers); } /** * Override the Thread Context ClassLoader if need be. * * @return old classloader if overridden; otherwise returns null */ protected ClassLoader overrideTccl(final Exchange exchange) { ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader(); ClassLoader appCtxCl = exchange.getContext().getApplicationContextClassLoader(); if (oldClassLoader == null || appCtxCl == null) { return null; } if (!oldClassLoader.equals(appCtxCl)) { Thread.currentThread().setContextClassLoader(appCtxCl); if (log.isTraceEnabled()) { log.trace("Overrode TCCL for exchangeId {} to {} on thread {}", new Object[] { exchange.getExchangeId(), appCtxCl, Thread.currentThread().getName() }); } return oldClassLoader; } return null; } /** * Restore the Thread Context ClassLoader if the old TCCL is not null. */ protected void restoreTccl(final Exchange exchange, ClassLoader oldTccl) { if (oldTccl == null) { return; } Thread.currentThread().setContextClassLoader(oldTccl); if (log.isTraceEnabled()) { log.trace("Restored TCCL for exchangeId {} to {} on thread {}", new String[] { exchange.getExchangeId(), oldTccl.toString(), Thread.currentThread().getName() }); } } }
/* * SFTPPath.java * Copyright 2016 Rob Spoor * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.robtimus.filesystems.sftp; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.nio.channels.SeekableByteChannel; import java.nio.file.AccessMode; import java.nio.file.CopyOption; import java.nio.file.DirectoryStream; import java.nio.file.DirectoryStream.Filter; import java.nio.file.FileStore; import java.nio.file.LinkOption; import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.WatchEvent.Kind; import java.nio.file.WatchEvent.Modifier; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.FileTime; import java.nio.file.attribute.GroupPrincipal; import java.nio.file.attribute.PosixFileAttributes; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.UserPrincipal; import java.util.Map; import java.util.Objects; import java.util.Set; import com.github.robtimus.filesystems.Messages; import com.github.robtimus.filesystems.SimpleAbstractPath; /** * A path for SFTP file systems. * * @author Rob Spoor */ class SFTPPath extends SimpleAbstractPath { private final SFTPFileSystem fs; SFTPPath(SFTPFileSystem fs, String path) { super(path); this.fs = Objects.requireNonNull(fs); } private SFTPPath(SFTPFileSystem fs, String path, boolean normalized) { super(path, normalized); this.fs = Objects.requireNonNull(fs); } @Override protected SFTPPath createPath(String path) { return new SFTPPath(fs, path, true); } @Override public SFTPFileSystem getFileSystem() { return fs; } @Override public SFTPPath getRoot() { return (SFTPPath) super.getRoot(); } @Override public SFTPPath getFileName() { return (SFTPPath) super.getFileName(); } @Override public SFTPPath getParent() { return (SFTPPath) super.getParent(); } @Override public SFTPPath getName(int index) { return (SFTPPath) super.getName(index); } @Override public SFTPPath subpath(int beginIndex, int endIndex) { return (SFTPPath) super.subpath(beginIndex, endIndex); } @Override public SFTPPath normalize() { return (SFTPPath) super.normalize(); } @Override public SFTPPath resolve(Path other) { return (SFTPPath) super.resolve(other); } @Override public SFTPPath resolve(String other) { return (SFTPPath) super.resolve(other); } @Override public SFTPPath resolveSibling(Path other) { return (SFTPPath) super.resolveSibling(other); } @Override public SFTPPath resolveSibling(String other) { return (SFTPPath) super.resolveSibling(other); } @Override public SFTPPath relativize(Path other) { return (SFTPPath) super.relativize(other); } @Override public URI toUri() { return fs.toUri(this); } @Override public SFTPPath toAbsolutePath() { return fs.toAbsolutePath(this); } @Override public SFTPPath toRealPath(LinkOption... options) throws IOException { return fs.toRealPath(this, options); } @Override public WatchKey register(WatchService watcher, Kind<?>[] events, Modifier... modifiers) throws IOException { throw Messages.unsupportedOperation(Path.class, "register"); //$NON-NLS-1$ } @Override public String toString() { return fs.toString(this); } InputStream newInputStream(OpenOption... options) throws IOException { return fs.newInputStream(this, options); } OutputStream newOutputStream(OpenOption... options) throws IOException { return fs.newOutputStream(this, options); } SeekableByteChannel newByteChannel(Set<? extends OpenOption> options, FileAttribute<?>... attrs) throws IOException { return fs.newByteChannel(this, options, attrs); } DirectoryStream<Path> newDirectoryStream(Filter<? super Path> filter) throws IOException { return fs.newDirectoryStream(this, filter); } void createDirectory(FileAttribute<?>... attrs) throws IOException { fs.createDirectory(this, attrs); } void delete() throws IOException { fs.delete(this); } SFTPPath readSymbolicLink() throws IOException { return fs.readSymbolicLink(this); } void copy(SFTPPath target, CopyOption... options) throws IOException { fs.copy(this, target, options); } void move(SFTPPath target, CopyOption... options) throws IOException { fs.move(this, target, options); } @SuppressWarnings("resource") boolean isSameFile(Path other) throws IOException { if (this.equals(other)) { return true; } if (other == null || getFileSystem() != other.getFileSystem()) { return false; } return fs.isSameFile(this, (SFTPPath) other); } boolean isHidden() throws IOException { return fs.isHidden(this); } FileStore getFileStore() throws IOException { return fs.getFileStore(this); } void checkAccess(AccessMode... modes) throws IOException { fs.checkAccess(this, modes); } PosixFileAttributes readAttributes(LinkOption... options) throws IOException { return fs.readAttributes(this, options); } Map<String, Object> readAttributes(String attributes, LinkOption... options) throws IOException { return fs.readAttributes(this, attributes, options); } void setOwner(UserPrincipal owner) throws IOException { fs.setOwner(this, owner); } void setGroup(GroupPrincipal group) throws IOException { fs.setGroup(this, group); } void setPermissions(Set<PosixFilePermission> permissions) throws IOException { fs.setPermissions(this, permissions); } void setTimes(FileTime lastModifiedTime, FileTime lastAccessTime, FileTime createTime) throws IOException { fs.setTimes(this, lastModifiedTime, lastAccessTime, createTime); } void setAttribute(String attribute, Object value, LinkOption... options) throws IOException { fs.setAttribute(this, attribute, value, options); } long getTotalSpace() throws IOException { return fs.getTotalSpace(this); } long getUsableSpace() throws IOException { return fs.getUsableSpace(this); } long getUnallocatedSpace() throws IOException { return fs.getUnallocatedSpace(this); } }
/** * <copyright> * </copyright> * * $Id$ */ package net.opengis.citygml.impl; import net.opengis.citygml.CitygmlPackage; import net.opengis.citygml.ExternalObjectReferenceType; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.EObjectImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>External Object Reference Type</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link net.opengis.citygml.impl.ExternalObjectReferenceTypeImpl#getName <em>Name</em>}</li> * <li>{@link net.opengis.citygml.impl.ExternalObjectReferenceTypeImpl#getUri <em>Uri</em>}</li> * </ul> * </p> * * @generated */ public class ExternalObjectReferenceTypeImpl extends EObjectImpl implements ExternalObjectReferenceType { /** * The default value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected static final String NAME_EDEFAULT = null; /** * The cached value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected String name = NAME_EDEFAULT; /** * The default value of the '{@link #getUri() <em>Uri</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getUri() * @generated * @ordered */ protected static final String URI_EDEFAULT = null; /** * The cached value of the '{@link #getUri() <em>Uri</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getUri() * @generated * @ordered */ protected String uri = URI_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ExternalObjectReferenceTypeImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return CitygmlPackage.Literals.EXTERNAL_OBJECT_REFERENCE_TYPE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getName() { return name; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setName(String newName) { String oldName = name; name = newName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, CitygmlPackage.EXTERNAL_OBJECT_REFERENCE_TYPE__NAME, oldName, name)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getUri() { return uri; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setUri(String newUri) { String oldUri = uri; uri = newUri; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, CitygmlPackage.EXTERNAL_OBJECT_REFERENCE_TYPE__URI, oldUri, uri)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case CitygmlPackage.EXTERNAL_OBJECT_REFERENCE_TYPE__NAME: return getName(); case CitygmlPackage.EXTERNAL_OBJECT_REFERENCE_TYPE__URI: return getUri(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case CitygmlPackage.EXTERNAL_OBJECT_REFERENCE_TYPE__NAME: setName((String)newValue); return; case CitygmlPackage.EXTERNAL_OBJECT_REFERENCE_TYPE__URI: setUri((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case CitygmlPackage.EXTERNAL_OBJECT_REFERENCE_TYPE__NAME: setName(NAME_EDEFAULT); return; case CitygmlPackage.EXTERNAL_OBJECT_REFERENCE_TYPE__URI: setUri(URI_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case CitygmlPackage.EXTERNAL_OBJECT_REFERENCE_TYPE__NAME: return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name); case CitygmlPackage.EXTERNAL_OBJECT_REFERENCE_TYPE__URI: return URI_EDEFAULT == null ? uri != null : !URI_EDEFAULT.equals(uri); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (name: "); result.append(name); result.append(", uri: "); result.append(uri); result.append(')'); return result.toString(); } } //ExternalObjectReferenceTypeImpl
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cognitoidp.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * The type of code delivery details being returned from the server. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/CodeDeliveryDetailsType" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CodeDeliveryDetailsType implements Serializable, Cloneable, StructuredPojo { /** * <p> * The destination for the code delivery details. * </p> */ private String destination; /** * <p> * The delivery medium (email message or phone number). * </p> */ private String deliveryMedium; /** * <p> * The name of the attribute in the code delivery details type. * </p> */ private String attributeName; /** * <p> * The destination for the code delivery details. * </p> * * @param destination * The destination for the code delivery details. */ public void setDestination(String destination) { this.destination = destination; } /** * <p> * The destination for the code delivery details. * </p> * * @return The destination for the code delivery details. */ public String getDestination() { return this.destination; } /** * <p> * The destination for the code delivery details. * </p> * * @param destination * The destination for the code delivery details. * @return Returns a reference to this object so that method calls can be chained together. */ public CodeDeliveryDetailsType withDestination(String destination) { setDestination(destination); return this; } /** * <p> * The delivery medium (email message or phone number). * </p> * * @param deliveryMedium * The delivery medium (email message or phone number). * @see DeliveryMediumType */ public void setDeliveryMedium(String deliveryMedium) { this.deliveryMedium = deliveryMedium; } /** * <p> * The delivery medium (email message or phone number). * </p> * * @return The delivery medium (email message or phone number). * @see DeliveryMediumType */ public String getDeliveryMedium() { return this.deliveryMedium; } /** * <p> * The delivery medium (email message or phone number). * </p> * * @param deliveryMedium * The delivery medium (email message or phone number). * @return Returns a reference to this object so that method calls can be chained together. * @see DeliveryMediumType */ public CodeDeliveryDetailsType withDeliveryMedium(String deliveryMedium) { setDeliveryMedium(deliveryMedium); return this; } /** * <p> * The delivery medium (email message or phone number). * </p> * * @param deliveryMedium * The delivery medium (email message or phone number). * @see DeliveryMediumType */ public void setDeliveryMedium(DeliveryMediumType deliveryMedium) { this.deliveryMedium = deliveryMedium.toString(); } /** * <p> * The delivery medium (email message or phone number). * </p> * * @param deliveryMedium * The delivery medium (email message or phone number). * @return Returns a reference to this object so that method calls can be chained together. * @see DeliveryMediumType */ public CodeDeliveryDetailsType withDeliveryMedium(DeliveryMediumType deliveryMedium) { setDeliveryMedium(deliveryMedium); return this; } /** * <p> * The name of the attribute in the code delivery details type. * </p> * * @param attributeName * The name of the attribute in the code delivery details type. */ public void setAttributeName(String attributeName) { this.attributeName = attributeName; } /** * <p> * The name of the attribute in the code delivery details type. * </p> * * @return The name of the attribute in the code delivery details type. */ public String getAttributeName() { return this.attributeName; } /** * <p> * The name of the attribute in the code delivery details type. * </p> * * @param attributeName * The name of the attribute in the code delivery details type. * @return Returns a reference to this object so that method calls can be chained together. */ public CodeDeliveryDetailsType withAttributeName(String attributeName) { setAttributeName(attributeName); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDestination() != null) sb.append("Destination: ").append(getDestination()).append(","); if (getDeliveryMedium() != null) sb.append("DeliveryMedium: ").append(getDeliveryMedium()).append(","); if (getAttributeName() != null) sb.append("AttributeName: ").append(getAttributeName()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CodeDeliveryDetailsType == false) return false; CodeDeliveryDetailsType other = (CodeDeliveryDetailsType) obj; if (other.getDestination() == null ^ this.getDestination() == null) return false; if (other.getDestination() != null && other.getDestination().equals(this.getDestination()) == false) return false; if (other.getDeliveryMedium() == null ^ this.getDeliveryMedium() == null) return false; if (other.getDeliveryMedium() != null && other.getDeliveryMedium().equals(this.getDeliveryMedium()) == false) return false; if (other.getAttributeName() == null ^ this.getAttributeName() == null) return false; if (other.getAttributeName() != null && other.getAttributeName().equals(this.getAttributeName()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDestination() == null) ? 0 : getDestination().hashCode()); hashCode = prime * hashCode + ((getDeliveryMedium() == null) ? 0 : getDeliveryMedium().hashCode()); hashCode = prime * hashCode + ((getAttributeName() == null) ? 0 : getAttributeName().hashCode()); return hashCode; } @Override public CodeDeliveryDetailsType clone() { try { return (CodeDeliveryDetailsType) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.cognitoidp.model.transform.CodeDeliveryDetailsTypeMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright 2014 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.openrtb.util; import static com.google.common.truth.Truth.assertThat; import com.google.common.collect.ImmutableList; import com.google.openrtb.OpenRtb.BidRequest; import com.google.openrtb.OpenRtb.BidRequest.Imp; import com.google.openrtb.OpenRtb.BidRequest.Imp.Banner; import com.google.openrtb.OpenRtb.BidRequest.Imp.Native; import com.google.openrtb.OpenRtb.BidRequest.Imp.Video; import com.google.openrtb.OpenRtb.BidResponse; import com.google.openrtb.OpenRtb.BidResponse.SeatBid; import com.google.openrtb.OpenRtb.BidResponse.SeatBid.Bid; import com.google.openrtb.OpenRtb.ContentCategory; import java.util.Iterator; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import org.junit.Test; /** * Tests for {@link OpenRtbUtils}. */ public class OpenRtbUtilsTest { @Test public void testCatUtils() { assertThat(OpenRtbUtils.categoryFromName("IAB10_1")).isSameInstanceAs(ContentCategory.IAB10_1); assertThat(OpenRtbUtils.categoryFromName("IAB10-1")).isSameInstanceAs(ContentCategory.IAB10_1); assertThat(OpenRtbUtils.categoryToJsonName("IAB10-1")).isEqualTo("IAB10-1"); assertThat(OpenRtbUtils.categoryToJsonName("IAB10_1")).isEqualTo("IAB10-1"); assertThat(OpenRtbUtils.categoryToJsonName(ContentCategory.IAB10_1)).isEqualTo("IAB10-1"); } @Test public void testRequest_imps() { BidRequest request = BidRequest.newBuilder().setId("1").build(); assertThat(OpenRtbUtils.impsWith(request, imp -> true)).isEmpty(); request = request.toBuilder().addImp(Imp.newBuilder().setId("1")).build(); assertThat(OpenRtbUtils.impsWith(request, imp -> true)).hasSize(1); assertThat(OpenRtbUtils.impsWith(request, imp -> "notfound".equals(imp.getId()))).isEmpty(); assertThat(OpenRtbUtils.impsWith(request, imp -> "1".equals(imp.getId()))).hasSize(1); assertThat(OpenRtbUtils.impWithId(request, "1")).isNotNull(); assertThat(OpenRtbUtils.impStreamWith(request, imp -> "1".equals(imp.getId())).count()) .isEqualTo(1); } @Test public void testRequest_imps_oftype() { BidRequest request = BidRequest.newBuilder() .setId("1") .addImp(Imp.newBuilder().setId("1").setBanner(Banner.newBuilder().setId("0"))) .addImp(Imp.newBuilder().setId("2").setBanner(Banner.newBuilder().setId("0"))) .addImp(Imp.newBuilder().setId("3").setBanner(Banner.newBuilder().setId("0"))) .addImp(Imp.newBuilder().setId("4").setBanner(Banner.newBuilder().setId("0"))) .addImp(Imp.newBuilder().setId("5").setVideo(Video.newBuilder())) .addImp(Imp.newBuilder().setId("6").setVideo(Video.newBuilder())) .addImp(Imp.newBuilder().setId("7").setNative(Native.newBuilder())) .build(); // Banner assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( imp -> true, true, false, false))).hasSize(4); assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( OpenRtbUtils.IMP_ALL, true, false, false))).hasSize(4); assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( imp -> false, true, false, false))).isEmpty(); assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( OpenRtbUtils.IMP_NONE, true, false, false))).isEmpty(); // Filter-all case assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( imp -> "0".equals(imp.getBanner().getId()), true, false, false))).hasSize(4); // Filter-none case assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( imp -> "1".equals(imp.getBanner().getId()), true, false, false))).isEmpty(); // Filter-1 case assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( imp -> "1".equals(imp.getId()), true, false, false))).hasSize(1); // Filter-N case assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( imp -> imp.getId().compareTo("1") > 0, true, false, false))).hasSize(3); assertThat(OpenRtbUtils.bannerImpWithId(request, "notfound", "2")).isNull(); assertThat(OpenRtbUtils.bannerImpWithId(request, "1", "notfound")).isNull(); assertThat(OpenRtbUtils.bannerImpWithId(request, "1", "0")).isNotNull(); // Video assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( imp -> true, false, true, false))).hasSize(2); // Native assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( imp -> true, false, false, true))).hasSize(1); // Mixed assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( imp -> true, true, true, false))).hasSize(6); assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( imp -> true, true, false, true))).hasSize(5); assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( imp -> true, false, true, true))).hasSize(3); assertThat(OpenRtbUtils.impsWith(request, OpenRtbUtils.addFilters( imp -> true, true, true, true))).hasSize(7); } @Test public void testAddFilter_specialCases() { Predicate<Imp> pred = imp -> true; assertThat(OpenRtbUtils.addFilters(pred, false, false, false)).isSameInstanceAs(pred); assertThat(OpenRtbUtils.addFilters(OpenRtbUtils.IMP_NONE, true, true, true)) .isSameInstanceAs(OpenRtbUtils.IMP_NONE); assertThat(OpenRtbUtils.addFilters(OpenRtbUtils.IMP_ALL, true, true, true)) .isNotSameInstanceAs(OpenRtbUtils.IMP_ALL); // Mostly for coverage } @Test public void testResponse_bids() { BidResponse.Builder response = BidResponse.newBuilder().setCur("USD"); OpenRtbUtils.seatBid(response, "unused"); OpenRtbUtils.seatBid(response); // no seat SeatBid.Builder seatbidAnon = OpenRtbUtils.seatBid(response); assertThat(OpenRtbUtils.seatBid(response)).isSameInstanceAs(seatbidAnon); SeatBid.Builder seatbidX = OpenRtbUtils.seatBid(response, "x"); assertThat(OpenRtbUtils.seatBid(response, "x")).isSameInstanceAs(seatbidX); assertThat(OpenRtbUtils.seatBid(response)).isNotSameInstanceAs(seatbidX); assertThat(OpenRtbUtils.bids(response)).isEmpty(); Bid bid1 = buildHtmlBid("1", 100).build(); OpenRtbUtils.seatBid(response).addBid(bid1); Bid bid11 = buildHtmlBid("11", 100).build(); OpenRtbUtils.seatBid(response).addBid(bid11); assertThat(OpenRtbUtils.bids(response)).hasSize(2); assertThat(OpenRtbUtils.bids(response, "none")).isEmpty(); assertThat(OpenRtbUtils.bids(response, null)).hasSize(2); assertThat(OpenRtbUtils.bidWithId(response, "1").build()).isEqualTo(bid1); Bid bid2 = buildHtmlBid("2", 100).build(); Bid bidUnused = buildHtmlBid("unused", 100).build(); OpenRtbUtils.seatBid(response, "x").addBid(bidUnused); OpenRtbUtils.seatBid(response, "x").addBid(bid2); Bid bid22 = buildHtmlBid("22", 100).build(); OpenRtbUtils.seatBid(response, "x").addBid(bid22); assertThat(OpenRtbUtils.bidWithId(response, "x", "2").build()).isEqualTo(bid2); assertThat(OpenRtbUtils.bidWithId(response, "x", "1")).isNull(); assertThat(OpenRtbUtils.bidWithId(response, "none")).isNull(); assertThat(OpenRtbUtils.bidWithId(response, "none", "1")).isNull(); assertThat(OpenRtbUtils.bidWithId(response, null, "1")).isNotNull(); Function<Bid.Builder, Bid> build = bid -> bid.buildPartial(); assertThat(ImmutableList.copyOf(OpenRtbUtils.bidsWith(response, OpenRtbUtils.SEAT_ANY, null)) .stream().map(build).collect(Collectors.toList())) .containsExactly(bid1, bid11, bidUnused, bid2, bid22); assertThat(OpenRtbUtils.bids(response, null).stream().map(build).collect(Collectors.toList())) .containsExactly(bid1, bid11); assertThat(OpenRtbUtils.bids(response, "x").stream().map(build).collect(Collectors.toList())) .containsExactly(bidUnused, bid2, bid22); Predicate<Bid.Builder> filterGoodBids = bid -> !"unused".equals(bid.getId()); assertThat(OpenRtbUtils.bidsWith(response, OpenRtbUtils.SEAT_ANY, filterGoodBids)).hasSize(4); assertThat(OpenRtbUtils.bidStreamWith(response, OpenRtbUtils.SEAT_ANY, filterGoodBids).count()) .isEqualTo(4); assertThat(OpenRtbUtils.bidsWith(response, "none", filterGoodBids)).isEmpty(); assertThat(OpenRtbUtils.bidsWith(response, "x", filterGoodBids)).hasSize(2); assertThat(OpenRtbUtils.bidsWith(response, null, filterGoodBids)).hasSize(2); } @Test(expected = UnsupportedOperationException.class) public void testResponse_unsupportedRemove() { BidResponse.Builder response = BidResponse.newBuilder().addSeatbid(SeatBid.newBuilder() .addBid(buildHtmlBid("1", 100))); Iterator<Bid.Builder> bids = OpenRtbUtils.bids(response).iterator(); bids.next(); bids.remove(); } @Test public void testResponse_remove() { BidResponse.Builder response = BidResponse.newBuilder() .addSeatbid(SeatBid.newBuilder() .addBid(buildHtmlBid("1", 100)) .addBid(buildHtmlBid("2", 100)) .addBid(buildHtmlBid("3", 200))) .addSeatbid(SeatBid.newBuilder().setSeat("unused")); OpenRtbUtils.removeBids(response, bid -> true); assertThat(OpenRtbUtils.bids(response)).hasSize(3); assertThat(OpenRtbUtils.removeBids(response, bid -> !"1".equals(bid.getId()))).isTrue(); assertThat(OpenRtbUtils.bids(response)).hasSize(2); OpenRtbUtils.seatBid(response, "x").addBid(buildHtmlBid("unused", 100)); OpenRtbUtils.seatBid(response, "x").addBid(buildHtmlBid("4", 100)); assertThat(OpenRtbUtils.removeBids(response, "x", bid -> !"4".equals(bid.getId()))).isTrue(); assertThat(OpenRtbUtils.bids(response, "x")).hasSize(1); assertThat(OpenRtbUtils.removeBids(response, "none", bid -> false)) .isFalse(); assertThat(OpenRtbUtils.removeBids(response, null, bid -> false)) .isTrue(); assertThat(OpenRtbUtils.removeBids(response, "x", bid -> false)) .isTrue(); assertThat(OpenRtbUtils.bids(response, "x")).isEmpty(); assertThat(OpenRtbUtils.removeBids(response, bid -> false)).isFalse(); assertThat(OpenRtbUtils.bids(response)).isEmpty(); } @Test public void testResponse_updater() { BidResponse.Builder response = BidResponse.newBuilder().addSeatbid(SeatBid.newBuilder() .addBid(buildHtmlBid("1", 100)) .addBid(buildHtmlBid("2", 200))); OpenRtbUtils.seatBid(response, "unused"); Function<Bid.Builder, Boolean> inflation = bid -> { if (bid.getPrice() < 150) { bid.setPrice(bid.getPrice() * 2); return true; } else { return false; } }; Function<Bid.Builder, Boolean> noUpdates = bid -> false; assertThat(OpenRtbUtils.updateBids(response, inflation)).isTrue(); assertThat(OpenRtbUtils.updateBids(response, noUpdates)).isFalse(); assertThat(OpenRtbUtils.updateBids(response, noUpdates)).isFalse(); OpenRtbUtils.seatBid(response, "x").addBid(buildHtmlBid("1", 100)); OpenRtbUtils.seatBid(response, "x").addBid(buildHtmlBid("2", 200)); assertThat(OpenRtbUtils.updateBids(response, "x", inflation)).isTrue(); assertThat(OpenRtbUtils.updateBids(response, "x", noUpdates)).isFalse(); assertThat(OpenRtbUtils.updateBids(response, "none", noUpdates)).isFalse(); assertThat(OpenRtbUtils.updateBids(response, null, noUpdates)).isFalse(); } private static Bid.Builder buildHtmlBid(String id, long bidMicros) { return Bid.newBuilder() .setId(id) .setAdid("ad" + id) .setImpid("imp" + id) .setPrice(bidMicros); } }
/********************************************************************************** * $URL:$ * $Id:$ *********************************************************************************** * * Copyright (c) 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.calendar.impl; import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sakaiproject.authz.api.SecurityService; import org.sakaiproject.calendar.api.Calendar; import org.sakaiproject.calendar.api.*; import org.sakaiproject.calendar.api.CalendarEvent.EventAccess; import org.sakaiproject.component.api.ServerConfigurationService; import org.sakaiproject.entity.api.EntityManager; import org.sakaiproject.entity.api.Reference; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.entity.api.ResourcePropertiesEdit; import org.sakaiproject.exception.*; import org.sakaiproject.id.api.IdManager; import org.sakaiproject.javax.Filter; import org.sakaiproject.memory.api.MemoryService; import org.sakaiproject.memory.api.SimpleConfiguration; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.api.SiteService; import org.sakaiproject.site.api.ToolConfiguration; import org.sakaiproject.time.api.Time; import org.sakaiproject.time.api.TimeRange; import org.sakaiproject.time.api.TimeService; import org.sakaiproject.tool.api.Placement; import org.sakaiproject.tool.api.SessionManager; import org.sakaiproject.tool.api.ToolManager; import org.sakaiproject.util.BaseResourcePropertiesEdit; import org.sakaiproject.util.FormattedText; import org.w3c.dom.Document; import org.w3c.dom.Element; import java.io.BufferedInputStream; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.*; public class BaseExternalCalendarSubscriptionService implements ExternalCalendarSubscriptionService { /** Logging */ private static Logger m_log = LoggerFactory.getLogger(BaseExternalCalendarSubscriptionService.class); /** Schedule tool ID */ private final static String SCHEDULE_TOOL_ID = "sakai.schedule"; /** Default context for institutional subscriptions */ private final static String INSTITUTIONAL_CONTEXT = "!worksite"; /** Default context for user-provided subscriptions */ private final static String USER_CONTEXT = "!user"; /** Default connect timeout when retrieving external subscriptions */ private final static int TIMEOUT = 30000; /** iCal external subscription enable flag */ private boolean enabled = false; /** merge iCal external subscriptions from other sites into My Workspace? */ private boolean mergeIntoMyworkspace = true; /** Column map for iCal processing */ private Map columnMap = null; /** Cache map of Institutional Calendars: <String url, Calendar cal> */ private SubscriptionCache institutionalSubscriptionCache = null; /** Cache map of user Calendars: <String url, Calendar cal> */ private SubscriptionCache usersSubscriptionCache = null; // ###################################################### // Spring services // ###################################################### /** Dependency: CalendarService. */ // We depend on the BaseCalendarService so we can call methods outside the calendar service API. protected BaseCalendarService m_calendarService = null; /** Dependency: SecurityService */ protected SecurityService m_securityService = null; /** Dependency: SessionManager */ protected SessionManager m_sessionManager = null; /** Dependency: TimeService */ protected TimeService m_timeService = null; /** Dependency: ToolManager */ protected ToolManager m_toolManager = null; /** Dependency: IdManager. */ protected IdManager m_idManager; /** Dependency: CalendarImporterService. */ protected CalendarImporterService m_importerService = null; /** Dependency: ServerConfigurationService. */ protected ServerConfigurationService m_configurationService = null; /** Dependency: EntityManager. */ protected EntityManager m_entityManager = null; /** Dependency: SiteService. */ protected SiteService m_siteService = null; protected MemoryService m_memoryService = null; public void setMemoryService(MemoryService memoryService) { this.m_memoryService = memoryService; } public void setCalendarService(BaseCalendarService service) { this.m_calendarService = service; } public void setServerConfigurationService(ServerConfigurationService service) { this.m_configurationService = service; } public void setCalendarImporterService(CalendarImporterService service) { this.m_importerService = service; } public void setEntityManager(EntityManager service) { this.m_entityManager = service; } public void setSiteService(SiteService service) { this.m_siteService = service; } /** * Dependency: SecurityService. * * @param securityService * The SecurityService. */ public void setSecurityService(SecurityService securityService) { m_securityService = securityService; } /** * Dependency: SessionManager. * @param sessionManager * The SessionManager. */ public void setSessionManager(SessionManager sessionManager) { this.m_sessionManager = sessionManager; } /** * Dependency: TimeService. * @param timeService * The TimeService. */ public void setTimeService(TimeService timeService) { this.m_timeService = timeService; } /** * Dependency: ToolManager. * @param toolManager * The ToolManager. */ public void setToolManager(ToolManager toolManager) { this.m_toolManager = toolManager; } /** * Dependency: IdManager. * @param idManager * The IdManager. */ public void setIdManager(IdManager idManager) { this.m_idManager = idManager; } /** Dependency: Timer */ protected Timer m_timer = null; public void init() { // external calendar subscriptions: enable? enabled = m_configurationService.getBoolean(SAK_PROP_EXTSUBSCRIPTIONS_ENABLED, true); mergeIntoMyworkspace = m_configurationService.getBoolean(SAK_PROP_EXTSUBSCRIPTIONS_MERGEINTOMYWORKSPACE, true); m_log.info("init(): enabled: " + enabled + ", merge from other sites into My Workspace? "+mergeIntoMyworkspace); if (enabled) { // INIT the caches long cacheRefreshRate = 43200; // 12 hours SimpleConfiguration cacheConfig = new SimpleConfiguration(1000, cacheRefreshRate, 0); // 12 hours cacheConfig.setStatisticsEnabled(true); institutionalSubscriptionCache = new SubscriptionCache( m_memoryService.createCache("org.sakaiproject.calendar.impl.BaseExternalCacheSubscriptionService.institutionalCache", cacheConfig)); usersSubscriptionCache = new SubscriptionCache( m_memoryService.createCache("org.sakaiproject.calendar.impl.BaseExternalCacheSubscriptionService.userCache", cacheConfig)); // TODO replace this with a real solution for when the caches are distributed by disabling the timer and using jobscheduler if (institutionalSubscriptionCache.getCache().isDistributed()) { m_log.error(institutionalSubscriptionCache.getCache().getName()+" is distributed but calendar subscription caches have a local timer refresh which means they will cause cache replication storms once every "+cacheRefreshRate+" seconds, do NOT distribute this cache"); } if (usersSubscriptionCache.getCache().isDistributed()) { m_log.error(usersSubscriptionCache.getCache().getName()+" is distributed but calendar subscription caches have a local timer refresh which means they will cause cache replication storms once every "+cacheRefreshRate+" seconds, do NOT distribute this cache"); } m_timer = new Timer(); // init timer // iCal column map try { columnMap = m_importerService .getDefaultColumnMap(CalendarImporterService.ICALENDAR_IMPORT); } catch (ImportException e1) { m_log .error("Unable to get column map for ICal import. External subscriptions will be disabled."); enabled = false; return; } // load institutional calendar subscriptions as timer tasks, this is so that // we don't slow up the loading of sakai. for (final InsitutionalSubscription sub: getInstitutionalSubscriptions()) { m_timer.schedule(new TimerTask() { @Override public void run() { String reference = calendarSubscriptionReference(INSTITUTIONAL_CONTEXT, getIdFromSubscriptionUrl(sub.url)); getCalendarSubscription(reference); } }, 0, cacheRefreshRate); } } } public void destroy() { // Nothing to clean up for now. } public boolean isEnabled() { return enabled; } public void setEnabled(boolean enabled) { this.enabled = enabled; } // ###################################################### // PUBLIC methods // ###################################################### /* * (non-Javadoc) * * @see org.sakaiproject.calendar.api.ExternalCalendarSubscriptionService#calendarSubscriptionReference(java.lang.String, * java.lang.String) */ public String calendarSubscriptionReference(String context, String id) { return BaseExternalSubscription.calendarSubscriptionReference(context, id); } /* * (non-Javadoc) * * @see org.sakaiproject.calendar.impl.ExternalCalendarSubscriptionService#getCalendarSubscription(java.lang.String) */ public Calendar getCalendarSubscription(String reference) { if (!isEnabled() || reference == null) return null; // Get Reference and Subscription URL Reference _ref = m_entityManager.newReference(reference); String subscriptionUrl = getSubscriptionUrlFromId(_ref.getId()); if (subscriptionUrl == null || subscriptionUrl.equals("null")) return null; m_log.debug("ExternalCalendarSubscriptionService.getCalendarSubscription(" + reference + ")"); m_log.debug(" |-> subscriptionUrl: " + subscriptionUrl); ExternalSubscription subscription = getExternalSubscription(subscriptionUrl, _ref.getContext()); m_log.debug(" |-> Subscription is " + subscription); if (subscription != null) { m_log.debug(" |-> Calendar is " + subscription.getCalendar()); return subscription.getCalendar(); } else { m_log.debug(" |-> Calendar is NULL"); return null; } } private ExternalSubscription getExternalSubscription(String subscriptionUrl, String context) { // Decide which cache to use. SubscriptionCache cache = (getInstitutionalSubscription(subscriptionUrl) != null)? institutionalSubscriptionCache : usersSubscriptionCache; ExternalSubscription subscription = cache.get(subscriptionUrl); // Did we get it? if (subscription == null) { subscription = loadCalendarSubscriptionFromUrl(subscriptionUrl, context); cache.put(subscription); } return subscription; } public Set<String> getCalendarSubscriptionChannelsForChannels( String primaryCalendarReference, Collection<Object> channels) { Set<String> subscriptionChannels = new HashSet<String>(); Set<String> subscriptionUrlsAdded = new HashSet<String>(); if(isMyWorkspace(primaryCalendarReference) && (!mergeIntoMyworkspace || m_securityService.isSuperUser())) { channels = new ArrayList<Object>(); channels.add(primaryCalendarReference); } for (Object channel : channels) { Set<String> channelSubscriptions = getCalendarSubscriptionChannelsForChannel((String) channel); for (String channelSub : channelSubscriptions) { Reference ref = m_entityManager.newReference(channelSub); if (!subscriptionUrlsAdded.contains(ref.getId())) { subscriptionChannels.add(channelSub); subscriptionUrlsAdded.add(ref.getId()); } } } return subscriptionChannels; } /* * (non-Javadoc) * * @see org.sakaiproject.calendar.impl.ExternalCalendarSubscriptionService#getCalendarSubscriptionChannelsForSite() */ public Set<String> getCalendarSubscriptionChannelsForChannel(String reference) { Set<String> channels = new HashSet<String>(); if (!isEnabled() || reference == null) return channels; // get externally subscribed urls from tool config Reference ref = m_entityManager.newReference(reference); Site site = null; try { site = m_siteService.getSite(ref.getContext()); } catch (IdUnusedException e) { m_log .error("ExternalCalendarSubscriptionService.getCalendarSubscriptionChannelsForChannel(): IdUnusedException for context in reference: " + reference); return channels; } ToolConfiguration tc = site.getToolForCommonId(SCHEDULE_TOOL_ID); Properties config = tc == null? null : tc.getConfig(); if (tc != null && config != null) { String prop = config.getProperty(TC_PROP_SUBCRIPTIONS); if (prop != null) { String[] chsPair = prop.split(SUBS_REF_DELIMITER); for (int i = 0; i < chsPair.length; i++) { String[] pair = chsPair[i].split(SUBS_NAME_DELIMITER); channels.add(pair[0]); } } } return channels; } public Set<ExternalSubscription> getAvailableInstitutionalSubscriptionsForChannel( String reference) { Set<ExternalSubscription> subscriptions = new HashSet<ExternalSubscription>(); if (!isEnabled() || reference == null) return subscriptions; Reference ref = m_entityManager.newReference(reference); // If the cache has been flushed then we may need to reload it. for (InsitutionalSubscription sub : getInstitutionalSubscriptions()) { // Need to have way to load these. ExternalSubscription subscription = getExternalSubscription(sub.url, ref.getContext()); if (subscription != null) { subscription.setContext(ref.getContext()); subscriptions.add(subscription); subscription.setCalendar(null); } } return subscriptions; } public Set<ExternalSubscription> getSubscriptionsForChannel(String reference, boolean loadCalendar) { Set<ExternalSubscription> subscriptions = new HashSet<ExternalSubscription>(); if (!isEnabled() || reference == null) return subscriptions; // get externally subscribed urls from tool config Reference ref = m_entityManager.newReference(reference); Site site = null; try { site = m_siteService.getSite(ref.getContext()); } catch (IdUnusedException e) { m_log .error("ExternalCalendarSubscriptionService.getSubscriptionsForChannel(): IdUnusedException for context in reference: " + reference); return subscriptions; } ToolConfiguration tc = site.getToolForCommonId(SCHEDULE_TOOL_ID); Properties config = tc == null? null : tc.getConfig(); if (tc != null && config != null) { String prop = config.getProperty(TC_PROP_SUBCRIPTIONS); if (prop != null) { String[] chsPair = prop.split(SUBS_REF_DELIMITER); for (int i = 0; i < chsPair.length; i++) { String[] pair = chsPair[i].split(SUBS_NAME_DELIMITER); String r = pair[0]; Reference r1 = m_entityManager.newReference(r); String url = getSubscriptionUrlFromId(r1.getId()); String name = null; if (pair.length == 2) name = pair[1]; else { try { name = institutionalSubscriptionCache.get(url) .getSubscriptionName(); } catch (Exception e) { name = url; } } ExternalSubscription subscription = new BaseExternalSubscription( name, url, ref.getContext(), loadCalendar ? getCalendarSubscription(r) : null, isInstitutionalCalendar(r)); subscriptions.add(subscription); } } } return subscriptions; } /* * (non-Javadoc) * * @see org.sakaiproject.calendar.impl.ExternalCalendarSubscriptionService#setSubscriptionsForChannel(String, * Collection<ExternalSubscription>) */ public void setSubscriptionsForChannel(String reference, Collection<ExternalSubscription> subscriptions) { if (!isEnabled() || reference == null) return; // set externally subscriptions in tool config Reference ref = m_entityManager.newReference(reference); Site site = null; try { site = m_siteService.getSite(ref.getContext()); } catch (IdUnusedException e) { m_log .error("ExternalCalendarSubscriptionService.setSubscriptionsForChannel(): IdUnusedException for context in reference: " + reference); return; } ToolConfiguration tc = site.getToolForCommonId(SCHEDULE_TOOL_ID); if (tc != null) { boolean first = true; StringBuffer tmpStr = new StringBuffer(); for (ExternalSubscription subscription : subscriptions) { if (!first) tmpStr.append(SUBS_REF_DELIMITER); first = false; tmpStr.append(subscription.getReference()); if (!subscription.isInstitutional()) tmpStr.append(SUBS_NAME_DELIMITER + subscription.getSubscriptionName()); } Properties config = tc.getConfig(); config.setProperty(TC_PROP_SUBCRIPTIONS, tmpStr.toString()); tc.save(); } } public boolean isInstitutionalCalendar(String reference) { // Get Reference and Subscription URL Reference _ref = m_entityManager.newReference(reference); String subscriptionUrl = getSubscriptionUrlFromId(_ref.getId()); if (subscriptionUrl == null || subscriptionUrl.equals("null")) return false; // Is a institutional subscription? String[] subscriptionURLs = m_configurationService .getStrings(SAK_PROP_EXTSUBSCRIPTIONS_URL); if (subscriptionURLs != null) { for (String url: subscriptionURLs) { if (subscriptionUrl.equals(url)) { return true; } } } return false; } public String getIdFromSubscriptionUrl(String url) { return BaseExternalSubscription.getIdFromSubscriptionUrl(url); } public String getSubscriptionUrlFromId(String id) { return BaseExternalSubscription.getSubscriptionUrlFromId(id); } // ###################################################### // PRIVATE methods // ###################################################### /** * Get the event type for this institutional subscription. * @param url * @return The forced event type or <code>null</code> if it isn't defined. */ String getEventType(String url) { InsitutionalSubscription sub = getInstitutionalSubscription(url); return (sub != null)? sub.eventType: null; } /** * Insitutional subscriptions loaded from configuration. */ class InsitutionalSubscription { String url; String name; String eventType; } InsitutionalSubscription getInstitutionalSubscription(String url) { for (InsitutionalSubscription sub: getInstitutionalSubscriptions()) { if(sub.url.equals(url)) { return sub; } } return null; } List<InsitutionalSubscription> getInstitutionalSubscriptions() { String[] subscriptionURLs = m_configurationService .getStrings(SAK_PROP_EXTSUBSCRIPTIONS_URL); String[] subscriptionNames = m_configurationService .getStrings(SAK_PROP_EXTSUBSCRIPTIONS_NAME); String[] subscriptionEventTypes = m_configurationService .getStrings(SAK_PROP_EXTSUBSCRIPTIONS_EVENTTYPE); ArrayList<InsitutionalSubscription> subs = new ArrayList<InsitutionalSubscription>(); if (subscriptionURLs != null) { for (int i = 0; i < subscriptionURLs.length; i++) { String name = subscriptionNames[i]; String eventType = subscriptionEventTypes[i]; if (name != null) { InsitutionalSubscription sub = new InsitutionalSubscription(); sub.url = subscriptionURLs[i]; sub.name = name; sub.eventType = eventType; subs.add(sub); } } } return subs; } ExternalSubscription loadCalendarSubscriptionFromUrl(String url, String context) { InsitutionalSubscription sub = getInstitutionalSubscription(url); String name = null; String forcedEventType = null; if (sub != null) { name = sub.name; forcedEventType = sub.eventType; } return loadCalendarSubscriptionFromUrl(url, context, name, forcedEventType); } ExternalSubscription loadCalendarSubscriptionFromUrl(String url, String context, String calendarName, String forcedEventType) { ExternalSubscription subscription = new BaseExternalSubscription(calendarName, url, context, null, INSTITUTIONAL_CONTEXT.equals(context)); ExternalCalendarSubscription calendar = null; List<CalendarEvent> events = null; BufferedInputStream stream = null; try { URL _url = new URL(url); if (calendarName == null) calendarName = _url.getFile(); // connect URLConnection conn = _url.openConnection(); // Must set user agent so we can detect loops. conn.addRequestProperty("User-Agent", m_calendarService.getUserAgent()); conn.setConnectTimeout(TIMEOUT); conn.setReadTimeout(TIMEOUT); // Now make the connection. conn.connect(); stream = new BufferedInputStream(conn.getInputStream()); // import events = m_importerService.doImport(CalendarImporterService.ICALENDAR_IMPORT, stream, columnMap, null); String subscriptionId = getIdFromSubscriptionUrl(url); String reference = calendarSubscriptionReference(context, subscriptionId); calendar = new ExternalCalendarSubscription(reference); for (CalendarEvent event : events) { String eventType = event.getType(); if (forcedEventType != null) eventType = forcedEventType; calendar.addEvent(event.getRange(), event.getDisplayName(), event .getDescription(), eventType, event.getLocation(), event .getRecurrenceRule(), null); } calendar.setName(calendarName); subscription.setCalendar(calendar); subscription.setInstitutional(getInstitutionalSubscription(url) != null); m_log.info("Loaded calendar subscription: " + subscription.toString()); } catch (ImportException e) { m_log.info("Error loading calendar subscription '" + calendarName + "' (will NOT retry again): " + url); String subscriptionId = getIdFromSubscriptionUrl(url); String reference = calendarSubscriptionReference(context, subscriptionId); calendar = new ExternalCalendarSubscription(reference); calendar.setName(calendarName); // By setting the calendar to be an empty one we make sure that we don't attempt to re-retrieve it // When 2 hours are up it will get refreshed through. subscription.setCalendar(calendar); } catch (PermissionException e) { // This will never be called (for now) e.printStackTrace(); } catch (MalformedURLException e) { m_log.info("Mal-formed URL in calendar subscription '" + calendarName + "': " + url); } catch (IOException e) { m_log.info("Unable to read calendar subscription '" + calendarName + "' from URL (I/O Error): " + url); } catch (Exception e) { m_log.info("Unknown error occurred while reading calendar subscription '" + calendarName + "' from URL: " + url); } finally { if (stream != null) { // Also closes the underlying InputStream try { stream.close(); } catch (IOException e) { // Ignore } } } return subscription; } /** * See if the current tab is the workspace tab (i.e. user site) * @param primaryCalendarReference The primary calendar reference. * @return true if we are currently on the "My Workspace" tab. */ private boolean isMyWorkspace(String primaryCalendarReference) { Reference ref = m_entityManager.newReference(primaryCalendarReference); String siteId = ref.getContext(); return m_siteService.isUserSite(siteId); } // ###################################################### // Support classes // ###################################################### public class ExternalCalendarSubscription implements Calendar { /** Memory storage */ protected Map<String, CalendarEvent> m_storage = new HashMap<String, CalendarEvent>(); /** The context in which this calendar exists. */ protected String m_context = null; /** Store the unique-in-context calendar id. */ protected String m_id = null; /** Store the calendar name. */ protected String m_name = null; /** The properties. */ protected ResourcePropertiesEdit m_properties = null; protected String modifiedDateStr = null; public ExternalCalendarSubscription(String ref) { // set the ids Reference r = m_entityManager.newReference(ref); m_context = r.getContext(); m_id = r.getId(); // setup for properties m_properties = new BaseResourcePropertiesEdit(); } public CalendarEvent addEvent(TimeRange range, String displayName, String description, String type, String location, EventAccess access, Collection groups, List attachments) throws PermissionException { return addEvent(range, displayName, description, type, location, attachments); } public CalendarEvent addEvent(TimeRange range, String displayName, String description, String type, String location, List attachments) throws PermissionException { return addEvent(range, displayName, description, type, location, null, attachments); } public CalendarEvent addEvent(TimeRange range, String displayName, String description, String type, String location, RecurrenceRule rrule, List attachments) throws PermissionException { // allocate a new unique event id // String id = getUniqueId(); String id = getUniqueIdBasedOnFields(displayName, description, type, location, m_id); // create event ExternalCalendarEvent edit = new ExternalCalendarEvent(m_context, m_id, id); // set it up edit.setRange(range); edit.setDisplayName(displayName); edit.setDescription(description); edit.setType(type); edit.setLocation(location); edit.setCreator(); if (rrule != null) edit.setRecurrenceRule(rrule); // put in storage m_storage.put(id, edit); return edit; } public CalendarEventEdit addEvent() throws PermissionException { // allocate a new unique event id // String id = getUniqueId(); // create event // CalendarEventEdit event = new ExternalCalendarEvent(this, id); // put in storage // m_storage.put(id, event); return null; } public CalendarEvent addEvent(CalendarEvent event) { // allocate a new unique event id String id = event.getId(); // put in storage m_storage.put(id, event); return event; } public Collection<CalendarEvent> getAllEvents() { return m_storage.values(); } public boolean allowAddCalendarEvent() { return false; } public boolean allowAddEvent() { return false; } public boolean allowEditEvent(String eventId) { return false; } public boolean allowGetEvent(String eventId) { return true; } public boolean allowGetEvents() { return true; } public boolean allowRemoveEvent(CalendarEvent event) { return false; } public void cancelEvent(CalendarEventEdit edit) { } public void commitEvent(CalendarEventEdit edit, int intention) { } public void commitEvent(CalendarEventEdit edit) { } public String getContext() { return m_context; } public CalendarEventEdit getEditEvent(String eventId, String editType) throws IdUnusedException, PermissionException, InUseException { return null; } public CalendarEvent getEvent(String eventId) throws IdUnusedException, PermissionException { return m_storage.get(eventId); } public String getEventFields() { return m_properties .getPropertyFormatted(ResourceProperties.PROP_CALENDAR_EVENT_FIELDS); } public List getEvents(TimeRange range, Filter filter) throws PermissionException { return filterEvents(new ArrayList<CalendarEvent>(m_storage.values()), range); } public boolean getExportEnabled() { return false; } public Collection getGroupsAllowAddEvent() { return new ArrayList(); } public Collection getGroupsAllowGetEvent() { return new ArrayList(); } public Collection getGroupsAllowRemoveEvent(boolean own) { return new ArrayList(); } public Time getModified() { return m_timeService.newTimeGmt(modifiedDateStr); } public CalendarEventEdit mergeEvent(Element el) throws PermissionException, IdUsedException { // TODO Implement mergeEvent() return null; } public void removeEvent(CalendarEventEdit edit, int intention) throws PermissionException { } public void removeEvent(CalendarEventEdit edit) throws PermissionException { } public void setExportEnabled(boolean enable) { } public void setModified() { } public String getId() { return m_id; } public ResourceProperties getProperties() { return m_properties; } public String getReference() { return m_calendarService.calendarSubscriptionReference(m_context, m_id); } protected void setContext(String context) { // set the ids m_context = context; for (CalendarEvent e : m_storage.values()) { // ((ExternalCalendarEvent) e).setCalendar(this); ((ExternalCalendarEvent) e).setCalendarContext(m_context); ((ExternalCalendarEvent) e).setCalendarId(m_id); } } public String getReference(String rootProperty) { return rootProperty + getReference(); } public String getUrl() { // TODO Auto-generated method stub return null; } public String getUrl(String rootProperty) { // TODO Auto-generated method stub return null; } public Element toXml(Document doc, Stack stack) { // TODO Auto-generated method stub return null; } public String getName() { return m_name; } public void setName(String calendarName) { this.m_name = calendarName; } /** * Access the id generating service and return a unique id. * * @return a unique id. */ protected String getUniqueId() { return m_idManager.createUuid(); } protected String getUniqueIdBasedOnFields(String displayName, String description, String type, String location, String calendarId) { StringBuilder key = new StringBuilder(); key.append(displayName); key.append(description); key.append(type); key.append(location); key.append(calendarId); String id = null; int n = 0; boolean unique = false; while (!unique) { byte[] bytes = key.toString().getBytes(); try{ MessageDigest digest = MessageDigest.getInstance("SHA-1"); digest.update(bytes); bytes = digest.digest(); id = getHexStringFromBytes(bytes); }catch(NoSuchAlgorithmException e){ // fall back to Base64 byte[] encoded = Base64.encodeBase64(bytes); id = StringUtils.newStringUtf8(encoded); } if (!m_storage.containsKey(id)) unique = true; else key.append(n++); } return id; } protected String getHexStringFromBytes(byte[] raw) { final String HEXES = "0123456789ABCDEF"; if(raw == null) { return null; } final StringBuilder hex = new StringBuilder(2 * raw.length); for(final byte b : raw) { hex.append(HEXES.charAt((b & 0xF0) >> 4)).append(HEXES.charAt((b & 0x0F))); } return hex.toString(); } /** * Filter the events to only those in the time range. * * @param events * The full list of events. * @param range * The time range. * @return A list of events from the incoming list that overlap the * given time range. */ protected List<CalendarEvent> filterEvents(List<CalendarEvent> events, TimeRange range) { List<CalendarEvent> filtered = new ArrayList<CalendarEvent>(); for (int i = 0; i < events.size(); i++) { CalendarEvent event = events.get(i); // resolve the event to the list of events in this range // TODO Support for recurring events List<CalendarEvent> resolved = ((ExternalCalendarEvent) event) .resolve(range); filtered.addAll(resolved); } return filtered; } /** * Checks if user has permission to modify any event (or fields) in this calendar * @param function * @return */ @Override public boolean canModifyAnyEvent(String function){ return CalendarService.AUTH_MODIFY_CALENDAR_ANY.equals(function); } } public class ExternalCalendarEvent implements CalendarEvent { // protected Calendar m_calendar = null; protected String m_calendar_context = null; protected String m_calendar_id = null; protected ResourcePropertiesEdit m_properties = null; protected String m_id = null; protected String calendarReference = null; protected TimeRange m_range = null; protected TimeRange m_baseRange = null; protected RecurrenceRule m_singleRule = null; protected RecurrenceRule m_exclusionRule = null; public ExternalCalendarEvent(String calendarContext, String calendarId, String id) { this(calendarContext, calendarId, id, null); } public ExternalCalendarEvent(String calendarContext, String calendarId, String id, String eventType) { m_id = id; // m_calendar = calendar; m_calendar_context = calendarContext; m_calendar_id = calendarId; m_properties = new BaseResourcePropertiesEdit(); if (eventType != null) m_properties .addProperty(ResourceProperties.PROP_CALENDAR_TYPE, eventType); } public ExternalCalendarEvent(CalendarEvent other, RecurrenceInstance ri) { // m_calendar = ((ExternalCalendarEvent) other).m_calendar; m_calendar_context = ((ExternalCalendarEvent) other).m_calendar_context; m_calendar_id = ((ExternalCalendarEvent) other).m_calendar_id; // encode the instance and the other's id into my id m_id = '!' + ri.getRange().toString() + '!' + ri.getSequence() + '!' + ((ExternalCalendarEvent) other).m_id; // use the new range m_range = (TimeRange) ri.getRange().clone(); m_baseRange = ((ExternalCalendarEvent) other).m_range; // point at the properties m_properties = ((ExternalCalendarEvent) other).m_properties; // point at the rules m_singleRule = ((ExternalCalendarEvent) other).m_singleRule; m_exclusionRule = ((ExternalCalendarEvent) other).m_exclusionRule; } public EventAccess getAccess() { return CalendarEvent.EventAccess.SITE; } public String getCalendarReference() { // return m_calendar.getReference(); return m_calendarService.calendarSubscriptionReference(m_calendar_context, m_calendar_id); } // protected Calendar getCalendar(){ // return m_calendar; // } // protected void setCalendar(Calendar calendar) { // m_calendar = calendar; // } protected void setCalendarContext(String calendarContext) { m_calendar_context = calendarContext; } protected void setCalendarId(String calendarId) { m_calendar_id = calendarId; } public String getCreator() { return m_properties.getProperty(ResourceProperties.PROP_CREATOR); } public String getDescription() { return FormattedText .convertFormattedTextToPlaintext(getDescriptionFormatted()); } public String getDescriptionFormatted() { // %%% JANDERSE the calendar event description can now be formatted // text // first try to use the formatted text description; if that isn't // found, use the plaintext description String desc = m_properties .getPropertyFormatted(ResourceProperties.PROP_DESCRIPTION + "-html"); if (desc != null && desc.length() > 0) return desc; desc = m_properties.getPropertyFormatted(ResourceProperties.PROP_DESCRIPTION + "-formatted"); desc = FormattedText.convertOldFormattedText(desc); if (desc != null && desc.length() > 0) return desc; desc = FormattedText.convertPlaintextToFormattedText(m_properties .getPropertyFormatted(ResourceProperties.PROP_DESCRIPTION)); return desc; } public String getDisplayName() { return m_properties .getPropertyFormatted(ResourceProperties.PROP_DISPLAY_NAME); } public String getField(String name) { // names are prefixed to form a namespace name = ResourceProperties.PROP_CALENDAR_EVENT_FIELDS + "." + name; return m_properties.getPropertyFormatted(name); } public Collection getGroupObjects() { return new ArrayList(); } public String getGroupRangeForDisplay(Calendar calendar) { return ""; } public Collection getGroups() { return new ArrayList(); } public String getLocation() { return m_properties .getPropertyFormatted(ResourceProperties.PROP_CALENDAR_LOCATION); } public String getModifiedBy() { return m_properties.getPropertyFormatted(ResourceProperties.PROP_MODIFIED_BY); } public TimeRange getRange() { // range might be null in the creation process, before the fields // are set in an edit, but // after the storage has registered the event and it's id. if (m_range == null) { return m_timeService.newTimeRange(m_timeService.newTime(0)); } // return (TimeRange) m_range.clone(); return m_range; } public RecurrenceRule getRecurrenceRule() { return m_singleRule; } public RecurrenceRule getExclusionRule() { if (m_exclusionRule == null) m_exclusionRule = new ExclusionSeqRecurrenceRule(); return m_exclusionRule; } protected List resolve(TimeRange range) { List rv = new Vector(); // for no rules, use the event if it's in range if (m_singleRule == null) { // the actual event if (range.overlaps(getRange())) { rv.add(this); } } // for rules... else { List instances = m_singleRule.generateInstances(this.getRange(), range, m_timeService.getLocalTimeZone()); // remove any excluded getExclusionRule().excludeInstances(instances); for (Iterator iRanges = instances.iterator(); iRanges.hasNext();) { RecurrenceInstance ri = (RecurrenceInstance) iRanges.next(); // generate an event object that is exactly like me but with // this range and no rules CalendarEvent clone = new ExternalCalendarEvent(this, ri); rv.add(clone); } } return rv; } public void setRecurrenceRule(RecurrenceRule rule) { m_singleRule = rule; } public void setExclusionRule(RecurrenceRule rule) { m_exclusionRule = rule; } public String getType() { return m_properties .getPropertyFormatted(ResourceProperties.PROP_CALENDAR_TYPE); } public boolean isUserOwner() { return false; } public String getId() { return m_id; } protected void setId(String id) { m_id = id; } public ResourceProperties getProperties() { return m_properties; } public String getReference() { // return m_calendar.getReference() + Entity.SEPARATOR + m_id; return m_calendarService.eventSubscriptionReference(m_calendar_context, m_calendar_id, m_id); } public String getReference(String rootProperty) { return rootProperty + getReference(); } public String getUrl() { return null;// m_calendar.getUrl() + getId(); } public String getUrl(String rootProperty) { return rootProperty + getUrl(); } public Element toXml(Document doc, Stack stack) { // TODO Auto-generated method stub return null; } public int compareTo(Object o) { if (!(o instanceof CalendarEvent)) throw new ClassCastException(); Time mine = getRange().firstTime(); Time other = ((CalendarEvent) o).getRange().firstTime(); if (mine.before(other)) return -1; if (mine.after(other)) return +1; return 0; } public List getAttachments() { // TODO Auto-generated method stub return null; } public void setCreator() { String currentUser = m_sessionManager.getCurrentSessionUserId(); String now = m_timeService.newTime().toString(); m_properties.addProperty(ResourceProperties.PROP_CREATOR, currentUser); m_properties.addProperty(ResourceProperties.PROP_CREATION_DATE, now); } public void setLocation(String location) { m_properties.addProperty(ResourceProperties.PROP_CALENDAR_LOCATION, location); } public void setType(String type) { m_properties.addProperty(ResourceProperties.PROP_CALENDAR_TYPE, type); } public void setDescription(String description) { setDescriptionFormatted(FormattedText .convertPlaintextToFormattedText(description)); } public void setDescriptionFormatted(String description) { // %%% JANDERSE the calendar event description can now be formatted // text // save both a formatted and a plaintext version of the description m_properties.addProperty(ResourceProperties.PROP_DESCRIPTION + "-html", description); m_properties.addProperty(ResourceProperties.PROP_DESCRIPTION, FormattedText .convertFormattedTextToPlaintext(description)); } public void setDisplayName(String displayName) { m_properties.addProperty(ResourceProperties.PROP_DISPLAY_NAME, displayName); } public void setRange(TimeRange range) { m_range = (TimeRange) range.clone(); } /** * Gets a site name for this calendar event */ public String getSiteName() { String calendarName = ""; if (m_calendar_context != null) { try { Site site = m_siteService.getSite(m_calendar_context); if (site != null) calendarName = site.getTitle(); } catch (IdUnusedException e) { m_log.warn(".getSiteName(): " + e); } } return calendarName; } } }
package com.tinify; import com.google.gson.Gson; import okhttp3.HttpUrl; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import mockit.*; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.hamcrest.Matchers.*; public class SourceTest { MockWebServer server; @Before public void setup() throws IOException { Logger.getLogger(MockWebServer.class.getName()).setLevel(Level.WARNING); server = new MockWebServer(); server.start(); new MockUp<HttpUrl>() { @Mock @SuppressWarnings("unused") HttpUrl parse(Invocation inv, String url) { if (url.contains("localhost")) { return inv.proceed(); } else { return new HttpUrl.Builder() .scheme("http") .host(server.getHostName()) .port(server.getPort()) .encodedPath("/shrink") .build(); } } }; } @After public void tearDown() throws IOException { server.shutdown(); } public void assertJsonEquals(String expected, String actual) { Gson gson = new Gson(); @SuppressWarnings("unchecked") Map<String, Object> expectedMap = gson.fromJson(expected, Map.class); @SuppressWarnings("unchecked") Map<String, Object> actualMap = gson.fromJson(actual, Map.class); assertEquals(expectedMap, actualMap); } @Test(expected = AccountException.class) public void withInvalidApiKeyFromFileShouldThrowAccountException() throws Exception, IOException, URISyntaxException { Tinify.setKey("invalid"); server.enqueue(new MockResponse() .setResponseCode(401) .setBody("{'error':'Unauthorized','message':'Credentials are invalid'}")); String filePath = Paths.get(getClass().getResource("/dummy.png").toURI()).toAbsolutePath().toString(); Source.fromFile(filePath); } @Test(expected = AccountException.class) public void withInvalidApiKeyFromBufferShouldThrowAccountException() throws Exception, IOException { Tinify.setKey("invalid"); server.enqueue(new MockResponse() .setResponseCode(401) .setBody("{'error':'Unauthorized','message':'Credentials are invalid'}")); Source.fromBuffer("png file".getBytes()); } @Test(expected = AccountException.class) public void withInvalidApiKeyFromUrlShouldThrowAccountException() throws Exception, IOException { Tinify.setKey("invalid"); server.enqueue(new MockResponse() .setResponseCode(401) .setBody("{'error':'Unauthorized','message':'Credentials are invalid'}")); Source.fromUrl("http://example.com/test.jpg"); } @Test public void withValidApiKeyFromFileShouldReturnSource() throws IOException, Exception, URISyntaxException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location") .addHeader("Compression-Count", 12)); String filePath = Paths.get(getClass().getResource("/dummy.png").toURI()).toAbsolutePath().toString(); assertThat(Source.fromFile(filePath), isA(Source.class)); } @Test public void withValidApiKeyFromFileShouldReturnSourceWithData() throws IOException, Exception, URISyntaxException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location")); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("compressed file")); String filePath = Paths.get(getClass().getResource("/dummy.png").toURI()).toAbsolutePath().toString(); assertThat(Source.fromFile(filePath).toBuffer(), is(equalTo("compressed file".getBytes()))); } @Test public void withValidApiKeyFromBufferShouldReturnSource() throws IOException, Exception { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location") .addHeader("Compression-Count", 12)); assertThat(Source.fromBuffer("png file".getBytes()), isA(Source.class)); } @Test public void withValidApiKeyFromBufferShouldReturnSourceWithData() throws IOException, Exception { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location")); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("compressed file")); assertThat(Source.fromBuffer("png file".getBytes()).toBuffer(), is(equalTo("compressed file".getBytes()))); } @Test public void withValidApiKeyFromUrlShouldReturnSource() throws IOException, Exception { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location") .addHeader("Compression-Count", 12)); assertThat(Source.fromUrl("http://example.com/test.jpg"), isA(Source.class)); } @Test public void withValidApiKeyFromUrlShouldReturnSourceWithData() throws IOException, Exception, InterruptedException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location")); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("compressed file")); assertThat(Source.fromUrl("http://example.com/test.jpg").toBuffer(), is(equalTo("compressed file".getBytes()))); RecordedRequest request1 = server.takeRequest(3, TimeUnit.SECONDS); assertJsonEquals("{\"source\":{\"url\":\"http://example.com/test.jpg\"}}", request1.getBody().readUtf8()); } @Test(expected = ClientException.class) public void withValidApiKeyFromUrlShouldThrowExceptionIfRequestIsNotOK() throws IOException, Exception { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(400) .setBody("{'error':'Source not found','message':'Cannot parse URL'}")); Source.fromUrl("file://wrong"); } @Test public void withValidApiKeyResultShouldReturnResult() throws Exception, IOException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location") .addHeader("Compression-Count", 12)); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("compressed file")); assertThat(Source.fromBuffer("png file".getBytes()).result(), isA(Result.class)); } @Test public void withValidApiKeyPreserveShouldReturnSource() throws Exception, InterruptedException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location")); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("copyrighted file")); assertThat(Source.fromBuffer("png file".getBytes()).preserve("copyright", "location"), isA(Source.class)); RecordedRequest request1 = server.takeRequest(3, TimeUnit.SECONDS); assertEquals("png file", request1.getBody().readUtf8()); } @Test public void withValidApiKeyPreserveShouldReturnSourceWithData() throws Exception, IOException, InterruptedException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location")); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("copyrighted file")); assertThat(Source.fromBuffer("png file".getBytes()).preserve("copyright", "location").toBuffer(), is(equalTo("copyrighted file".getBytes()))); RecordedRequest request1 = server.takeRequest(3, TimeUnit.SECONDS); assertEquals("png file", request1.getBody().readUtf8()); RecordedRequest request2 = server.takeRequest(3, TimeUnit.SECONDS); assertJsonEquals("{\"preserve\":[\"copyright\",\"location\"]}", request2.getBody().readUtf8()); } @Test public void withValidApiKeyPreserveShouldReturnSourceWithDataForArray() throws Exception, IOException, InterruptedException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location")); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("copyrighted file")); String[] options = new String [] {"copyright", "location"}; assertThat(Source.fromBuffer("png file".getBytes()).preserve(options).toBuffer(), is(equalTo("copyrighted file".getBytes()))); RecordedRequest request1 = server.takeRequest(3, TimeUnit.SECONDS); assertEquals("png file", request1.getBody().readUtf8()); RecordedRequest request2 = server.takeRequest(3, TimeUnit.SECONDS); assertJsonEquals("{\"preserve\":[\"copyright\",\"location\"]}", request2.getBody().readUtf8()); } @Test public void withValidApiKeyPreserveShouldIncludeOtherOptionsIfSet() throws Exception, IOException, InterruptedException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location")); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("copyrighted resized file")); Options resizeOptions = new Options().with("width", 100).with("height", 60); String[] preserveOptions = new String [] {"copyright", "location"}; assertThat(Source.fromBuffer("png file".getBytes()).resize(resizeOptions).preserve(preserveOptions).toBuffer(), is(equalTo("copyrighted resized file".getBytes()))); RecordedRequest request1 = server.takeRequest(3, TimeUnit.SECONDS); assertEquals("png file", request1.getBody().readUtf8()); RecordedRequest request2 = server.takeRequest(3, TimeUnit.SECONDS); assertJsonEquals("{\"resize\":{\"width\":100,\"height\":60},\"preserve\":[\"copyright\",\"location\"]}", request2.getBody().readUtf8()); } @Test public void withValidApiKeyResizeShouldReturnSource() throws Exception, InterruptedException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location")); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("small file")); Options options = new Options().with("width", 100).with("height", 60); assertThat(Source.fromBuffer("png file".getBytes()).resize(options), isA(Source.class)); RecordedRequest request1 = server.takeRequest(3, TimeUnit.SECONDS); assertEquals("png file", request1.getBody().readUtf8()); } @Test public void withValidApiKeyResizeShouldReturnSourceWithData() throws Exception, IOException, InterruptedException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location")); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("small file")); Options options = new Options().with("width", 100).with("height", 60); assertThat(Source.fromBuffer("png file".getBytes()).resize(options).toBuffer(), is(equalTo("small file".getBytes()))); RecordedRequest request1 = server.takeRequest(3, TimeUnit.SECONDS); assertEquals("png file", request1.getBody().readUtf8()); RecordedRequest request2 = server.takeRequest(3, TimeUnit.SECONDS); assertJsonEquals("{\"resize\":{\"width\":100,\"height\":60}}", request2.getBody().readUtf8()); } @Test public void withValidApiKeyStoreShouldReturnResultMeta() throws Exception, InterruptedException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location") .addHeader("Compression-Count", 12)); server.enqueue(new MockResponse() .setResponseCode(200) .addHeader("Location", "https://bucket.s3.amazonaws.com/example")); Options options = new Options().with("service", "s3"); assertThat(Source.fromBuffer("png file".getBytes()).store(options), isA(ResultMeta.class)); RecordedRequest request1 = server.takeRequest(3, TimeUnit.SECONDS); assertEquals("png file", request1.getBody().readUtf8()); RecordedRequest request2 = server.takeRequest(3, TimeUnit.SECONDS); assertJsonEquals("{\"store\":{\"service\":\"s3\"}}", request2.getBody().readUtf8()); } @Test public void withValidApiKeyStoreShouldReturnResultMetaWithLocation() throws Exception, InterruptedException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location") .addHeader("Compression-Count", 12)); server.enqueue(new MockResponse() .setResponseCode(200) .addHeader("Location", "https://bucket.s3.amazonaws.com/example")); Options options = new Options().with("service", "s3"); assertEquals("https://bucket.s3.amazonaws.com/example", Source.fromBuffer("png file".getBytes()).store(options).location()); RecordedRequest request1 = server.takeRequest(3, TimeUnit.SECONDS); assertEquals("png file", request1.getBody().readUtf8()); RecordedRequest request2 = server.takeRequest(3, TimeUnit.SECONDS); assertJsonEquals("{\"store\":{\"service\":\"s3\"}}", request2.getBody().readUtf8()); } @Test public void withValidApiKeyStoreShouldIncludeOtherOptionsIfSet() throws Exception, IOException, InterruptedException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location") .addHeader("Compression-Count", 12)); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("compressed file")); server.enqueue(new MockResponse() .setResponseCode(200) .addHeader("Location", "https://bucket.s3.amazonaws.com/example")); Options resizeOptions = new Options().with("width", 100); Options storeOptions = new Options().with("service", "s3"); Source.fromBuffer("png file".getBytes()).resize(resizeOptions).store(storeOptions); RecordedRequest request1 = server.takeRequest(3, TimeUnit.SECONDS); assertEquals("png file", request1.getBody().readUtf8()); RecordedRequest request2 = server.takeRequest(3, TimeUnit.SECONDS); assertJsonEquals("{\"resize\":{\"width\":100},\"store\":{\"service\":\"s3\"}}", request2.getBody().readUtf8()); } @Test public void withValidApiKeyToBufferShouldReturnImageData() throws Exception, IOException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location") .addHeader("Compression-Count", 12)); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("compressed file")); assertThat(Source.fromBuffer("png file".getBytes()).toBuffer(), is(equalTo("compressed file".getBytes()))); } @Test public void withValidApiKeyToFileShouldStoreImageData() throws Exception, IOException { Tinify.setKey("valid"); server.enqueue(new MockResponse() .setResponseCode(201) .addHeader("Location", "https://api.tinify.com/some/location") .addHeader("Compression-Count", 12)); server.enqueue(new MockResponse() .setResponseCode(200) .setBody("compressed file")); Path tempFile = Files.createTempFile("tinify_", null); tempFile.toFile().deleteOnExit(); Source.fromBuffer("png file".getBytes()).toFile(tempFile.toString()); assertThat(Files.readAllBytes(tempFile), is(equalTo("compressed file".getBytes()))); } }
package org.xtuml.bp.debug.ui.launch; //==================================================================== // //File: $RCSfile: VerifiableElementComposite.java,v $ //Version: $Revision: 1.16 $ //Modified: $Date: 2013/01/10 23:17:50 $ // //(c) Copyright 2005-2014 by Mentor Graphics Corp. All rights reserved. // //==================================================================== // Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy // of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. //======================================================================== import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.Hashtable; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.Vector; import org.eclipse.core.runtime.CoreException; import org.eclipse.debug.core.ILaunchConfiguration; import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy; import org.eclipse.jface.dialogs.IMessageProvider; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.jface.viewers.CellEditor; import org.eclipse.jface.viewers.CheckStateChangedEvent; import org.eclipse.jface.viewers.ICellModifier; import org.eclipse.jface.viewers.ICheckStateListener; import org.eclipse.jface.viewers.ILabelProviderListener; import org.eclipse.jface.viewers.ITableLabelProvider; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ControlEvent; import org.eclipse.swt.events.ControlListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.VerifyEvent; import org.eclipse.swt.events.VerifyListener; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Text; import org.eclipse.swt.widgets.TreeColumn; import org.eclipse.swt.widgets.TreeItem; import org.xtuml.bp.core.ComponentInComponent_c; import org.xtuml.bp.core.ComponentPackageInPackage_c; import org.xtuml.bp.core.ComponentPackage_c; import org.xtuml.bp.core.ComponentReference_c; import org.xtuml.bp.core.Component_c; import org.xtuml.bp.core.CorePlugin; import org.xtuml.bp.core.Domain_c; import org.xtuml.bp.core.Function_c; import org.xtuml.bp.core.ModelClass_c; import org.xtuml.bp.core.Ooaofooa; import org.xtuml.bp.core.Operation_c; import org.xtuml.bp.core.Package_c; import org.xtuml.bp.core.PackageableElement_c; import org.xtuml.bp.core.SystemModel_c; import org.xtuml.bp.core.common.BridgePointPreferencesStore; import org.xtuml.bp.core.common.NonRootModelElement; import org.xtuml.bp.core.ui.preferences.VerifierPreferences; import org.xtuml.bp.core.ui.tree.ModelCheckedTreeViewer; import org.xtuml.bp.core.ui.tree.SpinnerBoxCellEditor; import org.xtuml.bp.debug.ui.BPDebugUIPlugin; import org.xtuml.bp.ui.explorer.ModelLabelProvider; public class VerifiableElementComposite extends Composite implements Listener, IMessageProvider { // controls private ModelCheckedTreeViewer tableTreeViewer; private VerifiableElementInitializerDialog initializerEditor = null; private Button enableLogInfo; private Button enableSimulatedTime; private Button enableDeterministicExecution; private Text executionTimeoutValue; // data objects private static Hashtable<String, SystemModel_c> projectMap; // this map is used to map data from the launch config private Map<ILaunchConfiguration, Map<String, Vector<String>>> selectedModelsMap = new HashMap<ILaunchConfiguration, Map<String, Vector<String>>>(); // update private Vector updateListenerList = new Vector(); private String message; private int messageType; private ILaunchConfiguration configuration; enum ColumnNames { ModelExplorer(0), Multiplicity(1), Initializer(2); private int columnPosition; private String columnText; private ColumnNames(int position) { columnPosition = position; switch (position) { case 0: columnText = "Model Explorer"; break; case 1: columnText = "Multiplicity"; break; case 2: columnText = "Initializer"; break; } } protected int position() { return columnPosition; } protected String text() { return columnText; } protected static String[] getNames() { String[] names = {ColumnTextModelExplorer, ColumnTextMultiplicity, ColumnTextInitializer}; return names; } public static String ColumnTextModelExplorer = "Model Explorer"; public static String ColumnTextMultiplicity = "Multiplicity"; public static String ColumnTextInitializer = "Initializer"; } VerifiableElementComposite(Composite parent) { this(parent, 0); } VerifiableElementComposite(Composite parent, int style) { super(parent, style); createControl(); } public void dispose() { super.dispose(); } public Vector<String> getElementVector(String projectName) { if (configuration != null) { Vector<String> vector = null; Map<String, Vector<String>> map = selectedModelsMap .get(configuration); if (map != null) { vector = map.get(projectName); } if (vector == null) { vector = new Vector<String>(); // add every element to the vector Enumeration<SystemModel_c> systems = projectMap.elements(); while (systems.hasMoreElements()) { SystemModel_c system = systems.nextElement(); if (system.getName().equals(projectName)) { Object[] children = VerifierLaunchContentProvider .instance().getChildren(system); initializeChildren(vector, children); } } } return vector; } return null; } private void initializeChildren(Vector<String> vector, Object[] children) { if (children == null) return; for (int i = 0; i < children.length; i++) { if (isVerifiableElement(children[i])) { String entry = VerifierLaunchConfiguration .getComponentSelectionString( ((NonRootModelElement) children[i]) .Get_ooa_id().toString(), VerifierLaunchConfiguration.ConfigurationAttribute.DefaultMultiplicity, VerifierLaunchConfiguration.ConfigurationAttribute.DefaultInitializer, VerifierLaunchConfiguration.DISABLED_STATE); vector.add(entry); } initializeChildren(vector, VerifierLaunchContentProvider.instance() .getChildren(children[i])); } } private boolean isVerifiableElement(Object object) { if (object instanceof Component_c) { return true; } else if (object instanceof ComponentReference_c) { return true; } else if (object instanceof Domain_c) { return true; } else if (object instanceof Package_c) { Function_c anyFunc = Function_c .getOneS_SYNCOnR8001(PackageableElement_c .getManyPE_PEsOnR8000((Package_c) object)); Component_c anyComp = Component_c .getOneC_COnR8001(PackageableElement_c .getManyPE_PEsOnR8000((Package_c) object)); ComponentReference_c anyRef = ComponentReference_c .getOneCL_ICOnR8001(PackageableElement_c .getManyPE_PEsOnR8000((Package_c) object)); Operation_c anyOp = Operation_c.getOneO_TFROnR115(ModelClass_c .getManyO_OBJsOnR8001(PackageableElement_c .getManyPE_PEsOnR8000((Package_c) object))); if (anyFunc != null || anyComp != null || anyOp != null || anyRef != null) { return true; } else { Package_c[] childPkgs = Package_c .getManyEP_PKGsOnR8001(PackageableElement_c .getManyPE_PEsOnR8000((Package_c) object)); for (int i = 0; i < childPkgs.length; i++) { if (isVerifiableElement(childPkgs[i])) { return true; } } } } return false; } public ModelCheckedTreeViewer getTreeViewer() { return tableTreeViewer; } private void createControl() { Composite modelsComp = new Composite(this, SWT.NONE); GridLayout layout = new GridLayout(); layout.numColumns = 1; modelsComp.setLayout(layout); GridData data = new GridData(); data.grabExcessHorizontalSpace = true; data.horizontalAlignment = GridData.FILL; data.verticalAlignment = GridData.FILL; data.grabExcessVerticalSpace = true; modelsComp.setLayoutData(data); Label selectModel = new Label(modelsComp, SWT.NONE); selectModel.setText("Select Model(s) to Verify"); GridData d = new GridData(); d = new GridData(); d.grabExcessHorizontalSpace = true; d.horizontalAlignment = GridData.FILL; selectModel.setLayoutData(d); createTableTreeViewer(modelsComp); // Session file selection Composite loggingComp = new Composite(modelsComp, SWT.NONE); GridLayout lay2 = new GridLayout(2, false); loggingComp.setLayout(lay2); GridData sdata = new GridData(); sdata.horizontalAlignment = GridData.FILL; sdata.verticalAlignment = GridData.END; sdata.grabExcessHorizontalSpace = true; sdata.grabExcessVerticalSpace = false; loggingComp.setLayoutData(sdata); Label selectLogging = new Label(loggingComp, SWT.NONE); selectLogging.setText("Log model execution activity"); enableLogInfo = new Button(loggingComp, SWT.CHECK); enableLogInfo.setSelection(true); enableLogInfo.addListener(SWT.Selection, this); enableLogInfo.addListener(SWT.Modify, this); IPreferenceStore store = CorePlugin.getDefault().getPreferenceStore(); boolean defaultDeterministicSetting = store .getBoolean(BridgePointPreferencesStore.ENABLE_DETERMINISTIC_VERIFIER); Label selectDeterministicExecution = new Label(loggingComp, SWT.NONE); selectDeterministicExecution .setText(VerifierPreferences.deterministicExecutionBtnName); selectDeterministicExecution .setToolTipText(VerifierPreferences.deterministicExecutionBtnTip); enableDeterministicExecution = new Button(loggingComp, SWT.CHECK); enableDeterministicExecution.setSelection(defaultDeterministicSetting); enableDeterministicExecution.addListener(SWT.Selection, this); enableDeterministicExecution.addListener(SWT.Modify, this); enableDeterministicExecution.setEnabled(true); enableDeterministicExecution .setToolTipText(VerifierPreferences.deterministicExecutionBtnTip); Label selectSimulatedTime = new Label(loggingComp, SWT.NONE); selectSimulatedTime.setText("Enable simulated time"); enableSimulatedTime = new Button(loggingComp, SWT.CHECK); enableSimulatedTime.setSelection(true); enableSimulatedTime.addListener(SWT.Selection, this); enableSimulatedTime.addListener(SWT.Modify, this); Label executionTimeout = new Label(loggingComp, SWT.NONE); executionTimeout.setText("Execution timeout (seconds)"); executionTimeoutValue = new Text(loggingComp, SWT.SINGLE | SWT.BORDER | SWT.RIGHT); executionTimeoutValue.setEnabled(true); executionTimeoutValue.setEditable(true); executionTimeoutValue.setToolTipText("Execution will terminate after the specified number of seconds. A value of 0 means execution will not be terminated."); executionTimeoutValue.addListener(SWT.Selection, this); executionTimeoutValue.addListener(SWT.Modify, this); executionTimeoutValue.setTextLimit(9); executionTimeoutValue.setText("0"); executionTimeoutValue.addVerifyListener(new VerifyListener() { public void verifyText(VerifyEvent event) { // Assume we don't allow it event.doit = false; // Get the character typed char myChar = event.character; // Allow 0-9 if (Character.isDigit(myChar)) { event.doit = true; } // Allow backspace if (myChar == '\b') { event.doit = true; } // Allow initialization if (myChar==0) { event.doit = true; } } }); /** * If deterministic behavior is enabled by default disable the Simtime * control and enable SimTime. */ if (enableDeterministicExecution.getSelection()) { enableSimulatedTime.setSelection(true); enableSimulatedTime.setEnabled(false); } // If the user enables deterministic execution, then we must always // enable SimTime. When deterministic behavior is NOT selected the // user is free to select SimTime or Clocktime enableDeterministicExecution .addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { if (enableDeterministicExecution.getSelection()) { enableSimulatedTime.setSelection(true); enableSimulatedTime.setEnabled(false); } else { enableSimulatedTime.setEnabled(true); } } }); initializeProjectList(); updateControls(); } private void createTableTreeViewer(Composite modelsComp) { tableTreeViewer = new ModelCheckedTreeViewer(modelsComp, SWT.FULL_SELECTION, false, false); tableTreeViewer.setLabelProvider(new ITableLabelProvider() { ModelLabelProvider modelLabelProvider = new ModelLabelProvider(); public Image getColumnImage(Object element, int columnIndex) { if (columnIndex == 0) { return modelLabelProvider.getImage(element); } else { return null; } } public String getColumnText(Object element, int columnIndex) { if (columnIndex == 0) { return modelLabelProvider.getText(element); } else if (columnIndex == ColumnNames.Multiplicity.position()) { if ((element instanceof Component_c) || (element instanceof ComponentReference_c)) { return getMultiplicityForElement(element); } } else if (columnIndex == ColumnNames.Initializer.position()) { if ((element instanceof Component_c) || (element instanceof ComponentReference_c)) { return initializerEditor .getInitializerForElement(element); } } return ""; } public void addListener(ILabelProviderListener listener) { } public void dispose() { } public boolean isLabelProperty(Object element, String property) { if (property.equals(ColumnNames.ColumnTextMultiplicity) || property.equals(ColumnNames.ColumnTextInitializer)) { return true; } return false; } public void removeListener(ILabelProviderListener listener) { } }); GridData treeData = new GridData(); treeData.horizontalAlignment = GridData.FILL; treeData.verticalAlignment = GridData.FILL; treeData.grabExcessHorizontalSpace = true; treeData.grabExcessVerticalSpace = true; tableTreeViewer.getControl().setLayoutData(treeData); tableTreeViewer.setContentProvider(VerifierLaunchContentProvider .instance()); createColumns(); tableTreeViewer.initialize(); tableTreeViewer.getTree().setLinesVisible(true); tableTreeViewer.getTree().setHeaderVisible(true); tableTreeViewer.addCheckStateListener(new VerifierTableTreeStateChangeListener(this)); } protected void enableExecutionOfElement(Vector<String> vector, NonRootModelElement element) { Iterator iterator = vector.iterator(); String match = ""; while (iterator.hasNext()) { String current = (String) iterator.next(); if (current.startsWith(element.Get_ooa_id().toString())) { match = current; break; } } if (!match.equals("")) { // found the element entry // first remove it from the vector vector.remove(match); // now replace the disabled string with the // enabled string String newString = VerifierLaunchConfiguration .updateAllComponentSelectionStrings( match, VerifierLaunchConfiguration.ConfigurationAttribute.State, VerifierLaunchConfiguration.ENABLED_STATE); vector.add(newString); } } protected void disableExecutionOfElement(Vector<String> vector, NonRootModelElement element) { Iterator iterator = vector.iterator(); String match = ""; while (iterator.hasNext()) { String current = (String) iterator.next(); if (current.startsWith(element.Get_ooa_id().toString())) { match = current; break; } } if (!match.equals("")) { // found the element entry // first remove it from the vector vector.remove(match); // Remove any initializers. We do this at this point so that // if this is re-enabled the initializer must be selected again. // This helps us assure we only have 1 initializer. String newString = VerifierLaunchConfiguration .updateAllComponentSelectionStrings( match, VerifierLaunchConfiguration.ConfigurationAttribute.Initializer, VerifierLaunchConfiguration.ConfigurationAttribute.DefaultInitializer); tableTreeViewer.refresh(newString); if (!newString.equals(match)) { updateControls(); } // now replace the enabled string with the // disabled string newString = VerifierLaunchConfiguration .updateAllComponentSelectionStrings( match, VerifierLaunchConfiguration.ConfigurationAttribute.State, VerifierLaunchConfiguration.DISABLED_STATE); vector.add(newString); } } protected String getMultiplicityForElement(Object element) { String multiplicity = "1"; if (element instanceof NonRootModelElement) { Vector<String> vector = getElementVector(BPDebugUtils .getElementsSystem(element).getName()); if (vector != null) { Iterator iterator = vector.iterator(); while (iterator.hasNext()) { String current = (String) iterator.next(); if (current.startsWith(((NonRootModelElement) element) .Get_ooa_id().toString())) { // Get the current value associated with this launch // config multiplicity = VerifierLaunchConfiguration .getInternalElement(current, VerifierLaunchConfiguration.ConfigurationAttribute.Multiplicity); int currentMult = Integer.valueOf(multiplicity); if (!allowMultipleInstances(element) && currentMult > 1) { // The model must have been changed by the user to // prevent multiple instances. Therefore, the // current // value in this launch config is not allowed. Reset // the value to 1. multiplicity = VerifierLaunchConfiguration.ConfigurationAttribute.DefaultMultiplicity; setMultiplicityForElement(element, Integer.getInteger(multiplicity)); } break; } } } } return multiplicity; } protected void setMultiplicityForElement(Object element, Integer newValue) { if (element instanceof NonRootModelElement) { String initializer = VerifierLaunchConfiguration.ConfigurationAttribute.DefaultInitializer; String enablement = VerifierLaunchConfiguration.DISABLED_STATE; // $NON-NLS-1$ Vector<String> vector = getElementVector(BPDebugUtils .getElementsSystem(element).getName()); // remove the old values String match = ""; Iterator iterator = vector.iterator(); while (iterator.hasNext()) { String current = (String) iterator.next(); if (current.startsWith(((NonRootModelElement) element) .Get_ooa_id().toString())) { match = current; break; } } if (!match.equals("")) { vector.remove(match); initializer = VerifierLaunchConfiguration .getInternalElement( match, VerifierLaunchConfiguration.ConfigurationAttribute.Initializer); enablement = VerifierLaunchConfiguration .getInternalElement( match, VerifierLaunchConfiguration.ConfigurationAttribute.State); } String newEntry = VerifierLaunchConfiguration .getComponentSelectionString(((NonRootModelElement) element) .Get_ooa_id().toString(), newValue.toString(), initializer, enablement); vector.add(newEntry); tableTreeViewer.refresh(element); if (!newEntry.equals(match)) { updateControls(); } } } class VerifiableElementTreeCellModifier implements ICellModifier { public void modify(Object element, String property, Object value) { if (property.equals(ColumnNames.ColumnTextMultiplicity)) { if (value instanceof String) { value = 0; } setMultiplicityForElement(((TreeItem) element).getData(), (Integer) value); } // do nothing for Initializer property } public Object getValue(Object element, String property) { if ((element instanceof Component_c) || (element instanceof ComponentReference_c)) { if (property.equals(ColumnNames.ColumnTextMultiplicity)) { return Integer .valueOf(getMultiplicityForElement(element)); } if (property.equals(ColumnNames.ColumnTextInitializer)) { return initializerEditor .getInitializerForElement(element); } } return ""; } public boolean canModify(Object element, String property) { boolean canModify = false; if (property == ColumnNames.ColumnTextMultiplicity) { // Prevent editing of instances when the model doesn't allow // it canModify = VerifiableElementComposite .allowMultipleInstances(element); } else { if (element instanceof Component_c) { canModify = true; } else if (element instanceof ComponentReference_c) { canModify = true; } } if (canModify && property == ColumnNames.ColumnTextInitializer) { // TODO: If we ever decide to enforce a rule about having a single // selection, this is where it would go. } return canModify; } } private void createColumns() { ControlListener controlListener = new ControlListener() { public void controlResized(ControlEvent e) { // have the last column use the rest of // the real estate TreeColumn[] columns = tableTreeViewer.getTree().getColumns(); int width = 0; for (int i = 0; i + 1 < columns.length; i++) { width = width + columns[i].getWidth(); } width = tableTreeViewer.getControl().getBounds().width - width; if (columns.length > 0) { columns[columns.length - 1].setWidth(width - 2); } } public void controlMoved(ControlEvent e) { // do nothing } }; createColumn(controlListener, ColumnNames.ModelExplorer); createColumn(controlListener, ColumnNames.Multiplicity); createColumn(controlListener, ColumnNames.Initializer); tableTreeViewer.getControl().addControlListener(controlListener); SpinnerBoxCellEditor multiplicityEditor = new SpinnerBoxCellEditor( tableTreeViewer.getTree(), 1, 2000); initializerEditor = new VerifiableElementInitializerDialog(this); tableTreeViewer.setCellEditors(new CellEditor[] { null, multiplicityEditor, initializerEditor }); tableTreeViewer.setCellModifier(new VerifiableElementTreeCellModifier()); tableTreeViewer.setColumnProperties(ColumnNames.getNames()); } private void createColumn(ControlListener controlListener, ColumnNames column) { TreeColumn treeColumn = new TreeColumn(tableTreeViewer.getTree(), SWT.NONE); treeColumn.setText(column.text()); if (column == ColumnNames.ModelExplorer) { tableTreeViewer.getTree().addControlListener( new ControlListener() { public void controlResized(ControlEvent e) { if (tableTreeViewer.getTree().getColumns().length > 0) { tableTreeViewer.getTree().getColumn(0) .setWidth(getBounds().width / 2); } removeControlListener(this); } public void controlMoved(ControlEvent e) { } }); } else { treeColumn.setAlignment(SWT.RIGHT); treeColumn.pack(); } if (column == ColumnNames.Initializer) { // we do not want the last column // resizable as it is dynamically // sized treeColumn.setResizable(false); } else { // Add control listener for all columns but the last treeColumn.addControlListener(controlListener); } } /** * Check the given element to see if we should allow multiple instances to * be launched. The C_C.Mult and C_IC.Mult values are set to 0 by default. * When it is set to 1 it means the user has changed the {C_C|C_IC}.Mult * attribute value to "Many" in the properties view. * * @param element * @return */ private static boolean allowMultipleInstances(Object element) { boolean allowInstances = false; Component_c comp = null; if (element instanceof Component_c) { comp = (Component_c) element; if (comp.getMult() > 0) { allowInstances = true; } } else if (element instanceof ComponentReference_c) { ComponentReference_c impComp = (ComponentReference_c) element; if (impComp.getMult() > 0) { allowInstances = true; } } return allowInstances; } private void initializeProjectList() { // called once on control creation if (projectMap == null) { projectMap = new Hashtable<String, SystemModel_c>(); } else { projectMap.clear(); } // list of projects SystemModel_c[] x = SystemModel_c.SystemModelInstances(Ooaofooa .getDefaultInstance()); for (int i = 0; i < x.length; i++) { projectMap.put(x[i].getName(), x[i]); } } public void handleEvent(Event event) { updateControls(); } public void updateControls() { setMessage(null, IMessageProvider.NONE); TreeItem[] items = tableTreeViewer.getTree().getItems(); if (items.length == 0) { setMessage( "Error! No projects (or opened projects) exist in the workspace.", IMessageProvider.ERROR); } else if (!isElementSelected()) { setMessage("Select at least one model to verify.", IMessageProvider.ERROR); } notifyUpdateListeners(); } private void notifyUpdateListeners() { for (Iterator iter = updateListenerList.iterator(); iter.hasNext();) { ISWTCustomUpdate l = (ISWTCustomUpdate) iter.next(); if (l != null) { l.performUpdate(); } } } public void addUpdateListener(ISWTCustomUpdate listener) { updateListenerList.add(listener); } /** * Removes an updatelistener from this composite */ protected void removeUpdateListener(ISWTCustomUpdate listener) { updateListenerList.remove(listener); } private boolean isElementSelected() { return tableTreeViewer.getCheckedElements().length != 0; } // IMessageProvider public String getMessage() { return message; } public int getMessageType() { return messageType; } private void setMessage(String message, int type) { this.message = message; this.messageType = type; } public void initializeFromConfiguration(ILaunchConfiguration pConfiguration) { if (pConfiguration instanceof ILaunchConfigurationWorkingCopy) { configuration = ((ILaunchConfigurationWorkingCopy) pConfiguration) .getOriginal(); if (configuration == null) { configuration = pConfiguration; } } else { configuration = pConfiguration; } // if the map has not been setup yet, // initialize it here Map<String, Vector<String>> modelMap = selectedModelsMap .get(configuration); if (modelMap == null) { modelMap = new HashMap<String, Vector<String>>(); Set<String> projectSet = projectMap.keySet(); Iterator iterator = projectSet.iterator(); while (iterator.hasNext()) { // for each project store a // vector full of default values String projectName = (String) iterator.next(); Vector<String> vector = getElementVector(projectName); modelMap.put(projectName, vector); } selectedModelsMap.put(configuration, modelMap); } // refresh the tree tableTreeViewer.refresh(); if ((projectMap.size() == 0)) { updateControls(); return; } if ((projectMap == null) || (projectMap.size() == 0)) { updateControls(); } try { Map storedModelMap = configuration .getAttribute(VerifierLaunchConfiguration.ATTR_SELECTEDMODELS, new Hashtable()); Set projectSet = storedModelMap.keySet(); // clear the selection, to allow setting // up the selection correctly tableTreeViewer.setCheckedElements(new Object[0]); Iterator projectIterator = projectSet.iterator(); while (projectIterator.hasNext()) { String projectName = (String) projectIterator.next(); if (projectMap.containsKey(projectName)) { // set model selections of all project updateSelectedModelList(storedModelMap, projectName); updateData(projectMap.get(projectName)); } else { // the previous project is either closed or deleted, // currently not available // use default settings // updateData(); } } boolean logState = configuration.getAttribute( VerifierLaunchConfiguration.ATTR_LOGACTIVITY, false); enableLogInfo.setSelection(logState); boolean enableSim = configuration.getAttribute( VerifierLaunchConfiguration.ATTR_ENABLESIMTIME, false); enableSimulatedTime.setSelection(enableSim); // Use the BridgePoint preference as the default setting for // deterministic behavior IPreferenceStore store = CorePlugin.getDefault() .getPreferenceStore(); boolean defaultDeterministicSetting = store .getBoolean(BridgePointPreferencesStore.ENABLE_DETERMINISTIC_VERIFIER); boolean enableDeterminism = configuration.getAttribute( VerifierLaunchConfiguration.ATTR_ENABLEDETERMINISM, defaultDeterministicSetting); enableDeterministicExecution.setSelection(enableDeterminism); // If deterministic behavior is selected SimTime is always used. // If not, the user is allowed to choose SimTime or clock time if (enableDeterminism) { enableSimulatedTime.setSelection(true); enableSimulatedTime.setEnabled(false); } else { enableSimulatedTime.setEnabled(true); } int executionTimeout = configuration.getAttribute( VerifierLaunchConfiguration.ATTR_EXECUTIONTIMEOUT, 0); executionTimeoutValue.setText(String.valueOf(executionTimeout)); } catch (CoreException e) { BPDebugUIPlugin.logError("Unable to get attribute value", e); } } public void updateSelectedModelList(Map newlist, String selPjtName) { // newList has format: // key (pjtName) --> model1_uuid + Multiplicity + Initializer + // Enablement, model2_uuid... if (newlist == null) { return; } Iterator ite = newlist.keySet().iterator(); while (ite.hasNext()) { Object key = ite.next(); String s = (String) newlist.get(key); Object o = projectMap.get(selPjtName); if ((o != null) && o instanceof SystemModel_c) { // put selected model's UUIDs of key (pjt) to map Vector<String> vector = getElementVector(((SystemModel_c) o) .getName()); updateEntries((SystemModel_c) o, s, vector); } } } private Map<NonRootModelElement, String> getEnabledElementsFromEntries( SystemModel_c system, String[] objects) { Map<NonRootModelElement, String> map = new HashMap<NonRootModelElement, String>(); // add all elements that match the ids from the given // array for (int i = 0; i < objects.length; i++) { Domain_c[] domains = Domain_c.getManyS_DOMsOnR28(system); for (int j = 0; j < domains.length; j++) { if (objects[i].startsWith(domains[j].getDom_id().toString())) { if (VerifierLaunchConfiguration.elementIsEnabled(objects[i])) { map.put(domains[j], VerifierLaunchConfiguration .getInternalElement(objects[i], VerifierLaunchConfiguration.ConfigurationAttribute.State)); } } } Component_c[] components = Component_c .getManyC_CsOnR4608(ComponentPackage_c .getManyCP_CPsOnR4606(system)); for (int j = 0; j < components.length; j++) { getEnabledElementsFromEntries(components[j], objects[i], map); } ComponentReference_c[] icomponents = ComponentReference_c .getManyCL_ICsOnR4201(components); for (int j = 0; j < icomponents.length; j++) { getEnabledElementsFromEntries(icomponents[j], objects[i], map); } Package_c[] packages = Package_c.getManyEP_PKGsOnR1401(system); for (int j = 0; j < packages.length; j++) { getEnabledElementsFromEntries(packages[j], objects[i], map); } } return map; } private void getEnabledElementsFromEntries(NonRootModelElement element, String object, Map<NonRootModelElement, String> map) { if (element instanceof Package_c) { Package_c pkg = (Package_c) element; if (object.startsWith(pkg.getPackage_id().toString())) { if (VerifierLaunchConfiguration.elementIsEnabled(object)) { String initializer = VerifierLaunchConfiguration .getInternalElement(object, VerifierLaunchConfiguration.ConfigurationAttribute.State); map.put(pkg, initializer); } } // descend hierarchy Package_c[] childPkgs = Package_c .getManyEP_PKGsOnR8001(PackageableElement_c .getManyPE_PEsOnR8000(pkg)); for (int k = 0; k < childPkgs.length; k++) { getEnabledElementsFromEntries(childPkgs[k], object, map); } Component_c[] childComps = Component_c .getManyC_CsOnR8001(PackageableElement_c .getManyPE_PEsOnR8000(pkg)); for (int k = 0; k < childComps.length; k++) { getEnabledElementsFromEntries(childComps[k], object, map); } ComponentReference_c[] childCompRefs = ComponentReference_c .getManyCL_ICsOnR8001(PackageableElement_c .getManyPE_PEsOnR8000(pkg)); for (int k = 0; k < childCompRefs.length; k++) { getEnabledElementsFromEntries(childCompRefs[k], object, map); } } else if (element instanceof Component_c) { Component_c comp = (Component_c) element; if (object.startsWith(comp.getId().toString())) { if (VerifierLaunchConfiguration.elementIsEnabled(object)) { String initializer = VerifierLaunchConfiguration .getInternalElement(object, VerifierLaunchConfiguration.ConfigurationAttribute.State); map.put(comp, initializer); } } // descend hierarchy Package_c[] childPkgs = Package_c .getManyEP_PKGsOnR8001(PackageableElement_c .getManyPE_PEsOnR8003(comp)); for (int k = 0; k < childPkgs.length; k++) { getEnabledElementsFromEntries(childPkgs[k], object, map); } Component_c[] childComps = Component_c .getManyC_CsOnR8001(PackageableElement_c .getManyPE_PEsOnR8003(comp)); for (int k = 0; k < childComps.length; k++) { getEnabledElementsFromEntries(childComps[k], object, map); } ComponentReference_c[] childCompRefs = ComponentReference_c .getManyCL_ICsOnR8001(PackageableElement_c .getManyPE_PEsOnR8003(comp)); for (int k = 0; k < childCompRefs.length; k++) { getEnabledElementsFromEntries(childCompRefs[k], object, map); } } else if (element instanceof ComponentReference_c) { ComponentReference_c compRef = (ComponentReference_c) element; if (object.startsWith(compRef.getId().toString())) { if (VerifierLaunchConfiguration.elementIsEnabled(object)) { String initializer = VerifierLaunchConfiguration .getInternalElement(object, VerifierLaunchConfiguration.ConfigurationAttribute.State); map.put(compRef, initializer); } } // no need to descend hierarchy } } private void updateEntries(SystemModel_c system, String idString, Vector<String> vector) { // add all elements that match the ids from the given // string String[] ids = VerifierLaunchConfiguration.getModelSelectionStrings(idString); for (int i = 0; i < ids.length; i++) { boolean found = false; Package_c[] pkgs = Package_c.getManyEP_PKGsOnR1401(system); for (int j = 0; j < pkgs.length; j++) { found = updateEntries(pkgs[j], ids[i], vector); if (found) { break; } } if (found) continue; Domain_c[] domains = Domain_c.getManyS_DOMsOnR28(system); for (int j = 0; j < domains.length; j++) { if (ids[i].startsWith(domains[j].getDom_id().toString())) { VerifierLaunchConfiguration.updateEntryInVector(ids[i], vector); found = true; // refresh columns for element tableTreeViewer.refresh(domains[j]); break; } } if (found) continue; Component_c[] components = Component_c .getManyC_CsOnR4608(ComponentPackage_c .getManyCP_CPsOnR4606(system)); for (int j = 0; j < components.length; j++) { found = updateEntries(components[j], ids[i], vector); if (found) { break; } } if (found) continue; ComponentReference_c[] icomponents = ComponentReference_c .getManyCL_ICsOnR4201(components); for (int j = 0; j < icomponents.length; j++) { found = updateEntries(icomponents[j], ids[i], vector); if (found) { break; } } // remove the entry if not found if (!found) { vector.remove(ids[i]); } } } private boolean updateEntries(NonRootModelElement element, String id, Vector<String> vector) { boolean found = false; if (element instanceof Package_c) { Package_c pkg = (Package_c) element; if (id.startsWith(pkg.getPackage_id().toString())) { VerifierLaunchConfiguration.updateEntryInVector(id, vector); found = true; // refresh columns for element tableTreeViewer.refresh(pkg); } if (!found) { // descend hierarchy Package_c[] childPkgs = Package_c .getManyEP_PKGsOnR8001(PackageableElement_c .getManyPE_PEsOnR8000(pkg)); for (int k = 0; k < childPkgs.length; k++) { found = updateEntries(childPkgs[k], id, vector); if (found) { break; } } if (!found) { Component_c[] childComps = Component_c .getManyC_CsOnR8001(PackageableElement_c .getManyPE_PEsOnR8000(pkg)); for (int k = 0; k < childComps.length; k++) { found = updateEntries(childComps[k], id, vector); if (found) { break; } } if (!found) { ComponentReference_c[] childCompRefs = ComponentReference_c .getManyCL_ICsOnR8001(PackageableElement_c .getManyPE_PEsOnR8000(pkg)); for (int k = 0; k < childCompRefs.length; k++) { found = updateEntries(childCompRefs[k], id, vector); if (found) { break; } } } } } } else if (element instanceof Component_c) { Component_c comp = (Component_c) element; if (id.startsWith(((Component_c) element).getId().toString())) { VerifierLaunchConfiguration.updateEntryInVector(id, vector); found = true; // refresh columns for element tableTreeViewer.refresh(comp); } if (!found) { // descend hierarchy Package_c[] childPkgs = Package_c .getManyEP_PKGsOnR8001(PackageableElement_c .getManyPE_PEsOnR8003(comp)); for (int k = 0; k < childPkgs.length; k++) { found = updateEntries(childPkgs[k], id, vector); if (found) { break; } } if (!found) { Component_c[] childComps = Component_c .getManyC_CsOnR8001(PackageableElement_c .getManyPE_PEsOnR8003(comp)); for (int k = 0; k < childComps.length; k++) { found = updateEntries(childComps[k], id, vector); if (found) { break; } } if (!found) { ComponentReference_c[] childCompRefs = ComponentReference_c .getManyCL_ICsOnR8001(PackageableElement_c .getManyPE_PEsOnR8003(comp)); for (int k = 0; k < childCompRefs.length; k++) { found = updateEntries(childCompRefs[k], id, vector); if (found) { break; } } } } } } else if (element instanceof ComponentReference_c) { if (id.startsWith(((ComponentReference_c) element).getId() .toString())) { VerifierLaunchConfiguration.updateEntryInVector(id, vector); found = true; // refresh columns for element tableTreeViewer.refresh((ComponentReference_c) element); } // No need to descend hierarchy } return found; } private void updateData(SystemModel_c system) { // update checked viewer from launch config Vector<String> selectMod = getElementVector(system.getName()); if ((selectMod != null) && (selectMod.size() > 0)) { Map<NonRootModelElement, String> selectionMap = getEnabledElementsFromEntries( system, selectMod.toArray(new String[selectMod.size()])); Set<NonRootModelElement> selectedSet = selectionMap.keySet(); Iterator iterator = selectedSet.iterator(); while (iterator.hasNext()) { NonRootModelElement element = (NonRootModelElement) iterator .next(); tableTreeViewer.setChecked(element, true); } } updateControls(); } public Map<String,Vector<String>> getMapOfElementsToStore() { // return a map of all elements // which have storable data, those // elements that are not checked, or // have not had the multiplicity or // initializer message set do not need // to get stored to disk Map<String, Vector<String>> map = new HashMap<String, Vector<String>>(); Set<String> projectSet = projectMap.keySet(); Iterator<String> iterator = projectSet.iterator(); while (iterator.hasNext()) { String projectName = (String) iterator.next(); Vector<String> elementVector = getElementVector(projectName); Vector<String> storableVector = getEntriesWithStorableData(elementVector); map.put(projectName, storableVector); } return map; } private Vector<String> getEntriesWithStorableData(Vector<String> entryVector) { Vector<String> storableVector = new Vector<String>(); Iterator<String> iterator = entryVector.iterator(); while (iterator.hasNext()) { String current = (String) iterator.next(); if (!VerifierLaunchConfiguration.isDefaultEntry(current)) { storableVector.add(current); } } return storableVector; } public boolean getActivityLogEnabled() { return enableLogInfo.getSelection(); } public boolean getSimulatedTimeEnabled() { // Always use simulated time when deterministic behavior is selected if (isDeterministic()) { return true; } else { return enableSimulatedTime.getSelection(); } } public boolean isDeterministic() { return enableDeterministicExecution.getSelection(); } public int getExecutionTimeout() { int result = 0; String strResult = executionTimeoutValue.getText(); // Note that the text is validated upon entry to assure an integer value if (!strResult.isEmpty()) { result = Integer.valueOf(strResult); } return result; } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2014 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.ascanrulesBeta; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketTimeoutException; import java.nio.ByteBuffer; import org.apache.commons.codec.binary.Hex; import org.apache.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.core.scanner.AbstractHostPlugin; import org.parosproxy.paros.core.scanner.Alert; import org.parosproxy.paros.core.scanner.Category; /** * A class to actively check if the web server is vulnerable to the HeartBleed OpenSSL vulnerability * Based on https://github.com/musalbas/heartbleed-masstest/blob/master/ssltest.py since the * developer there "disclaims copyright to this source code." * * @author 70pointer */ public class HeartBleedActiveScanner extends AbstractHostPlugin { /** the timeout, which is controlled by the Attack Strength */ private int timeoutMs = 0; /** the logger object */ private static Logger log = Logger.getLogger(HeartBleedActiveScanner.class); /** Prefix for internationalized messages used by this rule */ private static final String MESSAGE_PREFIX = "ascanbeta.heartbleed."; static final byte handShakeClientHello = 0x01; /** Alert Record Type. 0x15. 21 in decimal */ static final byte alertRecordByte = 0x15; /** the value for the handshake record (aka the hello record byte) */ static final byte handshakeRecordByte = 0x16; /** all the various TLS versions that we will try */ static final String[] tlsNames = {"TLS 1.0", "TLS 1.1", "TLS 1.2"}; /** the binary codes for the various TLS versions that we will try */ static final byte[][] tlsBuffers = {{0x03, 0x01}, {0x03, 0x02}, {0x03, 0x03}}; /** * the trailer portion of the HELLO request that we wills end to say "Hello" to the SSL server */ static final byte[] helloBuffer = { (byte) 0x53, (byte) 0x43, (byte) 0x5b, (byte) 0x90, (byte) 0x9d, (byte) 0x9b, (byte) 0x72, (byte) 0x0b, (byte) 0xbc, (byte) 0x0c, (byte) 0xbc, (byte) 0x2b, (byte) 0x92, (byte) 0xa8, (byte) 0x48, (byte) 0x97, (byte) 0xcf, (byte) 0xbd, (byte) 0x39, (byte) 0x04, (byte) 0xcc, (byte) 0x16, (byte) 0x0a, (byte) 0x85, (byte) 0x03, (byte) 0x90, (byte) 0x9f, (byte) 0x77, (byte) 0x04, (byte) 0x33, (byte) 0xd4, (byte) 0xde, (byte) 0x00, /* //for the original implementation.. (byte)0x00, (byte)0x66, //Cipher suites length //followed by the cipher suites (byte)0xc0, (byte)0x14, (byte)0xc0, (byte)0x0a, (byte)0xc0, (byte)0x22, (byte)0xc0, (byte)0x21, (byte)0x00, (byte)0x39, (byte)0x00, (byte)0x38, (byte)0x00, (byte)0x88, (byte)0x00, (byte)0x87, (byte)0xc0, (byte)0x0f, (byte)0xc0, (byte)0x05, (byte)0x00, (byte)0x35, (byte)0x00, (byte)0x84, (byte)0xc0, (byte)0x12, (byte)0xc0, (byte)0x08, (byte)0xc0, (byte)0x1c, (byte)0xc0, (byte)0x1b, (byte)0x00, (byte)0x16, (byte)0x00, (byte)0x13, (byte)0xc0, (byte)0x0d, (byte)0xc0, (byte)0x03, (byte)0x00, (byte)0x0a, (byte)0xc0, (byte)0x13, (byte)0xc0, (byte)0x09, (byte)0xc0, (byte)0x1f, (byte)0xc0, (byte)0x1e, (byte)0x00, (byte)0x33, (byte)0x00, (byte)0x32, (byte)0x00, (byte)0x9a, (byte)0x00, (byte)0x99, (byte)0x00, (byte)0x45, (byte)0x00, (byte)0x44, (byte)0xc0, (byte)0x0e, (byte)0xc0, (byte)0x04, (byte)0x00, (byte)0x2f, (byte)0x00, (byte)0x96, (byte)0x00, (byte)0x41, (byte)0xc0, (byte)0x11, (byte)0xc0, (byte)0x07, (byte)0xc0, (byte)0x0c, (byte)0xc0, (byte)0x02, (byte)0x00, (byte)0x05, (byte)0x00, (byte)0x04, (byte)0x00, (byte)0x15, (byte)0x00, (byte)0x12, (byte)0x00, (byte)0x09, (byte)0x00, (byte)0x14, (byte)0x00, (byte)0x11, (byte)0x00, (byte)0x08, (byte)0x00, (byte)0x06, (byte)0x00, (byte)0x03, (byte)0x00, (byte)0xff, */ 0x02, 0x7C, // Cipher suites length: 636 bytes of data // followed by the individual cipher suites that we say we support. Ha! 0x00, 0x00, // TLS_NULL_WITH_NULL_NULL 0x00, 0x01, // TLS_RSA_WITH_NULL_MD5 0x00, 0x02, // TLS_RSA_WITH_NULL_SHA 0x00, 0x03, // TLS_RSA_EXPORT_WITH_RC4_40_MD5 0x00, 0x04, // TLS_RSA_WITH_RC4_128_MD5 0x00, 0x05, // TLS_RSA_WITH_RC4_128_SHA 0x00, 0x06, // TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5 0x00, 0x07, // TLS_RSA_WITH_IDEA_CBC_SHA 0x00, 0x08, // TLS_RSA_EXPORT_WITH_DES40_CBC_SHA 0x00, 0x09, // TLS_RSA_WITH_DES_CBC_SHA 0x00, 0x0A, // TLS_RSA_WITH_3DES_EDE_CBC_SHA 0x00, 0x0B, // TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA 0x00, 0x0C, // TLS_DH_DSS_WITH_DES_CBC_SHA 0x00, 0x0D, // TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA 0x00, 0x0E, // TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA 0x00, 0x0F, // TLS_DH_RSA_WITH_DES_CBC_SHA 0x00, 0x10, // TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA 0x00, 0x11, // TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA 0x00, 0x12, // TLS_DHE_DSS_WITH_DES_CBC_SHA 0x00, 0x13, // TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA 0x00, 0x14, // TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA 0x00, 0x15, // TLS_DHE_RSA_WITH_DES_CBC_SHA 0x00, 0x16, // TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA 0x00, 0x17, // TLS_DH_anon_EXPORT_WITH_RC4_40_MD5 0x00, 0x18, // TLS_DH_anon_WITH_RC4_128_MD5 0x00, 0x19, // TLS_DH_anon_EXPORT_WITH_DES40_CBC_SHA 0x00, 0x1A, // TLS_DH_anon_WITH_DES_CBC_SHA 0x00, 0x1B, // TLS_DH_anon_WITH_3DES_EDE_CBC_SHA 0x00, 0x1E, // TLS_KRB5_WITH_DES_CBC_SHA 0x00, 0x1F, // TLS_KRB5_WITH_3DES_EDE_CBC_SHA 0x00, 0x20, // TLS_KRB5_WITH_RC4_128_SHA 0x00, 0x21, // TLS_KRB5_WITH_IDEA_CBC_SHA 0x00, 0x22, // TLS_KRB5_WITH_DES_CBC_MD5 0x00, 0x23, // TLS_KRB5_WITH_3DES_EDE_CBC_MD5 0x00, 0x24, // TLS_KRB5_WITH_RC4_128_MD5 0x00, 0x25, // TLS_KRB5_WITH_IDEA_CBC_MD5 0x00, 0x26, // TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA 0x00, 0x27, // TLS_KRB5_EXPORT_WITH_RC2_CBC_40_SHA 0x00, 0x28, // TLS_KRB5_EXPORT_WITH_RC4_40_SHA 0x00, 0x29, // TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5 0x00, 0x2A, // TLS_KRB5_EXPORT_WITH_RC2_CBC_40_MD5 0x00, 0x2B, // TLS_KRB5_EXPORT_WITH_RC4_40_MD5 0x00, 0x2C, // TLS_PSK_WITH_NULL_SHA 0x00, 0x2D, // TLS_DHE_PSK_WITH_NULL_SHA 0x00, 0x2E, // TLS_RSA_PSK_WITH_NULL_SHA 0x00, 0x2F, // TLS_RSA_WITH_AES_128_CBC_SHA 0x00, 0x30, // TLS_DH_DSS_WITH_AES_128_CBC_SHA 0x00, 0x31, // TLS_DH_RSA_WITH_AES_128_CBC_SHA 0x00, 0x32, // TLS_DHE_DSS_WITH_AES_128_CBC_SHA 0x00, 0x33, // TLS_DHE_RSA_WITH_AES_128_CBC_SHA 0x00, 0x34, // TLS_DH_anon_WITH_AES_128_CBC_SHA 0x00, 0x35, // TLS_RSA_WITH_AES_256_CBC_SHA 0x00, 0x36, // TLS_DH_DSS_WITH_AES_256_CBC_SHA 0x00, 0x37, // TLS_DH_RSA_WITH_AES_256_CBC_SHA 0x00, 0x38, // TLS_DHE_DSS_WITH_AES_256_CBC_SHA 0x00, 0x39, // TLS_DHE_RSA_WITH_AES_256_CBC_SHA 0x00, 0x3A, // TLS_DH_anon_WITH_AES_256_CBC_SHA 0x00, 0x3B, // TLS_RSA_WITH_NULL_SHA256 0x00, 0x3C, // TLS_RSA_WITH_AES_128_CBC_SHA256 0x00, 0x3D, // TLS_RSA_WITH_AES_256_CBC_SHA256 0x00, 0x3E, // TLS_DH_DSS_WITH_AES_128_CBC_SHA256 0x00, 0x3F, // TLS_DH_RSA_WITH_AES_128_CBC_SHA256 0x00, 0x40, // TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 0x00, 0x41, // TLS_RSA_WITH_CAMELLIA_128_CBC_SHA 0x00, 0x42, // TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA 0x00, 0x43, // TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA 0x00, 0x44, // TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA 0x00, 0x45, // TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA 0x00, 0x46, // TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA 0x00, 0x67, // TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 0x00, 0x68, // TLS_DH_DSS_WITH_AES_256_CBC_SHA256 0x00, 0x69, // TLS_DH_RSA_WITH_AES_256_CBC_SHA256 0x00, 0x6A, // TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 0x00, 0x6B, // TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 0x00, 0x6C, // TLS_DH_anon_WITH_AES_128_CBC_SHA256 0x00, 0x6D, // TLS_DH_anon_WITH_AES_256_CBC_SHA256 0x00, (byte) 0x84, // TLS_RSA_WITH_CAMELLIA_256_CBC_SHA 0x00, (byte) 0x85, // TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA 0x00, (byte) 0x86, // TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA 0x00, (byte) 0x87, // TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA 0x00, (byte) 0x88, // TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA 0x00, (byte) 0x89, // TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA 0x00, (byte) 0x8A, // TLS_PSK_WITH_RC4_128_SHA 0x00, (byte) 0x8B, // TLS_PSK_WITH_3DES_EDE_CBC_SHA 0x00, (byte) 0x8C, // TLS_PSK_WITH_AES_128_CBC_SHA 0x00, (byte) 0x8D, // TLS_PSK_WITH_AES_256_CBC_SHA 0x00, (byte) 0x8E, // TLS_DHE_PSK_WITH_RC4_128_SHA 0x00, (byte) 0x8F, // TLS_DHE_PSK_WITH_3DES_EDE_CBC_SHA 0x00, (byte) 0x90, // TLS_DHE_PSK_WITH_AES_128_CBC_SHA 0x00, (byte) 0x91, // TLS_DHE_PSK_WITH_AES_256_CBC_SHA 0x00, (byte) 0x92, // TLS_RSA_PSK_WITH_RC4_128_SHA 0x00, (byte) 0x93, // TLS_RSA_PSK_WITH_3DES_EDE_CBC_SHA 0x00, (byte) 0x94, // TLS_RSA_PSK_WITH_AES_128_CBC_SHA 0x00, (byte) 0x95, // TLS_RSA_PSK_WITH_AES_256_CBC_SHA 0x00, (byte) 0x96, // TLS_RSA_WITH_SEED_CBC_SHA 0x00, (byte) 0x97, // TLS_DH_DSS_WITH_SEED_CBC_SHA 0x00, (byte) 0x98, // TLS_DH_RSA_WITH_SEED_CBC_SHA 0x00, (byte) 0x99, // TLS_DHE_DSS_WITH_SEED_CBC_SHA 0x00, (byte) 0x9A, // TLS_DHE_RSA_WITH_SEED_CBC_SHA 0x00, (byte) 0x9B, // TLS_DH_anon_WITH_SEED_CBC_SHA 0x00, (byte) 0x9C, // TLS_RSA_WITH_AES_128_GCM_SHA256 0x00, (byte) 0x9D, // TLS_RSA_WITH_AES_256_GCM_SHA384 0x00, (byte) 0x9E, // TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 0x00, (byte) 0x9F, // TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 0x00, (byte) 0xA0, // TLS_DH_RSA_WITH_AES_128_GCM_SHA256 0x00, (byte) 0xA1, // TLS_DH_RSA_WITH_AES_256_GCM_SHA384 0x00, (byte) 0xA2, // TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 0x00, (byte) 0xA3, // TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 0x00, (byte) 0xA4, // TLS_DH_DSS_WITH_AES_128_GCM_SHA256 0x00, (byte) 0xA5, // TLS_DH_DSS_WITH_AES_256_GCM_SHA384 0x00, (byte) 0xA6, // TLS_DH_anon_WITH_AES_128_GCM_SHA256 0x00, (byte) 0xA7, // TLS_DH_anon_WITH_AES_256_GCM_SHA384 0x00, (byte) 0xA8, // TLS_PSK_WITH_AES_128_GCM_SHA256 0x00, (byte) 0xA9, // TLS_PSK_WITH_AES_256_GCM_SHA384 0x00, (byte) 0xAA, // TLS_DHE_PSK_WITH_AES_128_GCM_SHA256 0x00, (byte) 0xAB, // TLS_DHE_PSK_WITH_AES_256_GCM_SHA384 0x00, (byte) 0xAC, // TLS_RSA_PSK_WITH_AES_128_GCM_SHA256 0x00, (byte) 0xAD, // TLS_RSA_PSK_WITH_AES_256_GCM_SHA384 0x00, (byte) 0xAE, // TLS_PSK_WITH_AES_128_CBC_SHA256 0x00, (byte) 0xAF, // TLS_PSK_WITH_AES_256_CBC_SHA384 0x00, (byte) 0xB0, // TLS_PSK_WITH_NULL_SHA256 0x00, (byte) 0xB1, // TLS_PSK_WITH_NULL_SHA384 0x00, (byte) 0xB2, // TLS_DHE_PSK_WITH_AES_128_CBC_SHA256 0x00, (byte) 0xB3, // TLS_DHE_PSK_WITH_AES_256_CBC_SHA384 0x00, (byte) 0xB4, // TLS_DHE_PSK_WITH_NULL_SHA256 0x00, (byte) 0xB5, // TLS_DHE_PSK_WITH_NULL_SHA384 0x00, (byte) 0xB6, // TLS_RSA_PSK_WITH_AES_128_CBC_SHA256 0x00, (byte) 0xB7, // TLS_RSA_PSK_WITH_AES_256_CBC_SHA384 0x00, (byte) 0xB8, // TLS_RSA_PSK_WITH_NULL_SHA256 0x00, (byte) 0xB9, // TLS_RSA_PSK_WITH_NULL_SHA384 0x00, (byte) 0xBA, // TLS_RSA_WITH_CAMELLIA_128_CBC_SHA256 0x00, (byte) 0xBB, // TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA256 0x00, (byte) 0xBC, // TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA256 0x00, (byte) 0xBD, // TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA256 0x00, (byte) 0xBE, // TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA256 0x00, (byte) 0xBF, // TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA256 0x00, (byte) 0xC0, // TLS_RSA_WITH_CAMELLIA_256_CBC_SHA256 0x00, (byte) 0xC1, // TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA256 0x00, (byte) 0xC2, // TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA256 0x00, (byte) 0xC3, // TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA256 0x00, (byte) 0xC4, // TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA256 0x00, (byte) 0xC5, // TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA256 (byte) 0xC0, 0x01, // TLS_ECDH_ECDSA_WITH_NULL_SHA (byte) 0xC0, 0x02, // TLS_ECDH_ECDSA_WITH_RC4_128_SHA (byte) 0xC0, 0x03, // TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA (byte) 0xC0, 0x04, // TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA (byte) 0xC0, 0x05, // TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA (byte) 0xC0, 0x06, // TLS_ECDHE_ECDSA_WITH_NULL_SHA (byte) 0xC0, 0x07, // TLS_ECDHE_ECDSA_WITH_RC4_128_SHA (byte) 0xC0, 0x08, // TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA (byte) 0xC0, 0x09, // TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA (byte) 0xC0, 0x0A, // TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA (byte) 0xC0, 0x0B, // TLS_ECDH_RSA_WITH_NULL_SHA (byte) 0xC0, 0x0C, // TLS_ECDH_RSA_WITH_RC4_128_SHA (byte) 0xC0, 0x0D, // TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA (byte) 0xC0, 0x0E, // TLS_ECDH_RSA_WITH_AES_128_CBC_SHA (byte) 0xC0, 0x0F, // TLS_ECDH_RSA_WITH_AES_256_CBC_SHA (byte) 0xC0, 0x10, // TLS_ECDHE_RSA_WITH_NULL_SHA (byte) 0xC0, 0x11, // TLS_ECDHE_RSA_WITH_RC4_128_SHA (byte) 0xC0, 0x12, // TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA (byte) 0xC0, 0x13, // TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA (byte) 0xC0, 0x14, // TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA (byte) 0xC0, 0x15, // TLS_ECDH_anon_WITH_NULL_SHA (byte) 0xC0, 0x16, // TLS_ECDH_anon_WITH_RC4_128_SHA (byte) 0xC0, 0x17, // TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA (byte) 0xC0, 0x18, // TLS_ECDH_anon_WITH_AES_128_CBC_SHA (byte) 0xC0, 0x19, // TLS_ECDH_anon_WITH_AES_256_CBC_SHA (byte) 0xC0, 0x1A, // TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA (byte) 0xC0, 0x1B, // TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA (byte) 0xC0, 0x1C, // TLS_SRP_SHA_DSS_WITH_3DES_EDE_CBC_SHA (byte) 0xC0, 0x1D, // TLS_SRP_SHA_WITH_AES_128_CBC_SHA (byte) 0xC0, 0x1E, // TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA (byte) 0xC0, 0x1F, // TLS_SRP_SHA_DSS_WITH_AES_128_CBC_SHA (byte) 0xC0, 0x20, // TLS_SRP_SHA_WITH_AES_256_CBC_SHA (byte) 0xC0, 0x21, // TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA (byte) 0xC0, 0x22, // TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA (byte) 0xC0, 0x23, // TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 (byte) 0xC0, 0x24, // TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 (byte) 0xC0, 0x25, // TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256 (byte) 0xC0, 0x26, // TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384 (byte) 0xC0, 0x27, // TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 (byte) 0xC0, 0x28, // TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 (byte) 0xC0, 0x29, // TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256 (byte) 0xC0, 0x2A, // TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384 (byte) 0xC0, 0x2B, // TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 (byte) 0xC0, 0x2C, // TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 (byte) 0xC0, 0x2D, // TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256 (byte) 0xC0, 0x2E, // TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384 (byte) 0xC0, 0x2F, // TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 (byte) 0xC0, 0x30, // TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 (byte) 0xC0, 0x31, // TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256 (byte) 0xC0, 0x32, // TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384 (byte) 0xC0, 0x33, // TLS_ECDHE_PSK_WITH_RC4_128_SHA (byte) 0xC0, 0x34, // TLS_ECDHE_PSK_WITH_3DES_EDE_CBC_SHA (byte) 0xC0, 0x35, // TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA (byte) 0xC0, 0x36, // TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA (byte) 0xC0, 0x37, // TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA256 (byte) 0xC0, 0x38, // TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA384 (byte) 0xC0, 0x39, // TLS_ECDHE_PSK_WITH_NULL_SHA (byte) 0xC0, 0x3A, // TLS_ECDHE_PSK_WITH_NULL_SHA256 (byte) 0xC0, 0x3B, // TLS_ECDHE_PSK_WITH_NULL_SHA384 (byte) 0xC0, 0x3C, // TLS_RSA_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x3D, // TLS_RSA_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x3E, // TLS_DH_DSS_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x3F, // TLS_DH_DSS_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x40, // TLS_DH_RSA_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x41, // TLS_DH_RSA_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x42, // TLS_DHE_DSS_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x43, // TLS_DHE_DSS_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x44, // TLS_DHE_RSA_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x45, // TLS_DHE_RSA_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x46, // TLS_DH_anon_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x47, // TLS_DH_anon_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x48, // TLS_ECDHE_ECDSA_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x49, // TLS_ECDHE_ECDSA_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x4A, // TLS_ECDH_ECDSA_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x4B, // TLS_ECDH_ECDSA_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x4C, // TLS_ECDHE_RSA_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x4D, // TLS_ECDHE_RSA_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x4E, // TLS_ECDH_RSA_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x4F, // TLS_ECDH_RSA_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x50, // TLS_RSA_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x51, // TLS_RSA_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x52, // TLS_DHE_RSA_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x53, // TLS_DHE_RSA_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x54, // TLS_DH_RSA_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x55, // TLS_DH_RSA_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x56, // TLS_DHE_DSS_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x57, // TLS_DHE_DSS_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x58, // TLS_DH_DSS_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x59, // TLS_DH_DSS_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x5A, // TLS_DH_anon_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x5B, // TLS_DH_anon_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x5C, // TLS_ECDHE_ECDSA_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x5D, // TLS_ECDHE_ECDSA_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x5E, // TLS_ECDH_ECDSA_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x5F, // TLS_ECDH_ECDSA_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x60, // TLS_ECDHE_RSA_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x61, // TLS_ECDHE_RSA_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x62, // TLS_ECDH_RSA_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x63, // TLS_ECDH_RSA_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x64, // TLS_PSK_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x65, // TLS_PSK_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x66, // TLS_DHE_PSK_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x67, // TLS_DHE_PSK_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x68, // TLS_RSA_PSK_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x69, // TLS_RSA_PSK_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x6A, // TLS_PSK_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x6B, // TLS_PSK_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x6C, // TLS_DHE_PSK_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x6D, // TLS_DHE_PSK_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x6E, // TLS_RSA_PSK_WITH_ARIA_128_GCM_SHA256 (byte) 0xC0, 0x6F, // TLS_RSA_PSK_WITH_ARIA_256_GCM_SHA384 (byte) 0xC0, 0x70, // TLS_ECDHE_PSK_WITH_ARIA_128_CBC_SHA256 (byte) 0xC0, 0x71, // TLS_ECDHE_PSK_WITH_ARIA_256_CBC_SHA384 (byte) 0xC0, 0x72, // TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_CBC_SHA256 (byte) 0xC0, 0x73, // TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_CBC_SHA384 (byte) 0xC0, 0x74, // TLS_ECDH_ECDSA_WITH_CAMELLIA_128_CBC_SHA256 (byte) 0xC0, 0x75, // TLS_ECDH_ECDSA_WITH_CAMELLIA_256_CBC_SHA384 (byte) 0xC0, 0x76, // TLS_ECDHE_RSA_WITH_CAMELLIA_128_CBC_SHA256 (byte) 0xC0, 0x77, // TLS_ECDHE_RSA_WITH_CAMELLIA_256_CBC_SHA384 (byte) 0xC0, 0x78, // TLS_ECDH_RSA_WITH_CAMELLIA_128_CBC_SHA256 (byte) 0xC0, 0x79, // TLS_ECDH_RSA_WITH_CAMELLIA_256_CBC_SHA384 (byte) 0xC0, 0x7A, // TLS_RSA_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, 0x7B, // TLS_RSA_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, 0x7C, // TLS_DHE_RSA_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, 0x7D, // TLS_DHE_RSA_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, 0x7E, // TLS_DH_RSA_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, 0x7F, // TLS_DH_RSA_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, (byte) 0x80, // TLS_DHE_DSS_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, (byte) 0x81, // TLS_DHE_DSS_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, (byte) 0x82, // TLS_DH_DSS_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, (byte) 0x83, // TLS_DH_DSS_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, (byte) 0x84, // TLS_DH_anon_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, (byte) 0x85, // TLS_DH_anon_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, (byte) 0x86, // TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, (byte) 0x87, // TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, (byte) 0x88, // TLS_ECDH_ECDSA_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, (byte) 0x89, // TLS_ECDH_ECDSA_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, (byte) 0x8A, // TLS_ECDHE_RSA_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, (byte) 0x8B, // TLS_ECDHE_RSA_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, (byte) 0x8C, // TLS_ECDH_RSA_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, (byte) 0x8D, // TLS_ECDH_RSA_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, (byte) 0x8E, // TLS_PSK_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, (byte) 0x8F, // TLS_PSK_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, (byte) 0x90, // TLS_DHE_PSK_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, (byte) 0x91, // TLS_DHE_PSK_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, (byte) 0x92, // TLS_RSA_PSK_WITH_CAMELLIA_128_GCM_SHA256 (byte) 0xC0, (byte) 0x93, // TLS_RSA_PSK_WITH_CAMELLIA_256_GCM_SHA384 (byte) 0xC0, (byte) 0x94, // TLS_PSK_WITH_CAMELLIA_128_CBC_SHA256 (byte) 0xC0, (byte) 0x95, // TLS_PSK_WITH_CAMELLIA_256_CBC_SHA384 (byte) 0xC0, (byte) 0x96, // TLS_DHE_PSK_WITH_CAMELLIA_128_CBC_SHA256 (byte) 0xC0, (byte) 0x97, // TLS_DHE_PSK_WITH_CAMELLIA_256_CBC_SHA384 (byte) 0xC0, (byte) 0x98, // TLS_RSA_PSK_WITH_CAMELLIA_128_CBC_SHA256 (byte) 0xC0, (byte) 0x99, // TLS_RSA_PSK_WITH_CAMELLIA_256_CBC_SHA384 (byte) 0xC0, (byte) 0x9A, // TLS_ECDHE_PSK_WITH_CAMELLIA_128_CBC_SHA256 (byte) 0xC0, (byte) 0x9B, // TLS_ECDHE_PSK_WITH_CAMELLIA_256_CBC_SHA384 (byte) 0xC0, (byte) 0x9C, // TLS_RSA_WITH_AES_128_CCM (byte) 0xC0, (byte) 0x9D, // TLS_RSA_WITH_AES_256_CCM (byte) 0xC0, (byte) 0x9E, // TLS_DHE_RSA_WITH_AES_128_CCM (byte) 0xC0, (byte) 0x9F, // TLS_DHE_RSA_WITH_AES_256_CCM (byte) 0xC0, (byte) 0xA0, // TLS_RSA_WITH_AES_128_CCM_8 (byte) 0xC0, (byte) 0xA1, // TLS_RSA_WITH_AES_256_CCM_8 (byte) 0xC0, (byte) 0xA2, // TLS_DHE_RSA_WITH_AES_128_CCM_8 (byte) 0xC0, (byte) 0xA3, // TLS_DHE_RSA_WITH_AES_256_CCM_8 (byte) 0xC0, (byte) 0xA4, // TLS_PSK_WITH_AES_128_CCM (byte) 0xC0, (byte) 0xA5, // TLS_PSK_WITH_AES_256_CCM (byte) 0xC0, (byte) 0xA6, // TLS_DHE_PSK_WITH_AES_128_CCM (byte) 0xC0, (byte) 0xA7, // TLS_DHE_PSK_WITH_AES_256_CCM (byte) 0xC0, (byte) 0xA8, // TLS_PSK_WITH_AES_128_CCM_8 (byte) 0xC0, (byte) 0xA9, // TLS_PSK_WITH_AES_256_CCM_8 (byte) 0xC0, (byte) 0xAA, // TLS_PSK_DHE_WITH_AES_128_CCM_8 (byte) 0xC0, (byte) 0xAB, // TLS_PSK_DHE_WITH_AES_256_CCM_8 (byte) 0xC0, (byte) 0xAC, // TLS_ECDHE_ECDSA_WITH_AES_128_CCM (byte) 0xC0, (byte) 0xAD, // TLS_ECDHE_ECDSA_WITH_AES_256_CCM (byte) 0xC0, (byte) 0xAE, // TLS_ECDHE_ECDSA_WITH_AES_128_CCM_8 (byte) 0xC0, (byte) 0xAF, // TLS_ECDHE_ECDSA_WITH_AES_256_CCM_8 0x00, (byte) 0xFF, // TLS_EMPTY_RENEGOTIATION_INFO_SCSV // compression methods length, etc (byte) 0x01, (byte) 0x00, (byte) 0x00, (byte) 0x49, (byte) 0x00, (byte) 0x0b, (byte) 0x00, (byte) 0x04, (byte) 0x03, (byte) 0x00, (byte) 0x01, (byte) 0x02, (byte) 0x00, (byte) 0x0a, (byte) 0x00, (byte) 0x34, (byte) 0x00, (byte) 0x32, (byte) 0x00, (byte) 0x0e, (byte) 0x00, (byte) 0x0d, (byte) 0x00, (byte) 0x19, (byte) 0x00, (byte) 0x0b, (byte) 0x00, (byte) 0x0c, (byte) 0x00, (byte) 0x18, (byte) 0x00, (byte) 0x09, (byte) 0x00, (byte) 0x0a, (byte) 0x00, (byte) 0x16, (byte) 0x00, (byte) 0x17, (byte) 0x00, (byte) 0x08, (byte) 0x00, (byte) 0x06, (byte) 0x00, (byte) 0x07, (byte) 0x00, (byte) 0x14, (byte) 0x00, (byte) 0x15, (byte) 0x00, (byte) 0x04, (byte) 0x00, (byte) 0x05, (byte) 0x00, (byte) 0x12, (byte) 0x00, (byte) 0x13, (byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x02, (byte) 0x00, (byte) 0x03, (byte) 0x00, (byte) 0x0f, (byte) 0x00, (byte) 0x10, (byte) 0x00, (byte) 0x11, (byte) 0x00, (byte) 0x23, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x0f, (byte) 0x00, (byte) 0x01, (byte) 0x01 }; /** * Heartbeat Record Type, for both requests and responses. aka "heartbeat content type" in * rfc6520 0x18 = 24 in decimal */ static final byte heartbeatRecordByte = 0x18; /** the beartbeat request that we will send to */ static final byte heartbeatBuffer[] = { 0x00, 0x03, // data Length = 0x00 0x03 = 3 in decimal. This length is important later... 0x01, // heartbeat message type. 0x01 = heartbeat request, 0x02 = heartbeat response 0x40, 0x00 // payload length to be sent back by the server. 0x40 0x00 = 16384 in decimal // Note: No actual payload sent! // Note: No actual padding sent! }; /** returns the plugin id */ @Override public int getId() { return 20015; } /** returns the name of the plugin */ @Override public String getName() { return Constant.messages.getString(MESSAGE_PREFIX + "name"); } @Override public String getDescription() { return Constant.messages.getString(MESSAGE_PREFIX + "desc"); } @Override public int getCategory() { return Category.INFO_GATHER; } @Override public String getSolution() { return Constant.messages.getString(MESSAGE_PREFIX + "soln"); } @Override public String getReference() { return Constant.messages.getString(MESSAGE_PREFIX + "refs"); } @Override public void init() { switch (this.getAttackStrength()) { case LOW: this.timeoutMs = 1000; // 1 second break; case MEDIUM: this.timeoutMs = 3000; // 3 seconds break; case HIGH: this.timeoutMs = 6000; // 6 seconds break; case INSANE: this.timeoutMs = 12000; // 12 seconds break; default: } } /** scans the node for the vulnerability */ @Override public void scan() { try { // get the network details for the attack String hostname = this.getBaseMsg().getRequestHeader().getURI().getHost(); int portnumber = this.getBaseMsg().getRequestHeader().getURI().getPort(); // use the default HTTPS port, if the URI did not contain an explicit port number // or if the URL was via HTTP, rather than via HTTPS (yes, we will still check it) if (portnumber == -1 || portnumber == 80) portnumber = 443; if (log.isDebugEnabled()) log.debug("About to look for HeartBleed on " + hostname + ":" + portnumber); for (int tlsIndex = 0; tlsIndex < tlsBuffers.length; tlsIndex++) { if (log.isDebugEnabled()) log.debug( "-------------------- Trying " + tlsNames[tlsIndex] + " --------------------"); Socket socket = null; OutputStream os = null; InputStream is = null; try { // establish a raw socket connection, without proxying it (the request will // definitely not appear in Zap's history tab) socket = new Socket(); try { socket.connect(new InetSocketAddress(hostname, portnumber), this.timeoutMs); if (log.isDebugEnabled()) log.debug("Connected"); // set a timeout on the socket for reads.. socket.setSoTimeout(this.timeoutMs); } catch (Exception e) { // we cannot connect at all.. no point in continuing. log.debug( "Cannot establish a socket connection to " + hostname + ":" + portnumber + " for HeartBleed"); return; } // get the streams os = socket.getOutputStream(); is = socket.getInputStream(); // send the client Hello // prepare some length info - 3 byte message length, and 2 byte record length int messagelen = tlsBuffers[tlsIndex].length + helloBuffer.length; int recordlen = messagelen + 4; byte[] messageLenBytes = new byte[3]; messageLenBytes[0] = (byte) (messagelen >> 16); messageLenBytes[1] = (byte) (messagelen >> 8); messageLenBytes[2] = (byte) (messagelen); byte[] recordLenBytes = new byte[2]; recordLenBytes[0] = (byte) (recordlen >> 8); recordLenBytes[1] = (byte) (recordlen); // now write the Hello message os.write(handshakeRecordByte); os.write(tlsBuffers[tlsIndex]); os.write(recordLenBytes); os.write(handShakeClientHello); os.write(messageLenBytes); os.write(tlsBuffers[tlsIndex]); os.write(helloBuffer); if (log.isDebugEnabled()) log.debug("Wrote the Client Hello"); getParent().notifyNewMessage(this); // read through messages until we get a handshake message back from the server try { while (true) { SSLRecord sslRecord = recvmsg(is, this.timeoutMs); if (sslRecord.typ == handshakeRecordByte && sslRecord.len > 0 && sslRecord.pay[0] == 0x0E) { break; } if (log.isDebugEnabled()) log.debug( "Got a reponse from the server, but it was not a server hello 'Done' message"); } } catch (SocketTimeoutException es) { throw new IOException( "The timeout was exceeded while attempting to read the Server Hello"); } catch (IOException e) { // if we do not get back a server hello, it is because // the server does not support the SSL/TLS variant we passed it. throw new IOException( tlsNames[tlsIndex] + " is not supported by the server, or a common cipher suite could not be agreed"); } if (log.isDebugEnabled()) log.debug("Got the Server Hello"); // all the SSL initialisation is complete. So is the SSL server vulnerable? boolean vulnerable = isVulnerable( is, os, this.timeoutMs, tlsBuffers[tlsIndex]); // put a timeout on the check for each of // the TLS variants if (vulnerable) { if (log.isDebugEnabled()) log.debug("Vulnerable"); // bingo! String extraInfo = Constant.messages.getString( MESSAGE_PREFIX + "extrainfo", tlsNames[tlsIndex]); bingo( getRisk(), Alert.CONFIDENCE_MEDIUM, getName(), getDescription(), getBaseMsg().getRequestHeader().getURI().getURI(), "", // param "", // attack extraInfo, getSolution(), "", // evidence getBaseMsg()); } if (is != null) is.close(); if (os != null) os.close(); if (socket != null) socket.close(); } catch (Exception e) { // this particular variant is not vulnerable. skip to the next one.. if (log.isDebugEnabled()) log.debug( "The SSL server does not appear to be vulnerable, using " + tlsNames[tlsIndex] + ": " + e.getMessage()); } finally { if (log.isDebugEnabled()) log.debug("Tidying up"); if (is != null) is.close(); if (os != null) os.close(); if (socket != null) socket.close(); } } } catch (Exception e) { // needed to catch exceptions from the "finally" statement log.error("Error scanning a node for HeartBleed: " + e.getMessage(), e); } } @Override public int getRisk() { return Alert.RISK_HIGH; } @Override public int getCweId() { return 119; // CWE 119: Failure to Constrain Operations within the Bounds of a Memory Buffer } @Override public int getWascId() { return 20; // WASC-20: Improper Input Handling } /** * determines if the SSL server behind the streams is vulnerable based on its response to * malformed heartbeat message * * @param is * @param os * @return true or false * @throws IOException */ boolean isVulnerable(InputStream is, OutputStream os, int timeoutMs, byte[] tlsVersionBuffer) throws IOException { // send the heartbeat request first, then start the clock ticking. tick tock, tick tock. os.write(heartbeatRecordByte); os.write(tlsVersionBuffer); os.write(heartbeatBuffer); getParent().notifyNewMessage(this); if (log.isDebugEnabled()) log.debug("Wrote the dodgy heartbeat message"); long startTime = System.currentTimeMillis(); long timeoutTime = startTime + timeoutMs; long currentTime = startTime; while (true && currentTime <= timeoutTime) { SSLRecord sslRecord = recvmsg(is, timeoutMs); if (log.isDebugEnabled()) log.debug( "Got a message of type 0x" + Integer.toHexString(sslRecord.typ) + " from the server: " + Hex.encodeHexString(sslRecord.pay)); if (sslRecord.typ == heartbeatRecordByte) { // received the heartbeat response if (sslRecord.len > 3) { if (log.isDebugEnabled()) log.debug("VULNERABLE. Got more data back than what we sent in"); // Got > 3 bytes back. Vulnerable. return true; } else { // Got <=3 bytes back. NOT Vulnerable. if (log.isDebugEnabled()) log.debug("NOT VULNERABLE. Got back <=3 bytes. Boo hoo."); return false; } } // server returned a fatal alert. we will just ignore warning alerts for now and hope // for the best if (sslRecord.typ == alertRecordByte) { if (sslRecord.pay[0] == 0x02) { // Fatal alert if (log.isDebugEnabled()) { log.debug("NOT VULNERABLE. We got a fatal alert back from the server"); log.debug("Alert Payload: 0x" + Hex.encodeHexString(sslRecord.pay)); String msg = null; switch (sslRecord.pay[1]) { case 0: msg = "close_notify"; break; case 10: msg = "unexpected_message"; break; case 20: msg = "bad_record_mac"; break; case 21: msg = "decryption_failed_RESERVED"; break; case 22: msg = "record_overflow"; break; case 30: msg = "decompression_failure"; break; case 40: msg = "handshake_failure"; break; case 41: msg = "no_certificate_RESERVED"; break; case 42: msg = "bad_certificate"; break; case 43: msg = "unsupported_certificate"; break; case 44: msg = "certificate_revoked"; break; case 45: msg = "certificate_expired"; break; case 46: msg = "certificate_unknown"; break; case 47: msg = "illegal_parameter"; break; case 48: msg = "unknown_ca"; break; case 49: msg = "access_denied"; break; case 50: msg = "decode_error"; break; case 51: msg = "decrypt_error"; break; case 60: msg = "export_restriction_RESERVED"; break; case 70: msg = "protocol_version"; break; case 71: msg = "insufficient_security"; break; case 80: msg = "internal_error"; break; case 90: msg = "user_canceled"; break; case 100: msg = "no_renegotiation"; break; case 110: msg = "unsupported_extension"; break; } log.debug("Alert reason: " + msg); } return false; } else { // warning alert if (log.isDebugEnabled()) { log.debug("Ignoring a warning alert from the server"); } } } currentTime = System.currentTimeMillis(); } // timed out.. and we haven't received a response to the heartbeat.. not vulnerable if (log.isDebugEnabled()) log.debug("NOT VULNERABLE. No suitable heartbeat response within the timeout"); return false; } /** * reads an SSL message from the inputstream * * @param is * @param timeoutMs the timeout in milliseconds * @return * @throws IOException */ static SSLRecord recvmsg(InputStream is, int timeoutMs) throws IOException { byte[] messageHeader = recvall(is, 5, timeoutMs); // convert the 5 bytes to (big endian) 1 unsigned byte type, 2 unsigned bytes ver, 2 // unsigned bytes len ByteBuffer bb = ByteBuffer.wrap(messageHeader); byte type = bb.get(); short ver = bb.getShort(); short len = bb.getShort(); // read the specified number of bytes from the inputstream byte[] messagePayload = recvall(is, len, timeoutMs); return new SSLRecord(type, ver, len, messagePayload); } /** * reads the requested number of bytes from the inputstream, blocking if necessary * * @param s the inputstream from which to read * @param length the number of bytes to reas * @return a byte array containing the requested number of bytes from the inputstream * @throws IOException */ static byte[] recvall(InputStream s, int length, int timeoutMs) throws IOException { long startTime = System.currentTimeMillis(); long timeoutTime = startTime + timeoutMs; long currentTime = startTime; byte[] buffer = new byte[length]; int remainingtoread = length; while (remainingtoread > 0 && currentTime <= timeoutTime) { int read = s.read(buffer, length - remainingtoread, remainingtoread); if (read != -1) remainingtoread -= read; else throw new IOException( "Failed to read " + length + " bytes. Read " + (length - remainingtoread) + " bytes"); currentTime = System.currentTimeMillis(); } // did we time out? if (currentTime >= timeoutTime) throw new IOException( "Failed to read " + length + " bytes in " + timeoutMs + "ms due to a timeout. Read " + (length - remainingtoread) + " bytes"); return buffer; } /** * a helper class used to pass internal SSL details around * * @author 70pointer@gmail.com */ public static class SSLRecord { public byte typ; public short ver; public short len; public byte[] pay; SSLRecord(byte typ, short ver, short len, byte[] pay) { this.typ = typ; this.ver = ver; this.len = len; this.pay = pay; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.axis2.addressing; import org.apache.axis2.AxisFault; import org.apache.axis2.Constants; import org.apache.axis2.context.MessageContext; import org.apache.axis2.description.AxisDescription; import org.apache.axis2.description.AxisOperation; import org.apache.axis2.description.Parameter; import org.apache.axis2.util.LoggingControl; import org.apache.axis2.util.Utils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.util.Map; public class AddressingHelper { private static final Log log = LogFactory.getLog(AddressingHelper.class); /** * Returns true if the ReplyTo address matches one of the supported * anonymous urls. If the ReplyTo is not set, anonymous is assumed, per the Final * spec. The AddressingInHandler should have set the ReplyTo to non-null in the * 2004/08 case to ensure the different semantics. (per AXIS2-885) * * According to the WS-Addressing Metadata spec the none URI must not be rejected. * * @param messageContext */ public static boolean isSyncReplyAllowed(MessageContext messageContext) { EndpointReference replyTo = messageContext.getReplyTo(); if (replyTo == null) { if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug(messageContext.getLogIDString() + " isSyncReplyAllowed: ReplyTo is null. Returning true"); } return true; } else { return replyTo.hasAnonymousAddress() || replyTo.hasNoneAddress(); } } /** * Returns true if the FaultTo address matches one of the supported * anonymous urls. If the FaultTo is not set, the ReplyTo is checked per the * spec. * * According to the WS-Addressing Metadata spec the none URI must not be rejected. * * @param messageContext * @see #isSyncReplyAllowed(org.apache.axis2.context.MessageContext) */ public static boolean isSyncFaultAllowed(MessageContext messageContext) { EndpointReference faultTo = messageContext.getFaultTo(); if (faultTo == null) { if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug(messageContext.getLogIDString() + " isSyncFaultAllowed: FaultTo is null. Returning isSyncReplyAllowed"); } return isSyncReplyAllowed(messageContext); } else { return faultTo.hasAnonymousAddress() || faultTo.hasNoneAddress(); } } /** * Returns true if the ReplyTo address does not match one of the supported * anonymous urls. If the ReplyTo is not set, anonymous is assumed, per the Final * spec. The AddressingInHandler should have set the ReplyTo to non-null in the * 2004/08 case to ensure the different semantics. (per AXIS2-885) * * @param messageContext */ public static boolean isReplyRedirected(MessageContext messageContext) { EndpointReference replyTo = messageContext.getReplyTo(); if (replyTo == null) { if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug(messageContext.getLogIDString() + " isReplyRedirected: ReplyTo is null. Returning false"); } return false; } else { return !replyTo.hasAnonymousAddress(); } } /** * Returns true if the FaultTo address does not match one of the supported * anonymous urls. If the FaultTo is not set, the ReplyTo is checked per the * spec. * * @param messageContext * @see #isReplyRedirected(org.apache.axis2.context.MessageContext) */ public static boolean isFaultRedirected(MessageContext messageContext) { EndpointReference faultTo = messageContext.getFaultTo(); if (faultTo == null) { if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug(messageContext.getLogIDString() + " isFaultRedirected: FaultTo is null. Returning isReplyRedirected"); } return isReplyRedirected(messageContext); } else { return !faultTo.hasAnonymousAddress(); } } /** * If the inbound FaultTo header was invalid and caused a fault, the fault should not be * sent to it. * * @return true if the fault should be sent to the FaultTo */ public static boolean shouldSendFaultToFaultTo(MessageContext messageContext) { // there are some information that the fault thrower wants to pass to the fault path. // Means that the fault is a ws-addressing one hence use the ws-addressing fault action. Object faultInfoForHeaders = messageContext.getLocalProperty(Constants.FAULT_INFORMATION_FOR_HEADERS); // if the exception is due to a problem in the faultTo header itself, we can not use those // fault informatio to send the error. Try to send using replyTo, leave it to transport boolean doNotSendFaultUsingFaultTo = false; if (faultInfoForHeaders != null) { // TODO: This should probably store a QName instead of a String.. currently we rely on prefix string matching!! String problemHeaderName = (String) ((Map) faultInfoForHeaders) .get(AddressingConstants.Final.FAULT_HEADER_PROB_HEADER_QNAME); doNotSendFaultUsingFaultTo = (problemHeaderName != null && (AddressingConstants .WSA_DEFAULT_PREFIX + ":" + AddressingConstants.WSA_FAULT_TO) .equals(problemHeaderName)); } return !doNotSendFaultUsingFaultTo; } public static String getAddressingRequirementParemeterValue(AxisDescription axisDescription){ String value = ""; if (axisDescription != null) { value = Utils.getParameterValue( axisDescription.getParameter(AddressingConstants.ADDRESSING_REQUIREMENT_PARAMETER)); if(value !=null){ value = value.trim(); } if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug("getAddressingRequirementParemeterValue: value: '" + value + "'"); } } if (value == null || "".equals(value)) { value = AddressingConstants.ADDRESSING_UNSPECIFIED; } return value; } /** * Extract the parameter representing the Anonymous flag from the AxisOperation * and return the String value. Return the default of "optional" if not specified. * * @param axisOperation */ public static String getInvocationPatternParameterValue(AxisOperation axisOperation) { String value = ""; if (axisOperation != null) { value = Utils.getParameterValue( axisOperation.getParameter(AddressingConstants.WSAM_INVOCATION_PATTERN_PARAMETER_NAME)); if(value !=null){ value = value.trim(); } if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug("getInvocationPatternParameterValue: value: '" + value + "'"); } } if (value == null || "".equals(value)) { value = AddressingConstants.WSAM_INVOCATION_PATTERN_BOTH; } return value; } /** * Set the value of an existing unlocked Parameter representing Anonymous or add a new one if one * does not exist. If a locked Parameter of the same name already exists the method will trace and * return. * * @param axisOperation * @param value */ public static void setInvocationPatternParameterValue(AxisOperation axisOperation, String value) { if (value == null) { if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug("setInvocationPatternParameterValue: value passed in is null. return"); } return; } Parameter param = axisOperation.getParameter(AddressingConstants.WSAM_INVOCATION_PATTERN_PARAMETER_NAME); // If an existing parameter exists if (param != null) { if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug("setInvocationPatternParameterValue: Parameter already exists"); } // and is not locked if (!param.isLocked()) { if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug("setInvocationPatternParameterValue: Parameter not locked. Setting value: " + value); } // set the value param.setValue(value); } } else { // otherwise, if no Parameter exists if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug("setInvocationPatternParameterValue: Parameter does not exist"); } // Create new Parameter with correct name/value param = new Parameter(); param.setName(AddressingConstants.WSAM_INVOCATION_PATTERN_PARAMETER_NAME); param.setValue(value); try { if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug("setInvocationPatternParameterValue: Adding parameter with value: " + value); } // and add it to the AxisOperation object axisOperation.addParameter(param); } catch (AxisFault af) { // This should not happen. AxisFault is only ever thrown when a locked Parameter // of the same name already exists and this should be dealt with by the outer // if statement. if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug( "setInvocationPatternParameterValue: addParameter failed: " + af.getMessage()); } } } } public static void setAddressingRequirementParemeterValue(AxisDescription axisDescription, String value) { if (value == null) { if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug("setAddressingRequirementParemeterValue: value passed in is null. return"); } return; } Parameter param = axisDescription.getParameter(AddressingConstants.ADDRESSING_REQUIREMENT_PARAMETER); // If an existing parameter exists if (param != null) { if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug("setAddressingRequirementParemeterValue: Parameter already exists"); } // and is not locked if (!param.isLocked()) { if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug("setAddressingRequirementParemeterValue: Parameter not locked. Setting value: " + value); } // set the value param.setValue(value); } } else { // otherwise, if no Parameter exists if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug("setAddressingRequirementParemeterValue: Parameter does not exist"); } // Create new Parameter with correct name/value param = new Parameter(); param.setName(AddressingConstants.ADDRESSING_REQUIREMENT_PARAMETER); param.setValue(value); try { if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug("setAddressingRequirementParemeterValue: Adding parameter with value: " + value); } // and add it to the AxisOperation object axisDescription.addParameter(param); } catch (AxisFault af) { // This should not happen. AxisFault is only ever thrown when a locked Parameter // of the same name already exists and this should be dealt with by the outer // if statement. if (LoggingControl.debugLoggingAllowed && log.isDebugEnabled()) { log.debug( "setAddressingRequirementParemeterValue: addParameter failed: " + af.getMessage()); } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import java.io.IOException; import java.util.Date; import java.io.DataInputStream; import java.io.FileOutputStream; import java.io.InputStreamReader; import java.io.PrintStream; import java.io.File; import java.io.BufferedReader; import java.util.StringTokenizer; import java.net.InetAddress; import java.text.SimpleDateFormat; import java.util.Iterator; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.MapReduceBase; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reducer; /** * This program executes a specified operation that applies load to * the NameNode. * * When run simultaneously on multiple nodes, this program functions * as a stress-test and benchmark for namenode, especially when * the number of bytes written to each file is small. * * Valid operations are: * create_write * open_read * rename * delete * * NOTE: The open_read, rename and delete operations assume that the files * they operate on are already available. The create_write operation * must be run before running the other operations. */ public class NNBench { private static final Log LOG = LogFactory.getLog( "org.apache.hadoop.hdfs.NNBench"); protected static String CONTROL_DIR_NAME = "control"; protected static String OUTPUT_DIR_NAME = "output"; protected static String DATA_DIR_NAME = "data"; protected static final String DEFAULT_RES_FILE_NAME = "NNBench_results.log"; protected static final String NNBENCH_VERSION = "NameNode Benchmark 0.4"; public static String operation = "none"; public static long numberOfMaps = 1l; // default is 1 public static long numberOfReduces = 1l; // default is 1 public static long startTime = System.currentTimeMillis() + (120 * 1000); // default is 'now' + 2min public static long blockSize = 1l; // default is 1 public static int bytesToWrite = 0; // default is 0 public static long bytesPerChecksum = 1l; // default is 1 public static long numberOfFiles = 1l; // default is 1 public static short replicationFactorPerFile = 1; // default is 1 public static String baseDir = "/benchmarks/NNBench"; // default public static boolean readFileAfterOpen = false; // default is to not read // Supported operations private static final String OP_CREATE_WRITE = "create_write"; private static final String OP_OPEN_READ = "open_read"; private static final String OP_RENAME = "rename"; private static final String OP_DELETE = "delete"; // To display in the format that matches the NN and DN log format // Example: 2007-10-26 00:01:19,853 static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd' 'HH:mm:ss','S"); private static Configuration config = new Configuration(); /** * Clean up the files before a test run * * @throws IOException on error */ private static void cleanupBeforeTestrun() throws IOException { FileSystem tempFS = FileSystem.get(config); // Delete the data directory only if it is the create/write operation if (operation.equals(OP_CREATE_WRITE)) { LOG.info("Deleting data directory"); tempFS.delete(new Path(baseDir, DATA_DIR_NAME), true); } tempFS.delete(new Path(baseDir, CONTROL_DIR_NAME), true); tempFS.delete(new Path(baseDir, OUTPUT_DIR_NAME), true); } /** * Create control files before a test run. * Number of files created is equal to the number of maps specified * * @throws IOException on error */ private static void createControlFiles() throws IOException { FileSystem tempFS = FileSystem.get(config); LOG.info("Creating " + numberOfMaps + " control files"); for (int i = 0; i < numberOfMaps; i++) { String strFileName = "NNBench_Controlfile_" + i; Path filePath = new Path(new Path(baseDir, CONTROL_DIR_NAME), strFileName); SequenceFile.Writer writer = null; try { writer = SequenceFile.createWriter(tempFS, config, filePath, Text.class, LongWritable.class, CompressionType.NONE); writer.append(new Text(strFileName), new LongWritable(0l)); } finally { if (writer != null) { writer.close(); } } } } /** * Display version */ private static void displayVersion() { System.out.println(NNBENCH_VERSION); } /** * Display usage */ private static void displayUsage() { String usage = "Usage: nnbench <options>\n" + "Options:\n" + "\t-operation <Available operations are " + OP_CREATE_WRITE + " " + OP_OPEN_READ + " " + OP_RENAME + " " + OP_DELETE + ". " + "This option is mandatory>\n" + "\t * NOTE: The open_read, rename and delete operations assume " + "that the files they operate on, are already available. " + "The create_write operation must be run before running the " + "other operations.\n" + "\t-maps <number of maps. default is 1. This is not mandatory>\n" + "\t-reduces <number of reduces. default is 1. This is not mandatory>\n" + "\t-startTime <time to start, given in seconds from the epoch. " + "Make sure this is far enough into the future, so all maps " + "(operations) will start at the same time. " + "default is launch time + 2 mins. This is not mandatory>\n" + "\t-blockSize <Block size in bytes. default is 1. " + "This is not mandatory>\n" + "\t-bytesToWrite <Bytes to write. default is 0. " + "This is not mandatory>\n" + "\t-bytesPerChecksum <Bytes per checksum for the files. default is 1. " + "This is not mandatory>\n" + "\t-numberOfFiles <number of files to create. default is 1. " + "This is not mandatory>\n" + "\t-replicationFactorPerFile <Replication factor for the files." + " default is 1. This is not mandatory>\n" + "\t-baseDir <base DFS path. default is /benchmarks/NNBench. " + "This is not mandatory>\n" + "\t-readFileAfterOpen <true or false. if true, it reads the file and " + "reports the average time to read. This is valid with the open_read " + "operation. default is false. This is not mandatory>\n" + "\t-help: Display the help statement\n"; System.out.println(usage); } /** * check for arguments and fail if the values are not specified * @param index positional number of an argument in the list of command * line's arguments * @param length total number of arguments */ public static void checkArgs(final int index, final int length) { if (index == length) { displayUsage(); System.exit(-1); } } /** * Parse input arguments * * @param args array of command line's parameters to be parsed */ public static void parseInputs(final String[] args) { // If there are no command line arguments, exit if (args.length == 0) { displayUsage(); System.exit(-1); } // Parse command line args for (int i = 0; i < args.length; i++) { if (args[i].equals("-operation")) { operation = args[++i]; } else if (args[i].equals("-maps")) { checkArgs(i + 1, args.length); numberOfMaps = Long.parseLong(args[++i]); } else if (args[i].equals("-reduces")) { checkArgs(i + 1, args.length); numberOfReduces = Long.parseLong(args[++i]); } else if (args[i].equals("-startTime")) { checkArgs(i + 1, args.length); startTime = Long.parseLong(args[++i]) * 1000; } else if (args[i].equals("-blockSize")) { checkArgs(i + 1, args.length); blockSize = Long.parseLong(args[++i]); } else if (args[i].equals("-bytesToWrite")) { checkArgs(i + 1, args.length); bytesToWrite = Integer.parseInt(args[++i]); } else if (args[i].equals("-bytesPerChecksum")) { checkArgs(i + 1, args.length); bytesPerChecksum = Long.parseLong(args[++i]); } else if (args[i].equals("-numberOfFiles")) { checkArgs(i + 1, args.length); numberOfFiles = Long.parseLong(args[++i]); } else if (args[i].equals("-replicationFactorPerFile")) { checkArgs(i + 1, args.length); replicationFactorPerFile = Short.parseShort(args[++i]); } else if (args[i].equals("-baseDir")) { checkArgs(i + 1, args.length); baseDir = args[++i]; } else if (args[i].equals("-readFileAfterOpen")) { checkArgs(i + 1, args.length); readFileAfterOpen = Boolean.parseBoolean(args[++i]); } else if (args[i].equals("-help")) { displayUsage(); System.exit(-1); } } LOG.info("Test Inputs: "); LOG.info(" Test Operation: " + operation); LOG.info(" Start time: " + sdf.format(new Date(startTime))); LOG.info(" Number of maps: " + numberOfMaps); LOG.info(" Number of reduces: " + numberOfReduces); LOG.info(" Block Size: " + blockSize); LOG.info(" Bytes to write: " + bytesToWrite); LOG.info(" Bytes per checksum: " + bytesPerChecksum); LOG.info(" Number of files: " + numberOfFiles); LOG.info(" Replication factor: " + replicationFactorPerFile); LOG.info(" Base dir: " + baseDir); LOG.info(" Read file after open: " + readFileAfterOpen); // Set user-defined parameters, so the map method can access the values config.set("test.nnbench.operation", operation); config.setLong("test.nnbench.maps", numberOfMaps); config.setLong("test.nnbench.reduces", numberOfReduces); config.setLong("test.nnbench.starttime", startTime); config.setLong("test.nnbench.blocksize", blockSize); config.setInt("test.nnbench.bytestowrite", bytesToWrite); config.setLong("test.nnbench.bytesperchecksum", bytesPerChecksum); config.setLong("test.nnbench.numberoffiles", numberOfFiles); config.setInt("test.nnbench.replicationfactor", (int) replicationFactorPerFile); config.set("test.nnbench.basedir", baseDir); config.setBoolean("test.nnbench.readFileAfterOpen", readFileAfterOpen); config.set("test.nnbench.datadir.name", DATA_DIR_NAME); config.set("test.nnbench.outputdir.name", OUTPUT_DIR_NAME); config.set("test.nnbench.controldir.name", CONTROL_DIR_NAME); } /** * Analyze the results * * @throws IOException on error */ private static void analyzeResults() throws IOException { final FileSystem fs = FileSystem.get(config); Path reduceFile = new Path(new Path(baseDir, OUTPUT_DIR_NAME), "part-00000"); DataInputStream in; in = new DataInputStream(fs.open(reduceFile)); BufferedReader lines; lines = new BufferedReader(new InputStreamReader(in)); long totalTimeAL1 = 0l; long totalTimeAL2 = 0l; long totalTimeTPmS = 0l; long lateMaps = 0l; long numOfExceptions = 0l; long successfulFileOps = 0l; long mapStartTimeTPmS = 0l; long mapEndTimeTPmS = 0l; String resultTPSLine1 = null; String resultTPSLine2 = null; String resultALLine1 = null; String resultALLine2 = null; String line; while((line = lines.readLine()) != null) { StringTokenizer tokens = new StringTokenizer(line, " \t\n\r\f%;"); String attr = tokens.nextToken(); if (attr.endsWith(":totalTimeAL1")) { totalTimeAL1 = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":totalTimeAL2")) { totalTimeAL2 = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":totalTimeTPmS")) { totalTimeTPmS = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":latemaps")) { lateMaps = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":numOfExceptions")) { numOfExceptions = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":successfulFileOps")) { successfulFileOps = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":mapStartTimeTPmS")) { mapStartTimeTPmS = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":mapEndTimeTPmS")) { mapEndTimeTPmS = Long.parseLong(tokens.nextToken()); } } // Average latency is the average time to perform 'n' number of // operations, n being the number of files double avgLatency1 = (double) totalTimeAL1 / successfulFileOps; double avgLatency2 = (double) totalTimeAL2 / successfulFileOps; // The time it takes for the longest running map is measured. Using that, // cluster transactions per second is calculated. It includes time to // retry any of the failed operations double longestMapTimeTPmS = (double) (mapEndTimeTPmS - mapStartTimeTPmS); double totalTimeTPS = (longestMapTimeTPmS == 0) ? (1000 * successfulFileOps) : (double) (1000 * successfulFileOps) / longestMapTimeTPmS; // The time it takes to perform 'n' operations is calculated (in ms), // n being the number of files. Using that time, the average execution // time is calculated. It includes time to retry any of the // failed operations double AverageExecutionTime = (totalTimeTPmS == 0) ? (double) successfulFileOps : (double) totalTimeTPmS / successfulFileOps; if (operation.equals(OP_CREATE_WRITE)) { // For create/write/close, it is treated as two transactions, // since a file create from a client perspective involves create and close resultTPSLine1 = " TPS: Create/Write/Close: " + (int) (totalTimeTPS * 2); resultTPSLine2 = "Avg exec time (ms): Create/Write/Close: " + AverageExecutionTime; resultALLine1 = " Avg Lat (ms): Create/Write: " + avgLatency1; resultALLine2 = " Avg Lat (ms): Close: " + avgLatency2; } else if (operation.equals(OP_OPEN_READ)) { resultTPSLine1 = " TPS: Open/Read: " + (int) totalTimeTPS; resultTPSLine2 = " Avg Exec time (ms): Open/Read: " + AverageExecutionTime; resultALLine1 = " Avg Lat (ms): Open: " + avgLatency1; if (readFileAfterOpen) { resultALLine2 = " Avg Lat (ms): Read: " + avgLatency2; } } else if (operation.equals(OP_RENAME)) { resultTPSLine1 = " TPS: Rename: " + (int) totalTimeTPS; resultTPSLine2 = " Avg Exec time (ms): Rename: " + AverageExecutionTime; resultALLine1 = " Avg Lat (ms): Rename: " + avgLatency1; } else if (operation.equals(OP_DELETE)) { resultTPSLine1 = " TPS: Delete: " + (int) totalTimeTPS; resultTPSLine2 = " Avg Exec time (ms): Delete: " + AverageExecutionTime; resultALLine1 = " Avg Lat (ms): Delete: " + avgLatency1; } String resultLines[] = { "-------------- NNBench -------------- : ", " Version: " + NNBENCH_VERSION, " Date & time: " + sdf.format(new Date( System.currentTimeMillis())), "", " Test Operation: " + operation, " Start time: " + sdf.format(new Date(startTime)), " Maps to run: " + numberOfMaps, " Reduces to run: " + numberOfReduces, " Block Size (bytes): " + blockSize, " Bytes to write: " + bytesToWrite, " Bytes per checksum: " + bytesPerChecksum, " Number of files: " + numberOfFiles, " Replication factor: " + replicationFactorPerFile, " Successful file operations: " + successfulFileOps, "", " # maps that missed the barrier: " + lateMaps, " # exceptions: " + numOfExceptions, "", resultTPSLine1, resultTPSLine2, resultALLine1, resultALLine2, "", " RAW DATA: AL Total #1: " + totalTimeAL1, " RAW DATA: AL Total #2: " + totalTimeAL2, " RAW DATA: TPS Total (ms): " + totalTimeTPmS, " RAW DATA: Longest Map Time (ms): " + longestMapTimeTPmS, " RAW DATA: Late maps: " + lateMaps, " RAW DATA: # of exceptions: " + numOfExceptions, "" }; PrintStream res = new PrintStream(new FileOutputStream( new File(DEFAULT_RES_FILE_NAME), true)); // Write to a file and also dump to log for(int i = 0; i < resultLines.length; i++) { LOG.info(resultLines[i]); res.println(resultLines[i]); } } /** * Run the test * * @throws IOException on error */ public static void runTests() throws IOException { config.setLong("io.bytes.per.checksum", bytesPerChecksum); JobConf job = new JobConf(config, NNBench.class); job.setJobName("NNBench-" + operation); FileInputFormat.setInputPaths(job, new Path(baseDir, CONTROL_DIR_NAME)); job.setInputFormat(SequenceFileInputFormat.class); // Explicitly set number of max map attempts to 1. job.setMaxMapAttempts(1); // Explicitly turn off speculative execution job.setSpeculativeExecution(false); job.setMapperClass(NNBenchMapper.class); job.setReducerClass(NNBenchReducer.class); FileOutputFormat.setOutputPath(job, new Path(baseDir, OUTPUT_DIR_NAME)); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setNumReduceTasks((int) numberOfReduces); JobClient.runJob(job); } /** * Validate the inputs */ public static void validateInputs() { // If it is not one of the four operations, then fail if (!operation.equals(OP_CREATE_WRITE) && !operation.equals(OP_OPEN_READ) && !operation.equals(OP_RENAME) && !operation.equals(OP_DELETE)) { System.err.println("Error: Unknown operation: " + operation); displayUsage(); System.exit(-1); } // If number of maps is a negative number, then fail // Hadoop allows the number of maps to be 0 if (numberOfMaps < 0) { System.err.println("Error: Number of maps must be a positive number"); displayUsage(); System.exit(-1); } // If number of reduces is a negative number or 0, then fail if (numberOfReduces <= 0) { System.err.println("Error: Number of reduces must be a positive number"); displayUsage(); System.exit(-1); } // If blocksize is a negative number or 0, then fail if (blockSize <= 0) { System.err.println("Error: Block size must be a positive number"); displayUsage(); System.exit(-1); } // If bytes to write is a negative number, then fail if (bytesToWrite < 0) { System.err.println("Error: Bytes to write must be a positive number"); displayUsage(); System.exit(-1); } // If bytes per checksum is a negative number, then fail if (bytesPerChecksum < 0) { System.err.println("Error: Bytes per checksum must be a positive number"); displayUsage(); System.exit(-1); } // If number of files is a negative number, then fail if (numberOfFiles < 0) { System.err.println("Error: Number of files must be a positive number"); displayUsage(); System.exit(-1); } // If replication factor is a negative number, then fail if (replicationFactorPerFile < 0) { System.err.println("Error: Replication factor must be a positive number"); displayUsage(); System.exit(-1); } // If block size is not a multiple of bytesperchecksum, fail if (blockSize % bytesPerChecksum != 0) { System.err.println("Error: Block Size in bytes must be a multiple of " + "bytes per checksum: "); displayUsage(); System.exit(-1); } } /** * Main method for running the NNBench benchmarks * * @param args array of command line arguments * @throws IOException indicates a problem with test startup */ public static void main(String[] args) throws IOException { // Display the application version string displayVersion(); // Parse the inputs parseInputs(args); // Validate inputs validateInputs(); // Clean up files before the test run cleanupBeforeTestrun(); // Create control files before test run createControlFiles(); // Run the tests as a map reduce job runTests(); // Analyze results analyzeResults(); } /** * Mapper class */ static class NNBenchMapper extends Configured implements Mapper<Text, LongWritable, Text, Text> { FileSystem filesystem = null; private String hostName = null; long numberOfFiles = 1l; long blkSize = 1l; short replFactor = 1; int bytesToWrite = 0; String baseDir = null; String dataDirName = null; String op = null; boolean readFile = false; final int MAX_OPERATION_EXCEPTIONS = 1000; // Data to collect from the operation int numOfExceptions = 0; long startTimeAL = 0l; long totalTimeAL1 = 0l; long totalTimeAL2 = 0l; long successfulFileOps = 0l; /** * Constructor */ public NNBenchMapper() { } /** * Mapper base implementation */ public void configure(JobConf conf) { setConf(conf); try { filesystem = FileSystem.get(conf); } catch(Exception e) { throw new RuntimeException("Cannot get file system.", e); } try { hostName = InetAddress.getLocalHost().getHostName(); } catch(Exception e) { throw new RuntimeException("Error getting hostname", e); } } /** * Mapper base implementation */ public void close() throws IOException { } /** * Returns when the current number of seconds from the epoch equals * the command line argument given by <code>-startTime</code>. * This allows multiple instances of this program, running on clock * synchronized nodes, to start at roughly the same time. * @return true if the method was able to sleep for <code>-startTime</code> * without interruption; false otherwise */ private boolean barrier() { long startTime = getConf().getLong("test.nnbench.starttime", 0l); long currentTime = System.currentTimeMillis(); long sleepTime = startTime - currentTime; boolean retVal = false; // If the sleep time is greater than 0, then sleep and return if (sleepTime > 0) { LOG.info("Waiting in barrier for: " + sleepTime + " ms"); try { Thread.sleep(sleepTime); retVal = true; } catch (Exception e) { retVal = false; } } return retVal; } /** * Map method */ public void map(Text key, LongWritable value, OutputCollector<Text, Text> output, Reporter reporter) throws IOException { Configuration conf = filesystem.getConf(); numberOfFiles = conf.getLong("test.nnbench.numberoffiles", 1l); blkSize = conf.getLong("test.nnbench.blocksize", 1l); replFactor = (short) (conf.getInt("test.nnbench.replicationfactor", 1)); bytesToWrite = conf.getInt("test.nnbench.bytestowrite", 0); baseDir = conf.get("test.nnbench.basedir"); dataDirName = conf.get("test.nnbench.datadir.name"); op = conf.get("test.nnbench.operation"); readFile = conf.getBoolean("test.nnbench.readFileAfterOpen", false); long totalTimeTPmS = 0l; long startTimeTPmS = 0l; long endTimeTPms = 0l; numOfExceptions = 0; startTimeAL = 0l; totalTimeAL1 = 0l; totalTimeAL2 = 0l; successfulFileOps = 0l; if (barrier()) { if (op.equals(OP_CREATE_WRITE)) { startTimeTPmS = System.currentTimeMillis(); doCreateWriteOp("file_" + hostName + "_", reporter); } else if (op.equals(OP_OPEN_READ)) { startTimeTPmS = System.currentTimeMillis(); doOpenReadOp("file_" + hostName + "_", reporter); } else if (op.equals(OP_RENAME)) { startTimeTPmS = System.currentTimeMillis(); doRenameOp("file_" + hostName + "_", reporter); } else if (op.equals(OP_DELETE)) { startTimeTPmS = System.currentTimeMillis(); doDeleteOp("file_" + hostName + "_", reporter); } endTimeTPms = System.currentTimeMillis(); totalTimeTPmS = endTimeTPms - startTimeTPmS; } else { output.collect(new Text("l:latemaps"), new Text("1")); } // collect after the map end time is measured output.collect(new Text("l:totalTimeAL1"), new Text(String.valueOf(totalTimeAL1))); output.collect(new Text("l:totalTimeAL2"), new Text(String.valueOf(totalTimeAL2))); output.collect(new Text("l:numOfExceptions"), new Text(String.valueOf(numOfExceptions))); output.collect(new Text("l:successfulFileOps"), new Text(String.valueOf(successfulFileOps))); output.collect(new Text("l:totalTimeTPmS"), new Text(String.valueOf(totalTimeTPmS))); output.collect(new Text("min:mapStartTimeTPmS"), new Text(String.valueOf(startTimeTPmS))); output.collect(new Text("max:mapEndTimeTPmS"), new Text(String.valueOf(endTimeTPms))); } /** * Create and Write operation. * @param name of the prefix of the putput file to be created * @param reporter an instanse of (@link Reporter) to be used for * status' updates */ private void doCreateWriteOp(String name, Reporter reporter) { FSDataOutputStream out; byte[] buffer = new byte[bytesToWrite]; for (long l = 0l; l < numberOfFiles; l++) { Path filePath = new Path(new Path(baseDir, dataDirName), name + "_" + l); boolean successfulOp = false; while (! successfulOp && numOfExceptions < MAX_OPERATION_EXCEPTIONS) { try { // Set up timer for measuring AL (transaction #1) startTimeAL = System.currentTimeMillis(); // Create the file // Use a buffer size of 512 out = filesystem.create(filePath, true, 512, replFactor, blkSize); out.write(buffer); totalTimeAL1 += (System.currentTimeMillis() - startTimeAL); // Close the file / file output stream // Set up timers for measuring AL (transaction #2) startTimeAL = System.currentTimeMillis(); out.close(); totalTimeAL2 += (System.currentTimeMillis() - startTimeAL); successfulOp = true; successfulFileOps ++; reporter.setStatus("Finish "+ l + " files"); } catch (IOException e) { LOG.info("Exception recorded in op: " + "Create/Write/Close"); numOfExceptions++; } } } } /** * Open operation * @param name of the prefix of the putput file to be read * @param reporter an instanse of (@link Reporter) to be used for * status' updates */ private void doOpenReadOp(String name, Reporter reporter) { FSDataInputStream input; byte[] buffer = new byte[bytesToWrite]; for (long l = 0l; l < numberOfFiles; l++) { Path filePath = new Path(new Path(baseDir, dataDirName), name + "_" + l); boolean successfulOp = false; while (! successfulOp && numOfExceptions < MAX_OPERATION_EXCEPTIONS) { try { // Set up timer for measuring AL startTimeAL = System.currentTimeMillis(); input = filesystem.open(filePath); totalTimeAL1 += (System.currentTimeMillis() - startTimeAL); // If the file needs to be read (specified at command line) if (readFile) { startTimeAL = System.currentTimeMillis(); input.readFully(buffer); totalTimeAL2 += (System.currentTimeMillis() - startTimeAL); } input.close(); successfulOp = true; successfulFileOps ++; reporter.setStatus("Finish "+ l + " files"); } catch (IOException e) { LOG.info("Exception recorded in op: OpenRead " + e); numOfExceptions++; } } } } /** * Rename operation * @param name of prefix of the file to be renamed * @param reporter an instanse of (@link Reporter) to be used for * status' updates */ private void doRenameOp(String name, Reporter reporter) { for (long l = 0l; l < numberOfFiles; l++) { Path filePath = new Path(new Path(baseDir, dataDirName), name + "_" + l); Path filePathR = new Path(new Path(baseDir, dataDirName), name + "_r_" + l); boolean successfulOp = false; while (! successfulOp && numOfExceptions < MAX_OPERATION_EXCEPTIONS) { try { // Set up timer for measuring AL startTimeAL = System.currentTimeMillis(); filesystem.rename(filePath, filePathR); totalTimeAL1 += (System.currentTimeMillis() - startTimeAL); successfulOp = true; successfulFileOps ++; reporter.setStatus("Finish "+ l + " files"); } catch (IOException e) { LOG.info("Exception recorded in op: Rename"); numOfExceptions++; } } } } /** * Delete operation * @param name of prefix of the file to be deleted * @param reporter an instanse of (@link Reporter) to be used for * status' updates */ private void doDeleteOp(String name, Reporter reporter) { for (long l = 0l; l < numberOfFiles; l++) { Path filePath = new Path(new Path(baseDir, dataDirName), name + "_" + l); boolean successfulOp = false; while (! successfulOp && numOfExceptions < MAX_OPERATION_EXCEPTIONS) { try { // Set up timer for measuring AL startTimeAL = System.currentTimeMillis(); filesystem.delete(filePath, true); totalTimeAL1 += (System.currentTimeMillis() - startTimeAL); successfulOp = true; successfulFileOps ++; reporter.setStatus("Finish "+ l + " files"); } catch (IOException e) { LOG.info("Exception in recorded op: Delete"); numOfExceptions++; } } } } } /** * Reducer class */ static class NNBenchReducer extends MapReduceBase implements Reducer<Text, Text, Text, Text> { protected String hostName; public NNBenchReducer () { LOG.info("Starting NNBenchReducer !!!"); try { hostName = java.net.InetAddress.getLocalHost().getHostName(); } catch(Exception e) { hostName = "localhost"; } LOG.info("Starting NNBenchReducer on " + hostName); } /** * Reduce method */ public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter ) throws IOException { String field = key.toString(); reporter.setStatus("starting " + field + " ::host = " + hostName); // sum long values if (field.startsWith("l:")) { long lSum = 0; while (values.hasNext()) { lSum += Long.parseLong(values.next().toString()); } output.collect(key, new Text(String.valueOf(lSum))); } if (field.startsWith("min:")) { long minVal = -1; while (values.hasNext()) { long value = Long.parseLong(values.next().toString()); if (minVal == -1) { minVal = value; } else { if (value != 0 && value < minVal) { minVal = value; } } } output.collect(key, new Text(String.valueOf(minVal))); } if (field.startsWith("max:")) { long maxVal = -1; while (values.hasNext()) { long value = Long.parseLong(values.next().toString()); if (maxVal == -1) { maxVal = value; } else { if (value > maxVal) { maxVal = value; } } } output.collect(key, new Text(String.valueOf(maxVal))); } reporter.setStatus("finished " + field + " ::host = " + hostName); } } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver13; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnTlvIcmpIdVer13 implements OFBsnTlvIcmpId { private static final Logger logger = LoggerFactory.getLogger(OFBsnTlvIcmpIdVer13.class); // version: 1.3 final static byte WIRE_VERSION = 4; final static int LENGTH = 6; private final static int DEFAULT_VALUE = 0x0; // OF message fields private final int value; // // Immutable default instance final static OFBsnTlvIcmpIdVer13 DEFAULT = new OFBsnTlvIcmpIdVer13( DEFAULT_VALUE ); // package private constructor - used by readers, builders, and factory OFBsnTlvIcmpIdVer13(int value) { this.value = value; } // Accessors for OF message fields @Override public int getType() { return 0x46; } @Override public int getValue() { return value; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } public OFBsnTlvIcmpId.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnTlvIcmpId.Builder { final OFBsnTlvIcmpIdVer13 parentMessage; // OF message fields private boolean valueSet; private int value; BuilderWithParent(OFBsnTlvIcmpIdVer13 parentMessage) { this.parentMessage = parentMessage; } @Override public int getType() { return 0x46; } @Override public int getValue() { return value; } @Override public OFBsnTlvIcmpId.Builder setValue(int value) { this.value = value; this.valueSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFBsnTlvIcmpId build() { int value = this.valueSet ? this.value : parentMessage.value; // return new OFBsnTlvIcmpIdVer13( value ); } } static class Builder implements OFBsnTlvIcmpId.Builder { // OF message fields private boolean valueSet; private int value; @Override public int getType() { return 0x46; } @Override public int getValue() { return value; } @Override public OFBsnTlvIcmpId.Builder setValue(int value) { this.value = value; this.valueSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } // @Override public OFBsnTlvIcmpId build() { int value = this.valueSet ? this.value : DEFAULT_VALUE; return new OFBsnTlvIcmpIdVer13( value ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnTlvIcmpId> { @Override public OFBsnTlvIcmpId readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property type == 0x46 short type = bb.readShort(); if(type != (short) 0x46) throw new OFParseError("Wrong type: Expected=0x46(0x46), got="+type); int length = U16.f(bb.readShort()); if(length != 6) throw new OFParseError("Wrong length: Expected=6(6), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); int value = U16.f(bb.readShort()); OFBsnTlvIcmpIdVer13 bsnTlvIcmpIdVer13 = new OFBsnTlvIcmpIdVer13( value ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnTlvIcmpIdVer13); return bsnTlvIcmpIdVer13; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnTlvIcmpIdVer13Funnel FUNNEL = new OFBsnTlvIcmpIdVer13Funnel(); static class OFBsnTlvIcmpIdVer13Funnel implements Funnel<OFBsnTlvIcmpIdVer13> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnTlvIcmpIdVer13 message, PrimitiveSink sink) { // fixed value property type = 0x46 sink.putShort((short) 0x46); // fixed value property length = 6 sink.putShort((short) 0x6); sink.putInt(message.value); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnTlvIcmpIdVer13> { @Override public void write(ByteBuf bb, OFBsnTlvIcmpIdVer13 message) { // fixed value property type = 0x46 bb.writeShort((short) 0x46); // fixed value property length = 6 bb.writeShort((short) 0x6); bb.writeShort(U16.t(message.value)); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnTlvIcmpIdVer13("); b.append("value=").append(value); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnTlvIcmpIdVer13 other = (OFBsnTlvIcmpIdVer13) obj; if( value != other.value) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + value; return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.commons.compress.archivers.zip; import java.util.zip.ZipException; import org.apache.commons.compress.utils.ByteUtils; import static org.apache.commons.compress.archivers.zip.ZipConstants.DWORD; import static org.apache.commons.compress.archivers.zip.ZipConstants.WORD; /** * Holds size and other extended information for entries that use Zip64 * features. * * <p>Currently Commons Compress doesn't support encrypting the * central directory so the note in APPNOTE.TXT about masking doesn't * apply.</p> * * <p>The implementation relies on data being read from the local file * header and assumes that both size values are always present.</p> * * @see <a href="https://www.pkware.com/documents/casestudies/APPNOTE.TXT">PKWARE * APPNOTE.TXT, section 4.5.3</a> * * @since 1.2 * @NotThreadSafe */ public class Zip64ExtendedInformationExtraField implements ZipExtraField { static final ZipShort HEADER_ID = new ZipShort(0x0001); private static final String LFH_MUST_HAVE_BOTH_SIZES_MSG = "Zip64 extended information must contain" + " both size values in the local file header."; private ZipEightByteInteger size, compressedSize, relativeHeaderOffset; private ZipLong diskStart; /** * Stored in {@link #parseFromCentralDirectoryData * parseFromCentralDirectoryData} so it can be reused when ZipFile * calls {@link #reparseCentralDirectoryData * reparseCentralDirectoryData}. * * <p>Not used for anything else</p> * * @since 1.3 */ private byte[] rawCentralDirectoryData; /** * This constructor should only be used by the code that reads * archives inside of Commons Compress. */ public Zip64ExtendedInformationExtraField() { } /** * Creates an extra field based on the original and compressed size. * * @param size the entry's original size * @param compressedSize the entry's compressed size * * @throws IllegalArgumentException if size or compressedSize is null */ public Zip64ExtendedInformationExtraField(final ZipEightByteInteger size, final ZipEightByteInteger compressedSize) { this(size, compressedSize, null, null); } /** * Creates an extra field based on all four possible values. * * @param size the entry's original size * @param compressedSize the entry's compressed size * @param relativeHeaderOffset the entry's offset * @param diskStart the disk start * * @throws IllegalArgumentException if size or compressedSize is null */ public Zip64ExtendedInformationExtraField(final ZipEightByteInteger size, final ZipEightByteInteger compressedSize, final ZipEightByteInteger relativeHeaderOffset, final ZipLong diskStart) { this.size = size; this.compressedSize = compressedSize; this.relativeHeaderOffset = relativeHeaderOffset; this.diskStart = diskStart; } @Override public ZipShort getHeaderId() { return HEADER_ID; } @Override public ZipShort getLocalFileDataLength() { return new ZipShort(size != null ? 2 * DWORD : 0); } @Override public ZipShort getCentralDirectoryLength() { return new ZipShort((size != null ? DWORD : 0) + (compressedSize != null ? DWORD : 0) + (relativeHeaderOffset != null ? DWORD : 0) + (diskStart != null ? WORD : 0)); } @Override public byte[] getLocalFileDataData() { if (size != null || compressedSize != null) { if (size == null || compressedSize == null) { throw new IllegalArgumentException(LFH_MUST_HAVE_BOTH_SIZES_MSG); } final byte[] data = new byte[2 * DWORD]; addSizes(data); return data; } return ByteUtils.EMPTY_BYTE_ARRAY; } @Override public byte[] getCentralDirectoryData() { final byte[] data = new byte[getCentralDirectoryLength().getValue()]; int off = addSizes(data); if (relativeHeaderOffset != null) { System.arraycopy(relativeHeaderOffset.getBytes(), 0, data, off, DWORD); off += DWORD; } if (diskStart != null) { System.arraycopy(diskStart.getBytes(), 0, data, off, WORD); off += WORD; // NOSONAR - assignment as documentation } return data; } @Override public void parseFromLocalFileData(final byte[] buffer, int offset, final int length) throws ZipException { if (length == 0) { // no local file data at all, may happen if an archive // only holds a ZIP64 extended information extra field // inside the central directory but not inside the local // file header return; } if (length < 2 * DWORD) { throw new ZipException(LFH_MUST_HAVE_BOTH_SIZES_MSG); } size = new ZipEightByteInteger(buffer, offset); offset += DWORD; compressedSize = new ZipEightByteInteger(buffer, offset); offset += DWORD; int remaining = length - 2 * DWORD; if (remaining >= DWORD) { relativeHeaderOffset = new ZipEightByteInteger(buffer, offset); offset += DWORD; remaining -= DWORD; } if (remaining >= WORD) { diskStart = new ZipLong(buffer, offset); offset += WORD; // NOSONAR - assignment as documentation remaining -= WORD; // NOSONAR - assignment as documentation } } @Override public void parseFromCentralDirectoryData(final byte[] buffer, int offset, final int length) throws ZipException { // store for processing in reparseCentralDirectoryData rawCentralDirectoryData = new byte[length]; System.arraycopy(buffer, offset, rawCentralDirectoryData, 0, length); // if there is no size information in here, we are screwed and // can only hope things will get resolved by LFH data later // But there are some cases that can be detected // * all data is there // * length == 24 -> both sizes and offset // * length % 8 == 4 -> at least we can identify the diskStart field if (length >= 3 * DWORD + WORD) { parseFromLocalFileData(buffer, offset, length); } else if (length == 3 * DWORD) { size = new ZipEightByteInteger(buffer, offset); offset += DWORD; compressedSize = new ZipEightByteInteger(buffer, offset); offset += DWORD; relativeHeaderOffset = new ZipEightByteInteger(buffer, offset); } else if (length % DWORD == WORD) { diskStart = new ZipLong(buffer, offset + length - WORD); } } /** * Parses the raw bytes read from the central directory extra * field with knowledge which fields are expected to be there. * * <p>All four fields inside the zip64 extended information extra * field are optional and must only be present if their corresponding * entry inside the central directory contains the correct magic * value.</p> * * @param hasUncompressedSize flag to read from central directory * @param hasCompressedSize flag to read from central directory * @param hasRelativeHeaderOffset flag to read from central directory * @param hasDiskStart flag to read from central directory * @throws ZipException on error */ public void reparseCentralDirectoryData(final boolean hasUncompressedSize, final boolean hasCompressedSize, final boolean hasRelativeHeaderOffset, final boolean hasDiskStart) throws ZipException { if (rawCentralDirectoryData != null) { final int expectedLength = (hasUncompressedSize ? DWORD : 0) + (hasCompressedSize ? DWORD : 0) + (hasRelativeHeaderOffset ? DWORD : 0) + (hasDiskStart ? WORD : 0); if (rawCentralDirectoryData.length < expectedLength) { throw new ZipException("Central directory zip64 extended" + " information extra field's length" + " doesn't match central directory" + " data. Expected length " + expectedLength + " but is " + rawCentralDirectoryData.length); } int offset = 0; if (hasUncompressedSize) { size = new ZipEightByteInteger(rawCentralDirectoryData, offset); offset += DWORD; } if (hasCompressedSize) { compressedSize = new ZipEightByteInteger(rawCentralDirectoryData, offset); offset += DWORD; } if (hasRelativeHeaderOffset) { relativeHeaderOffset = new ZipEightByteInteger(rawCentralDirectoryData, offset); offset += DWORD; } if (hasDiskStart) { diskStart = new ZipLong(rawCentralDirectoryData, offset); offset += WORD; // NOSONAR - assignment as documentation } } } /** * The uncompressed size stored in this extra field. * @return The uncompressed size stored in this extra field. */ public ZipEightByteInteger getSize() { return size; } /** * The uncompressed size stored in this extra field. * @param size The uncompressed size stored in this extra field. */ public void setSize(final ZipEightByteInteger size) { this.size = size; } /** * The compressed size stored in this extra field. * @return The compressed size stored in this extra field. */ public ZipEightByteInteger getCompressedSize() { return compressedSize; } /** * The uncompressed size stored in this extra field. * @param compressedSize The uncompressed size stored in this extra field. */ public void setCompressedSize(final ZipEightByteInteger compressedSize) { this.compressedSize = compressedSize; } /** * The relative header offset stored in this extra field. * @return The relative header offset stored in this extra field. */ public ZipEightByteInteger getRelativeHeaderOffset() { return relativeHeaderOffset; } /** * The relative header offset stored in this extra field. * @param rho The relative header offset stored in this extra field. */ public void setRelativeHeaderOffset(final ZipEightByteInteger rho) { relativeHeaderOffset = rho; } /** * The disk start number stored in this extra field. * @return The disk start number stored in this extra field. */ public ZipLong getDiskStartNumber() { return diskStart; } /** * The disk start number stored in this extra field. * @param ds The disk start number stored in this extra field. */ public void setDiskStartNumber(final ZipLong ds) { diskStart = ds; } private int addSizes(final byte[] data) { int off = 0; if (size != null) { System.arraycopy(size.getBytes(), 0, data, 0, DWORD); off += DWORD; } if (compressedSize != null) { System.arraycopy(compressedSize.getBytes(), 0, data, off, DWORD); off += DWORD; } return off; } }
package org.redisson; import static org.assertj.core.api.Assertions.assertThat; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.TimeUnit; import org.junit.Test; import org.redisson.api.RStream; import org.redisson.api.StreamId; public class RedissonStreamTest extends BaseTest { @Test public void testRangeReversed() { RStream<String, String> stream = redisson.getStream("test"); assertThat(stream.size()).isEqualTo(0); Map<String, String> entries1 = new HashMap<>(); entries1.put("1", "11"); entries1.put("3", "31"); stream.addAll(new StreamId(1), entries1, 1, false); assertThat(stream.size()).isEqualTo(1); Map<String, String> entries2 = new HashMap<>(); entries2.put("5", "55"); entries2.put("7", "77"); stream.addAll(new StreamId(2), entries2, 1, false); Map<StreamId, Map<String, String>> r2 = stream.rangeReversed(10, StreamId.MAX, StreamId.MIN); assertThat(r2.keySet()).containsExactly(new StreamId(2), new StreamId(1)); assertThat(r2.get(new StreamId(1))).isEqualTo(entries1); assertThat(r2.get(new StreamId(2))).isEqualTo(entries2); } @Test public void testRange() { RStream<String, String> stream = redisson.getStream("test"); assertThat(stream.size()).isEqualTo(0); Map<String, String> entries1 = new HashMap<>(); entries1.put("1", "11"); entries1.put("3", "31"); stream.addAll(new StreamId(1), entries1, 1, false); assertThat(stream.size()).isEqualTo(1); Map<String, String> entries2 = new HashMap<>(); entries2.put("5", "55"); entries2.put("7", "77"); stream.addAll(new StreamId(2), entries2, 1, false); Map<StreamId, Map<String, String>> r = stream.range(10, new StreamId(0), new StreamId(1)); assertThat(r).hasSize(1); assertThat(r.get(new StreamId(1))).isEqualTo(entries1); Map<StreamId, Map<String, String>> r2 = stream.range(10, StreamId.MIN, StreamId.MAX); assertThat(r2.keySet()).containsExactly(new StreamId(1), new StreamId(2)); assertThat(r2.get(new StreamId(1))).isEqualTo(entries1); assertThat(r2.get(new StreamId(2))).isEqualTo(entries2); } @Test public void testPollMultiKeys() { RStream<String, String> stream = redisson.getStream("test"); Map<String, String> entries1 = new LinkedHashMap<>(); entries1.put("1", "11"); entries1.put("3", "31"); Thread t = new Thread() { @Override public void run() { try { Thread.sleep(2000); } catch (InterruptedException e) { e.printStackTrace(); } stream.addAll(new StreamId(1), entries1); } }; t.start(); long start = System.currentTimeMillis(); Map<String, Map<StreamId, Map<String, String>>> s = stream.read(2, 5, TimeUnit.SECONDS, new StreamId(0), "test1", StreamId.NEWEST); assertThat(System.currentTimeMillis() - start).isBetween(1900L, 2200L); assertThat(s).hasSize(1); assertThat(s.get("test").get(new StreamId(1))).isEqualTo(entries1); } @Test public void testPoll() { RStream<String, String> stream = redisson.getStream("test"); Map<String, String> entries1 = new LinkedHashMap<>(); entries1.put("1", "11"); entries1.put("3", "31"); Thread t = new Thread() { @Override public void run() { try { Thread.sleep(2000); } catch (InterruptedException e) { e.printStackTrace(); } stream.addAll(new StreamId(1), entries1); } }; t.start(); long start = System.currentTimeMillis(); Map<StreamId, Map<String, String>> s = stream.read(2, 5, TimeUnit.SECONDS, new StreamId(0)); assertThat(System.currentTimeMillis() - start).isBetween(1900L, 2200L); assertThat(s).hasSize(1); assertThat(s.get(new StreamId(1))).isEqualTo(entries1); } @Test public void testSize() { RStream<String, String> stream = redisson.getStream("test"); assertThat(stream.size()).isEqualTo(0); Map<String, String> entries1 = new HashMap<>(); entries1.put("1", "11"); entries1.put("3", "31"); stream.addAll(new StreamId(1), entries1, 1, false); assertThat(stream.size()).isEqualTo(1); Map<String, String> entries2 = new HashMap<>(); entries2.put("5", "55"); entries2.put("7", "77"); stream.addAll(new StreamId(2), entries2, 1, false); assertThat(stream.size()).isEqualTo(2); } @Test public void testReadMultiKeysEmpty() { RStream<String, String> stream = redisson.getStream("test2"); Map<String, Map<StreamId, Map<String, String>>> s = stream.read(10, new StreamId(0), "test1", new StreamId(0)); assertThat(s).isEmpty(); } @Test public void testReadMultiKeys() { RStream<String, String> stream1 = redisson.getStream("test1"); Map<String, String> entries1 = new LinkedHashMap<>(); entries1.put("1", "11"); entries1.put("2", "22"); entries1.put("3", "33"); stream1.addAll(entries1); RStream<String, String> stream2 = redisson.getStream("test2"); Map<String, String> entries2 = new LinkedHashMap<>(); entries2.put("4", "44"); entries2.put("5", "55"); entries2.put("6", "66"); stream2.addAll(entries2); Map<String, Map<StreamId, Map<String, String>>> s = stream2.read(10, new StreamId(0), "test1", new StreamId(0)); assertThat(s).hasSize(2); assertThat(s.get("test1").values().iterator().next()).isEqualTo(entries1); assertThat(s.get("test2").values().iterator().next()).isEqualTo(entries2); } @Test public void testReadMulti() { RStream<String, String> stream = redisson.getStream("test"); Map<String, String> entries1 = new LinkedHashMap<>(); entries1.put("1", "11"); entries1.put("3", "31"); stream.addAll(new StreamId(1), entries1, 1, false); Map<String, String> entries2 = new LinkedHashMap<>(); entries2.put("5", "55"); entries2.put("7", "77"); stream.addAll(new StreamId(2), entries2, 1, false); Map<String, String> entries3 = new LinkedHashMap<>(); entries3.put("15", "05"); entries3.put("17", "07"); stream.addAll(new StreamId(3), entries3, 1, false); Map<StreamId, Map<String, String>> result = stream.read(10, new StreamId(0, 0)); assertThat(result).hasSize(3); assertThat(result.get(new StreamId(4))).isNull(); assertThat(result.get(new StreamId(1))).isEqualTo(entries1); assertThat(result.get(new StreamId(2))).isEqualTo(entries2); assertThat(result.get(new StreamId(3))).isEqualTo(entries3); } @Test public void testReadSingle() { RStream<String, String> stream = redisson.getStream("test"); Map<String, String> entries1 = new LinkedHashMap<>(); entries1.put("1", "11"); entries1.put("3", "31"); stream.addAll(new StreamId(1), entries1, 1, true); Map<StreamId, Map<String, String>> result = stream.read(10, new StreamId(0, 0)); assertThat(result).hasSize(1); assertThat(result.get(new StreamId(4))).isNull(); assertThat(result.get(new StreamId(1))).isEqualTo(entries1); } @Test public void testReadEmpty() { RStream<String, String> stream2 = redisson.getStream("test"); Map<StreamId, Map<String, String>> result2 = stream2.read(10, new StreamId(0, 0)); assertThat(result2).isEmpty(); } @Test public void testAdd() { RStream<String, String> stream = redisson.getStream("test1"); StreamId s = stream.add("12", "33"); assertThat(s.getId0()).isNotNegative(); assertThat(s.getId1()).isNotNegative(); assertThat(stream.size()).isEqualTo(1); } @Test public void testAddAll() { RStream<String, String> stream = redisson.getStream("test1"); assertThat(stream.size()).isEqualTo(0); Map<String, String> entries = new HashMap<>(); entries.put("6", "61"); entries.put("4", "41"); stream.addAll(new StreamId(12, 42), entries, 10, false); assertThat(stream.size()).isEqualTo(1); entries.clear(); entries.put("1", "11"); entries.put("3", "31"); stream.addAll(new StreamId(Long.MAX_VALUE), entries, 1, false); assertThat(stream.size()).isEqualTo(2); } }
package org.apache.rya.mongodb; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.List; import java.util.Properties; import org.apache.accumulo.core.security.Authorizations; import org.apache.hadoop.conf.Configuration; import org.apache.rya.api.RdfCloudTripleStoreConfiguration; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.mongodb.MongoClient; public class MongoDBRdfConfiguration extends RdfCloudTripleStoreConfiguration { public static final String MONGO_INSTANCE = "mongo.db.instance"; public static final String MONGO_INSTANCE_PORT = "mongo.db.port"; public static final String MONGO_GEO_MAXDISTANCE = "mongo.geo.maxdist"; public static final String MONGO_DB_NAME = "mongo.db.name"; public static final String MONGO_COLLECTION_PREFIX = "mongo.db.collectionprefix"; public static final String MONGO_USER = "mongo.db.user"; public static final String MONGO_USER_PASSWORD = "mongo.db.userpassword"; public static final String CONF_ADDITIONAL_INDEXERS = "ac.additional.indexers"; public static final String USE_MOCK_MONGO = ".useMockInstance"; private MongoClient mongoClient; public MongoDBRdfConfiguration() { super(); } public MongoDBRdfConfiguration(final Configuration other) { super(other); } /** * Creates a MongoRdfConfiguration object from a Properties file. This * method assumes that all values in the Properties file are Strings and * that the Properties file uses the keys below. * * <br> * <ul> * <li>"mongo.auths" - String of Mongo authorizations. Empty auths used by default. * <li>"mongo.visibilities" - String of Mongo visibilities assigned to ingested triples. * <li>"mongo.user" - Mongo user. Empty by default. * <li>"mongo.password" - Mongo password. Empty by default. * <li>"mongo.host" - Mongo host. Default host is "localhost" * <li>"mongo.port" - Mongo port. Default port is "27017". * <li>"mongo.db.name" - Name of MongoDB. Default name is "rya_triples". * <li>"mongo.collection.prefix" - Mongo collection prefix. Default is "rya_". * <li>"mongo.rya.prefix" - Prefix for Mongo Rya instance. Same as value of "mongo.collection.prefix". * <li>"use.mock" - Use a Embedded Mongo instance as back-end for Rya instance. False by default. * <li>"use.display.plan" - Display query plan during evaluation. Useful for debugging. True by default. * <li>"use.inference" - Use backward chaining inference during query. False by default. * </ul> * <br> * * @param props * - Properties file containing Mongo specific configuration * parameters * @return MongoRdfConfiguration with properties set */ public static MongoDBRdfConfiguration fromProperties(final Properties props) { return MongoDBRdfConfigurationBuilder.fromProperties(props); } public MongoDBRdfConfigurationBuilder getBuilder() { return new MongoDBRdfConfigurationBuilder(); } @Override public MongoDBRdfConfiguration clone() { return new MongoDBRdfConfiguration(this); } public Authorizations getAuthorizations() { final String[] auths = getAuths(); if (auths == null || auths.length == 0) { return MongoDbRdfConstants.ALL_AUTHORIZATIONS; } return new Authorizations(auths); } /** * @return name of Mongo Collection containing Rya triples */ public String getTriplesCollectionName() { return this.get(MONGO_COLLECTION_PREFIX, "rya") + "_triples"; } /** * @return name of Mongo Collection */ public String getCollectionName() { return this.get(MONGO_COLLECTION_PREFIX, "rya"); } /** * Sets Mongo Collection name * @param name - name of Mongo Collection to connect to */ public void setCollectionName(final String name) { Preconditions.checkNotNull(name); this.set(MONGO_COLLECTION_PREFIX, name); } /** * @return name of Mongo Host */ public String getMongoInstance() { return this.get(MONGO_INSTANCE, "localhost"); } /** * Sets name of Mongo Host * @param name - name of Mongo Host to connect to */ public void setMongoInstance(final String name) { Preconditions.checkNotNull(name); this.set(MONGO_INSTANCE, name); } /** * @return port that Mongo is running on */ public String getMongoPort() { return this.get(MONGO_INSTANCE_PORT, AbstractMongoDBRdfConfigurationBuilder.DEFAULT_MONGO_PORT); } /** * Sets port that Mongo will run on * @param name - Mongo port to connect to */ public void setMongoPort(final String name) { Preconditions.checkNotNull(name); this.set(MONGO_INSTANCE_PORT, name); } /** * @return name of MongoDB */ public String getMongoDBName() { return this.get(MONGO_DB_NAME, "rya"); } /** * Sets name of MongoDB * @param name - name of MongoDB to connect to */ public void setMongoDBName(final String name) { Preconditions.checkNotNull(name); this.set(MONGO_DB_NAME, name); } /** * Tells Rya to use an embedded Mongo instance as its backing * if set to true. By default this is set to false. * @param useMock */ public void setUseMock(final boolean useMock) { this.setBoolean(USE_MOCK_MONGO, useMock); } /** * Get whether an embedded Mongo is being used as the backing * for Rya. * @return true if embedded Mongo is being used, and false otherwise */ public boolean getUseMock() { return getBoolean(USE_MOCK_MONGO, false); } /** * @return name of NameSpace Mongo Collection */ public String getNameSpacesCollectionName() { return this.get(MONGO_COLLECTION_PREFIX, "rya") + "_ns"; } /** * Sets name of Mongo User * @param user - name of Mongo user to connect to */ public void setMongoUser(final String user) { Preconditions.checkNotNull(user); set(MONGO_USER, user); } /** * @return name of Mongo user */ public String getMongoUser() { return get(MONGO_USER); } /** * Sets Mongo password * @param password - password to connect to Mongo */ public void setMongoPassword(final String password) { Preconditions.checkNotNull(password); set(MONGO_USER_PASSWORD, password); } /** * @return Mongo password */ public String getMongoPassword() { return get(MONGO_USER_PASSWORD); } public void setAdditionalIndexers(final Class<? extends MongoSecondaryIndex>... indexers) { final List<String> strs = Lists.newArrayList(); for (final Class<?> ai : indexers){ strs.add(ai.getName()); } setStrings(CONF_ADDITIONAL_INDEXERS, strs.toArray(new String[]{})); } public List<MongoSecondaryIndex> getAdditionalIndexers() { return getInstances(CONF_ADDITIONAL_INDEXERS, MongoSecondaryIndex.class); } public void setMongoClient(final MongoClient client) { Preconditions.checkNotNull(client); this.mongoClient = client; } public MongoClient getMongoClient() { return mongoClient; } }
/* * Copyright 2016 Crown Copyright * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package stroom.widget.tooltip.client.presenter; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.safecss.shared.SafeStyles; import com.google.gwt.safecss.shared.SafeStylesBuilder; import com.google.gwt.safehtml.shared.SafeHtml; import com.google.gwt.safehtml.shared.SafeHtmlBuilder; import com.google.gwt.safehtml.shared.SafeHtmlUtils; import java.util.Objects; import java.util.function.Consumer; import java.util.function.Function; public final class TooltipUtil { public static final SafeHtml NON_BREAKING_SPACE = SafeHtmlUtils.fromSafeConstant("&nbsp;"); public static final SafeHtml EN_SPACE = SafeHtmlUtils.fromSafeConstant("&ensp;"); public static final SafeHtml EM_SPACE = SafeHtmlUtils.fromSafeConstant("&emsp;"); private static final SafeHtml BREAK = SafeHtmlUtils.fromSafeConstant("<br/>"); private static final SafeHtml SEPARATOR = SafeHtmlUtils.fromSafeConstant("<hr/>"); private static final SafeHtml ITAlIC_OPEN = SafeHtmlUtils.fromSafeConstant("<i>"); private static final SafeHtml ITAlIC_CLOSE = SafeHtmlUtils.fromSafeConstant("</i>"); private static final SafeHtml BOLD_OPEN = SafeHtmlUtils.fromSafeConstant("<b>"); private static final SafeHtml BOLD_CLOSE = SafeHtmlUtils.fromSafeConstant("</b>"); private static final SafeHtml CODE_OPEN = SafeHtmlUtils.fromSafeConstant("<code>"); private static final SafeHtml CODE_CLOSE = SafeHtmlUtils.fromSafeConstant("</code>"); private static final SafeHtml PARA_OPEN = SafeHtmlUtils.fromSafeConstant("<p>"); private static final SafeHtml PARA_CLOSE = SafeHtmlUtils.fromSafeConstant("</p>"); private static final SafeHtml DIV_OPEN = SafeHtmlUtils.fromSafeConstant("<div>"); private static final SafeHtml DIV_CLOSE = SafeHtmlUtils.fromSafeConstant("</div>"); private static final SafeHtml SPAN_CLOSE = SafeHtmlUtils.fromSafeConstant("</span>"); private static final SafeHtml BLANK = SafeHtmlUtils.fromString(""); private TooltipUtil() { // Utility class. } public static SafeHtml italicText(final Object value) { return withFormatting(value, ITAlIC_OPEN, ITAlIC_CLOSE); } public static SafeHtml boldText(final Object value) { return withFormatting(value, BOLD_OPEN, BOLD_CLOSE); } public static SafeHtml styledSpan(final Object value, final Consumer<SafeStylesBuilder> stylesBuilderConsumer) { SafeStylesBuilder builder = new SafeStylesBuilder(); if (stylesBuilderConsumer != null) { stylesBuilderConsumer.accept(builder); } return styledSpan(value, builder.toSafeStyles()); } public static SafeHtml styledSpan(final Object value, final SafeStyles safeStyles) { return withFormatting( value, SafeHtmlUtils.fromTrustedString("<span style=\"" + safeStyles.asString() + "\">"), SPAN_CLOSE); } public static SafeHtml styledParagraph(final Object value, final Consumer<SafeStylesBuilder> stylesBuilderConsumer) { SafeStylesBuilder builder = new SafeStylesBuilder(); if (stylesBuilderConsumer != null) { stylesBuilderConsumer.accept(builder); } return styledParagraph(value, builder.toSafeStyles()); } public static SafeHtml styledParagraph(final Object value, final SafeStyles safeStyles) { return withFormatting( value, SafeHtmlUtils.fromTrustedString("<p style=\"" + safeStyles.asString() + "\">"), PARA_CLOSE); } public static SafeHtml styledDiv(final Object value, final Consumer<SafeStylesBuilder> stylesBuilderConsumer) { SafeStylesBuilder builder = new SafeStylesBuilder(); if (stylesBuilderConsumer != null) { stylesBuilderConsumer.accept(builder); } return styledDiv(value, builder.toSafeStyles()); } public static SafeHtml styledDiv(final Object value, final SafeStyles safeStyles) { return withFormatting( value, SafeHtmlUtils.fromTrustedString("<div style=\"" + safeStyles.asString() + "\">"), DIV_CLOSE); } public static SafeHtml boldItalicText(final Object value) { return withFormatting( value, new SafeHtmlBuilder() .append(BOLD_OPEN) .append(ITAlIC_OPEN) .toSafeHtml(), new SafeHtmlBuilder() .append(BOLD_CLOSE) .append(ITAlIC_CLOSE) .toSafeHtml()); } public static SafeHtml fixedWidthText(final Object value) { return withFormatting(value, CODE_OPEN, CODE_CLOSE); } private static SafeHtml withFormatting(final Object value, final SafeHtml openTag, final SafeHtml closeTag) { if (value != null) { SafeHtmlBuilder safeHtmlBuilder = new SafeHtmlBuilder() .append(openTag); if (value instanceof SafeHtml) { safeHtmlBuilder.append((SafeHtml) value); } else { String str = String.valueOf(value); if (str.length() > 0) { safeHtmlBuilder.appendEscaped(str); } else { safeHtmlBuilder.append(BLANK); } } return safeHtmlBuilder .append(closeTag) .toSafeHtml(); } else { return BLANK; } } public static Builder builder() { return new Builder(); } public static Builder builder(final Consumer<SafeStylesBuilder> stylesBuilderConsumer) { return new Builder(stylesBuilderConsumer); } private static SafeHtml objectToSafeHtml(final Object value) { final SafeHtml safeHtml; if (value == null) { safeHtml = BLANK; } else if (value instanceof SafeHtml) { safeHtml = (SafeHtml) value; } else { safeHtml = SafeHtmlUtils.fromString(String.valueOf(value)); } return safeHtml; } // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ public static final class Builder { private final SafeHtmlBuilder buffer; private final Consumer<SafeStylesBuilder> safeStylesBuilderConsumer; private Builder() { this(null); } private Builder(final Consumer<SafeStylesBuilder> stylesBuilderConsumer) { this.buffer = new SafeHtmlBuilder(); this.safeStylesBuilderConsumer = stylesBuilderConsumer; } public Builder addHeading(final String heading) { buffer.append(BOLD_OPEN); buffer.appendEscaped(heading); buffer.append(BOLD_CLOSE); buffer.append(BREAK); return this; } public Builder addLine(final String heading, final Object value) { addLine(heading, value, false); return this; } public Builder addLine(final String heading, final Object value, final boolean showBlank) { if (value != null) { final String s = String.valueOf(value); if (s.length() > 0 || showBlank) { buffer.appendEscaped(heading); buffer.appendEscaped(" : "); buffer.appendEscaped(s); buffer.append(BREAK); } } else { if (showBlank) { buffer.appendEscaped(heading); buffer.appendEscaped(": "); buffer.append(BREAK); } } return this; } /** * Add the text followed by a break tag */ public Builder addLine(final String value) { if (value != null && !value.isEmpty()) { buffer.appendEscaped(value); buffer.append(BREAK); } return this; } /** * Add the text inside paragraph tags */ public Builder addParagraph(final String value) { if (value != null && !value.isEmpty()) { buffer.append(PARA_OPEN); buffer.appendEscaped(value); buffer.append(PARA_CLOSE); } return this; } public Builder addBreak() { buffer.append(BREAK); return this; } public Builder addNonBreakingSpace() { buffer.append(NON_BREAKING_SPACE); return this; } public Builder addEnSpace() { buffer.append(EN_SPACE); return this; } public Builder addEmSpace() { buffer.append(EM_SPACE); return this; } public Builder addSafeHtml(final SafeHtml safeHtml) { buffer.append(safeHtml); return this; } public Builder addSeparator() { buffer.append(SEPARATOR); return this; } public Builder appendWithoutBreak(final String value) { if (value != null && !value.isEmpty()) { buffer.appendEscaped(value); } return this; } public Builder appendWithoutBreak(final SafeHtml value) { if (value != null && !value.asString().isEmpty()) { buffer.append(value); } return this; } public Builder appendLinkWithoutBreak(final String url, final String title) { Objects.requireNonNull(url); String escapedUrl = SafeHtmlUtils.htmlEscape(url); buffer.append(SafeHtmlUtils.fromTrustedString( "<a href=\"" + escapedUrl + "\" target=\"_blank\">")); if (title != null && !title.isEmpty()) { buffer.appendEscaped(title); } buffer.appendHtmlConstant("</a>"); return this; } public Builder addTwoColTable(Function<TableBuilder2, SafeHtml> tableBuilderFunc) { TableBuilder2 tableBuilder = new TableBuilder2(); buffer.append(tableBuilderFunc.apply(tableBuilder)); return this; } public Builder addThreeColTable(Function<TableBuilder3, SafeHtml> tableBuilderFunc) { TableBuilder3 tableBuilder = new TableBuilder3(); buffer.append(tableBuilderFunc.apply(tableBuilder)); return this; } public SafeHtml build() { final SafeHtml innerHtml = buffer.toSafeHtml(); final SafeStylesBuilder safeStylesBuilder = new SafeStylesBuilder(); if (safeStylesBuilderConsumer != null) { safeStylesBuilderConsumer.accept(safeStylesBuilder); } final SafeHtmlBuilder outerHtml = new SafeHtmlBuilder(); outerHtml.append(styledDiv(innerHtml, safeStylesBuilder.toSafeStyles())); return outerHtml.toSafeHtml(); } } // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ public static class TableBuilder2 { private final SafeHtmlBuilder buffer; public TableBuilder2() { buffer = new SafeHtmlBuilder() .appendHtmlConstant("<table>"); } public TableBuilder2 addHeaderRow(final String key) { return addHeaderRow(key, ""); } public TableBuilder2 addHeaderRow(final String key, final String value) { buffer .appendHtmlConstant("<tr><th align=\"left\">") .append(boldText(key)) .appendHtmlConstant("</th><th align=\"left\">") .append(boldText(value)) .appendHtmlConstant("</th></tr>"); return this; } public TableBuilder2 addHeaderRow(final SafeHtml key) { return addHeaderRow(key, BLANK); } public TableBuilder2 addHeaderRow(final SafeHtml key, final SafeHtml value) { buffer .appendHtmlConstant("<tr><th align=\"left\">") .append(key) .appendHtmlConstant("</th><th align=\"left\">") .append(value) .appendHtmlConstant("</th></tr>"); return this; } public TableBuilder2 addBlankRow() { buffer.appendHtmlConstant("<tr><td>&nbsp;</td><td>&nbsp;</td></tr>"); return this; } // public TableBuilder2 addRow(final Object key) { // return addRow(key, null, true); // } public TableBuilder2 addRow(final Object key, final Object value) { return addRow(key, value, false, null); } public TableBuilder2 addRow(final Object key, final Object value, final boolean showBlank) { return addRow(key, value, showBlank, null); } public TableBuilder2 addRow(final Object key, final Object value, final boolean showBlank, final SafeStyles safeStyles) { Objects.requireNonNull(key); final SafeHtml safeKey = objectToSafeHtml(key); final String cellStyles = safeStyles != null ? safeStyles.asString() : "padding-right: 5px;"; if (value != null) { final SafeHtml safeValue = value instanceof SafeHtml ? (SafeHtml) value : objectToSafeHtml(value); if (safeValue.asString().length() > 0 || showBlank) { buffer .appendHtmlConstant("<tr><td style=\"" + cellStyles + "\">") .append(safeKey) .appendHtmlConstant("</td>") .appendHtmlConstant("<td>") .append(safeValue) .appendHtmlConstant("</td></tr>"); } } else { if (showBlank) { buffer .appendHtmlConstant("<tr><td style=\"" + cellStyles + "\">") .append(safeKey) .appendHtmlConstant("</td>") .appendHtmlConstant("<td/></tr>"); } } return this; } public SafeHtml build() { buffer.appendHtmlConstant("</table>"); // Make the text selectable, e.g. for copy/pasting return new SafeHtmlBuilder() .appendHtmlConstant("<div style=\"user-select: text;\">") .append(buffer.toSafeHtml()) .appendHtmlConstant("</div>") .toSafeHtml(); } } // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ public static class TableBuilder3 { private final SafeHtmlBuilder buffer; public TableBuilder3() { buffer = new SafeHtmlBuilder() .appendHtmlConstant("<table>"); } public TableBuilder3 addHeaderRow(final String col1) { return addHeaderRow(col1, "", ""); } public TableBuilder3 addHeaderRow(final String col1, final String col2, final String col3) { return addHeaderRow( boldText(col1), boldText(col2), boldText(col3)); } public TableBuilder3 addHeaderRow(final SafeHtml col1) { return addHeaderRow(col1, BLANK, BLANK); } public TableBuilder3 addHeaderRow(final SafeHtml col1, final SafeHtml col2, final SafeHtml col3) { buffer .appendHtmlConstant("<tr><th align=\"left\" style=\"padding-right: 8px;\">") .append(col1) .appendHtmlConstant("</th><th align=\"left\" style=\"padding-right: 8px;\">") .append(col2) .appendHtmlConstant("</th><th align=\"left\">") .append(col3) .appendHtmlConstant("</th></tr>"); return this; } public TableBuilder3 addBlankRow() { buffer.appendHtmlConstant("<tr><td>&nbsp;</td><td>&nbsp;</td><td>&nbsp;</td></tr>"); return this; } // public TableBuilder3 addRow(final Object key) { // return addRow(key, null, true); // } public TableBuilder3 addRow(final Object col1, final Object col2, final Object col3) { return addRow(col1, col2, col3, false, null); } public TableBuilder3 addRow(final Object col1, final Object col2, final Object col3, final boolean showBlank) { return addRow(col1, col2, col3, showBlank, null); } public TableBuilder3 addRow(final Object col1, final Object col2, final Object col3, final boolean showBlank, final SafeStyles safeStyles) { Objects.requireNonNull(col1); final SafeHtml safeCol1 = objectToSafeHtml(col1); final SafeStylesBuilder cellStylesBuilder = new SafeStylesBuilder() .paddingRight(8, Unit.PX); if (safeStyles != null) { cellStylesBuilder.append(safeStyles); } final String cellStyles = cellStylesBuilder.toSafeStyles().asString(); if (col2 != null || col3 != null) { final SafeHtml safeCol2 = objectToSafeHtml(col2); final SafeHtml safeCol3 = objectToSafeHtml(col3); if (safeCol2.asString().length() > 0 || safeCol3.asString().length() > 0 || showBlank) { buffer .appendHtmlConstant("<tr><td style=\"" + cellStyles + "\">") .append(safeCol1) .appendHtmlConstant("</td>") .appendHtmlConstant("<td style=\"" + cellStyles + "\">") .append(safeCol2) .appendHtmlConstant("</td>") .appendHtmlConstant("<td style=\"" + cellStyles + "\">") .append(safeCol3) .appendHtmlConstant("</td></tr>"); } } else { if (showBlank) { buffer .appendHtmlConstant("<tr><td style=\"" + cellStyles + "\">") .append(safeCol1) .appendHtmlConstant("</td>") .appendHtmlConstant("<td/>") // empty col2 .appendHtmlConstant("<td/>") // empty col3 .appendHtmlConstant("</tr>"); } } return this; } public SafeHtml build() { buffer.appendHtmlConstant("</table>"); // Make the text selectable, e.g. for copy/pasting return new SafeHtmlBuilder() .appendHtmlConstant("<div style=\"user-select: text;\">") .append(buffer.toSafeHtml()) .appendHtmlConstant("</div>") .toSafeHtml(); } } }
package org.keyboardplaying.mapper.engine; import java.beans.IntrospectionException; import java.beans.PropertyDescriptor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.util.Map; import java.util.Objects; import org.keyboardplaying.mapper.Defaults; import org.keyboardplaying.mapper.annotation.Metadata; import org.keyboardplaying.mapper.annotation.Nested; import org.keyboardplaying.mapper.exception.MapperException; import org.keyboardplaying.mapper.exception.MappingException; import org.keyboardplaying.mapper.exception.ParserInitializationException; import org.keyboardplaying.mapper.exception.ParsingException; import org.keyboardplaying.mapper.parser.ElaborateParser; /** * The mapping engine for mapping a flat map to a POJO (unmapping). * * @author Cyrille Chopelet (https://keyboardplaying.org) */ public class UnmappingEngine extends BaseEngine { private static final DefaultValueProvider DEFAULT_VALUES = new DefaultValueProvider(); /** * Instantiates a new bean of specified type and unmaps metadata to it, based on the annotations in the bean. * * @param metadata the flat metadata * @param beanType the destination bean's type * @return the destination bean * @throws MapperException if the parser could not be initialized for a field or the mapping fails */ public <T> T unmapToClass(Map<String, String> metadata, Class<T> beanType) throws MapperException { try { return unmapToBean(metadata, beanType.newInstance()); } catch (InstantiationException | IllegalAccessException e) { throw new MappingException("Could not instanciate a new bean for type " + beanType.getSimpleName() + ". Did you provide a public no-argument constructor?", e); } } /** * Unmaps metadata to a destination bean, based on the annotations in the bean. * <p/> * Non-annotated fields are not overwritten. * * @param metadata the flat metadata * @param bean the destination bean * @return the destination bean * @throws MapperException if the parser could not be initialized for a field or the mapping fails */ public <T> T unmapToBean(Map<String, String> metadata, T bean) throws MapperException { /* Control the validity of arguments. */ Objects.requireNonNull(bean, "The supplied bean was null."); Objects.requireNonNull(metadata, "The supplied metadata was null."); /* Now perform the unmapping. */ performUnmapping(metadata, bean, bean.getClass()); return bean; } private <T> void performUnmapping(Map<String, String> metadata, T bean, Class<?> klass) throws MapperException { final Field[] fields = klass.getDeclaredFields(); for (Field field : fields) { if (field.isAnnotationPresent(Nested.class)) { performNestedUnmapping(metadata, bean, field); } else if (field.isAnnotationPresent(Metadata.class)) { performFieldUnmapping(metadata, bean, field); } } /* Take care of inherited fields. */ Class<?> superklass = klass.getSuperclass(); if (!superklass.equals(Object.class)) { performUnmapping(metadata, bean, superklass); } } /** * Performs the unmapping to a field which is an inner bean (marked with the {@link Nested} annotation). * * @param metadata the flat metadata * @param bean the destination bean * @param field the field to set * @throws MapperException if the parser could not be initialized for a field or the mapping fails */ private <T> void performNestedUnmapping(Map<String, String> metadata, T bean, Field field) throws MapperException { Nested annotation = field.getAnnotation(Nested.class); try { PropertyDescriptor descriptor = getPropertyDescriptor(bean, field); Object innerBean = get(bean, descriptor); if (innerBean == null) { innerBean = intantiateBeanAndUnmap(metadata, field, annotation.className()); set(bean, descriptor, innerBean); } else { // unmap to bean unmapToBean(metadata, innerBean); } } catch (IllegalAccessException | InvocationTargetException | IntrospectionException e) { throw new MappingException("Error while unmapping nested bean " + field.getName() + " of " + field.getDeclaringClass().getName(), e); } catch (MapperException e) { if (annotation.mandatory()) { throw e; } // XXX some log here would be great otherwise } } private Object intantiateBeanAndUnmap(Map<String, String> metadata, Field field, String className) throws MapperException { try { // unmap to class // compilation fails when variable is inline (?!) Class<?> beanType = className == null || className.length() == 0 ? field.getType() : Class.forName(className); return unmapToClass(metadata, beanType); } catch (ClassNotFoundException e) { throw new MappingException("Could not find class " + className + " when instantiating bean for inner field " + field.getName() + " of class " + field.getDeclaringClass().getName(), e); } } /** * Performs the unmapping for a convertible-typed field. * * @param metadata the flat metadata * @param bean the destination bean * @param field the field to set * @throws ParserInitializationException if the parser for the field could not be initialized * @throws MappingException if the mapping fails */ private <T> void performFieldUnmapping(Map<String, String> metadata, T bean, Field field) throws ParserInitializationException, MappingException { Metadata settings = field.getAnnotation(Metadata.class); Class<? extends ElaborateParser<?>> elaborate = settings.elaborate(); String metadataName = settings.value(); if (!ElaborateParser.None.class.equals(elaborate)) { // a custom setter was defined, overrides the default parser setElaborateField(bean, field, elaborate, metadata); } else if (Defaults.EMPTY.equals(metadataName)) { throw new MappingException("No key nor elaborate parser was provided for field " + field.getName() + " of bean " + bean.getClass().getSimpleName()); } else if (metadata.containsKey(metadataName)) { /* Set the value using the value provided with the metadata. */ setField(bean, field, metadata.get(metadataName)); } else if (!settings.defaultValue().equals(Defaults.EMPTY)) { /* Set the value using the provided default value. */ setField(bean, field, settings.defaultValue()); } else if (settings.blankDefaultValue()) { /* Set the field using a blank value. */ setField(bean, field, Defaults.EMPTY); } else if (settings.mandatory()) { /* Data is absent though mandatory, raise an exception. */ throw new MappingException("Mandatory data " + metadataName + " is missing from metadata map (" + metadata.keySet().toString() + ")."); } /* Otherwise, leave field as is. */ } /** * Sets the field, using the default parser for the type of the field and the field's setter. * <p/> * This requires the bean to respect the bean notation. * * @param bean the destination bean * @param field the field to set * @param value the non-converted value for the field * @throws ParserInitializationException if the parser for the field could not be initialized * @throws MappingException if the mapping fails */ private <T> void setField(T bean, Field field, String value) throws ParserInitializationException, MappingException { try { set(bean, field, value == null ? DEFAULT_VALUES.getDefaultValue(field.getType()) : this.<T>getParser(field).convertFromString(value)); } catch (IllegalAccessException | InvocationTargetException | IntrospectionException | ParsingException e) { throw new MappingException( "Field " + field.getName() + " of " + field.getDeclaringClass().getName() + " could not be set.", e); } } /** * Sets the field, using the supplied setter. * <p/> * The signature for the setter should be {@code setSomething(java.lang.String, java.util.Map<String, String>)}. It * will receive the value for the required metadata as the first argument, and the whole flat metadata as the * second. * * @param bean the destination bean * @param field the field to set * @param parser the {@link ElaborateParser} to use * @param metadata the flat metadata * @throws MappingException if the mapping fails */ private <T> void setElaborateField(T bean, Field field, Class<? extends ElaborateParser<?>> parser, Map<String, String> metadata) throws MappingException { try { Object value = parser.newInstance().fromMap(metadata); set(bean, field, value == null ? DEFAULT_VALUES.getDefaultValue(field.getType()) : value); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | IntrospectionException | ParsingException e) { throw new MappingException( "Serialization through parser " + parser.getSimpleName() + " could not be performed for field " + field.getName() + " of bean " + bean.getClass().getSimpleName(), e); } } }
/*L * Copyright Washington University in St. Louis, SemanticBits, Persistent Systems, Krishagni. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/catissue-dao/LICENSE.txt for details. */ /** *<p>Title: ConnectionManager class> * <p>Description: It handles all * hibernate specific operations like opening and closing of hibernate connection, session etc * Connection manager has ThreadLocal instance variable which holds the Map having session object * as per the application .It holds Map<ApplicationName, session> * thus allow user to use multiple hibernate sessions as per the application.</p> * @author kalpana_thakur * @version 1.0 */ package edu.wustl.dao.connectionmanager; import java.sql.Connection; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import org.hibernate.FlushMode; import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.Transaction; import org.hibernate.cfg.Configuration; import edu.wustl.common.exception.ErrorKey; import edu.wustl.common.util.logger.Logger; import edu.wustl.dao.exception.DAOException; import edu.wustl.dao.util.DAOConstants; /** * @author kalpana_thakur * */ public class ConnectionManager implements IConnectionManager { /** * logger Logger - Generic logger. */ private static org.apache.log4j.Logger logger = Logger.getLogger(ConnectionManager.class); /** * This member will store the name of the application. */ protected String applicationName; /** * This member will store the configuration instance. */ protected Configuration configuration; /** * This member will store the sessionFactory instance. */ protected SessionFactory sessionFactory; /** * This member will store data source for JDBC connection. */ protected String dataSource; /** * specify Session instance. */ private Session session = null; /** * specify Transaction instance. */ private Transaction transaction = null; /** * specify clean Session instance. */ private Session cleanSession = null; /** * specify clean connection instance. */ private Connection cleanConnection = null; /** * ThreadLocal to hold the Session for the current executing thread. * It holds Map(ApplicationName, session) thus allow user to use multiple Hibernate sessions as per the application. */ private static final ThreadLocal<Map<String, Session>> SESSION_THREAD_LOCAL = new ThreadLocal<Map<String, Session>>(); /** * This block will instantiate applicationSessionMap. * This map holds the session object associated to the application. * Map will stored in threadLocal,whenever new session will be created , * threadLocal will be checked first to obtain the session associated to application. */ /*static { Map<String, Session> applicationSessionMap = new HashMap<String, Session>(); SESSION_THREAD_LOCAL.set(applicationSessionMap); } */ /** *This method will be called to close current connection. *@throws DAOException :Generic DAOException. */ public void closeConnection() throws DAOException { closeSession(); } /** * This method will be called to close the session. * It will check the session for the running application in applicationSessionMap, * if present it will remove it from the Map. *@throws DAOException :Generic DAOException. */ public void closeSession() throws DAOException { Map<String, Session> applicationSessionMap = SESSION_THREAD_LOCAL.get(); if(applicationSessionMap.containsKey(applicationName)) { Session session = applicationSessionMap.get(applicationName); if(session != null) { session.close(); applicationSessionMap.remove(applicationName); session=null; transaction = null; } } } /** * It will instantiate applicationSessionMap. * This map holds the session object associated to the application. * Map will stored in threadLocal,whenever new session will be created , * threadLocal will be checked first to obtain the session associated to application. * This method will be called to retrieve the current session. * It will check the session for the running application in applicationSessionMap. * If present, retrieved the session from the Map otherwise create the * new session and store it into the Map. * @return session object. *@throws DAOException :Generic DAOException. */ public Session currentSession() throws DAOException { Map<String, Session> appSessionMap = SESSION_THREAD_LOCAL.get(); if(appSessionMap == null) { appSessionMap = new HashMap<String, Session>(); SESSION_THREAD_LOCAL.set(appSessionMap); } // Open a new Session, if this Thread has none yet if (!(appSessionMap.containsKey(applicationName)) ) { Session session = newSession(); transaction = session.beginTransaction(); appSessionMap.put(applicationName, session); } return appSessionMap.get(applicationName); } /** * This method will be called to create new session. * @return session object. *@throws DAOException :Generic DAOException. */ public Session newSession() throws DAOException { try { Session session = sessionFactory.openSession(); session.setFlushMode(FlushMode.COMMIT); session.connection().setAutoCommit(false); return session; } catch (Exception excp) { ErrorKey errorKey = ErrorKey.getErrorKey("db.operation.error"); throw new DAOException(errorKey,excp,"ConnectionManager.java :"+ DAOConstants.NEW_SESSION_ERROR); } } /** *This method will be called to retrieved the current connection object. *@return Connection object *@throws DAOException :Generic DAOException. */ public Connection getConnection() throws DAOException { return currentSession().connection(); } /** * This will called to retrieve configuration object. * @return configuration */ public Configuration getConfiguration() { return configuration; } /** * This will called to set the configuration object. * @param cfg configuration */ public void setConfiguration(Configuration cfg) { this.configuration = cfg; } /** * This will called to retrieve session factory object. * @return sessionFactory */ public SessionFactory getSessionFactory() { return sessionFactory; } /** * This will called to set session factory object. * @param sessionFactory : session factory. */ public void setSessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; } /** * This method will be called to set applicationName. * @param applicationName : Name of the application. */ public void setApplicationName(String applicationName) { this.applicationName = applicationName; } /** * This method will be called to retrieved the application Name. * @return application name. */ public String getApplicationName() { return applicationName; } /** * This method will be called to get the data source. * @return dataSource */ public String getDataSource() { return dataSource; } /** * This method will be called to set the data source. * @param dataSource : JDBC connection name. */ public void setDataSource(String dataSource) { this.dataSource = dataSource; } /** * Commit the database level changes. * @throws DAOException : It will throw DAOException. */ public void commit() throws DAOException { if (transaction != null) { transaction.commit(); } } /** * RollBack all the changes after last commit. * Declared in DAO class. * @throws DAOException : It will throw DAOException. */ public void rollback() throws DAOException { if (transaction != null) { transaction.rollback(); } } /** *This method will be called to retrieved the current connection object. *@return Connection object *@throws DAOException :Generic DAOException. */ public Connection getCleanConnection() throws DAOException { logger.debug("Get clean connection"); cleanConnection = getCleanSession().connection(); return cleanConnection; } /** *This method will be called to close current connection. *@throws DAOException :Generic DAOException. */ public void closeCleanConnection() throws DAOException { logger.debug("Close clean connection"); try { cleanConnection.close(); } catch (SQLException sqlExp) { ErrorKey errorKey = ErrorKey.getErrorKey("db.operation.error"); throw new DAOException(errorKey,sqlExp,"DAOFactory.java :"+ DAOConstants.CLOSE_CONN_ERR); } } /** *This method will be called to close current connection. *@throws DAOException :Generic DAOException. */ public void closeCleanSession() throws DAOException { logger.debug("Close clean session"); cleanSession.close(); } /** * This method will be called to obtain clean session. * @return session object. *@throws DAOException :Generic DAOException. */ public Session getCleanSession() throws DAOException { try { return sessionFactory.openSession(); } catch (HibernateException exp) { ErrorKey errorKey = ErrorKey.getErrorKey("db.operation.error"); throw new DAOException(errorKey,exp,"ConnectionManager.java :"+ DAOConstants.NEW_SESSION_ERROR); } } }
package com.github.elopteryx.reflect.tests; import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; import com.github.elopteryx.reflect.BeanMirror; import com.github.elopteryx.reflect.BeanMirrorException; import org.junit.jupiter.api.Test; import java.lang.invoke.MethodHandles; import java.util.Objects; class ObjectMirrorTest { private static final MethodHandles.Lookup LOOKUP = MethodHandles.lookup(); @SuppressWarnings("unused") private static class Parent { private int i = 3; } @SuppressWarnings("unused") private static class Child extends Parent { private int i = 4; } @Test void asType() { final var childValue = BeanMirror.of(new Child(), LOOKUP).get("i", int.class); final var parentValue = BeanMirror.of(new Child(), LOOKUP).asType(Parent.class).get("i", int.class); assertAll( () -> assertEquals(3, (int)parentValue), () -> assertEquals(4, (int)childValue)); } @Test void get() { assertNotNull(BeanMirror.of(new Child(), LOOKUP).get()); } @SuppressWarnings("unused") private static class GetField { private String a = "a"; } @Test void getField() { assertNotNull(BeanMirror.of(new GetField(), LOOKUP).get("a", String.class)); } @Test void getMissingField() { final var exception = assertThrows(BeanMirrorException.class, () -> BeanMirror.of(new GetField(), LOOKUP).get("b", String.class)); assertEquals(NoSuchFieldException.class, exception.getCause().getClass()); } @SuppressWarnings("unused") private static class SetField { private String a = "a"; } @Test void setField() { final var mirror = BeanMirror.of(new SetField(), LOOKUP); final var mirrorAfterSet = mirror.set("a", "b"); assertEquals(mirror, mirrorAfterSet); } @Test void setMissingField() { final var exception = assertThrows(BeanMirrorException.class, () -> BeanMirror.of(new SetField(), LOOKUP).get("b", String.class)); assertEquals(NoSuchFieldException.class, exception.getCause().getClass()); } @SuppressWarnings("unused") private static class FieldTarget { private String value = "field"; } @Test void field() { final var mirror = BeanMirror.of(new FieldTarget(), LOOKUP).field("value", String.class); assertEquals(mirror.get(), "field"); } @SuppressWarnings("unused") private static class GetterSetterTarget { private String value; private GetterSetterTarget() { this(""); } private GetterSetterTarget(final String value) { this.value = value; } private String getValue() { return value; } } @Test void createGetter() { final var getter = BeanMirror.of(new GetterSetterTarget(), LOOKUP).createGetter("value", String.class); assertAll( () -> assertEquals(getter.apply(new GetterSetterTarget()), ""), () -> assertEquals(getter.apply(new GetterSetterTarget("a")), "a"), () -> assertEquals(getter.apply(new GetterSetterTarget("b")), "b") ); } @Test void createSetter() { final var target = new GetterSetterTarget(); final var setter = BeanMirror.of(target, LOOKUP).createSetter("value", String.class); assertAll( () -> assertEquals("", target.getValue()), () -> { setter.accept(target, "a"); assertEquals("a", target.getValue()); }, () -> { setter.accept(target, "b"); assertEquals("b", target.getValue()); }, () -> { setter.accept(target, "c"); assertEquals("c", target.getValue()); } ); } @Test void createGetterAndSetter() { final var target = new GetterSetterTarget(); final var mirror = BeanMirror.of(target, LOOKUP); final var getter = mirror.createGetter("value", String.class); final var setter = mirror.createSetter("value", String.class); assertAll( () -> assertEquals("", target.getValue()), () -> { setter.accept(target, "a"); assertEquals("a", target.getValue()); assertEquals("a", getter.apply(target)); }, () -> { setter.accept(target, "b"); assertEquals("b", target.getValue()); assertEquals("b", getter.apply(target)); }, () -> { setter.accept(target, "c"); assertEquals("c", target.getValue()); assertEquals("c", getter.apply(target)); } ); } @SuppressWarnings("unused") private static class GetterSetterTargetStatic { private static String value; private static void init() { GetterSetterTargetStatic.value = ""; } } @Test void createStaticGetterAndSetter() { final var mirror = BeanMirror.of(new GetterSetterTargetStatic(), LOOKUP); final var getter = mirror.createStaticGetter("value", String.class); final var setter = mirror.createStaticSetter("value", String.class); assertAll( () -> { GetterSetterTargetStatic.init(); assertEquals("", GetterSetterTargetStatic.value); assertEquals("", getter.get()); }, () -> { setter.accept("a"); assertEquals("a", GetterSetterTargetStatic.value); assertEquals("a", getter.get()); }, () -> { setter.accept("b"); assertEquals("b", GetterSetterTargetStatic.value); assertEquals("b", getter.get()); }, () -> { setter.accept("c"); assertEquals("c", GetterSetterTargetStatic.value); assertEquals("c", getter.get()); } ); } @SuppressWarnings("unused") private static class RunTarget { public void run(final String param) { Objects.requireNonNull(param); } } @Test void run() { final var mirror = BeanMirror.of(new RunTarget(), LOOKUP); mirror.run("run", "arg1").run("run", "arg2"); final var exception = assertThrows(RuntimeException.class, () -> mirror.run("run", (String) null)); assertEquals(exception.getCause().getClass(), NullPointerException.class); } @Test void runMissing() { final var exception = assertThrows(BeanMirrorException.class, () -> BeanMirror.of(new RunTarget(), LOOKUP).run("runThis", (String) null)); assertEquals(NoSuchMethodException.class, exception.getCause().getClass()); } @SuppressWarnings("unused") private static class CallTarget { private String call() { return "callable"; } } @Test void call() { final var mirror = BeanMirror.of(new CallTarget(), LOOKUP); assertEquals(mirror.call(String.class, "call").get(), "callable"); } @Test void callMissing() { final var exception = assertThrows(BeanMirrorException.class, () -> BeanMirror.of(new CallTarget(), LOOKUP).call(String.class, "callThis")); assertEquals(NoSuchMethodException.class, exception.getCause().getClass()); } private static class StandardObjectMethods {} @Test void forHashCode() { final var object = new StandardObjectMethods(); final var mirror = BeanMirror.of(object, LOOKUP); assertEquals(mirror.hashCode(), object.hashCode()); } @Test void forEquals() { final var object = new StandardObjectMethods(); final var mirror = BeanMirror.of(object, LOOKUP); final var otherMirror = BeanMirror.of(object, LOOKUP); assertEquals(mirror, otherMirror); } @Test void forToString() { final var object = new StandardObjectMethods(); final var mirror = BeanMirror.of(object, LOOKUP); assertEquals(mirror.toString(), object.toString()); } }
/******************************************************************************* * Copyright (c) 2005, 2011 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.jdt.internal.ui.jarimport; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.util.HashSet; import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Label; import org.eclipse.core.filesystem.URIUtil; import org.eclipse.core.runtime.Assert; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.jface.dialogs.Dialog; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.viewers.DecoratingLabelProvider; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.ui.PlatformUI; import org.eclipse.ltk.core.refactoring.RefactoringCore; import org.eclipse.jdt.core.IClasspathEntry; import org.eclipse.jdt.core.IJavaElement; import org.eclipse.jdt.core.IJavaModel; import org.eclipse.jdt.core.IJavaProject; import org.eclipse.jdt.core.IPackageFragment; import org.eclipse.jdt.core.IPackageFragmentRoot; import org.eclipse.jdt.core.JavaCore; import org.eclipse.jdt.core.JavaModelException; import org.eclipse.jdt.core.refactoring.descriptors.JavaRefactoringDescriptor; import org.eclipse.jdt.ui.JavaElementComparator; import org.eclipse.jdt.ui.JavaElementLabelProvider; import org.eclipse.jdt.ui.ProblemsLabelDecorator; import org.eclipse.jdt.ui.StandardJavaElementContentProvider; import org.eclipse.jdt.internal.ui.IJavaHelpContextIds; import org.eclipse.jdt.internal.ui.JavaPlugin; import org.eclipse.jdt.internal.ui.filters.EmptyPackageFilter; import org.eclipse.jdt.internal.ui.jarpackager.JarPackagerUtil; import org.eclipse.jdt.internal.ui.refactoring.binary.BinaryRefactoringHistoryWizard; import org.eclipse.jdt.internal.ui.util.SWTUtil; import org.eclipse.jdt.internal.ui.wizards.buildpaths.ArchiveFileFilter; /** * Jar import wizard page. * * @since 3.2 */ public final class JarImportWizardPage extends WizardPage { /** The jar import wizard page name */ private static final String PAGE_NAME= "JarImportWizardPage"; //$NON-NLS-1$ /** The history dialog setting */ protected static final String SETTING_HISTORY= "org.eclipse.jdt.ui.refactoring.jarHistory"; //$NON-NLS-1$ /** Is the wizard page displayed for the first time? */ private boolean fFirstTime= true; /** Is the wizard part of an import wizard? */ private final boolean fImportWizard; /** The location control */ private RefactoringLocationControl fLocationControl= null; /** The java model viewer */ private TreeViewer fTreeViewer= null; /** The import wizard */ private final JarImportWizard fWizard; /** * Creates a new jar import wizard page. * * @param wizard * the jar import wizard * @param importWizard * <code>true</code> if the wizard is part of an import wizard, * <code>false</code> otherwise */ public JarImportWizardPage(final JarImportWizard wizard, final boolean importWizard) { super(PAGE_NAME); Assert.isNotNull(wizard); fWizard= wizard; fImportWizard= importWizard; if (fImportWizard) { setTitle(JarImportMessages.JarImportWizardPage_page_title); setDescription(JarImportMessages.JarImportWizardPage_page_description); } else { setTitle(JarImportMessages.JarImportWizardPage_page_replace_title); setDescription(JarImportMessages.JarImportWizardPage_page_replace_description); } } /** * {@inheritDoc} */ public void createControl(final Composite parent) { initializeDialogUnits(parent); final Composite composite= new Composite(parent, SWT.NONE); composite.setLayout(new GridLayout()); composite.setLayoutData(new GridData(GridData.VERTICAL_ALIGN_FILL | GridData.HORIZONTAL_ALIGN_FILL)); createLocationGroup(composite); if (fImportWizard) createInputGroup(composite); createRenameGroup(composite); setPageComplete(false); if (fImportWizard && !fTreeViewer.getControl().isEnabled()) setMessage(JarImportMessages.JarImportWizardPage_no_jar_files, INFORMATION); setControl(composite); Dialog.applyDialogFont(composite); PlatformUI.getWorkbench().getHelpSystem().setHelp(composite, IJavaHelpContextIds.JARIMPORT_WIZARD_PAGE); } /** * Creates a new grid data. * * @param flag * the flags to use * @param hspan * the horizontal span * @param indent * the indent * @return the grid data */ protected GridData createGridData(final int flag, final int hspan, final int indent) { final GridData data= new GridData(flag); data.horizontalIndent= indent; data.horizontalSpan= hspan; return data; } /** * Creates the input group. * * @param parent * the parent control */ protected void createInputGroup(final Composite parent) { Assert.isNotNull(parent); new Label(parent, SWT.NONE); final Label label= new Label(parent, SWT.NONE); label.setText(JarImportMessages.JarImportWizardPage_import_message); final StandardJavaElementContentProvider contentProvider= new StandardJavaElementContentProvider() { @Override public Object[] getChildren(Object element) { if ((element instanceof IJavaProject) || (element instanceof IJavaModel)) return super.getChildren(element); return new Object[0]; } @Override protected Object[] getJavaProjects(final IJavaModel model) throws JavaModelException { final Set<IJavaProject> set= new HashSet<IJavaProject>(); final IJavaProject[] projects= model.getJavaProjects(); for (int index= 0; index < projects.length; index++) { if (JarImportWizard.isValidJavaProject(projects[index])) { final Object[] roots= getPackageFragmentRoots(projects[index]); if (roots.length > 0) set.add(projects[index]); } } return set.toArray(); } @Override protected Object[] getPackageFragmentRoots(final IJavaProject project) throws JavaModelException { final Set<IPackageFragmentRoot> set= new HashSet<IPackageFragmentRoot>(); final IPackageFragmentRoot[] roots= project.getPackageFragmentRoots(); for (int offset= 0; offset < roots.length; offset++) { IPackageFragmentRoot root= roots[offset]; IClasspathEntry entry= root.getRawClasspathEntry(); if (JarImportWizard.isValidClassPathEntry(entry) && root.getResolvedClasspathEntry().getReferencingEntry() == null) set.add(root); } return set.toArray(); } @Override public boolean hasChildren(final Object element) { return (element instanceof IJavaProject) || (element instanceof IJavaModel); } }; final DecoratingLabelProvider labelProvider= new DecoratingLabelProvider(new JavaElementLabelProvider(JavaElementLabelProvider.SHOW_BASICS | JavaElementLabelProvider.SHOW_OVERLAY_ICONS | JavaElementLabelProvider.SHOW_SMALL_ICONS), new ProblemsLabelDecorator(null)); fTreeViewer= new TreeViewer(parent, SWT.SINGLE | SWT.BORDER); fTreeViewer.getTree().setLayoutData(createGridData(GridData.FILL_BOTH, 6, 0)); fTreeViewer.setLabelProvider(labelProvider); fTreeViewer.setContentProvider(contentProvider); fTreeViewer.addFilter(new EmptyPackageFilter()); fTreeViewer.setComparator(new JavaElementComparator()); fTreeViewer.setAutoExpandLevel(2); fTreeViewer.setInput(JavaCore.create(ResourcesPlugin.getWorkspace().getRoot())); final IPackageFragmentRoot root= fWizard.getPackageFragmentRoot(); if (root != null) { fTreeViewer.setSelection(new StructuredSelection(new Object[] { root}), true); fTreeViewer.expandToLevel(root, 1); } fTreeViewer.addSelectionChangedListener(new ISelectionChangedListener() { public void selectionChanged(final SelectionChangedEvent event) { handleInputChanged(); } }); if (contentProvider.getChildren(JavaCore.create(ResourcesPlugin.getWorkspace().getRoot())).length == 0) { fTreeViewer.getControl().setEnabled(false); label.setEnabled(false); } } /** * Creates the location group. * * @param parent * the parent control */ protected void createLocationGroup(final Composite parent) { Assert.isNotNull(parent); new Label(parent, SWT.NONE).setText(JarImportMessages.JarImportWizardPage_import_label); final Composite composite= new Composite(parent, SWT.NONE); composite.setLayoutData(createGridData(GridData.FILL_HORIZONTAL, 6, 0)); composite.setLayout(new GridLayout(3, false)); final Label label= new Label(composite, SWT.NONE); label.setText(JarImportMessages.JarImportWizardPage_location_label); label.setLayoutData(createGridData(GridData.HORIZONTAL_ALIGN_BEGINNING, 1, 0)); fLocationControl= new RefactoringLocationControl(fWizard, composite, SETTING_HISTORY); fLocationControl.setLayoutData(createGridData(GridData.FILL_HORIZONTAL, 1, 0)); fLocationControl.loadHistory(); fLocationControl.getControl().addModifyListener(new ModifyListener() { public final void modifyText(final ModifyEvent event) { handleInputChanged(); } }); fLocationControl.getControl().addSelectionListener(new SelectionAdapter() { @Override public final void widgetSelected(final SelectionEvent event) { handleInputChanged(); } }); fLocationControl.setFocus(); final Button button= new Button(composite, SWT.PUSH); button.setText(JarImportMessages.JarImportWizardPage_browse_button_label); button.setLayoutData(createGridData(GridData.HORIZONTAL_ALIGN_FILL, 1, 0)); SWTUtil.setButtonDimensionHint(button); button.addSelectionListener(new SelectionAdapter() { @Override public final void widgetSelected(final SelectionEvent event) { handleBrowseButtonSelected(); } }); } /** * Creates the rename group. * * @param parent * the parent control */ protected void createRenameGroup(final Composite parent) { Assert.isNotNull(parent); final JarImportData data= fWizard.getImportData(); final Button button= new Button(parent, SWT.CHECK); button.setText(JarImportMessages.JarImportWizardPage_replace_jar_file); button.setSelection(!data.isRenameJarFile()); button.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(final SelectionEvent event) { data.setRenameJarFile(!button.getSelection()); } }); if (fImportWizard && !fTreeViewer.getControl().isEnabled()) button.setEnabled(false); if (!fImportWizard) { final GridData gd= new GridData(); gd.horizontalIndent= IDialogConstants.HORIZONTAL_MARGIN; button.setLayoutData(gd); } } /** * Handles the browse button selected event. */ protected void handleBrowseButtonSelected() { final FileDialog file= new FileDialog(getShell(), SWT.OPEN); file.setText(JarImportMessages.JarImportWizardPage_browse_caption); file.setFilterNames(ArchiveFileFilter.ALL_ARCHIVES_FILTER_EXTENSIONS); file.setFilterExtensions(ArchiveFileFilter.ALL_ARCHIVES_FILTER_EXTENSIONS); final String path= file.open(); if (path != null) { fLocationControl.setText(path); handleInputChanged(); } } /** * Handles the input changed event. */ protected void handleInputChanged() { final JarImportData data= fWizard.getImportData(); data.setRefactoringHistory(null); data.setRefactoringFileLocation(null); setErrorMessage(null); setMessage(null, NONE); setPageComplete(true); handleJarFileChanged(); if (isPageComplete()) handlePackageFragmentRootChanged(); if (fImportWizard && !fTreeViewer.getControl().isEnabled()) setErrorMessage(JarImportMessages.JarImportWizardPage_no_jar_files); fFirstTime= false; getContainer().updateButtons(); } /** * Handles the jar file changed event. */ protected void handleJarFileChanged() { if (fLocationControl != null) { final String path= fLocationControl.getText(); if ("".equals(path)) { //$NON-NLS-1$ setErrorMessage(JarImportMessages.JarImportWizardPage_empty_location); setPageComplete(false); return; } else { final File file= new File(path); if (!file.exists()) { setErrorMessage(JarImportMessages.JarImportWizardPage_invalid_location); setPageComplete(false); return; } ZipFile zip= null; try { try { zip= new ZipFile(file, ZipFile.OPEN_READ); } catch (IOException exception) { setErrorMessage(JarImportMessages.JarImportWizardPage_invalid_location); setPageComplete(false); return; } final JarImportData data= fWizard.getImportData(); data.setRefactoringFileLocation(URIUtil.toURI(path)); ZipEntry entry= zip.getEntry(JarPackagerUtil.getRefactoringsEntry()); if (entry == null) { setMessage(JarImportMessages.JarImportWizardPage_no_refactorings, INFORMATION); setPageComplete(true); return; } handleTimeStampChanged(); if (data.getExistingTimeStamp() > entry.getTime()) { setMessage(JarImportMessages.JarImportWizardPage_version_warning, WARNING); setPageComplete(true); return; } InputStream stream= null; try { stream= zip.getInputStream(entry); data.setRefactoringHistory(RefactoringCore.getHistoryService().readRefactoringHistory(stream, JavaRefactoringDescriptor.JAR_MIGRATION | JavaRefactoringDescriptor.JAR_REFACTORING)); } catch (IOException exception) { setErrorMessage(JarImportMessages.JarImportWizardPage_no_refactorings); setPageComplete(false); return; } catch (CoreException exception) { JavaPlugin.log(exception); setErrorMessage(JarImportMessages.JarImportWizardPage_no_refactorings); setPageComplete(false); return; } finally { if (stream != null) { try { stream.close(); } catch (IOException exception) { // Do nothing } } } } finally { if (zip != null) { try { zip.close(); } catch (IOException e) { } } } } } } /** * Handles the package fragment root changed event. */ protected void handlePackageFragmentRootChanged() { if (fTreeViewer != null) { final IStructuredSelection selection= (IStructuredSelection) fTreeViewer.getSelection(); final Object[] elements= selection.toArray(); if (elements.length != 1) { setErrorMessage(JarImportMessages.JarImportWizardPage_select_single_jar); setPageComplete(false); return; } else { final JarImportData data= fWizard.getImportData(); final Object element= elements[0]; if (element instanceof IPackageFragmentRoot) data.setPackageFragmentRoot((IPackageFragmentRoot) element); else if (element instanceof IPackageFragment) { data.setPackageFragmentRoot((IPackageFragmentRoot) ((IJavaElement) element).getParent()); } else { setErrorMessage(JarImportMessages.JarImportWizardPage_select_single_jar); setPageComplete(false); } } } } /** * Handles the time stamp changed event. */ protected void handleTimeStampChanged() { final IPackageFragmentRoot root= fWizard.getPackageFragmentRoot(); if (root != null) { try { final URI uri= BinaryRefactoringHistoryWizard.getLocationURI(root.getRawClasspathEntry()); if (uri != null) { final File file= new File(uri); if (file.exists()) { ZipFile zip= null; try { zip= new ZipFile(file, ZipFile.OPEN_READ); ZipEntry entry= zip.getEntry(JarPackagerUtil.getRefactoringsEntry()); if (entry != null) { fWizard.getImportData().setExistingTimeStamp(entry.getTime()); } } catch (IOException exception) { // Just leave it } finally { if (zip != null) { try { zip.close(); } catch (IOException e) { } } } } } } catch (CoreException exception) { JavaPlugin.log(exception); } } } /** * Gets called if the wizard is finished. */ public void performFinish() { fLocationControl.saveHistory(); } /** * {@inheritDoc} */ @Override public void setErrorMessage(final String message) { if (!fFirstTime) super.setErrorMessage(message); else setMessage(message, NONE); } /** * {@inheritDoc} */ @Override public void setVisible(final boolean visible) { super.setVisible(visible); if (visible) handleInputChanged(); } }
/** * * LiquidFunProcessing | Copyright 2017 Thomas Diewald - www.thomasdiewald.com * * https://github.com/diwi/LiquidFunProcessing.git * * Box2d / LiquidFun Library for Processing. * MIT License: https://opensource.org/licenses/MIT * */ package liquidfun_DamBreak_LiquidFx; import com.thomasdiewald.liquidfun.java.DwWorld; import com.thomasdiewald.pixelflow.java.DwPixelFlow; import com.thomasdiewald.pixelflow.java.imageprocessing.filter.DwLiquidFX; import org.jbox2d.collision.shapes.ChainShape; import org.jbox2d.collision.shapes.PolygonShape; import org.jbox2d.common.Color3f; import org.jbox2d.common.Vec2; import org.jbox2d.dynamics.Body; import org.jbox2d.dynamics.BodyDef; import org.jbox2d.particle.ParticleGroupDef; import org.jbox2d.particle.ParticleType; import processing.core.*; import processing.opengl.PGraphics2D; public class liquidfun_DamBreak_LiquidFx extends PApplet { // // Simulation of a clash of two big particle-groups. // // required libraries: // - PixelFlow, https://github.com/diwi/PixelFlow // // // Controls: // // LMB ... drag bodies // LMB + SHIFT ... shoot bullet // MMB ... add particles // RMB ... remove particles // 'r' ... reset // 't' ... update/pause physics // 'f' ... toggle debug draw // 'g' ... toggle DwLiquidFX // int viewport_w = 1280; int viewport_h = 720; int viewport_x = 230; int viewport_y = 0; boolean UPDATE_PHYSICS = true; boolean USE_DEBUG_DRAW = false; boolean APPLY_LIQUID_FX = true; DwWorld world; DwPixelFlow pixelflow; DwLiquidFX liquidfx; PGraphics2D pg_particles; // PImage sprite; public void settings(){ size(viewport_w, viewport_h, P2D); smooth(8); } public void setup(){ surface.setLocation(viewport_x, viewport_y); // sprite = loadImage("sprite.png"); pixelflow = new DwPixelFlow(this); liquidfx = new DwLiquidFX(pixelflow); pg_particles = (PGraphics2D) createGraphics(width, height, P2D); reset(); frameRate(120); } public void release(){ if(world != null) world.release(); world = null; } public void reset(){ // release old resources release(); world = new DwWorld(this, 18); world.particles.param.falloff_exp1 = 3; world.particles.param.falloff_exp2 = 1; world.particles.param.radius_scale = 2; // create scene: rigid bodies, particles, etc ... initScene(); } public void draw(){ if(UPDATE_PHYSICS){ world.update(); } int BACKGROUND = 32; if(USE_DEBUG_DRAW){ PGraphics2D canvas = (PGraphics2D) this.g; canvas.background(BACKGROUND); canvas.pushMatrix(); world.applyTransform(canvas); world.drawBulletSpawnTrack(canvas); world.displayDebugDraw(canvas); canvas.popMatrix(); } else { PGraphics2D canvas = (PGraphics2D) pg_particles; canvas.beginDraw(); canvas.clear(); canvas.background(BACKGROUND, 0); world.applyTransform(canvas); world.particles.display(canvas, 0); canvas.endDraw(); if(APPLY_LIQUID_FX) { liquidfx.param.base_LoD = 1; liquidfx.param.base_blur_radius = 2; liquidfx.param.base_threshold = 0.7f; liquidfx.param.highlight_enabled = true; liquidfx.param.highlight_LoD = 1; liquidfx.param.highlight_decay = 0.6f; liquidfx.param.sss_enabled = true; liquidfx.param.sss_LoD = 3; liquidfx.param.sss_decay = 0.5f; liquidfx.apply(canvas); } background(BACKGROUND); image(canvas, 0, 0); pushMatrix(); world.applyTransform(this.g); world.drawBulletSpawnTrack(this.g); popMatrix(); } // info int num_bodies = world.getBodyCount(); int num_particles = world.getParticleCount(); String txt_fps = String.format(getClass().getName()+ " [bodies: %d] [particles: %d] [fps %6.2f]", num_bodies, num_particles, frameRate); surface.setTitle(txt_fps); } ////////////////////////////////////////////////////////////////////////////// // User Interaction ////////////////////////////////////////////////////////////////////////////// public void keyReleased(){ if(key == 't') UPDATE_PHYSICS = !UPDATE_PHYSICS; if(key == 'r') reset(); if(key == 'f') USE_DEBUG_DRAW = !USE_DEBUG_DRAW; if(key == 'g') APPLY_LIQUID_FX = !APPLY_LIQUID_FX; } ////////////////////////////////////////////////////////////////////////////// // Scene Setup ////////////////////////////////////////////////////////////////////////////// // https://github.com/jbox2d/jbox2d/blob/master/jbox2d-testbed/src/main/java/org/jbox2d/testbed/tests/DamBreak.java public void initScene() { float dimx = world.transform.box2d_dimx; float dimy = world.transform.box2d_dimy; float dimxh = dimx/2; float dimyh = dimy/2; { BodyDef bd = new BodyDef(); Body ground = world.createBody(bd); ChainShape shape = new ChainShape(); Vec2[] vertices = {new Vec2(-dimxh, 0), new Vec2(dimxh, 0), new Vec2(dimxh, dimy), new Vec2(-dimxh, dimy)}; shape.createLoop(vertices, 4); ground.createFixture(shape, 0.0f); world.bodies.add(ground, false, color(0), true, color(0), 1f); } { PolygonShape shape = new PolygonShape(); ParticleGroupDef pd = new ParticleGroupDef(); pd.flags = 0 | ParticleType.b2_waterParticle | ParticleType.b2_viscousParticle | ParticleType.b2_colorMixingParticle // | ParticleType.b2_powderParticle // | ParticleType.b2_springParticle // | ParticleType.b2_tensileParticle ; float sx = dimxh * 0.25f; float sy = dimyh * 0.95f; shape.setAsBox(sx, sy, new Vec2(-dimxh/2, dimyh), 0); pd.shape = shape; pd.setColor(new Color3f(0.00f, 0.2f, 1)); world.createParticleGroup(pd); shape.setAsBox(sx, sy, new Vec2(+dimxh/2, dimyh), 0); pd.shape = shape; pd.setColor(new Color3f(1.00f, 0.2f, 0.00f)); world.createParticleGroup(pd); } } public static void main(String args[]) { PApplet.main(new String[] { liquidfun_DamBreak_LiquidFx.class.getName() }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.test; import org.apache.calcite.adapter.enumerable.CallImplementor; import org.apache.calcite.adapter.enumerable.RexImpTable.NullAs; import org.apache.calcite.adapter.enumerable.RexToLixTranslator; import org.apache.calcite.adapter.java.ReflectiveSchema; import org.apache.calcite.jdbc.CalciteConnection; import org.apache.calcite.linq4j.Ord; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.apache.calcite.linq4j.tree.Types; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelProtoDataType; import org.apache.calcite.rex.RexCall; import org.apache.calcite.schema.FunctionParameter; import org.apache.calcite.schema.ImplementableFunction; import org.apache.calcite.schema.ScalarFunction; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.schema.impl.AbstractSchema; import org.apache.calcite.schema.impl.ScalarFunctionImpl; import org.apache.calcite.schema.impl.ViewTable; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.util.Smalls; import com.google.common.collect.ImmutableList; import org.junit.Ignore; import org.junit.Test; import java.lang.reflect.Method; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; /** * Tests for user-defined functions; * includes user-defined aggregate functions * but user-defined table functions are in {@link TableFunctionTest}. * * @see Smalls */ public class UdfTest { private CalciteAssert.AssertThat withUdf() { final String model = "{\n" + " version: '1.0',\n" + " schemas: [\n" + " {\n" + " name: 'adhoc',\n" + " tables: [\n" + " {\n" + " name: 'EMPLOYEES',\n" + " type: 'custom',\n" + " factory: '" + JdbcTest.EmpDeptTableFactory.class.getName() + "',\n" + " operand: {'foo': true, 'bar': 345}\n" + " }\n" + " ],\n" + " functions: [\n" + " {\n" + " name: 'MY_PLUS',\n" + " className: '" + Smalls.MyPlusFunction.class.getName() + "'\n" + " },\n" + " {\n" + " name: 'MY_DET_PLUS',\n" + " className: '" + Smalls.MyDeterministicPlusFunction.class.getName() + "'\n" + " },\n" + " {\n" + " name: 'MY_LEFT',\n" + " className: '" + Smalls.MyLeftFunction.class.getName() + "'\n" + " },\n" + " {\n" + " name: 'ABCDE',\n" + " className: '" + Smalls.MyAbcdeFunction.class.getName() + "'\n" + " },\n" + " {\n" + " name: 'MY_STR',\n" + " className: '" + Smalls.MyToStringFunction.class.getName() + "'\n" + " },\n" + " {\n" + " name: 'MY_DOUBLE',\n" + " className: '" + Smalls.MyDoubleFunction.class.getName() + "'\n" + " },\n" + " {\n" + " name: 'COUNT_ARGS',\n" + " className: '" + Smalls.CountArgs0Function.class.getName() + "'\n" + " },\n" + " {\n" + " name: 'COUNT_ARGS',\n" + " className: '" + Smalls.CountArgs1Function.class.getName() + "'\n" + " },\n" + " {\n" + " name: 'COUNT_ARGS',\n" + " className: '" + Smalls.CountArgs1NullableFunction.class.getName() + "'\n" + " },\n" + " {\n" + " name: 'COUNT_ARGS',\n" + " className: '" + Smalls.CountArgs2Function.class.getName() + "'\n" + " },\n" + " {\n" + " name: 'MY_ABS',\n" + " className: '" + java.lang.Math.class.getName() + "',\n" + " methodName: 'abs'\n" + " },\n" + " {\n" + " className: '" + Smalls.MultipleFunction.class.getName() + "',\n" + " methodName: '*'\n" + " },\n" + " {\n" + " className: '" + Smalls.AllTypesFunction.class.getName() + "',\n" + " methodName: '*'\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + "}"; return CalciteAssert.model(model); } /** Tests a user-defined function that is defined in terms of a class with * non-static methods. */ @Ignore("[CALCITE-1561] Intermittent test failures") @Test public void testUserDefinedFunction() throws Exception { final String sql = "select \"adhoc\".my_plus(\"deptno\", 100) as p\n" + "from \"adhoc\".EMPLOYEES"; final AtomicInteger c = Smalls.MyPlusFunction.INSTANCE_COUNT; final int before = c.get(); withUdf().query(sql).returnsUnordered("P=110", "P=120", "P=110", "P=110"); final int after = c.get(); assertThat(after, is(before + 4)); } /** As {@link #testUserDefinedFunction()}, but checks that the class is * instantiated exactly once, per * <a href="https://issues.apache.org/jira/browse/CALCITE-1548">[CALCITE-1548] * Instantiate function objects once per query</a>. */ @Test public void testUserDefinedFunctionInstanceCount() throws Exception { final String sql = "select \"adhoc\".my_det_plus(\"deptno\", 100) as p\n" + "from \"adhoc\".EMPLOYEES"; final AtomicInteger c = Smalls.MyDeterministicPlusFunction.INSTANCE_COUNT; final int before = c.get(); withUdf().query(sql).returnsUnordered("P=110", "P=120", "P=110", "P=110"); final int after = c.get(); assertThat(after, is(before + 1)); } @Test public void testUserDefinedFunctionB() throws Exception { final String sql = "select \"adhoc\".my_double(\"deptno\") as p\n" + "from \"adhoc\".EMPLOYEES"; final String expected = "P=20\n" + "P=40\n" + "P=20\n" + "P=20\n"; withUdf().query(sql).returns(expected); } /** Test case for * <a href="https://issues.apache.org/jira/browse/CALCITE-937">[CALCITE-937] * User-defined function within view</a>. */ @Test public void testUserDefinedFunctionInView() throws Exception { Class.forName("org.apache.calcite.jdbc.Driver"); Connection connection = DriverManager.getConnection("jdbc:calcite:"); CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); SchemaPlus rootSchema = calciteConnection.getRootSchema(); rootSchema.add("hr", new ReflectiveSchema(new JdbcTest.HrSchema())); SchemaPlus post = rootSchema.add("POST", new AbstractSchema()); post.add("MY_INCREMENT", ScalarFunctionImpl.create(Smalls.MyIncrement.class, "eval")); final String viewSql = "select \"empid\" as EMPLOYEE_ID,\n" + " \"name\" || ' ' || \"name\" as EMPLOYEE_NAME,\n" + " \"salary\" as EMPLOYEE_SALARY,\n" + " POST.MY_INCREMENT(\"empid\", 10) as INCREMENTED_SALARY\n" + "from \"hr\".\"emps\""; post.add("V_EMP", ViewTable.viewMacro(post, viewSql, ImmutableList.<String>of(), ImmutableList.of("POST", "V_EMP"), null)); final String result = "" + "EMPLOYEE_ID=100; EMPLOYEE_NAME=Bill Bill; EMPLOYEE_SALARY=10000.0; INCREMENTED_SALARY=110.0\n" + "EMPLOYEE_ID=200; EMPLOYEE_NAME=Eric Eric; EMPLOYEE_SALARY=8000.0; INCREMENTED_SALARY=220.0\n" + "EMPLOYEE_ID=150; EMPLOYEE_NAME=Sebastian Sebastian; EMPLOYEE_SALARY=7000.0; INCREMENTED_SALARY=165.0\n" + "EMPLOYEE_ID=110; EMPLOYEE_NAME=Theodore Theodore; EMPLOYEE_SALARY=11500.0; INCREMENTED_SALARY=121.0\n"; Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(viewSql); assertThat(CalciteAssert.toString(resultSet), is(result)); resultSet.close(); ResultSet viewResultSet = statement.executeQuery("select * from \"POST\".\"V_EMP\""); assertThat(CalciteAssert.toString(viewResultSet), is(result)); statement.close(); connection.close(); } /** * Tests that IS NULL/IS NOT NULL is properly implemented for non-strict * functions. */ @Test public void testNotNullImplementor() { final CalciteAssert.AssertThat with = withUdf(); with.query( "select upper(\"adhoc\".my_str(\"name\")) as p from \"adhoc\".EMPLOYEES") .returns("P=<BILL>\n" + "P=<ERIC>\n" + "P=<SEBASTIAN>\n" + "P=<THEODORE>\n"); with.query("select \"name\" as p from \"adhoc\".EMPLOYEES\n" + "where \"adhoc\".my_str(\"name\") is not null") .returns("P=Bill\n" + "P=Eric\n" + "P=Sebastian\n" + "P=Theodore\n"); with.query("select \"name\" as p from \"adhoc\".EMPLOYEES\n" + "where \"adhoc\".my_str(upper(\"name\")) is not null") .returns("P=Bill\n" + "P=Eric\n" + "P=Sebastian\n" + "P=Theodore\n"); with.query("select \"name\" as p from \"adhoc\".EMPLOYEES\n" + "where upper(\"adhoc\".my_str(\"name\")) is not null") .returns("P=Bill\n" + "P=Eric\n" + "P=Sebastian\n" + "P=Theodore\n"); with.query("select \"name\" as p from \"adhoc\".EMPLOYEES\n" + "where \"adhoc\".my_str(\"name\") is null") .returns(""); with.query("select \"name\" as p from \"adhoc\".EMPLOYEES\n" + "where \"adhoc\".my_str(upper(\"adhoc\".my_str(\"name\")" + ")) ='8'") .returns(""); } /** Tests derived return type of user-defined function. */ @Test public void testUdfDerivedReturnType() { final CalciteAssert.AssertThat with = withUdf(); with.query( "select max(\"adhoc\".my_double(\"deptno\")) as p from \"adhoc\".EMPLOYEES") .returns("P=40\n"); with.query("select max(\"adhoc\".my_str(\"name\")) as p\n" + "from \"adhoc\".EMPLOYEES\n" + "where \"adhoc\".my_str(\"name\") is null") .returns("P=null\n"); } /** Tests a user-defined function that has multiple overloads. */ @Test public void testUdfOverloaded() { final CalciteAssert.AssertThat with = withUdf(); with.query("values (\"adhoc\".count_args(),\n" + " \"adhoc\".count_args(0),\n" + " \"adhoc\".count_args(0, 0))") .returns("EXPR$0=0; EXPR$1=1; EXPR$2=2\n"); with.query("select max(\"adhoc\".count_args()) as p0,\n" + " min(\"adhoc\".count_args(0)) as p1,\n" + " max(\"adhoc\".count_args(0, 0)) as p2\n" + "from \"adhoc\".EMPLOYEES limit 1") .returns("P0=0; P1=1; P2=2\n"); } @Test public void testUdfOverloadedNullable() { final CalciteAssert.AssertThat with = withUdf(); with.query("values (\"adhoc\".count_args(),\n" + " \"adhoc\".count_args(cast(null as smallint)),\n" + " \"adhoc\".count_args(0, 0))") .returns("EXPR$0=0; EXPR$1=-1; EXPR$2=2\n"); } /** Tests passing parameters to user-defined function by name. */ @Test public void testUdfArgumentName() { final CalciteAssert.AssertThat with = withUdf(); // arguments in physical order with.query("values (\"adhoc\".my_left(\"s\" => 'hello', \"n\" => 3))") .returns("EXPR$0=hel\n"); // arguments in reverse order with.query("values (\"adhoc\".my_left(\"n\" => 3, \"s\" => 'hello'))") .returns("EXPR$0=hel\n"); with.query("values (\"adhoc\".my_left(\"n\" => 1 + 2, \"s\" => 'hello'))") .returns("EXPR$0=hel\n"); // duplicate argument names with.query("values (\"adhoc\".my_left(\"n\" => 3, \"n\" => 2, \"s\" => 'hello'))") .throws_("Duplicate argument name 'n'"); // invalid argument names with.query("values (\"adhoc\".my_left(\"n\" => 3, \"m\" => 2, \"s\" => 'h'))") .throws_("No match found for function signature " + "MY_LEFT(n => <NUMERIC>, m => <NUMERIC>, s => <CHARACTER>)"); // missing arguments with.query("values (\"adhoc\".my_left(\"n\" => 3))") .throws_("No match found for function signature MY_LEFT(n => <NUMERIC>)"); with.query("values (\"adhoc\".my_left(\"s\" => 'hello'))") .throws_("No match found for function signature MY_LEFT(s => <CHARACTER>)"); // arguments of wrong type with.query("values (\"adhoc\".my_left(\"n\" => 'hello', \"s\" => 'x'))") .throws_("No match found for function signature " + "MY_LEFT(n => <CHARACTER>, s => <CHARACTER>)"); with.query("values (\"adhoc\".my_left(\"n\" => 1, \"s\" => 0))") .throws_("No match found for function signature " + "MY_LEFT(n => <NUMERIC>, s => <NUMERIC>)"); } /** Tests calling a user-defined function some of whose parameters are * optional. */ @Test public void testUdfArgumentOptional() { final CalciteAssert.AssertThat with = withUdf(); with.query("values (\"adhoc\".abcde(a=>1,b=>2,c=>3,d=>4,e=>5))") .returns("EXPR$0={a: 1, b: 2, c: 3, d: 4, e: 5}\n"); with.query("values (\"adhoc\".abcde(1,2,3,4,CAST(NULL AS INTEGER)))") .returns("EXPR$0={a: 1, b: 2, c: 3, d: 4, e: null}\n"); with.query("values (\"adhoc\".abcde(a=>1,b=>2,c=>3,d=>4))") .returns("EXPR$0={a: 1, b: 2, c: 3, d: 4, e: null}\n"); with.query("values (\"adhoc\".abcde(a=>1,b=>2,c=>3))") .returns("EXPR$0={a: 1, b: 2, c: 3, d: null, e: null}\n"); with.query("values (\"adhoc\".abcde(a=>1,e=>5,c=>3))") .returns("EXPR$0={a: 1, b: null, c: 3, d: null, e: 5}\n"); with.query("values (\"adhoc\".abcde(1,2,3))") .returns("EXPR$0={a: 1, b: 2, c: 3, d: null, e: null}\n"); with.query("values (\"adhoc\".abcde(1,2,3,4))") .returns("EXPR$0={a: 1, b: 2, c: 3, d: 4, e: null}\n"); with.query("values (\"adhoc\".abcde(1,2,3,4,5))") .returns("EXPR$0={a: 1, b: 2, c: 3, d: 4, e: 5}\n"); with.query("values (\"adhoc\".abcde(1,2))") .throws_("No match found for function signature ABCDE(<NUMERIC>, <NUMERIC>)"); with.query("values (\"adhoc\".abcde(1,DEFAULT,3))") .returns("EXPR$0={a: 1, b: null, c: 3, d: null, e: null}\n"); with.query("values (\"adhoc\".abcde(1,DEFAULT,'abcde'))") .throws_("No match found for function signature ABCDE(<NUMERIC>, <ANY>, <CHARACTER>)"); with.query("values (\"adhoc\".abcde(true))") .throws_("No match found for function signature ABCDE(<BOOLEAN>)"); with.query("values (\"adhoc\".abcde(true,DEFAULT))") .throws_("No match found for function signature ABCDE(<BOOLEAN>, <ANY>)"); with.query("values (\"adhoc\".abcde(1,DEFAULT,3,DEFAULT))") .returns("EXPR$0={a: 1, b: null, c: 3, d: null, e: null}\n"); with.query("values (\"adhoc\".abcde(1,2,DEFAULT))") .throws_("DEFAULT is only allowed for optional parameters"); with.query("values (\"adhoc\".abcde(a=>1,b=>2,c=>DEFAULT))") .throws_("DEFAULT is only allowed for optional parameters"); with.query("values (\"adhoc\".abcde(a=>1,b=>DEFAULT,c=>3))") .returns("EXPR$0={a: 1, b: null, c: 3, d: null, e: null}\n"); } /** Test for * {@link org.apache.calcite.runtime.CalciteResource#requireDefaultConstructor(String)}. */ @Test public void testUserDefinedFunction2() throws Exception { withBadUdf(Smalls.AwkwardFunction.class) .connectThrows( "Declaring class 'org.apache.calcite.util.Smalls$AwkwardFunction' of non-static user-defined function must have a public constructor with zero parameters"); } /** Tests user-defined function, with multiple methods per class. */ @Test public void testUserDefinedFunctionWithMethodName() throws Exception { // java.lang.Math has abs(int) and abs(double). final CalciteAssert.AssertThat with = withUdf(); with.query("values abs(-4)").returnsValue("4"); with.query("values abs(-4.5)").returnsValue("4.5"); // 3 overloads of "fun1", another method "fun2", but method "nonStatic" // cannot be used as a function with.query("values \"adhoc\".\"fun1\"(2)").returnsValue("4"); with.query("values \"adhoc\".\"fun1\"(2, 3)").returnsValue("5"); with.query("values \"adhoc\".\"fun1\"('Foo Bar')").returnsValue("foo bar"); with.query("values \"adhoc\".\"fun2\"(10)").returnsValue("30"); with.query("values \"adhoc\".\"nonStatic\"(2)") .throws_("No match found for function signature nonStatic(<NUMERIC>)"); } /** Tests user-defined aggregate function. */ @Test public void testUserDefinedAggregateFunction() throws Exception { final String empDept = JdbcTest.EmpDeptTableFactory.class.getName(); final String sum = Smalls.MyStaticSumFunction.class.getName(); final String sum2 = Smalls.MySumFunction.class.getName(); final CalciteAssert.AssertThat with = CalciteAssert.model("{\n" + " version: '1.0',\n" + " schemas: [\n" + " {\n" + " name: 'adhoc',\n" + " tables: [\n" + " {\n" + " name: 'EMPLOYEES',\n" + " type: 'custom',\n" + " factory: '" + empDept + "',\n" + " operand: {'foo': true, 'bar': 345}\n" + " }\n" + " ],\n" + " functions: [\n" + " {\n" + " name: 'MY_SUM',\n" + " className: '" + sum + "'\n" + " },\n" + " {\n" + " name: 'MY_SUM2',\n" + " className: '" + sum2 + "'\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + "}") .withDefaultSchema("adhoc"); with.withDefaultSchema(null) .query( "select \"adhoc\".my_sum(\"deptno\") as p from \"adhoc\".EMPLOYEES\n") .returns("P=50\n"); with.query("select my_sum(\"empid\"), \"deptno\" as p from EMPLOYEES\n") .throws_( "Expression 'deptno' is not being grouped"); with.query("select my_sum(\"deptno\") as p from EMPLOYEES\n") .returns("P=50\n"); with.query("select my_sum(\"name\") as p from EMPLOYEES\n") .throws_("No match found for function signature MY_SUM(<CHARACTER>)"); with.query("select my_sum(\"deptno\", 1) as p from EMPLOYEES\n") .throws_( "No match found for function signature MY_SUM(<NUMERIC>, <NUMERIC>)"); with.query("select my_sum() as p from EMPLOYEES\n") .throws_( "No match found for function signature MY_SUM()"); with.query("select \"deptno\", my_sum(\"deptno\") as p from EMPLOYEES\n" + "group by \"deptno\"") .returnsUnordered( "deptno=20; P=20", "deptno=10; P=30"); with.query("select \"deptno\", my_sum2(\"deptno\") as p from EMPLOYEES\n" + "group by \"deptno\"") .returnsUnordered("deptno=20; P=20", "deptno=10; P=30"); } /** Tests user-defined aggregate function. */ @Test public void testUserDefinedAggregateFunctionWithMultipleParameters() throws Exception { final String empDept = JdbcTest.EmpDeptTableFactory.class.getName(); final String sum21 = Smalls.MyTwoParamsSumFunctionFilter1.class.getName(); final String sum22 = Smalls.MyTwoParamsSumFunctionFilter2.class.getName(); final String sum31 = Smalls.MyThreeParamsSumFunctionWithFilter1.class.getName(); final String sum32 = Smalls.MyThreeParamsSumFunctionWithFilter2.class.getName(); final CalciteAssert.AssertThat with = CalciteAssert.model("{\n" + " version: '1.0',\n" + " schemas: [\n" + " {\n" + " name: 'adhoc',\n" + " tables: [\n" + " {\n" + " name: 'EMPLOYEES',\n" + " type: 'custom',\n" + " factory: '" + empDept + "',\n" + " operand: {'foo': true, 'bar': 345}\n" + " }\n" + " ],\n" + " functions: [\n" + " {\n" + " name: 'MY_SUM2',\n" + " className: '" + sum21 + "'\n" + " },\n" + " {\n" + " name: 'MY_SUM2',\n" + " className: '" + sum22 + "'\n" + " },\n" + " {\n" + " name: 'MY_SUM3',\n" + " className: '" + sum31 + "'\n" + " },\n" + " {\n" + " name: 'MY_SUM3',\n" + " className: '" + sum32 + "'\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + "}") .withDefaultSchema("adhoc"); with.withDefaultSchema(null) .query( "select \"adhoc\".my_sum3(\"deptno\",\"name\",'Eric') as p from \"adhoc\".EMPLOYEES\n") .returns("P=20\n"); with.query("select \"adhoc\".my_sum3(\"empid\",\"deptno\",\"commission\") as p " + "from \"adhoc\".EMPLOYEES\n") .returns("P=330\n"); with.query("select \"adhoc\".my_sum3(\"empid\",\"deptno\",\"commission\"),\"name\" as p " + "from \"adhoc\".EMPLOYEES\n") .throws_("Expression 'name' is not being grouped"); with.query("select \"name\",\"adhoc\".my_sum3(\"empid\",\"deptno\",\"commission\") as p " + "from \"adhoc\".EMPLOYEES\n" + "group by \"name\"") .returnsUnordered("name=Theodore; P=0", "name=Eric; P=220", "name=Bill; P=110", "name=Sebastian; P=0"); with.query("select \"adhoc\".my_sum3(\"empid\",\"deptno\",\"salary\") as p " + "from \"adhoc\".EMPLOYEES\n") .throws_("No match found for function signature MY_SUM3(<NUMERIC>, " + "<NUMERIC>, <APPROXIMATE_NUMERIC>)"); with.query("select \"adhoc\".my_sum3(\"empid\",\"deptno\",\"name\") as p " + "from \"adhoc\".EMPLOYEES\n"); with.query("select \"adhoc\".my_sum2(\"commission\",250) as p " + "from \"adhoc\".EMPLOYEES\n") .returns("P=1500\n"); with.query("select \"adhoc\".my_sum2(\"name\",250) as p from \"adhoc\".EMPLOYEES\n") .throws_("No match found for function signature MY_SUM2(<CHARACTER>, <NUMERIC>)"); with.query("select \"adhoc\".my_sum2(\"empid\",0.0) as p from \"adhoc\".EMPLOYEES\n") .throws_("No match found for function signature MY_SUM2(<NUMERIC>, <NUMERIC>)"); } /** Test for * {@link org.apache.calcite.runtime.CalciteResource#firstParameterOfAdd(String)}. */ @Test public void testUserDefinedAggregateFunction3() throws Exception { withBadUdf(Smalls.SumFunctionBadIAdd.class).connectThrows( "Caused by: java.lang.RuntimeException: In user-defined aggregate class 'org.apache.calcite.util.Smalls$SumFunctionBadIAdd', first parameter to 'add' method must be the accumulator (the return type of the 'init' method)"); } /** Test case for * <a href="https://issues.apache.org/jira/browse/CALCITE-1434">[CALCITE-1434] * AggregateFunctionImpl doesnt work if the class implements a generic * interface</a>. */ @Test public void testUserDefinedAggregateFunctionImplementsInterface() { final String empDept = JdbcTest.EmpDeptTableFactory.class.getName(); final String mySum3 = Smalls.MySum3.class.getName(); final String model = "{\n" + " version: '1.0',\n" + " schemas: [\n" + " {\n" + " name: 'adhoc',\n" + " tables: [\n" + " {\n" + " name: 'EMPLOYEES',\n" + " type: 'custom',\n" + " factory: '" + empDept + "',\n" + " operand: {'foo': true, 'bar': 345}\n" + " }\n" + " ],\n" + " functions: [\n" + " {\n" + " name: 'MY_SUM3',\n" + " className: '" + mySum3 + "'\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + "}"; final CalciteAssert.AssertThat with = CalciteAssert.model(model) .withDefaultSchema("adhoc"); with.query("select my_sum3(\"deptno\") as p from EMPLOYEES\n") .returns("P=50\n"); with.withDefaultSchema(null) .query("select \"adhoc\".my_sum3(\"deptno\") as p\n" + "from \"adhoc\".EMPLOYEES\n") .returns("P=50\n"); with.query("select my_sum3(\"empid\"), \"deptno\" as p from EMPLOYEES\n") .throws_("Expression 'deptno' is not being grouped"); with.query("select my_sum3(\"deptno\") as p from EMPLOYEES\n") .returns("P=50\n"); with.query("select my_sum3(\"name\") as p from EMPLOYEES\n") .throws_("No match found for function signature MY_SUM3(<CHARACTER>)"); with.query("select my_sum3(\"deptno\", 1) as p from EMPLOYEES\n") .throws_("No match found for function signature " + "MY_SUM3(<NUMERIC>, <NUMERIC>)"); with.query("select my_sum3() as p from EMPLOYEES\n") .throws_("No match found for function signature MY_SUM3()"); with.query("select \"deptno\", my_sum3(\"deptno\") as p from EMPLOYEES\n" + "group by \"deptno\"") .returnsUnordered("deptno=20; P=20", "deptno=10; P=30"); } private static CalciteAssert.AssertThat withBadUdf(Class clazz) { final String empDept = JdbcTest.EmpDeptTableFactory.class.getName(); final String className = clazz.getName(); return CalciteAssert.model("{\n" + " version: '1.0',\n" + " schemas: [\n" + " {\n" + " name: 'adhoc',\n" + " tables: [\n" + " {\n" + " name: 'EMPLOYEES',\n" + " type: 'custom',\n" + " factory: '" + empDept + "',\n" + " operand: {'foo': true, 'bar': 345}\n" + " }\n" + " ],\n" + " functions: [\n" + " {\n" + " name: 'AWKWARD',\n" + " className: '" + className + "'\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + "}") .withDefaultSchema("adhoc"); } /** Tests user-defined aggregate function with FILTER. * * <p>Also tests that we do not try to push ADAF to JDBC source. */ @Test public void testUserDefinedAggregateFunctionWithFilter() throws Exception { final String sum = Smalls.MyStaticSumFunction.class.getName(); final String sum2 = Smalls.MySumFunction.class.getName(); final CalciteAssert.AssertThat with = CalciteAssert.model("{\n" + " version: '1.0',\n" + " schemas: [\n" + JdbcTest.SCOTT_SCHEMA + ",\n" + " {\n" + " name: 'adhoc',\n" + " functions: [\n" + " {\n" + " name: 'MY_SUM',\n" + " className: '" + sum + "'\n" + " },\n" + " {\n" + " name: 'MY_SUM2',\n" + " className: '" + sum2 + "'\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + "}") .withDefaultSchema("adhoc"); with.query("select deptno, \"adhoc\".my_sum(deptno) as p\n" + "from scott.emp\n" + "group by deptno\n") .returns( "DEPTNO=20; P=100\n" + "DEPTNO=10; P=30\n" + "DEPTNO=30; P=180\n"); with.query("select deptno,\n" + " \"adhoc\".my_sum(deptno) filter (where job = 'CLERK') as c,\n" + " \"adhoc\".my_sum(deptno) filter (where job = 'XXX') as x\n" + "from scott.emp\n" + "group by deptno\n") .returns( "DEPTNO=20; C=40; X=0\n" + "DEPTNO=10; C=10; X=0\n" + "DEPTNO=30; C=30; X=0\n"); } /** Tests resolution of functions using schema paths. */ @Test public void testPath() throws Exception { final String name = Smalls.MyPlusFunction.class.getName(); final CalciteAssert.AssertThat with = CalciteAssert.model("{\n" + " version: '1.0',\n" + " schemas: [\n" + " {\n" + " name: 'adhoc',\n" + " functions: [\n" + " {\n" + " name: 'MY_PLUS',\n" + " className: '" + name + "'\n" + " }\n" + " ]\n" + " },\n" + " {\n" + " name: 'adhoc2',\n" + " functions: [\n" + " {\n" + " name: 'MY_PLUS2',\n" + " className: '" + name + "'\n" + " }\n" + " ]\n" + " },\n" + " {\n" + " name: 'adhoc3',\n" + " path: ['adhoc2','adhoc3'],\n" + " functions: [\n" + " {\n" + " name: 'MY_PLUS3',\n" + " className: '" + name + "'\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + "}"); final String err = "No match found for function signature"; final String res = "EXPR$0=2\n"; // adhoc can see own function MY_PLUS but not adhoc2.MY_PLUS2 unless // qualified final CalciteAssert.AssertThat adhoc = with.withDefaultSchema("adhoc"); adhoc.query("values MY_PLUS(1, 1)").returns(res); adhoc.query("values MY_PLUS2(1, 1)").throws_(err); adhoc.query("values \"adhoc2\".MY_PLUS(1, 1)").throws_(err); adhoc.query("values \"adhoc2\".MY_PLUS2(1, 1)").returns(res); // adhoc2 can see own function MY_PLUS2 but not adhoc2.MY_PLUS unless // qualified final CalciteAssert.AssertThat adhoc2 = with.withDefaultSchema("adhoc2"); adhoc2.query("values MY_PLUS2(1, 1)").returns(res); adhoc2.query("values MY_PLUS(1, 1)").throws_(err); adhoc2.query("values \"adhoc\".MY_PLUS(1, 1)").returns(res); // adhoc3 can see own adhoc2.MY_PLUS2 because in path, with or without // qualification, but can only see adhoc.MY_PLUS with qualification final CalciteAssert.AssertThat adhoc3 = with.withDefaultSchema("adhoc3"); adhoc3.query("values MY_PLUS2(1, 1)").returns(res); adhoc3.query("values MY_PLUS(1, 1)").throws_(err); adhoc3.query("values \"adhoc\".MY_PLUS(1, 1)").returns(res); } /** Test case for * <a href="https://issues.apache.org/jira/browse/CALCITE-986">[CALCITE-986] * User-defined function with Date or Timestamp parameters</a>. */ @Test public void testDate() { final CalciteAssert.AssertThat with = withUdf(); with.query("values \"adhoc\".\"dateFun\"(DATE '1970-01-01')") .returnsValue("0"); with.query("values \"adhoc\".\"dateFun\"(DATE '1970-01-02')") .returnsValue("86400000"); with.query("values \"adhoc\".\"dateFun\"(cast(null as date))") .returnsValue("-1"); with.query("values \"adhoc\".\"timeFun\"(TIME '00:00:00')") .returnsValue("0"); with.query("values \"adhoc\".\"timeFun\"(TIME '00:01:30')") .returnsValue("90000"); with.query("values \"adhoc\".\"timeFun\"(cast(null as time))") .returnsValue("-1"); with.query("values \"adhoc\".\"timestampFun\"(TIMESTAMP '1970-01-01 00:00:00')") .returnsValue("0"); with.query("values \"adhoc\".\"timestampFun\"(TIMESTAMP '1970-01-02 00:01:30')") .returnsValue("86490000"); with.query("values \"adhoc\".\"timestampFun\"(cast(null as timestamp))") .returnsValue("-1"); } /** Test case for * <a href="https://issues.apache.org/jira/browse/CALCITE-1041">[CALCITE-1041] * User-defined function returns DATE or TIMESTAMP value</a>. */ @Test public void testReturnDate() { final CalciteAssert.AssertThat with = withUdf(); with.query("values \"adhoc\".\"toDateFun\"(0)") .returnsValue("1970-01-01"); with.query("values \"adhoc\".\"toDateFun\"(1)") .returnsValue("1970-01-02"); with.query("values \"adhoc\".\"toDateFun\"(cast(null as bigint))") .returnsValue(null); with.query("values \"adhoc\".\"toTimeFun\"(0)") .returnsValue("00:00:00"); with.query("values \"adhoc\".\"toTimeFun\"(90000)") .returnsValue("00:01:30"); with.query("values \"adhoc\".\"toTimeFun\"(cast(null as bigint))") .returnsValue(null); with.query("values \"adhoc\".\"toTimestampFun\"(0)") .returnsValue("1970-01-01 00:00:00"); with.query("values \"adhoc\".\"toTimestampFun\"(86490000)") .returnsValue("1970-01-02 00:01:30"); with.query("values \"adhoc\".\"toTimestampFun\"(cast(null as bigint))") .returnsValue(null); } /** Test case for * <a href="https://issues.apache.org/jira/browse/CALCITE-1881">[CALCITE-1881] * Can't distinguish overloaded user-defined functions that have DATE and * TIMESTAMP arguments</a>. */ @Test public void testDateAndTimestamp() { final CalciteAssert.AssertThat with = withUdf(); with.query("values \"adhoc\".\"toLong\"(DATE '1970-01-15')") .returns("EXPR$0=1209600000\n"); with.query("values \"adhoc\".\"toLong\"(DATE '2002-08-11')") .returns("EXPR$0=1029024000000\n"); with.query("values \"adhoc\".\"toLong\"(DATE '2003-04-11')") .returns("EXPR$0=1050019200000\n"); with.query("values \"adhoc\".\"toLong\"(TIMESTAMP '2003-04-11 00:00:00')") .returns("EXPR$0=1050019200000\n"); with.query("values \"adhoc\".\"toLong\"(TIMESTAMP '2003-04-11 00:00:06')") .returns("EXPR$0=1050019206000\n"); with.query("values \"adhoc\".\"toLong\"(TIMESTAMP '2003-04-18 01:20:00')") .returns("EXPR$0=1050628800000\n"); with.query("values \"adhoc\".\"toLong\"(TIME '00:20:00')") .returns("EXPR$0=1200000\n"); with.query("values \"adhoc\".\"toLong\"(TIME '00:20:10')") .returns("EXPR$0=1210000\n"); with.query("values \"adhoc\".\"toLong\"(TIME '01:20:00')") .returns("EXPR$0=4800000\n"); } /** Test case for * <a href="https://issues.apache.org/jira/browse/CALCITE-1041">[CALCITE-1041] * User-defined function returns DATE or TIMESTAMP value</a>. */ @Test public void testReturnDate2() { final CalciteAssert.AssertThat with = withUdf(); with.query("select * from (values 0) as t(c)\n" + "where \"adhoc\".\"toTimestampFun\"(c) in (\n" + " cast('1970-01-01 00:00:00' as timestamp),\n" + " cast('1997-02-01 00:00:00' as timestamp))") .returnsValue("0"); with.query("select * from (values 0) as t(c)\n" + "where \"adhoc\".\"toTimestampFun\"(c) in (\n" + " timestamp '1970-01-01 00:00:00',\n" + " timestamp '1997-02-01 00:00:00')") .returnsValue("0"); with.query("select * from (values 0) as t(c)\n" + "where \"adhoc\".\"toTimestampFun\"(c) in (\n" + " '1970-01-01 00:00:00',\n" + " '1997-02-01 00:00:00')") .returnsValue("0"); } /** * Test case for * <a href="https://issues.apache.org/jira/browse/CALCITE-1834">[CALCITE-1834] * User-defined function for Arrays</a>. */ @Test public void testArrayUserDefinedFunction() throws Exception { try (Connection connection = DriverManager.getConnection("jdbc:calcite:")) { CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); SchemaPlus rootSchema = calciteConnection.getRootSchema(); rootSchema.add("hr", new ReflectiveSchema(new JdbcTest.HrSchema())); SchemaPlus post = rootSchema.add("POST", new AbstractSchema()); post.add("ARRAY_APPEND", new ArrayAppendDoubleFunction()); post.add("ARRAY_APPEND", new ArrayAppendIntegerFunction()); final String sql = "select \"empid\" as EMPLOYEE_ID,\n" + " \"name\" || ' ' || \"name\" as EMPLOYEE_NAME,\n" + " \"salary\" as EMPLOYEE_SALARY,\n" + " POST.ARRAY_APPEND(ARRAY[1,2,3], \"deptno\") as DEPARTMENTS\n" + "from \"hr\".\"emps\""; final String result = "" + "EMPLOYEE_ID=100; EMPLOYEE_NAME=Bill Bill;" + " EMPLOYEE_SALARY=10000.0; DEPARTMENTS=[1, 2, 3, 10]\n" + "EMPLOYEE_ID=200; EMPLOYEE_NAME=Eric Eric;" + " EMPLOYEE_SALARY=8000.0; DEPARTMENTS=[1, 2, 3, 20]\n" + "EMPLOYEE_ID=150; EMPLOYEE_NAME=Sebastian Sebastian;" + " EMPLOYEE_SALARY=7000.0; DEPARTMENTS=[1, 2, 3, 10]\n" + "EMPLOYEE_ID=110; EMPLOYEE_NAME=Theodore Theodore;" + " EMPLOYEE_SALARY=11500.0; DEPARTMENTS=[1, 2, 3, 10]\n"; try (Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(sql)) { assertThat(CalciteAssert.toString(resultSet), is(result)); } connection.close(); } } /** * Base class for functions that append arrays. */ private abstract static class ArrayAppendScalarFunction implements ScalarFunction, ImplementableFunction { public List<FunctionParameter> getParameters() { final List<FunctionParameter> parameters = new ArrayList<>(); for (final Ord<RelProtoDataType> type : Ord.zip(getParams())) { parameters.add( new FunctionParameter() { public int getOrdinal() { return type.i; } public String getName() { return "arg" + type.i; } public RelDataType getType(RelDataTypeFactory typeFactory) { return type.e.apply(typeFactory); } public boolean isOptional() { return false; } }); } return parameters; } protected abstract List<RelProtoDataType> getParams(); @Override public CallImplementor getImplementor() { return new CallImplementor() { public Expression implement(RexToLixTranslator translator, RexCall call, NullAs nullAs) { Method lookupMethod = Types.lookupMethod(Smalls.AllTypesFunction.class, "arrayAppendFun", List.class, Integer.class); return Expressions.call(lookupMethod, translator.translateList(call.getOperands(), nullAs)); } }; } } /** Function with signature "f(ARRAY OF INTEGER, INTEGER) returns ARRAY OF * INTEGER". */ private class ArrayAppendIntegerFunction extends ArrayAppendScalarFunction { @Override public RelDataType getReturnType(RelDataTypeFactory typeFactory) { return typeFactory.createArrayType( typeFactory.createSqlType(SqlTypeName.INTEGER), -1); } @Override public List<RelProtoDataType> getParams() { return ImmutableList.of( new RelProtoDataType() { public RelDataType apply(RelDataTypeFactory typeFactory) { return typeFactory.createArrayType( typeFactory.createSqlType(SqlTypeName.INTEGER), -1); } }, new RelProtoDataType() { public RelDataType apply(RelDataTypeFactory typeFactory) { return typeFactory.createSqlType(SqlTypeName.INTEGER); } }); } } /** Function with signature "f(ARRAY OF DOUBLE, INTEGER) returns ARRAY OF * DOUBLE". */ private class ArrayAppendDoubleFunction extends ArrayAppendScalarFunction { public RelDataType getReturnType(RelDataTypeFactory typeFactory) { return typeFactory.createArrayType( typeFactory.createSqlType(SqlTypeName.DOUBLE), -1); } public List<RelProtoDataType> getParams() { return ImmutableList.of( new RelProtoDataType() { public RelDataType apply(RelDataTypeFactory typeFactory) { return typeFactory.createArrayType( typeFactory.createSqlType(SqlTypeName.DOUBLE), -1); } }, new RelProtoDataType() { public RelDataType apply(RelDataTypeFactory typeFactory) { return typeFactory.createSqlType(SqlTypeName.INTEGER); } }); } } } // End UdfTest.java
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.sagemaker.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Defines the objective metric for a hyperparameter tuning job. Hyperparameter tuning uses the value of this metric to * evaluate the training jobs it launches, and returns the training job that results in either the highest or lowest * value for this metric, depending on the value you specify for the <code>Type</code> parameter. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/HyperParameterTuningJobObjective" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class HyperParameterTuningJobObjective implements Serializable, Cloneable, StructuredPojo { /** * <p> * Whether to minimize or maximize the objective metric. * </p> */ private String type; /** * <p> * The name of the metric to use for the objective metric. * </p> */ private String metricName; /** * <p> * Whether to minimize or maximize the objective metric. * </p> * * @param type * Whether to minimize or maximize the objective metric. * @see HyperParameterTuningJobObjectiveType */ public void setType(String type) { this.type = type; } /** * <p> * Whether to minimize or maximize the objective metric. * </p> * * @return Whether to minimize or maximize the objective metric. * @see HyperParameterTuningJobObjectiveType */ public String getType() { return this.type; } /** * <p> * Whether to minimize or maximize the objective metric. * </p> * * @param type * Whether to minimize or maximize the objective metric. * @return Returns a reference to this object so that method calls can be chained together. * @see HyperParameterTuningJobObjectiveType */ public HyperParameterTuningJobObjective withType(String type) { setType(type); return this; } /** * <p> * Whether to minimize or maximize the objective metric. * </p> * * @param type * Whether to minimize or maximize the objective metric. * @return Returns a reference to this object so that method calls can be chained together. * @see HyperParameterTuningJobObjectiveType */ public HyperParameterTuningJobObjective withType(HyperParameterTuningJobObjectiveType type) { this.type = type.toString(); return this; } /** * <p> * The name of the metric to use for the objective metric. * </p> * * @param metricName * The name of the metric to use for the objective metric. */ public void setMetricName(String metricName) { this.metricName = metricName; } /** * <p> * The name of the metric to use for the objective metric. * </p> * * @return The name of the metric to use for the objective metric. */ public String getMetricName() { return this.metricName; } /** * <p> * The name of the metric to use for the objective metric. * </p> * * @param metricName * The name of the metric to use for the objective metric. * @return Returns a reference to this object so that method calls can be chained together. */ public HyperParameterTuningJobObjective withMetricName(String metricName) { setMetricName(metricName); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getType() != null) sb.append("Type: ").append(getType()).append(","); if (getMetricName() != null) sb.append("MetricName: ").append(getMetricName()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof HyperParameterTuningJobObjective == false) return false; HyperParameterTuningJobObjective other = (HyperParameterTuningJobObjective) obj; if (other.getType() == null ^ this.getType() == null) return false; if (other.getType() != null && other.getType().equals(this.getType()) == false) return false; if (other.getMetricName() == null ^ this.getMetricName() == null) return false; if (other.getMetricName() != null && other.getMetricName().equals(this.getMetricName()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode()); hashCode = prime * hashCode + ((getMetricName() == null) ? 0 : getMetricName().hashCode()); return hashCode; } @Override public HyperParameterTuningJobObjective clone() { try { return (HyperParameterTuningJobObjective) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.sagemaker.model.transform.HyperParameterTuningJobObjectiveMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package com.mindscapehq.raygun4java.core; import com.mindscapehq.raygun4java.core.handlers.offlinesupport.RaygunOnFailedSendOfflineStorageHandler; import com.mindscapehq.raygun4java.core.handlers.requestfilters.RaygunDuplicateErrorFilterFactory; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import java.io.IOException; import java.io.OutputStream; import java.net.HttpURLConnection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class RaygunClientFactoryTest { @Mock private IRaygunSendEventFactory<IRaygunOnBeforeSend> onBeforeSendhandlerFactory; @Mock private IRaygunSendEventFactory<IRaygunOnAfterSend> onAfterSendhandlerFactory; @Mock private IRaygunSendEventFactory<IRaygunOnFailedSend> onFailedSendhandlerFactory; @Before public void setup() { MockitoAnnotations.initMocks(this); } public RaygunClientFactory getFactory(String key) { return new RaygunClientFactory(key); } public RaygunClient getClient(IRaygunClientFactory factory) { return factory.newClient(); } @Test public void shouldConstructFactoryWithDefaultVersionDetection() { IRaygunClientFactory factory = getFactory("apiKey"); RaygunClient client = getClient(factory); assertEquals("Not supplied", client.string); assertEquals("apiKey", client.apiKey); } @Test public void shouldConstructFactoryWithVersionDetectionFromClass() { IRaygunClientFactory factory = getFactory("apiKey").withVersionFrom(org.apache.commons.io.IOUtils.class); RaygunClient client = getClient(factory); assertEquals("2.5", client.string); assertEquals("apiKey", client.apiKey); } @Test public void shouldConstructFactoryWithSuppliedVersion() { IRaygunClientFactory factory = getFactory("apiKey").withVersion("1.2.3"); RaygunClient client = getClient(factory); assertEquals("1.2.3", client.string); assertEquals("apiKey", client.apiKey); } @Test public void shouldConstructFactoryWithDuplicateErrorHandler() { IRaygunClientFactory factory = getFactory("apiKey"); RaygunClient client = getClient(factory); assertTrue(factory.getRaygunOnBeforeSendChainFactory().getLastFilterFactory() instanceof RaygunDuplicateErrorFilterFactory); assertEquals(factory.getRaygunOnAfterSendChainFactory().getHandlersFactory().get(0), factory.getRaygunOnBeforeSendChainFactory().getLastFilterFactory()); } @Test public void shouldConstructFactoryWithOnBeforeSendHandler() { IRaygunOnBeforeSend handler = mock(IRaygunOnBeforeSend.class); when(onBeforeSendhandlerFactory.create()).thenReturn(handler); IRaygunClientFactory factory = getFactory("apiKey").withBeforeSend(onBeforeSendhandlerFactory); RaygunClient client = getClient(factory); assertEquals(factory.getRaygunOnBeforeSendChainFactory().getHandlersFactory().get(0), onBeforeSendhandlerFactory); assertEquals(((RaygunOnBeforeSendChain)getClient(factory).onBeforeSend).getHandlers().get(0), handler); } @Test public void shouldConstructFactoryWithOnAfterSendHandler() { IRaygunOnAfterSend handler = mock(IRaygunOnAfterSend.class); when(onAfterSendhandlerFactory.create()).thenReturn(handler); IRaygunClientFactory factory = getFactory("apiKey").withAfterSend(onAfterSendhandlerFactory); RaygunClient client = getClient(factory); assertEquals(factory.getRaygunOnAfterSendChainFactory().getHandlersFactory().get(1), onAfterSendhandlerFactory); assertEquals(((RaygunOnAfterSendChain)getClient(factory).onAfterSend).getHandlers().get(1), handler); } @Test public void shouldConstructFactoryWithOnFailedSendHandler() { IRaygunOnFailedSend handler = mock(IRaygunOnFailedSend.class); when(onFailedSendhandlerFactory.create()).thenReturn(handler); IRaygunClientFactory factory = getFactory("apiKey").withFailedSend(onFailedSendhandlerFactory); RaygunClient client = getClient(factory); assertEquals(factory.getRaygunOnFailedSendChainFactory().getHandlersFactory().get(0), onFailedSendhandlerFactory); assertEquals(((RaygunOnFailedSendChain)getClient(factory).onFailedSend).getHandlers().get(0), handler); } @Test public void shouldConstructFactoryDuplicateDetection() throws IOException { IRaygunClientFactory factory = getFactory("apiKey"); RaygunClient client = getClient(factory); RaygunConnection raygunConnection = mock(RaygunConnection.class); client.setRaygunConnection(raygunConnection); HttpURLConnection httpURLConnection = mock(HttpURLConnection.class); when(httpURLConnection.getResponseCode()).thenReturn(202); when(httpURLConnection.getOutputStream()).thenReturn(mock(OutputStream.class)); when(raygunConnection.getConnection(anyString())).thenReturn(httpURLConnection); Exception exception = new Exception("boom"); client.send(exception); client.send(exception); verify(raygunConnection, times(1)).getConnection(anyString()); // and a new client Mockito.reset(raygunConnection); client = getClient(factory); client.setRaygunConnection(raygunConnection); client.send(exception); verify(raygunConnection, times(1)).getConnection(anyString()); } @Test public void shouldSetBreadcrumbLocations() { RaygunClientFactory factory = getFactory("apiKey"); assertFalse(getClient(factory).shouldProcessBreadcrumbLocation()); factory.withBreadcrumbLocations(); assertTrue(getClient(factory).shouldProcessBreadcrumbLocation()); } @Test public void shouldSetOfflineStorageHandler() { IRaygunClientFactory factory = getFactory("apiKey").withOfflineStorage(); assertTrue(factory.getRaygunOnBeforeSendChainFactory().getHandlersFactory().get(0) instanceof RaygunOnFailedSendOfflineStorageHandler); assertEquals(factory.getRaygunOnBeforeSendChainFactory().getHandlersFactory().get(0), factory.getRaygunOnFailedSendChainFactory().getHandlersFactory().get(0)); } @Test public void shouldAddTagsToFactory() { RaygunClientFactory f1 = getFactory("apiKey").withTag("a"); assertTrue(f1.factoryTags.contains("a")); RaygunClientFactory f2 = getFactory("apiKey").withTag("b"); assertTrue(!f2.factoryTags.contains("a")); assertTrue(f2.factoryTags.contains("b")); RaygunClient c1 = getClient(f1); c1.withTag("a1"); RaygunClient c2 = getClient(f2); c2.withTag("b1"); assertTrue(c1.getTags().contains("a")); assertTrue(c1.getTags().contains("a1")); assertTrue(!c1.getTags().contains("b")); assertTrue(!c1.getTags().contains("b1")); assertTrue(!c2.getTags().contains("a")); assertTrue(!c2.getTags().contains("a1")); assertTrue(c2.getTags().contains("b")); assertTrue(c2.getTags().contains("b1")); Set<String> errorTags = new HashSet<String>(); errorTags.add("a2"); errorTags = c1.getTagsForError(errorTags); assertTrue(errorTags.contains("a")); assertTrue(errorTags.contains("a1")); assertTrue(errorTags.contains("a2")); assertTrue(!f1.factoryTags.contains("a2")); assertTrue(!c1.getTags().contains("a2")); } @Test public void shouldAddDataToFactory() { RaygunClientFactory f1 = getFactory("apiKey").withData("a", 1); assertTrue(f1.factoryData.containsKey("a")); RaygunClientFactory f2 = getFactory("apiKey").withData("b", 1); assertTrue(!f2.factoryData.containsKey("a")); assertTrue(f2.factoryData.containsKey("b")); RaygunClient c1 = getClient(f1); c1.withData("a1", 1); RaygunClient c2 = getClient(f2); c2.withData("b1", 1); assertTrue(c1.getData().containsKey("a")); assertTrue(c1.getData().containsKey("a1")); assertTrue(!c1.getData().containsKey("b")); assertTrue(!c1.getData().containsKey("b1")); assertTrue(!c2.getData().containsKey("a")); assertTrue(!c2.getData().containsKey("a1")); assertTrue(c2.getData().containsKey("b")); assertTrue(c2.getData().containsKey("b1")); Map errorData = new HashMap(); errorData.put("a2", 1); errorData = c1.getDataForError(errorData); assertTrue(errorData.containsKey("a")); assertTrue(errorData.containsKey("a1")); assertTrue(errorData.containsKey("a2")); assertTrue(!f1.factoryData.containsKey("a2")); assertTrue(!c1.getData().containsKey("a2")); } }
/* * reserved comment block * DO NOT REMOVE OR ALTER! */ /* * Copyright 1999-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.sun.org.apache.xml.internal.security.c14n.implementations; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import com.sun.org.apache.xml.internal.security.c14n.CanonicalizationException; import com.sun.org.apache.xml.internal.security.c14n.CanonicalizerSpi; import com.sun.org.apache.xml.internal.security.c14n.helper.AttrCompare; import com.sun.org.apache.xml.internal.security.signature.NodeFilter; import com.sun.org.apache.xml.internal.security.signature.XMLSignatureInput; import com.sun.org.apache.xml.internal.security.utils.Constants; import com.sun.org.apache.xml.internal.security.utils.UnsyncByteArrayOutputStream; import com.sun.org.apache.xml.internal.security.utils.XMLUtils; import org.w3c.dom.Attr; import org.w3c.dom.Comment; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.ProcessingInstruction; import org.xml.sax.SAXException; /** * Abstract base class for canonicalization algorithms. * * @author Christian Geuer-Pollmann <geuerp@apache.org> * @version $Revision: 1.5 $ */ public abstract class CanonicalizerBase extends CanonicalizerSpi { //Constants to be outputed, In char array form, so //less garbage is generate when outputed. private static final byte[] _END_PI = {'?','>'}; private static final byte[] _BEGIN_PI = {'<','?'}; private static final byte[] _END_COMM = {'-','-','>'}; private static final byte[] _BEGIN_COMM = {'<','!','-','-'}; private static final byte[] __XA_ = {'&','#','x','A',';'}; private static final byte[] __X9_ = {'&','#','x','9',';'}; private static final byte[] _QUOT_ = {'&','q','u','o','t',';'}; private static final byte[] __XD_ = {'&','#','x','D',';'}; private static final byte[] _GT_ = {'&','g','t',';'}; private static final byte[] _LT_ = {'&','l','t',';'}; private static final byte[] _END_TAG = {'<','/'}; private static final byte[] _AMP_ = {'&','a','m','p',';'}; final static AttrCompare COMPARE=new AttrCompare(); final static String XML="xml"; final static String XMLNS="xmlns"; final static byte[] equalsStr= {'=','\"'}; static final int NODE_BEFORE_DOCUMENT_ELEMENT = -1; static final int NODE_NOT_BEFORE_OR_AFTER_DOCUMENT_ELEMENT = 0; static final int NODE_AFTER_DOCUMENT_ELEMENT = 1; //The null xmlns definiton. protected static final Attr nullNode; static { try { nullNode=DocumentBuilderFactory.newInstance(). newDocumentBuilder().newDocument().createAttributeNS(Constants.NamespaceSpecNS,XMLNS); nullNode.setValue(""); } catch (Exception e) { throw new RuntimeException("Unable to create nullNode"/*,*/+e); } } List nodeFilter; boolean _includeComments; Set _xpathNodeSet = null; /** * The node to be skiped/excluded from the DOM tree * in subtree canonicalizations. */ Node _excludeNode =null; OutputStream _writer = new UnsyncByteArrayOutputStream();//null; /** * Constructor CanonicalizerBase * * @param includeComments */ public CanonicalizerBase(boolean includeComments) { this._includeComments = includeComments; } /** * Method engineCanonicalizeSubTree * @inheritDoc * @param rootNode * @throws CanonicalizationException */ public byte[] engineCanonicalizeSubTree(Node rootNode) throws CanonicalizationException { return engineCanonicalizeSubTree(rootNode,(Node)null); } /** * Method engineCanonicalizeXPathNodeSet * @inheritDoc * @param xpathNodeSet * @throws CanonicalizationException */ public byte[] engineCanonicalizeXPathNodeSet(Set xpathNodeSet) throws CanonicalizationException { this._xpathNodeSet = xpathNodeSet; return engineCanonicalizeXPathNodeSetInternal(XMLUtils.getOwnerDocument(this._xpathNodeSet)); } /** * Canonicalizes a Subtree node. * @param input the root of the subtree to canicalize * @return The canonicalize stream. * @throws CanonicalizationException */ public byte[] engineCanonicalize(XMLSignatureInput input) throws CanonicalizationException { try { if (input.isExcludeComments()) _includeComments = false; byte[] bytes; if (input.isOctetStream()) { return engineCanonicalize(input.getBytes()); } if (input.isElement()) { bytes = engineCanonicalizeSubTree(input.getSubNode(), input .getExcludeNode()); return bytes; } else if (input.isNodeSet()) { nodeFilter=input.getNodeFilters(); circumventBugIfNeeded(input); if (input.getSubNode() != null) { bytes = engineCanonicalizeXPathNodeSetInternal(input.getSubNode()); } else { bytes = engineCanonicalizeXPathNodeSet(input.getNodeSet()); } return bytes; } return null; } catch (CanonicalizationException ex) { throw new CanonicalizationException("empty", ex); } catch (ParserConfigurationException ex) { throw new CanonicalizationException("empty", ex); } catch (IOException ex) { throw new CanonicalizationException("empty", ex); } catch (SAXException ex) { throw new CanonicalizationException("empty", ex); } } /** * @param _writer The _writer to set. */ public void setWriter(OutputStream _writer) { this._writer = _writer; } /** * Canonicalizes a Subtree node. * * @param rootNode * the root of the subtree to canicalize * @param excludeNode * a node to be excluded from the canicalize operation * @return The canonicalize stream. * @throws CanonicalizationException */ byte[] engineCanonicalizeSubTree(Node rootNode,Node excludeNode) throws CanonicalizationException { this._excludeNode = excludeNode; try { NameSpaceSymbTable ns=new NameSpaceSymbTable(); int nodeLevel=NODE_BEFORE_DOCUMENT_ELEMENT; if (rootNode instanceof Element) { //Fills the nssymbtable with the definitions of the parent of the root subnode getParentNameSpaces((Element)rootNode,ns); nodeLevel=NODE_NOT_BEFORE_OR_AFTER_DOCUMENT_ELEMENT; } this.canonicalizeSubTree(rootNode,ns,rootNode,nodeLevel); this._writer.close(); if (this._writer instanceof ByteArrayOutputStream) { byte []result=((ByteArrayOutputStream)this._writer).toByteArray(); if (reset) { ((ByteArrayOutputStream)this._writer).reset(); } return result; } else if (this._writer instanceof UnsyncByteArrayOutputStream) { byte []result=((UnsyncByteArrayOutputStream)this._writer).toByteArray(); if (reset) { ((UnsyncByteArrayOutputStream)this._writer).reset(); } return result; } return null; } catch (UnsupportedEncodingException ex) { throw new CanonicalizationException("empty", ex); } catch (IOException ex) { throw new CanonicalizationException("empty", ex); } } /** * Method canonicalizeSubTree, this function is a recursive one. * * @param currentNode * @param ns * @param endnode * @throws CanonicalizationException * @throws IOException */ final void canonicalizeSubTree(Node currentNode, NameSpaceSymbTable ns,Node endnode, int documentLevel) throws CanonicalizationException, IOException { if (isVisibleInt(currentNode)==-1) return; Node sibling=null; Node parentNode=null; final OutputStream writer=this._writer; final Node excludeNode=this._excludeNode; final boolean includeComments=this._includeComments; Map cache=new HashMap(); do { switch (currentNode.getNodeType()) { case Node.DOCUMENT_TYPE_NODE : default : break; case Node.ENTITY_NODE : case Node.NOTATION_NODE : case Node.ATTRIBUTE_NODE : // illegal node type during traversal throw new CanonicalizationException("empty"); case Node.DOCUMENT_FRAGMENT_NODE : case Node.DOCUMENT_NODE : ns.outputNodePush(); sibling= currentNode.getFirstChild(); break; case Node.COMMENT_NODE : if (includeComments) { outputCommentToWriter((Comment) currentNode, writer, documentLevel); } break; case Node.PROCESSING_INSTRUCTION_NODE : outputPItoWriter((ProcessingInstruction) currentNode, writer, documentLevel); break; case Node.TEXT_NODE : case Node.CDATA_SECTION_NODE : outputTextToWriter(currentNode.getNodeValue(), writer); break; case Node.ELEMENT_NODE : documentLevel=NODE_NOT_BEFORE_OR_AFTER_DOCUMENT_ELEMENT; if (currentNode==excludeNode) { break; } Element currentElement = (Element) currentNode; //Add a level to the nssymbtable. So latter can be pop-back. ns.outputNodePush(); writer.write('<'); String name=currentElement.getTagName(); UtfHelpper.writeByte(name,writer,cache); Iterator attrs = this.handleAttributesSubtree(currentElement,ns); if (attrs!=null) { //we output all Attrs which are available while (attrs.hasNext()) { Attr attr = (Attr) attrs.next(); outputAttrToWriter(attr.getNodeName(),attr.getNodeValue(), writer,cache); } } writer.write('>'); sibling= currentNode.getFirstChild(); if (sibling==null) { writer.write(_END_TAG); UtfHelpper.writeStringToUtf8(name,writer); writer.write('>'); //We fineshed with this level, pop to the previous definitions. ns.outputNodePop(); if (parentNode != null) { sibling= currentNode.getNextSibling(); } } else { parentNode=currentElement; } break; } while (sibling==null && parentNode!=null) { writer.write(_END_TAG); UtfHelpper.writeByte(((Element)parentNode).getTagName(),writer,cache); writer.write('>'); //We fineshed with this level, pop to the previous definitions. ns.outputNodePop(); if (parentNode==endnode) return; sibling=parentNode.getNextSibling(); parentNode=parentNode.getParentNode(); if (!(parentNode instanceof Element)) { documentLevel=NODE_AFTER_DOCUMENT_ELEMENT; parentNode=null; } } if (sibling==null) return; currentNode=sibling; sibling=currentNode.getNextSibling(); } while(true); } private byte[] engineCanonicalizeXPathNodeSetInternal(Node doc) throws CanonicalizationException { try { this.canonicalizeXPathNodeSet(doc,doc); this._writer.close(); if (this._writer instanceof ByteArrayOutputStream) { byte [] sol=((ByteArrayOutputStream)this._writer).toByteArray(); if (reset) { ((ByteArrayOutputStream)this._writer).reset(); } return sol; } else if (this._writer instanceof UnsyncByteArrayOutputStream) { byte []result=((UnsyncByteArrayOutputStream)this._writer).toByteArray(); if (reset) { ((UnsyncByteArrayOutputStream)this._writer).reset(); } return result; } return null; } catch (UnsupportedEncodingException ex) { throw new CanonicalizationException("empty", ex); } catch (IOException ex) { throw new CanonicalizationException("empty", ex); } } /** * Canoicalizes all the nodes included in the currentNode and contained in the * _xpathNodeSet field. * * @param currentNode * @param endnode * @throws CanonicalizationException * @throws IOException */ final void canonicalizeXPathNodeSet(Node currentNode,Node endnode ) throws CanonicalizationException, IOException { if (isVisibleInt(currentNode)==-1) return; boolean currentNodeIsVisible = false; NameSpaceSymbTable ns=new NameSpaceSymbTable(); if (currentNode instanceof Element) getParentNameSpaces((Element)currentNode,ns); Node sibling=null; Node parentNode=null; OutputStream writer=this._writer; int documentLevel=NODE_BEFORE_DOCUMENT_ELEMENT; Map cache=new HashMap(); do { switch (currentNode.getNodeType()) { case Node.DOCUMENT_TYPE_NODE : default : break; case Node.ENTITY_NODE : case Node.NOTATION_NODE : case Node.ATTRIBUTE_NODE : // illegal node type during traversal throw new CanonicalizationException("empty"); case Node.DOCUMENT_FRAGMENT_NODE : case Node.DOCUMENT_NODE : ns.outputNodePush(); //currentNode = currentNode.getFirstChild(); sibling= currentNode.getFirstChild(); break; case Node.COMMENT_NODE : if (this._includeComments && (isVisibleDO(currentNode,ns.getLevel())==1)) { outputCommentToWriter((Comment) currentNode, writer, documentLevel); } break; case Node.PROCESSING_INSTRUCTION_NODE : if (isVisible(currentNode)) outputPItoWriter((ProcessingInstruction) currentNode, writer, documentLevel); break; case Node.TEXT_NODE : case Node.CDATA_SECTION_NODE : if (isVisible(currentNode)) { outputTextToWriter(currentNode.getNodeValue(), writer); for (Node nextSibling = currentNode.getNextSibling(); (nextSibling != null) && ((nextSibling.getNodeType() == Node.TEXT_NODE) || (nextSibling.getNodeType() == Node.CDATA_SECTION_NODE)); nextSibling = nextSibling.getNextSibling()) { outputTextToWriter(nextSibling.getNodeValue(), writer); currentNode=nextSibling; sibling=currentNode.getNextSibling(); } } break; case Node.ELEMENT_NODE : documentLevel=NODE_NOT_BEFORE_OR_AFTER_DOCUMENT_ELEMENT; Element currentElement = (Element) currentNode; //Add a level to the nssymbtable. So latter can be pop-back. String name=null; int i=isVisibleDO(currentNode,ns.getLevel()); if (i==-1) { sibling= currentNode.getNextSibling(); break; } currentNodeIsVisible=(i==1); if (currentNodeIsVisible) { ns.outputNodePush(); writer.write('<'); name=currentElement.getTagName(); UtfHelpper.writeByte(name,writer,cache); } else { ns.push(); } Iterator attrs = handleAttributes(currentElement,ns); if (attrs!=null) { //we output all Attrs which are available while (attrs.hasNext()) { Attr attr = (Attr) attrs.next(); outputAttrToWriter(attr.getNodeName(),attr.getNodeValue(), writer,cache); } } if (currentNodeIsVisible) { writer.write('>'); } sibling= currentNode.getFirstChild(); if (sibling==null) { if (currentNodeIsVisible) { writer.write(_END_TAG); UtfHelpper.writeByte(name,writer,cache); writer.write('>'); //We fineshed with this level, pop to the previous definitions. ns.outputNodePop(); } else { ns.pop(); } if (parentNode != null) { sibling= currentNode.getNextSibling(); } } else { parentNode=currentElement; } break; } while (sibling==null && parentNode!=null) { if (isVisible(parentNode)) { writer.write(_END_TAG); UtfHelpper.writeByte(((Element)parentNode).getTagName(),writer,cache); writer.write('>'); //We fineshed with this level, pop to the previous definitions. ns.outputNodePop(); } else { ns.pop(); } if (parentNode==endnode) return; sibling=parentNode.getNextSibling(); parentNode=parentNode.getParentNode(); if (!(parentNode instanceof Element)) { parentNode=null; documentLevel=NODE_AFTER_DOCUMENT_ELEMENT; } } if (sibling==null) return; currentNode=sibling; sibling=currentNode.getNextSibling(); } while(true); } int isVisibleDO(Node currentNode,int level) { if (nodeFilter!=null) { Iterator it=nodeFilter.iterator(); while (it.hasNext()) { int i=((NodeFilter)it.next()).isNodeIncludeDO(currentNode,level); if (i!=1) return i; } } if ((this._xpathNodeSet!=null) && !this._xpathNodeSet.contains(currentNode)) return 0; return 1; } int isVisibleInt(Node currentNode) { if (nodeFilter!=null) { Iterator it=nodeFilter.iterator(); while (it.hasNext()) { int i=((NodeFilter)it.next()).isNodeInclude(currentNode); if (i!=1) return i; } } if ((this._xpathNodeSet!=null) && !this._xpathNodeSet.contains(currentNode)) return 0; return 1; } boolean isVisible(Node currentNode) { if (nodeFilter!=null) { Iterator it=nodeFilter.iterator(); while (it.hasNext()) { if (((NodeFilter)it.next()).isNodeInclude(currentNode)!=1) return false; } } if ((this._xpathNodeSet!=null) && !this._xpathNodeSet.contains(currentNode)) return false; return true; } void handleParent(Element e,NameSpaceSymbTable ns) { if (!e.hasAttributes()) { return; } NamedNodeMap attrs = e.getAttributes(); int attrsLength = attrs.getLength(); for (int i = 0; i < attrsLength; i++) { Attr N = (Attr) attrs.item(i); if (Constants.NamespaceSpecNS!=N.getNamespaceURI()) { //Not a namespace definition, ignore. continue; } String NName=N.getLocalName(); String NValue=N.getNodeValue(); if (XML.equals(NName) && Constants.XML_LANG_SPACE_SpecNS.equals(NValue)) { continue; } ns.addMapping(NName,NValue,N); } } /** * Adds to ns the definitons from the parent elements of el * @param el * @param ns */ final void getParentNameSpaces(Element el,NameSpaceSymbTable ns) { List parents=new ArrayList(10); Node n1=el.getParentNode(); if (!(n1 instanceof Element)) { return; } //Obtain all the parents of the elemnt Element parent=(Element) n1; while (parent!=null) { parents.add(parent); Node n=parent.getParentNode(); if (!(n instanceof Element )) { break; } parent=(Element)n; } //Visit them in reverse order. ListIterator it=parents.listIterator(parents.size()); while (it.hasPrevious()) { Element ele=(Element)it.previous(); handleParent(ele, ns); } Attr nsprefix; if (((nsprefix=ns.getMappingWithoutRendered("xmlns"))!=null) && "".equals(nsprefix.getValue())) { ns.addMappingAndRender("xmlns","",nullNode); } } /** * Obtain the attributes to output for this node in XPathNodeSet c14n. * * @param E * @param ns * @return the attributes nodes to output. * @throws CanonicalizationException */ abstract Iterator handleAttributes(Element E, NameSpaceSymbTable ns ) throws CanonicalizationException; /** * Obtain the attributes to output for this node in a Subtree c14n. * * @param E * @param ns * @return the attributes nodes to output. * @throws CanonicalizationException */ abstract Iterator handleAttributesSubtree(Element E, NameSpaceSymbTable ns) throws CanonicalizationException; abstract void circumventBugIfNeeded(XMLSignatureInput input) throws CanonicalizationException, ParserConfigurationException, IOException, SAXException; /** * Outputs an Attribute to the internal Writer. * * The string value of the node is modified by replacing * <UL> * <LI>all ampersands (&) with <CODE>&amp;amp;</CODE></LI> * <LI>all open angle brackets (<) with <CODE>&amp;lt;</CODE></LI> * <LI>all quotation mark characters with <CODE>&amp;quot;</CODE></LI> * <LI>and the whitespace characters <CODE>#x9</CODE>, #xA, and #xD, with character * references. The character references are written in uppercase * hexadecimal with no leading zeroes (for example, <CODE>#xD</CODE> is represented * by the character reference <CODE>&amp;#xD;</CODE>)</LI> * </UL> * * @param name * @param value * @param writer * @throws IOException */ static final void outputAttrToWriter(final String name, final String value, final OutputStream writer, final Map cache) throws IOException { writer.write(' '); UtfHelpper.writeByte(name,writer,cache); writer.write(equalsStr); byte []toWrite; final int length = value.length(); int i=0; while (i < length) { char c = value.charAt(i++); switch (c) { case '&' : toWrite=_AMP_; break; case '<' : toWrite=_LT_; break; case '"' : toWrite=_QUOT_; break; case 0x09 : // '\t' toWrite=__X9_; break; case 0x0A : // '\n' toWrite=__XA_; break; case 0x0D : // '\r' toWrite=__XD_; break; default : if (c < 0x80 ) { writer.write(c); } else { UtfHelpper.writeCharToUtf8(c,writer); }; continue; } writer.write(toWrite); } writer.write('\"'); } /** * Outputs a PI to the internal Writer. * * @param currentPI * @param writer where to write the things * @throws IOException */ static final void outputPItoWriter(ProcessingInstruction currentPI, OutputStream writer,int position) throws IOException { if (position == NODE_AFTER_DOCUMENT_ELEMENT) { writer.write('\n'); } writer.write(_BEGIN_PI); final String target = currentPI.getTarget(); int length = target.length(); for (int i = 0; i < length; i++) { char c=target.charAt(i); if (c==0x0D) { writer.write(__XD_); } else { if (c < 0x80) { writer.write(c); } else { UtfHelpper.writeCharToUtf8(c,writer); }; } } final String data = currentPI.getData(); length = data.length(); if (length > 0) { writer.write(' '); for (int i = 0; i < length; i++) { char c=data.charAt(i); if (c==0x0D) { writer.write(__XD_); } else { UtfHelpper.writeCharToUtf8(c,writer); } } } writer.write(_END_PI); if (position == NODE_BEFORE_DOCUMENT_ELEMENT) { writer.write('\n'); } } /** * Method outputCommentToWriter * * @param currentComment * @param writer writer where to write the things * @throws IOException */ static final void outputCommentToWriter(Comment currentComment, OutputStream writer,int position) throws IOException { if (position == NODE_AFTER_DOCUMENT_ELEMENT) { writer.write('\n'); } writer.write(_BEGIN_COMM); final String data = currentComment.getData(); final int length = data.length(); for (int i = 0; i < length; i++) { char c=data.charAt(i); if (c==0x0D) { writer.write(__XD_); } else { if (c < 0x80) { writer.write(c); } else { UtfHelpper.writeCharToUtf8(c,writer); }; } } writer.write(_END_COMM); if (position == NODE_BEFORE_DOCUMENT_ELEMENT) { writer.write('\n'); } } /** * Outputs a Text of CDATA section to the internal Writer. * * @param text * @param writer writer where to write the things * @throws IOException */ static final void outputTextToWriter(final String text, final OutputStream writer) throws IOException { final int length = text.length(); byte []toWrite; for (int i = 0; i < length; i++) { char c = text.charAt(i); switch (c) { case '&' : toWrite=_AMP_; break; case '<' : toWrite=_LT_; break; case '>' : toWrite=_GT_; break; case 0xD : toWrite=__XD_; break; default : if (c < 0x80) { writer.write(c); } else { UtfHelpper.writeCharToUtf8(c,writer); }; continue; } writer.write(toWrite); } } }
/* * Copyright (c) 2004-2007 Sun Microsystems, Inc. All rights reserved. * * The Sun Project JXTA(TM) Software License * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. The end-user documentation included with the redistribution, if any, must * include the following acknowledgment: "This product includes software * developed by Sun Microsystems, Inc. for JXTA(TM) technology." * Alternately, this acknowledgment may appear in the software itself, if * and wherever such third-party acknowledgments normally appear. * * 4. The names "Sun", "Sun Microsystems, Inc.", "JXTA" and "Project JXTA" must * not be used to endorse or promote products derived from this software * without prior written permission. For written permission, please contact * Project JXTA at http://www.jxta.org. * * 5. Products derived from this software may not be called "JXTA", nor may * "JXTA" appear in their name, without prior written permission of Sun. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SUN * MICROSYSTEMS OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * JXTA is a registered trademark of Sun Microsystems, Inc. in the United * States and other countries. * * Please see the license information page at : * <http://www.jxta.org/project/www/license.html> for instructions on use of * the license in source files. * * ==================================================================== * * This software consists of voluntary contributions made by many individuals * on behalf of Project JXTA. For more information on Project JXTA, please see * http://www.jxta.org. * * This license is based on the BSD license adopted by the Apache Foundation. */ package tutorial.customgroupservice; import net.jxta.document.Advertisement; import net.jxta.document.AdvertisementFactory; import net.jxta.document.Attribute; import net.jxta.document.Element; import net.jxta.document.ExtendableAdvertisement; import net.jxta.document.MimeMediaType; import net.jxta.document.StructuredDocument; import net.jxta.document.XMLElement; import net.jxta.id.ID; import net.jxta.logging.Logging; import java.util.Enumeration; import java.util.logging.Logger; import net.jxta.document.Attributable; /** * Defines Gossip Service configuration parameters. * <p/> * A typical GossipServiceConfigAdv : * <tt><pre> * &lt;jxta:GossipServiceConfigAdv showOwn="true"> * &lt;Gossip> * Bonjour! * &lt;/Gossip> * &lt;/jxta:GossipServiceConfigAdv> * </pre></tt> */ public final class GossipServiceConfigAdv extends ExtendableAdvertisement implements Cloneable { /** * Logger */ private static final Logger LOG = Logger.getLogger(GossipServiceConfigAdv.class.getName()); /** * The advertisement index fields. (currently none). */ private static final String[] INDEX_FIELDS = {}; /** * The DOCTYPE */ private static final String advType = "jxta:GossipServiceConfigAdv"; /** * The name of the attribute which controls whether the gossip service * should show message from the local peer. */ private static final String SHOW_OWN_ATTR = "showOwn"; /** * The name of the tag which we use to store the gossip text. */ private static final String GOSSIP_TEXT_TAG = "gossip"; /** * Instantiator for GossipServiceConfigAdv */ public static class Instantiator implements AdvertisementFactory.Instantiator { /** * {@inheritDoc} */ public String getAdvertisementType() { return advType; } /** * {@inheritDoc} */ public Advertisement newInstance() { return new GossipServiceConfigAdv(); } /** * {@inheritDoc} */ public Advertisement newInstance(Element root) { if (!XMLElement.class.isInstance(root)) { throw new IllegalArgumentException(getClass().getName() + " only supports XLMElement"); } return new GossipServiceConfigAdv((XMLElement) root); } } /** * If {@code true} then the gossip service should show it's own gossips. If * {@code null} then the gossip service will use it's default. */ private Boolean showOwn = null; /** * The text we will "gossip". If {@code null} then the gossip service will * use it's default. */ private String gossip = null; /** * Returns the identifying type of this Advertisement. * <p/> * <b>Note:</b> This is a static method. It cannot be used to determine * the runtime type of an advertisement. ie. * </p><code><pre> * Advertisement adv = module.getSomeAdv(); * String advType = adv.getAdvertisementType(); * </pre></code> * <p/> * <b>This is wrong and does not work the way you might expect.</b> * This call is not polymorphic and calls * Advertisement.getAdvertisementType() no matter what the real type of the * advertisement. * * @return String the type of advertisement */ public static String getAdvertisementType() { return advType; } /** * Use the Instantiator through the factory */ private GossipServiceConfigAdv() { } /** * Use the Instantiator method to construct Peer Group Config Advs. * * @param doc the element */ private GossipServiceConfigAdv(XMLElement doc) { String doctype = doc.getName(); String typedoctype = ""; Attribute itsType = doc.getAttribute("type"); if (null != itsType) { typedoctype = itsType.getValue(); } if (!doctype.equals(getAdvertisementType()) && !getAdvertisementType().equals(typedoctype)) { throw new IllegalArgumentException("Could not construct : " + getClass().getName() + "from doc containing a " + doc.getName()); } /* Process attributes from root element */ Enumeration<Attribute> eachAttr = doc.getAttributes(); while (eachAttr.hasMoreElements()) { Attribute aConfigAttr = eachAttr.nextElement(); if (super.handleAttribute(aConfigAttr)) { // nothing to do } else if (SHOW_OWN_ATTR.equals(aConfigAttr.getName())) { setShowOwn(Boolean.valueOf(aConfigAttr.getValue().trim())); } else { Logging.logCheckedWarning(LOG, "Unhandled Attribute: " + aConfigAttr.getName()); } } /* process child elements of root */ Enumeration<XMLElement> elements = doc.getChildren(); while (elements.hasMoreElements()) { XMLElement elem = elements.nextElement(); if (!handleElement(elem)) { Logging.logCheckedWarning(LOG, "Unhandled Element: " + elem.toString()); } } } /** * {@inheritDoc} */ @Override protected boolean handleElement(Element raw) { if (super.handleElement(raw)) { return true; } XMLElement elem = (XMLElement) raw; if (GOSSIP_TEXT_TAG.equals(elem.getName())) { String value = elem.getTextValue(); if (null == value) { return false; } value = value.trim(); if (0 == value.length()) { return false; } gossip = value; return true; } return false; } /** * Make a clone of this GossipServiceConfigAdv. * * @return A copy of this GossipServiceConfigAdv. */ @Override public GossipServiceConfigAdv clone() { try { GossipServiceConfigAdv clone = (GossipServiceConfigAdv) super.clone(); clone.setShowOwn(getShowOwn()); clone.setGossip(getGossip()); return clone; } catch (CloneNotSupportedException impossible) { throw new Error("Object.clone() threw CloneNotSupportedException", impossible); } } /** * {@inheritDoc} */ @Override public String getAdvType() { return getAdvertisementType(); } /** * {@inheritDoc} */ @Override public final String getBaseAdvType() { return getAdvertisementType(); } /** * {@inheritDoc} */ @Override public ID getID() { return ID.nullID; } /** * {@inheritDoc} */ @Override public StructuredDocument getDocument(MimeMediaType encodeAs) { StructuredDocument adv = (StructuredDocument) super.getDocument(encodeAs); if (!(adv instanceof Attributable)) { throw new IllegalArgumentException("Only document types supporting atrributes are allowed"); } if (null != getShowOwn()) { ((Attributable) adv).addAttribute(SHOW_OWN_ATTR, Boolean.toString(getShowOwn())); } if (null != getGossip()) { Element e = adv.createElement(GOSSIP_TEXT_TAG, getGossip()); adv.appendChild(e); } return adv; } /** * {@inheritDoc} */ @Override public String[] getIndexFields() { return INDEX_FIELDS; } /** * Returns the gossip text which should be used by the gossip service. * * @return The gossip text which should be used by the gossip service or * {@code null} if the service should use it's default value. */ public String getGossip() { return gossip; } /** * Sets the gossip text which should be used by the gossip service. * * @param gossip The gossip text which should be used by the gossip service * or {@code null} to use service default. */ public void setGossip(String gossip) { this.gossip = gossip; } /** * Returns whether the gossip service should show it's own gossip text. * * @return If {@code true} then we should show our own gossip text, * {@code false} to omit it or {@code null} to use service default. */ public Boolean getShowOwn() { return showOwn; } /** * Sets whether the gossip service should show it's own gossip text. * * @param showOwn If {@code true} then we should show our own gossip text, * {@code false} to omit it or {@code null} to use service default. */ public void setShowOwn(Boolean showOwn) { this.showOwn = showOwn; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.io; import java.io.IOException; import junit.framework.TestCase; import org.apache.commons.io.output.ByteArrayOutputStream; /** * @author Scott Sanders (sanders at apache dot org) * @author Marc Johnson (mjohnson at apache dot org) * @version $Revision: 919474 $ $Date: 2010-03-05 11:31:43 -0500 (Fri, 05 Mar 2010) $ */ public class HexDumpTest extends TestCase { /** * Creates new HexDumpTest * * @param name */ public HexDumpTest(String name) { super(name); } private char toHex(int n) { char[] hexChars = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' }; return hexChars[n % 16]; } /** * test dump method * * @exception IOException */ public void testDump() throws IOException { byte[] testArray = new byte[256]; for (int j = 0; j < 256; j++) { testArray[j] = (byte) j; } ByteArrayOutputStream stream = new ByteArrayOutputStream(); HexDump.dump(testArray, 0, stream, 0); byte[] outputArray = new byte[16 * (73 + HexDump.EOL.length())]; for (int j = 0; j < 16; j++) { int offset = (73 + HexDump.EOL.length()) * j; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) toHex(j); outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) ' '; for (int k = 0; k < 16; k++) { outputArray[offset++] = (byte) toHex(j); outputArray[offset++] = (byte) toHex(k); outputArray[offset++] = (byte) ' '; } for (int k = 0; k < 16; k++) { outputArray[offset++] = (byte) toAscii((j * 16) + k); } System.arraycopy(HexDump.EOL.getBytes(), 0, outputArray, offset, HexDump.EOL.getBytes().length); } byte[] actualOutput = stream.toByteArray(); assertEquals("array size mismatch", outputArray.length, actualOutput.length); for (int j = 0; j < outputArray.length; j++) { assertEquals("array[ " + j + "] mismatch", outputArray[j], actualOutput[j]); } // verify proper behavior with non-zero offset stream = new ByteArrayOutputStream(); HexDump.dump(testArray, 0x10000000, stream, 0); outputArray = new byte[16 * (73 + HexDump.EOL.length())]; for (int j = 0; j < 16; j++) { int offset = (73 + HexDump.EOL.length()) * j; outputArray[offset++] = (byte) '1'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) toHex(j); outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) ' '; for (int k = 0; k < 16; k++) { outputArray[offset++] = (byte) toHex(j); outputArray[offset++] = (byte) toHex(k); outputArray[offset++] = (byte) ' '; } for (int k = 0; k < 16; k++) { outputArray[offset++] = (byte) toAscii((j * 16) + k); } System.arraycopy(HexDump.EOL.getBytes(), 0, outputArray, offset, HexDump.EOL.getBytes().length); } actualOutput = stream.toByteArray(); assertEquals("array size mismatch", outputArray.length, actualOutput.length); for (int j = 0; j < outputArray.length; j++) { assertEquals("array[ " + j + "] mismatch", outputArray[j], actualOutput[j]); } // verify proper behavior with negative offset stream = new ByteArrayOutputStream(); HexDump.dump(testArray, 0xFF000000, stream, 0); outputArray = new byte[16 * (73 + HexDump.EOL.length())]; for (int j = 0; j < 16; j++) { int offset = (73 + HexDump.EOL.length()) * j; outputArray[offset++] = (byte) 'F'; outputArray[offset++] = (byte) 'F'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) toHex(j); outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) ' '; for (int k = 0; k < 16; k++) { outputArray[offset++] = (byte) toHex(j); outputArray[offset++] = (byte) toHex(k); outputArray[offset++] = (byte) ' '; } for (int k = 0; k < 16; k++) { outputArray[offset++] = (byte) toAscii((j * 16) + k); } System.arraycopy(HexDump.EOL.getBytes(), 0, outputArray, offset, HexDump.EOL.getBytes().length); } actualOutput = stream.toByteArray(); assertEquals("array size mismatch", outputArray.length, actualOutput.length); for (int j = 0; j < outputArray.length; j++) { assertEquals("array[ " + j + "] mismatch", outputArray[j], actualOutput[j]); } // verify proper behavior with non-zero index stream = new ByteArrayOutputStream(); HexDump.dump(testArray, 0x10000000, stream, 0x81); outputArray = new byte[(8 * (73 + HexDump.EOL.length())) - 1]; for (int j = 0; j < 8; j++) { int offset = (73 + HexDump.EOL.length()) * j; outputArray[offset++] = (byte) '1'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) '0'; outputArray[offset++] = (byte) toHex(j + 8); outputArray[offset++] = (byte) '1'; outputArray[offset++] = (byte) ' '; for (int k = 0; k < 16; k++) { int index = 0x81 + (j * 16) + k; if (index < 0x100) { outputArray[offset++] = (byte) toHex(index / 16); outputArray[offset++] = (byte) toHex(index); } else { outputArray[offset++] = (byte) ' '; outputArray[offset++] = (byte) ' '; } outputArray[offset++] = (byte) ' '; } for (int k = 0; k < 16; k++) { int index = 0x81 + (j * 16) + k; if (index < 0x100) { outputArray[offset++] = (byte) toAscii(index); } } System.arraycopy(HexDump.EOL.getBytes(), 0, outputArray, offset, HexDump.EOL.getBytes().length); } actualOutput = stream.toByteArray(); assertEquals("array size mismatch", outputArray.length, actualOutput.length); for (int j = 0; j < outputArray.length; j++) { assertEquals("array[ " + j + "] mismatch", outputArray[j], actualOutput[j]); } // verify proper behavior with negative index try { HexDump.dump(testArray, 0x10000000, new ByteArrayOutputStream(), -1); fail("should have caught ArrayIndexOutOfBoundsException on negative index"); } catch (ArrayIndexOutOfBoundsException ignored_exception) { // as expected } // verify proper behavior with index that is too large try { HexDump.dump(testArray, 0x10000000, new ByteArrayOutputStream(), testArray.length); fail("should have caught ArrayIndexOutOfBoundsException on large index"); } catch (ArrayIndexOutOfBoundsException ignored_exception) { // as expected } // verify proper behavior with null stream try { HexDump.dump(testArray, 0x10000000, null, 0); fail("should have caught IllegalArgumentException on negative index"); } catch (IllegalArgumentException ignored_exception) { // as expected } } private char toAscii(int c) { char rval = '.'; if ((c >= 32) && (c <= 126)) { rval = (char) c; } return rval; } }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.okhttp; import com.squareup.okhttp.internal.DiskLruCache; import com.squareup.okhttp.internal.Util; import com.squareup.okhttp.internal.http.HttpMethod; import java.io.BufferedWriter; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FilterInputStream; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.net.CacheRequest; import java.net.CacheResponse; import java.net.ResponseCache; import java.net.URI; import java.net.URLConnection; import java.security.cert.Certificate; import java.security.cert.CertificateEncodingException; import java.security.cert.CertificateException; import java.security.cert.CertificateFactory; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import okio.BufferedSource; import okio.ByteString; import okio.Okio; import static com.squareup.okhttp.internal.Util.UTF_8; /** * Caches HTTP and HTTPS responses to the filesystem so they may be reused, * saving time and bandwidth. * * <h3>Cache Optimization</h3> * To measure cache effectiveness, this class tracks three statistics: * <ul> * <li><strong>{@link #getRequestCount() Request Count:}</strong> the number * of HTTP requests issued since this cache was created. * <li><strong>{@link #getNetworkCount() Network Count:}</strong> the * number of those requests that required network use. * <li><strong>{@link #getHitCount() Hit Count:}</strong> the number of * those requests whose responses were served by the cache. * </ul> * Sometimes a request will result in a conditional cache hit. If the cache * contains a stale copy of the response, the client will issue a conditional * {@code GET}. The server will then send either the updated response if it has * changed, or a short 'not modified' response if the client's copy is still * valid. Such responses increment both the network count and hit count. * * <p>The best way to improve the cache hit rate is by configuring the web * server to return cacheable responses. Although this client honors all <a * href="http://www.ietf.org/rfc/rfc2616.txt">HTTP/1.1 (RFC 2068)</a> cache * headers, it doesn't cache partial responses. * * <h3>Force a Network Response</h3> * In some situations, such as after a user clicks a 'refresh' button, it may be * necessary to skip the cache, and fetch data directly from the server. To force * a full refresh, add the {@code no-cache} directive: <pre> {@code * connection.addRequestProperty("Cache-Control", "no-cache"); * }</pre> * If it is only necessary to force a cached response to be validated by the * server, use the more efficient {@code max-age=0} instead: <pre> {@code * connection.addRequestProperty("Cache-Control", "max-age=0"); * }</pre> * * <h3>Force a Cache Response</h3> * Sometimes you'll want to show resources if they are available immediately, * but not otherwise. This can be used so your application can show * <i>something</i> while waiting for the latest data to be downloaded. To * restrict a request to locally-cached resources, add the {@code * only-if-cached} directive: <pre> {@code * try { * connection.addRequestProperty("Cache-Control", "only-if-cached"); * InputStream cached = connection.getInputStream(); * // the resource was cached! show it * } catch (FileNotFoundException e) { * // the resource was not cached * } * }</pre> * This technique works even better in situations where a stale response is * better than no response. To permit stale cached responses, use the {@code * max-stale} directive with the maximum staleness in seconds: <pre> {@code * int maxStale = 60 * 60 * 24 * 28; // tolerate 4-weeks stale * connection.addRequestProperty("Cache-Control", "max-stale=" + maxStale); * }</pre> */ public final class HttpResponseCache extends ResponseCache implements OkResponseCache { // TODO: add APIs to iterate the cache? private static final int VERSION = 201105; private static final int ENTRY_METADATA = 0; private static final int ENTRY_BODY = 1; private static final int ENTRY_COUNT = 2; private final DiskLruCache cache; /* read and write statistics, all guarded by 'this' */ private int writeSuccessCount; private int writeAbortCount; private int networkCount; private int hitCount; private int requestCount; public HttpResponseCache(File directory, long maxSize) throws IOException { cache = DiskLruCache.open(directory, VERSION, ENTRY_COUNT, maxSize); } @Override public CacheResponse get(URI uri, String s, Map<String, List<String>> stringListMap) throws IOException { throw new UnsupportedOperationException("This is not a general purpose response cache."); } @Override public CacheRequest put(URI uri, URLConnection urlConnection) throws IOException { throw new UnsupportedOperationException("This is not a general purpose response cache."); } private static String urlToKey(Request requst) { return Util.hash(requst.urlString()); } @Override public Response get(Request request) { String key = urlToKey(request); DiskLruCache.Snapshot snapshot; Entry entry; try { snapshot = cache.get(key); if (snapshot == null) { return null; } entry = new Entry(snapshot.getInputStream(ENTRY_METADATA)); } catch (IOException e) { // Give up because the cache cannot be read. return null; } Response response = entry.response(request, snapshot); if (!entry.matches(request, response)) { Util.closeQuietly(response.body()); return null; } return response; } @Override public CacheRequest put(Response response) throws IOException { String requestMethod = response.request().method(); if (maybeRemove(response.request())) { return null; } if (!requestMethod.equals("GET")) { // Don't cache non-GET responses. We're technically allowed to cache // HEAD requests and some POST requests, but the complexity of doing // so is high and the benefit is low. return null; } if (response.hasVaryAll()) { return null; } Entry entry = new Entry(response); DiskLruCache.Editor editor = null; try { editor = cache.edit(urlToKey(response.request())); if (editor == null) { return null; } entry.writeTo(editor); return new CacheRequestImpl(editor); } catch (IOException e) { abortQuietly(editor); return null; } } @Override public boolean maybeRemove(Request request) { if (HttpMethod.invalidatesCache(request.method())) { try { cache.remove(urlToKey(request)); } catch (IOException ignored) { // The cache cannot be written. } return true; } return false; } @Override public void update(Response cached, Response network) { Entry entry = new Entry(network); DiskLruCache.Snapshot snapshot = ((CacheResponseBody) cached.body()).snapshot; DiskLruCache.Editor editor = null; try { editor = snapshot.edit(); // Returns null if snapshot is not current. if (editor != null) { entry.writeTo(editor); editor.commit(); } } catch (IOException e) { abortQuietly(editor); } } private void abortQuietly(DiskLruCache.Editor editor) { // Give up because the cache cannot be written. try { if (editor != null) { editor.abort(); } } catch (IOException ignored) { } } /** * Closes the cache and deletes all of its stored values. This will delete * all files in the cache directory including files that weren't created by * the cache. */ public void delete() throws IOException { cache.delete(); } public synchronized int getWriteAbortCount() { return writeAbortCount; } public synchronized int getWriteSuccessCount() { return writeSuccessCount; } public long getSize() { return cache.size(); } public long getMaxSize() { return cache.getMaxSize(); } public void flush() throws IOException { cache.flush(); } public void close() throws IOException { cache.close(); } public File getDirectory() { return cache.getDirectory(); } public boolean isClosed() { return cache.isClosed(); } @Override public synchronized void trackResponse(ResponseSource source) { requestCount++; switch (source) { case CACHE: hitCount++; break; case CONDITIONAL_CACHE: case NETWORK: networkCount++; break; } } @Override public synchronized void trackConditionalCacheHit() { hitCount++; } public synchronized int getNetworkCount() { return networkCount; } public synchronized int getHitCount() { return hitCount; } public synchronized int getRequestCount() { return requestCount; } private final class CacheRequestImpl extends CacheRequest { private final DiskLruCache.Editor editor; private OutputStream cacheOut; private boolean done; private OutputStream body; public CacheRequestImpl(final DiskLruCache.Editor editor) throws IOException { this.editor = editor; this.cacheOut = editor.newOutputStream(ENTRY_BODY); this.body = new FilterOutputStream(cacheOut) { @Override public void close() throws IOException { synchronized (HttpResponseCache.this) { if (done) { return; } done = true; writeSuccessCount++; } super.close(); editor.commit(); } @Override public void write(byte[] buffer, int offset, int length) throws IOException { // Since we don't override "write(int oneByte)", we can write directly to "out" // and avoid the inefficient implementation from the FilterOutputStream. out.write(buffer, offset, length); } }; } @Override public void abort() { synchronized (HttpResponseCache.this) { if (done) { return; } done = true; writeAbortCount++; } Util.closeQuietly(cacheOut); try { editor.abort(); } catch (IOException ignored) { } } @Override public OutputStream getBody() throws IOException { return body; } } private static final class Entry { private final String url; private final Headers varyHeaders; private final String requestMethod; private final String statusLine; private final Headers responseHeaders; private final Handshake handshake; /** * Reads an entry from an input stream. A typical entry looks like this: * <pre>{@code * http://google.com/foo * GET * 2 * Accept-Language: fr-CA * Accept-Charset: UTF-8 * HTTP/1.1 200 OK * 3 * Content-Type: image/png * Content-Length: 100 * Cache-Control: max-age=600 * }</pre> * * <p>A typical HTTPS file looks like this: * <pre>{@code * https://google.com/foo * GET * 2 * Accept-Language: fr-CA * Accept-Charset: UTF-8 * HTTP/1.1 200 OK * 3 * Content-Type: image/png * Content-Length: 100 * Cache-Control: max-age=600 * * AES_256_WITH_MD5 * 2 * base64-encoded peerCertificate[0] * base64-encoded peerCertificate[1] * -1 * }</pre> * The file is newline separated. The first two lines are the URL and * the request method. Next is the number of HTTP Vary request header * lines, followed by those lines. * * <p>Next is the response status line, followed by the number of HTTP * response header lines, followed by those lines. * * <p>HTTPS responses also contain SSL session information. This begins * with a blank line, and then a line containing the cipher suite. Next * is the length of the peer certificate chain. These certificates are * base64-encoded and appear each on their own line. The next line * contains the length of the local certificate chain. These * certificates are also base64-encoded and appear each on their own * line. A length of -1 is used to encode a null array. */ public Entry(InputStream in) throws IOException { try { BufferedSource source = Okio.buffer(Okio.source(in)); url = source.readUtf8LineStrict(); requestMethod = source.readUtf8LineStrict(); Headers.Builder varyHeadersBuilder = new Headers.Builder(); int varyRequestHeaderLineCount = readInt(source); for (int i = 0; i < varyRequestHeaderLineCount; i++) { varyHeadersBuilder.addLine(source.readUtf8LineStrict()); } varyHeaders = varyHeadersBuilder.build(); statusLine = source.readUtf8LineStrict(); Headers.Builder responseHeadersBuilder = new Headers.Builder(); int responseHeaderLineCount = readInt(source); for (int i = 0; i < responseHeaderLineCount; i++) { responseHeadersBuilder.addLine(source.readUtf8LineStrict()); } responseHeaders = responseHeadersBuilder.build(); if (isHttps()) { String blank = source.readUtf8LineStrict(); if (blank.length() > 0) { throw new IOException("expected \"\" but was \"" + blank + "\""); } String cipherSuite = source.readUtf8LineStrict(); List<Certificate> peerCertificates = readCertificateList(source); List<Certificate> localCertificates = readCertificateList(source); handshake = Handshake.get(cipherSuite, peerCertificates, localCertificates); } else { handshake = null; } } finally { in.close(); } } public Entry(Response response) { this.url = response.request().urlString(); this.varyHeaders = response.request().headers().getAll(response.getVaryFields()); this.requestMethod = response.request().method(); this.statusLine = response.statusLine(); this.responseHeaders = response.headers(); this.handshake = response.handshake(); } public void writeTo(DiskLruCache.Editor editor) throws IOException { OutputStream out = editor.newOutputStream(ENTRY_METADATA); Writer writer = new BufferedWriter(new OutputStreamWriter(out, UTF_8)); writer.write(url + '\n'); writer.write(requestMethod + '\n'); writer.write(Integer.toString(varyHeaders.size()) + '\n'); for (int i = 0; i < varyHeaders.size(); i++) { writer.write(varyHeaders.name(i) + ": " + varyHeaders.value(i) + '\n'); } writer.write(statusLine + '\n'); writer.write(Integer.toString(responseHeaders.size()) + '\n'); for (int i = 0; i < responseHeaders.size(); i++) { writer.write(responseHeaders.name(i) + ": " + responseHeaders.value(i) + '\n'); } if (isHttps()) { writer.write('\n'); writer.write(handshake.cipherSuite() + '\n'); writeCertArray(writer, handshake.peerCertificates()); writeCertArray(writer, handshake.localCertificates()); } writer.close(); } private boolean isHttps() { return url.startsWith("https://"); } private List<Certificate> readCertificateList(BufferedSource source) throws IOException { int length = readInt(source); if (length == -1) return Collections.emptyList(); // OkHttp v1.2 used -1 to indicate null. try { CertificateFactory certificateFactory = CertificateFactory.getInstance("X.509"); List<Certificate> result = new ArrayList<Certificate>(length); for (int i = 0; i < length; i++) { String line = source.readUtf8LineStrict(); byte[] bytes = ByteString.decodeBase64(line).toByteArray(); result.add(certificateFactory.generateCertificate(new ByteArrayInputStream(bytes))); } return result; } catch (CertificateException e) { throw new IOException(e.getMessage()); } } private void writeCertArray(Writer writer, List<Certificate> certificates) throws IOException { try { writer.write(Integer.toString(certificates.size()) + '\n'); for (int i = 0, size = certificates.size(); i < size; i++) { byte[] bytes = certificates.get(i).getEncoded(); String line = ByteString.of(bytes).base64(); writer.write(line + '\n'); } } catch (CertificateEncodingException e) { throw new IOException(e.getMessage()); } } public boolean matches(Request request, Response response) { return url.equals(request.urlString()) && requestMethod.equals(request.method()) && response.varyMatches(varyHeaders, request); } public Response response(Request request, DiskLruCache.Snapshot snapshot) { String contentType = responseHeaders.get("Content-Type"); String contentLength = responseHeaders.get("Content-Length"); return new Response.Builder() .request(request) .statusLine(statusLine) .headers(responseHeaders) .body(new CacheResponseBody(snapshot, contentType, contentLength)) .handshake(handshake) .build(); } } private static int readInt(BufferedSource source) throws IOException { String line = source.readUtf8LineStrict(); try { return Integer.parseInt(line); } catch (NumberFormatException e) { throw new IOException("Expected an integer but was \"" + line + "\""); } } private static class CacheResponseBody extends Response.Body { private final DiskLruCache.Snapshot snapshot; private final InputStream bodyIn; private final String contentType; private final String contentLength; public CacheResponseBody(final DiskLruCache.Snapshot snapshot, String contentType, String contentLength) { this.snapshot = snapshot; this.contentType = contentType; this.contentLength = contentLength; // This input stream closes the snapshot when the stream is closed. this.bodyIn = new FilterInputStream(snapshot.getInputStream(ENTRY_BODY)) { @Override public void close() throws IOException { snapshot.close(); super.close(); } }; } @Override public boolean ready() throws IOException { return true; } @Override public MediaType contentType() { return contentType != null ? MediaType.parse(contentType) : null; } @Override public long contentLength() { try { return contentLength != null ? Long.parseLong(contentLength) : -1; } catch (NumberFormatException e) { return -1; } } @Override public InputStream byteStream() { return bodyIn; } } }
/* * Copyright 2015 Red Hat Inc. and/or its affiliates and other contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.qa.jcontainer; import org.jboss.qa.jcontainer.util.ProcessUtils; import org.jboss.qa.jcontainer.util.ReflectionUtils; import org.jboss.qa.jcontainer.util.executor.ProcessBuilderExecutor; import java.io.File; import java.io.IOException; import java.net.Socket; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import lombok.extern.slf4j.Slf4j; @Slf4j public abstract class AbstractContainer<T extends JavaConfiguration, U extends Client<T>, V extends User> implements Container<T, U, V> { public static final String JCONTAINER_ID = "jcontainer.id"; private final long id; private final File stdoutLogFile; protected T configuration; protected U client; private Class<T> confClass; private Class<U> clientClass; private volatile List<Thread> shutdownHooks = new ArrayList<>(); public AbstractContainer(T configuration) { id = System.nanoTime(); stdoutLogFile = new File(configuration.getDirectory(), String.format("stdout-%s.log", id)); confClass = ReflectionUtils.getGenericClass(getClass(), 0); clientClass = ReflectionUtils.getGenericClass(getClass(), 1); this.configuration = configuration; client = createClient(configuration); log.info("container id = {}", id); } public long getId() { return id; } public File getStdoutLogFile() { return stdoutLogFile; } /** * Returns command, which can be used by client. * * @return String command if client is supported, NULL otherwise */ protected abstract String getBasicCommand(); /** * Returns log directory. */ public File getLogDir() { try { return getLogDirInternal(); } catch (Exception e) { throw new IllegalStateException("Log directory was not found", e); } } /** * Returns default log file. */ public File getDefaultLogFile() { final File logFile = new File(getLogDir(), configuration.getLogFileName()); if (!logFile.exists()) { log.warn("Log file does not exist: {}", logFile.getAbsoluteFile()); } return logFile; } protected abstract File getLogDirInternal(); protected void addShutdownHook(Thread hook) { shutdownHooks.add(hook); Runtime.getRuntime().addShutdownHook(hook); } @Override public synchronized void start() throws Exception { if (isRunning()) { log.warn("Container is already started"); return; } if (checkSocket()) { throw new IllegalStateException(String.format("Another container already uses %s:%d", configuration.getHost(), configuration.getBusyPort())); } if (configuration.getDirectory() == null || !configuration.getDirectory().exists()) { throw new IllegalArgumentException("Directory of container must exist"); } final List<String> cmd = configuration.generateCommand(); cmd.addAll(configuration.getParams()); log.debug("Process arguments: " + cmd.toString()); final ProcessBuilder processBuilder = new ProcessBuilder(cmd); processBuilder.environment().putAll(System.getenv()); processBuilder.environment().putAll(configuration.getEnvProps()); // Modify JAVA_OPTS final StringBuilder javaOpts = new StringBuilder(); final String oldJavaOpts = processBuilder.environment().get(configuration.getJavaOptsEnvName()); if (oldJavaOpts != null) { javaOpts.append(oldJavaOpts); } javaOpts.append(String.format(" -D%s=%s", JCONTAINER_ID, id)); processBuilder.environment().put(configuration.getJavaOptsEnvName(), javaOpts.toString()); final Process process = ProcessBuilderExecutor.asyncExecute(processBuilder, getStdoutLogFile()); addShutdownHook(new Thread(new Runnable() { @Override public void run() { if (process != null) { process.destroy(); try { process.waitFor(); } catch (InterruptedException e) { log.debug("Container stop process was interrupted!"); log.trace(e.getMessage(), e); Thread.currentThread().interrupt(); } } } })); // This shutdown hook wait until container process exists. addShutdownHook(new Thread(new Runnable() { @Override public void run() { try { String pid; while ((pid = ProcessUtils.getJavaPidByContainerId(getId())) != null) { log.debug("Stopping container (PID {}) ...", pid); Thread.sleep(TimeUnit.SECONDS.toMillis(1)); } } catch (InterruptedException e) { log.trace(e.getMessage(), e); Thread.currentThread().interrupt(); } } })); waitForStarted(); } public synchronized void stop(long timeout, TimeUnit timeUnit) throws Exception { if (isRunning()) { client.close(); final ExecutorService service = Executors.newCachedThreadPool(); final List<Future> futures = new ArrayList<>(); for (Thread shutdownHook : shutdownHooks) { Runtime.getRuntime().removeShutdownHook(shutdownHook); futures.add(service.submit(shutdownHook)); } service.shutdown(); if (!service.awaitTermination(timeout, timeUnit)) { for (Future future : futures) { future.cancel(true); } log.warn("Container shutdown process didn't finish in {} {}!", timeout, timeUnit); } else { log.info("Container was stopped"); } shutdownHooks.clear(); } } @Override public synchronized void stop() throws Exception { stop(1, TimeUnit.MINUTES); } @Override public void close() throws IOException { try { stop(); } catch (Exception e) { throw new IOException(e); } } @Override public boolean isRunning() throws Exception { return !shutdownHooks.isEmpty(); } protected synchronized void waitForStarted() throws InterruptedException { int attempts = 30; while (!checkSocket()) { if (--attempts <= 0) { throw new IllegalStateException("Container was not started"); } wait(TimeUnit.SECONDS.toMillis(5)); log.info("Waiting for container..."); } checkClient(); log.info("Container was started"); } public void checkClient() { if (isClientSupported()) { final int clientAttempts = 20; final String basicCommand = getBasicCommand(); for (int i = 0; i < clientAttempts; i++) { try { Thread.sleep(TimeUnit.SECONDS.toMillis(5)); client.execute(basicCommand); log.debug("Client was connected to container"); return; } catch (Exception e) { log.debug("Waiting for client..."); log.trace(e.getMessage(), e); } } throw new IllegalStateException("Client was not connected to container"); } } @Override public boolean isClientSupported() { return getBasicCommand() != null; } public synchronized boolean checkSocket() { try (Socket socket = new Socket(configuration.getHost(), configuration.getBusyPort())) { return true; } catch (Exception e) { log.trace(e.getMessage(), e); return false; } } @Override public T getConfiguration() { return configuration; } @Override public U getClient() { return client; } protected U createClient(T configuration) { try { return clientClass.getConstructor(confClass).newInstance(configuration); } catch (Exception e) { log.error("Client was not created"); log.trace(e.getMessage(), e); } return null; } protected void checkMandatoryProperty(String name, Object value) { if (value == null) { throw new IllegalArgumentException(String.format("Property '%s' is mandatory", name)); } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.cxx; import com.facebook.buck.core.build.context.BuildContext; import com.facebook.buck.core.filesystems.AbsPath; import com.facebook.buck.core.filesystems.RelPath; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.rulekey.AddToRuleKey; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.rules.SourcePathRuleFinder; import com.facebook.buck.core.rules.attr.SupportsInputBasedRuleKey; import com.facebook.buck.core.rules.schedule.OverrideScheduleRule; import com.facebook.buck.core.rules.schedule.RuleScheduleInfo; import com.facebook.buck.core.sourcepath.ExplicitBuildTargetSourcePath; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.cxx.toolchain.LinkerMapMode; import com.facebook.buck.cxx.toolchain.StripStyle; import com.facebook.buck.cxx.toolchain.linker.HasImportLibrary; import com.facebook.buck.cxx.toolchain.linker.HasLinkerMap; import com.facebook.buck.cxx.toolchain.linker.Linker; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.io.filesystem.impl.ProjectFilesystemUtils; import com.facebook.buck.rules.args.Arg; import com.facebook.buck.rules.modern.BuildCellRelativePathFactory; import com.facebook.buck.rules.modern.Buildable; import com.facebook.buck.rules.modern.ModernBuildRule; import com.facebook.buck.rules.modern.OutputPathResolver; import com.facebook.buck.rules.modern.PublicOutputPath; import com.facebook.buck.step.Step; import com.facebook.buck.step.fs.MkdirStep; import com.facebook.buck.step.isolatedsteps.common.TouchStep; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.ImmutableMap; import java.nio.file.Path; import java.util.Optional; import java.util.stream.Stream; /** A BuildRule for linking c++ objects. */ public class CxxThinLTOIndex extends ModernBuildRule<CxxThinLTOIndex.Impl> implements SupportsInputBasedRuleKey, HasAppleDebugSymbolDeps, OverrideScheduleRule { private final Optional<RuleScheduleInfo> ruleScheduleInfo; private final boolean cacheable; // Stored here so we can access it without an OutputPathResolver. private final Path output; public CxxThinLTOIndex( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, SourcePathRuleFinder ruleFinder, Linker linker, Path output, ImmutableList<Arg> args, Optional<RuleScheduleInfo> ruleScheduleInfo, boolean cacheable, boolean withDownwardApi) { super( buildTarget, projectFilesystem, ruleFinder, new Impl(linker, output, args, buildTarget, withDownwardApi)); this.output = output; this.ruleScheduleInfo = ruleScheduleInfo; this.cacheable = cacheable; performChecks(buildTarget); } private void performChecks(BuildTarget buildTarget) { Preconditions.checkArgument( !buildTarget.getFlavors().contains(CxxStrip.RULE_FLAVOR) || !StripStyle.FLAVOR_DOMAIN.containsAnyOf(buildTarget.getFlavors()), "CxxLink should not be created with CxxStrip flavors"); } /** Buildable implementation of CxxLink. */ public static class Impl implements Buildable { @AddToRuleKey private final BuildTarget targetName; @AddToRuleKey private final Linker linker; @AddToRuleKey private final ImmutableList<Arg> args; @AddToRuleKey private final PublicOutputPath output; @AddToRuleKey private final Optional<PublicOutputPath> linkerMapPath; @AddToRuleKey private final boolean withDownwardApi; public Impl( Linker linker, Path output, ImmutableList<Arg> args, BuildTarget buildTarget, boolean withDownwardApi) { this.linker = linker; this.output = new PublicOutputPath(output); this.withDownwardApi = withDownwardApi; Optional<Path> linkerMapPath = getLinkerMapPath(linker, output); if (linkerMapPath.isPresent() && LinkerMapMode.isLinkerMapEnabledForBuildTarget(buildTarget)) { this.linkerMapPath = Optional.of(new PublicOutputPath(linkerMapPath.get())); } else { this.linkerMapPath = Optional.empty(); } this.args = args; this.targetName = buildTarget; } @Override public ImmutableList<Step> getBuildSteps( BuildContext context, ProjectFilesystem filesystem, OutputPathResolver outputPathResolver, BuildCellRelativePathFactory buildCellPathFactory) { AbsPath scratchDir = filesystem.resolve(outputPathResolver.getTempPath()); AbsPath argFilePath = scratchDir.resolve("linker.argsfile"); AbsPath fileListPath = scratchDir.resolve("filelist.txt"); RelPath outputPath = outputPathResolver.resolvePath(output); Path linkOutput = outputPath.getParent().resolve("thinlto.objects"); Builder<Step> stepsBuilder = new Builder<Step>() .add(MkdirStep.of(buildCellPathFactory.from(outputPath.getParent()))) .add(MkdirStep.of(buildCellPathFactory.from(outputPath))) .addAll( CxxPrepareForLinkStep.create( argFilePath.getPath(), fileListPath.getPath(), linker.fileList(fileListPath), linkOutput, args, linker, targetName.getCell(), filesystem.getRootPath().getPath(), context.getSourcePathResolver(), ImmutableMap.of(), ImmutableList.of())) .add( new CxxLinkStep( filesystem.getRootPath(), ProjectFilesystemUtils.relativize( filesystem.getRootPath(), context.getBuildCellRootPath()), linker.getEnvironment(context.getSourcePathResolver()), linker.getCommandPrefix(context.getSourcePathResolver()), argFilePath.getPath(), scratchDir.getPath(), withDownwardApi)); if (linkerMapPath.isPresent()) { // In some case (when there are no `dll_export`s eg) an import library is not produced by // link.exe. An empty file is produced in this case (since an import library was already // promised to `buildableContext`). stepsBuilder.add(new TouchStep(outputPathResolver.resolvePath(linkerMapPath.get()))); } return stepsBuilder.build(); } } @Override public Stream<BuildRule> getAppleDebugSymbolDeps() { return getBuildDeps().stream() .filter(x -> x instanceof Archive || x instanceof CxxPreprocessAndCompile); } @Override public Optional<String> getPathNormalizationPrefix() { // CxxThinLTOIndex does not support linker normalization args at all (i.e., it passes // an empty map for the cell roots to CxxPrepareForLinkStep), so there's no need to // implement this method, either. return Optional.empty(); } @Override public SourcePath getSourcePathToOutput() { return getSourcePath(getBuildable().output); } /** @return The source path to be used to link against this binary. */ SourcePath getSourcePathToOutputForLinking() { if (isSharedLib() && getBuildable().linker instanceof HasImportLibrary) { HasImportLibrary impLibLinker = (HasImportLibrary) getBuildable().linker; return ExplicitBuildTargetSourcePath.of( getBuildTarget(), impLibLinker.importLibraryPath(output)); } return getSourcePathToOutput(); } private boolean isSharedLib() { return getBuildTarget().getFlavors().contains(CxxDescriptionEnhancer.SHARED_FLAVOR); } @Override public RuleScheduleInfo getRuleScheduleInfo() { return ruleScheduleInfo.orElse(RuleScheduleInfo.DEFAULT); } @Override public boolean isCacheable() { return cacheable; } public Optional<Path> getLinkerMapPath() { return getLinkerMapPath(getLinker(), output); } private static Optional<Path> getLinkerMapPath(Linker linker, Path output) { if (linker instanceof HasLinkerMap) { return Optional.of(((HasLinkerMap) linker).linkerMapPath(output)); } else { return Optional.empty(); } } public Linker getLinker() { return getBuildable().linker; } public ImmutableList<Arg> getArgs() { return getBuildable().args; } }
/* * Copyright 2021 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.window.sample.embedding; import static android.app.PendingIntent.FLAG_IMMUTABLE; import android.app.Activity; import android.app.PendingIntent; import android.content.ComponentName; import android.content.Intent; import android.content.res.Configuration; import android.os.Bundle; import android.util.DisplayMetrics; import android.util.LayoutDirection; import android.util.Log; import android.util.TypedValue; import android.view.View; import android.widget.CompoundButton; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.appcompat.app.AppCompatActivity; import androidx.core.util.Consumer; import androidx.window.core.ExperimentalWindowApi; import androidx.window.embedding.ActivityFilter; import androidx.window.embedding.ActivityRule; import androidx.window.embedding.EmbeddingRule; import androidx.window.embedding.SplitController; import androidx.window.embedding.SplitInfo; import androidx.window.embedding.SplitPairFilter; import androidx.window.embedding.SplitPairRule; import androidx.window.embedding.SplitPlaceholderRule; import androidx.window.embedding.SplitRule; import androidx.window.sample.databinding.ActivitySplitActivityLayoutBinding; import java.util.HashSet; import java.util.List; import java.util.Set; import kotlin.OptIn; /** * Sample showcase of split activity rules. Allows the user to select some split configuration * options with checkboxes and launch activities with those options applied. */ @OptIn(markerClass = ExperimentalWindowApi.class) public class SplitActivityBase extends AppCompatActivity implements CompoundButton.OnCheckedChangeListener { private static final String TAG = "SplitActivityTest"; private static final float MIN_SPLIT_WIDTH_DP = 600f; static final float SPLIT_RATIO = 0.3f; static final String EXTRA_LAUNCH_C_TO_SIDE = "launch_c_to_side"; private SplitController mSplitController; private SplitInfoCallback mCallback; private ActivitySplitActivityLayoutBinding mViewBinding; /** * Flag indicating that the config is being updated from checkboxes changes in a loop. */ private boolean mUpdatingConfigs; @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); mViewBinding = ActivitySplitActivityLayoutBinding.inflate(getLayoutInflater()); setContentView(mViewBinding.getRoot()); // Setup activity launch buttons. mViewBinding.launchB.setOnClickListener((View v) -> startActivity(new Intent(this, SplitActivityB.class))); mViewBinding.launchBAndC.setOnClickListener((View v) -> { Intent bStartIntent = new Intent(this, SplitActivityB.class); bStartIntent.putExtra(EXTRA_LAUNCH_C_TO_SIDE, true); startActivity(bStartIntent); }); mViewBinding.launchE.setOnClickListener((View v) -> startActivity(new Intent(this, SplitActivityE.class))); mViewBinding.launchF.setOnClickListener((View v) -> startActivity(new Intent(this, SplitActivityF.class))); mViewBinding.launchFPendingIntent.setOnClickListener((View v) -> { try { PendingIntent.getActivity(this, 0, new Intent(this, SplitActivityF.class), FLAG_IMMUTABLE).send(); } catch (PendingIntent.CanceledException e) { Log.e(TAG, e.getMessage()); } }); // Listen for split configuration checkboxes to update the rules before launching // activities. mViewBinding.splitMainCheckBox.setOnCheckedChangeListener(this); mViewBinding.usePlaceholderCheckBox.setOnCheckedChangeListener(this); mViewBinding.useStickyPlaceholderCheckBox.setOnCheckedChangeListener(this); mViewBinding.splitBCCheckBox.setOnCheckedChangeListener(this); mViewBinding.finishBCCheckBox.setOnCheckedChangeListener(this); mViewBinding.fullscreenECheckBox.setOnCheckedChangeListener(this); mViewBinding.splitWithFCheckBox.setOnCheckedChangeListener(this); mSplitController = SplitController.Companion.getInstance(); } @Override protected void onStart() { super.onStart(); mCallback = new SplitInfoCallback(); mSplitController.addSplitListener(this, Runnable::run, mCallback); updateEmbeddedStatus(); } @Override protected void onStop() { super.onStop(); mSplitController.removeSplitListener(mCallback); mCallback = null; } @Override public void onConfigurationChanged(@NonNull Configuration newConfig) { super.onConfigurationChanged(newConfig); updateEmbeddedStatus(); } class SplitInfoCallback implements Consumer<List<SplitInfo>> { @Override public void accept(List<SplitInfo> splitInfoList) { updateCheckboxesFromCurrentConfig(); } } @Override public void onCheckedChanged(@NonNull CompoundButton c, boolean isChecked) { if (mUpdatingConfigs) { return; } if (c.getId() == mViewBinding.splitBCCheckBox.getId()) { if (isChecked) { mViewBinding.finishBCCheckBox.setEnabled(true); } else { mViewBinding.finishBCCheckBox.setEnabled(false); mViewBinding.finishBCCheckBox.setChecked(false); } } else if (c.getId() == mViewBinding.usePlaceholderCheckBox.getId()) { if (isChecked) { mViewBinding.useStickyPlaceholderCheckBox.setEnabled(true); } else { mViewBinding.useStickyPlaceholderCheckBox.setEnabled(false); mViewBinding.useStickyPlaceholderCheckBox.setChecked(false); } } updateRulesFromCheckboxes(); } void updateCheckboxesFromCurrentConfig() { mUpdatingConfigs = true; SplitPairRule splitMainConfig = getRuleFor(SplitActivityA.class, null); mViewBinding.splitMainCheckBox.setChecked(splitMainConfig != null); SplitPlaceholderRule placeholderForBConfig = getPlaceholderRule(SplitActivityB.class); mViewBinding.usePlaceholderCheckBox.setChecked(placeholderForBConfig != null); mViewBinding.useStickyPlaceholderCheckBox.setEnabled(placeholderForBConfig != null); mViewBinding.useStickyPlaceholderCheckBox.setChecked(placeholderForBConfig != null && placeholderForBConfig.isSticky()); SplitPairRule bAndCPairConfig = getRuleFor(SplitActivityB.class, SplitActivityC.class); mViewBinding.splitBCCheckBox.setChecked(bAndCPairConfig != null); mViewBinding.finishBCCheckBox.setEnabled(bAndCPairConfig != null); mViewBinding.finishBCCheckBox.setChecked(bAndCPairConfig != null && bAndCPairConfig.getFinishPrimaryWithSecondary() == SplitRule.FINISH_ALWAYS && bAndCPairConfig.getFinishSecondaryWithPrimary() == SplitRule.FINISH_ALWAYS); SplitPairRule fConfig = getRuleFor(null, SplitActivityF.class); mViewBinding.splitWithFCheckBox.setChecked(fConfig != null); ActivityRule configE = getRuleFor(SplitActivityE.class); mViewBinding.fullscreenECheckBox.setChecked(configE != null && configE.getAlwaysExpand()); mUpdatingConfigs = false; } private SplitPairRule getRuleFor(Class<? extends Activity> a, Class<? extends Activity> b) { Set<EmbeddingRule> currentRules = mSplitController.getSplitRules(); for (EmbeddingRule rule : currentRules) { if (rule instanceof SplitPairRule && isRuleFor(a, b, (SplitPairRule) rule)) { return (SplitPairRule) rule; } } return null; } SplitPlaceholderRule getPlaceholderRule(Class<? extends Activity> a) { Set<EmbeddingRule> currentRules = mSplitController.getSplitRules(); for (EmbeddingRule rule : currentRules) { if (rule instanceof SplitPlaceholderRule) { for (ActivityFilter filter : ((SplitPlaceholderRule) rule).getFilters()) { if (filter.getComponentName().getClassName().equals(a.getName())) { return (SplitPlaceholderRule) rule; } } } } return null; } private ActivityRule getRuleFor(Class<? extends Activity> a) { Set<EmbeddingRule> currentRules = mSplitController.getSplitRules(); for (EmbeddingRule rule : currentRules) { if (rule instanceof ActivityRule && isRuleFor(a, (ActivityRule) rule)) { return (ActivityRule) rule; } } return null; } private boolean isRuleFor(Class<? extends Activity> a, Class<? extends Activity> b, SplitPairRule pairConfig) { return isRuleFor(a != null ? a.getName() : "*", b != null ? b.getName() : "*", pairConfig); } private boolean isRuleFor(String primaryActivityName, String secondaryActivityName, SplitPairRule pairConfig) { for (SplitPairFilter filter : pairConfig.getFilters()) { if (filter.getPrimaryActivityName().getClassName().contains(primaryActivityName) && filter.getSecondaryActivityName().getClassName() .contains(secondaryActivityName)) { return true; } } return false; } private boolean isRuleFor(Class<? extends Activity> a, ActivityRule config) { return isRuleFor(a != null ? a.getName() : "*", config); } private boolean isRuleFor(String activityName, ActivityRule config) { for (ActivityFilter filter : config.getFilters()) { if (filter.getComponentName().getClassName().contains(activityName)) { return true; } } return false; } private void updateRulesFromCheckboxes() { int minSplitWidth = minSplitWidth(); mSplitController.clearRegisteredRules(); Set<SplitPairFilter> pairFilters = new HashSet<>(); pairFilters.add(new SplitPairFilter(componentName(SplitActivityA.class), componentName("*"), null)); SplitPairRule rule = new SplitPairRule(pairFilters, SplitRule.FINISH_NEVER, SplitRule.FINISH_NEVER, true, minSplitWidth, /*minSplitWidth*/0, SPLIT_RATIO, LayoutDirection.LOCALE); if (mViewBinding.splitMainCheckBox.isChecked()) { mSplitController.registerRule(rule); } Set<ActivityFilter> activityFilters = new HashSet<>(); activityFilters.add(new ActivityFilter(componentName(SplitActivityB.class), null)); Intent intent = new Intent(); intent.setComponent( componentName("androidx.window.sample.embedding.SplitActivityPlaceholder")); SplitPlaceholderRule placeholderRule = new SplitPlaceholderRule(activityFilters, intent, mViewBinding.useStickyPlaceholderCheckBox.isChecked(), SplitRule.FINISH_ADJACENT, minSplitWidth, 0 /* minSmallestWidth */, SPLIT_RATIO, LayoutDirection.LOCALE); if (mViewBinding.usePlaceholderCheckBox.isChecked()) { mSplitController.registerRule(placeholderRule); } pairFilters = new HashSet<>(); pairFilters.add(new SplitPairFilter(componentName(SplitActivityB.class), componentName(SplitActivityC.class), null)); rule = new SplitPairRule(pairFilters, mViewBinding.finishBCCheckBox.isChecked() ? SplitRule.FINISH_ALWAYS : SplitRule.FINISH_NEVER, mViewBinding.finishBCCheckBox.isChecked() ? SplitRule.FINISH_ALWAYS : SplitRule.FINISH_NEVER, true, minSplitWidth, /*minSplitWidth*/0, SPLIT_RATIO, LayoutDirection.LOCALE); if (mViewBinding.splitBCCheckBox.isChecked()) { mSplitController.registerRule(rule); } pairFilters = new HashSet<>(); pairFilters.add(new SplitPairFilter(componentName("androidx.window.*"), componentName(SplitActivityF.class), null)); rule = new SplitPairRule(pairFilters, SplitRule.FINISH_NEVER, SplitRule.FINISH_NEVER, true, minSplitWidth, /*minSplitWidth*/0, SPLIT_RATIO, LayoutDirection.LOCALE); if (mViewBinding.splitWithFCheckBox.isChecked()) { mSplitController.registerRule(rule); } activityFilters = new HashSet<>(); activityFilters.add(new ActivityFilter(componentName(SplitActivityE.class), null)); ActivityRule activityRule = new ActivityRule(activityFilters, true); if (mViewBinding.fullscreenECheckBox.isChecked()) { mSplitController.registerRule(activityRule); } } ComponentName componentName(Class<? extends Activity> activityClass) { return new ComponentName(getPackageName(), activityClass != null ? activityClass.getName() : "*"); } ComponentName componentName(String className) { return new ComponentName(getPackageName(), className); } int minSplitWidth() { DisplayMetrics dm = getResources().getDisplayMetrics(); return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, MIN_SPLIT_WIDTH_DP, dm); } /** Updates the status label that says when an activity is embedded. */ private void updateEmbeddedStatus() { if (mSplitController.isActivityEmbedded(this)) { mViewBinding.activityEmbeddedStatusTextView.setVisibility(View.VISIBLE); } else { mViewBinding.activityEmbeddedStatusTextView.setVisibility(View.GONE); } } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map.impl.mapstore.writebehind; import com.hazelcast.core.IMap; import com.hazelcast.core.MapStore; import com.hazelcast.core.MapStoreAdapter; import com.hazelcast.test.AssertTask; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static java.lang.Integer.valueOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class WriteBehindMapStoreWithEvictionsTest extends HazelcastTestSupport { @Test public void testWriteBehind_callEvictBeforePersisting() throws Exception { final MapStoreWithCounter<Integer, Integer> mapStore = new MapStoreWithCounter<Integer, Integer>(); final IMap<Integer, Integer> map = TestMapUsingMapStoreBuilder.<Integer, Integer>create() .withMapStore(mapStore) .withNodeCount(1) .withNodeFactory(createHazelcastInstanceFactory(1)) .withBackupCount(0) .withWriteDelaySeconds(100) .withPartitionCount(1) .build(); final int numberOfItems = 1000; populateMap(map, numberOfItems); evictMap(map, numberOfItems); assertFinalValueEqualsForEachEntry(map, numberOfItems); } @Test public void testWriteBehind_callEvictBeforePersisting_onSameKey() throws Exception { final MapStoreWithCounter<Integer, Integer> mapStore = new MapStoreWithCounter<Integer, Integer>(); final IMap<Integer, Integer> map = TestMapUsingMapStoreBuilder.<Integer, Integer>create() .withMapStore(mapStore) .withNodeCount(1) .withNodeFactory(createHazelcastInstanceFactory(1)) .withBackupCount(0) .withWriteDelaySeconds(3) .withPartitionCount(1) .build(); final int numberOfUpdates = 1000; final int key = 0; continuouslyUpdateKey(map, numberOfUpdates, key); map.evict(0); final int expectedLastValue = numberOfUpdates - 1; assertFinalValueEquals(expectedLastValue, map.get(0)); } @Test public void testWriteBehind_callEvictBeforePersisting_onSameKey_thenCallRemove() throws Exception { final MapStoreWithCounter<Integer, Integer> mapStore = new MapStoreWithCounter<Integer, Integer>(); final IMap<Integer, Integer> map = TestMapUsingMapStoreBuilder.<Integer, Integer>create() .withMapStore(mapStore) .withNodeCount(1) .withNodeFactory(createHazelcastInstanceFactory(1)) .withBackupCount(0) .withWriteDelaySeconds(100) .withPartitionCount(1) .build(); final int numberOfUpdates = 1000; final int key = 0; continuouslyUpdateKey(map, numberOfUpdates, key); map.evict(0); final Object previousValue = map.remove(0); final int expectedLastValue = numberOfUpdates - 1; assertFinalValueEquals(expectedLastValue, (Integer) previousValue); } @Test public void testWriteBehind_callEvictBeforePersisting_onSameKey_thenCallRemoveMultipleTimes() throws Exception { final MapStoreWithCounter<Integer, Integer> mapStore = new MapStoreWithCounter<Integer, Integer>(); final IMap<Integer, Integer> map = TestMapUsingMapStoreBuilder.<Integer, Integer>create() .withMapStore(mapStore) .withNodeCount(1) .withNodeFactory(createHazelcastInstanceFactory(1)) .withBackupCount(0) .withWriteDelaySeconds(100) .withPartitionCount(1) .build(); final int numberOfUpdates = 1000; final int key = 0; continuouslyUpdateKey(map, numberOfUpdates, key); map.evict(0); map.remove(0); final Object previousValue = map.remove(0); assertNull(null, previousValue); } @Test public void evict_then_loadAll_onSameKey() throws Exception { final MapStoreWithCounter<Integer, Integer> mapStore = new MapStoreWithCounter<Integer, Integer>(); final IMap<Integer, Integer> map = TestMapUsingMapStoreBuilder.<Integer, Integer>create() .withMapStore(mapStore) .withNodeCount(1) .withNodeFactory(createHazelcastInstanceFactory(1)) .withBackupCount(0) .withWriteDelaySeconds(100) .withPartitionCount(1) .build(); map.put(1, 100); final Map<Integer, Integer> fill = new HashMap<Integer, Integer>(); fill.put(1, -1); mapStore.storeAll(fill); map.evict(1); final Set<Integer> loadKeys = new HashSet<Integer>(); loadKeys.add(1); map.loadAll(loadKeys, true); assertEquals(100, map.get(1).intValue()); } @Test public void evictAll_then_loadAll_onSameKey() throws Exception { final MapStoreWithCounter<Integer, Integer> mapStore = new MapStoreWithCounter<Integer, Integer>(); final IMap<Integer, Integer> map = TestMapUsingMapStoreBuilder.<Integer, Integer>create() .withMapStore(mapStore) .withNodeCount(1) .withNodeFactory(createHazelcastInstanceFactory(1)) .withBackupCount(0) .withWriteDelaySeconds(100) .withPartitionCount(1) .build(); map.put(1, 100); final Map<Integer, Integer> fill = new HashMap<Integer, Integer>(); fill.put(1, -1); mapStore.storeAll(fill); map.evictAll(); final Set<Integer> loadKeys = new HashSet<Integer>(); loadKeys.add(1); map.loadAll(loadKeys, true); assertEquals(100, map.get(1).intValue()); } @Test public void testWriteBehindFlushPersistsAllRecords_afterShutdownAll() throws Exception { int nodeCount = 2; final MapStoreWithCounter<Integer, Integer> mapStore = new MapStoreWithCounter<Integer, Integer>(); final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(nodeCount); final IMap<Integer, Integer> map = TestMapUsingMapStoreBuilder.<Integer, Integer>create() .withMapStore(mapStore) .withNodeCount(nodeCount) .withNodeFactory(factory) .withBackupCount(0) .withWriteDelaySeconds(100) .withPartitionCount(100) .build(); final int numberOfItems = 1000; // add some expiration logic by setting a 10 seconds TTL to puts populateMap(map, numberOfItems, 10); factory.shutdownAll(); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (int i = 0; i < 1000; i++) { assertEquals(valueOf(i), mapStore.store.get(i)); } } }); } @Test public void testWriteBehind_shouldNotMakeDuplicateStoreOperationForAKey_uponEviction() throws Exception { final AtomicInteger storeCount = new AtomicInteger(0); MapStore<Integer, Integer> store = createSlowMapStore(storeCount); IMap<Integer, Integer> map = TestMapUsingMapStoreBuilder.<Integer, Integer>create() .withMapStore(store) .withNodeCount(1) .withNodeFactory(createHazelcastInstanceFactory(1)) .withBackupCount(0) .withWriteDelaySeconds(1) .build(); map.put(1, 1); map.evict(1); // give some time to process write-behind sleepSeconds(2); assertStoreCount(1, storeCount); } @Test public void testTransientlyPutKeysAreNotReachable_afterEviction() throws Exception { int numberOfItems = 1000; IMap<Integer, Integer> map = createMapBackedByWriteBehindStore(); // 1. these puts are used to create write-behind-queues on partitions for (int i = -1; i > -numberOfItems; i--) { map.put(i, i); } // 2. put transient entries for (int i = 0; i < numberOfItems; i++) { map.putTransient(i, i, 10, TimeUnit.SECONDS); } // 3. evict all transient entries for (int i = 0; i < numberOfItems; i++) { map.evict(i); } // 4. expecting all transiently put entries are not reachable assertEntriesRemoved(map, numberOfItems); } private IMap<Integer, Integer> createMapBackedByWriteBehindStore() { MapStoreWithCounter<Integer, Integer> mapStore = new MapStoreWithCounter<Integer, Integer>(); return TestMapUsingMapStoreBuilder.<Integer, Integer>create() .withMapStore(mapStore) .withNodeCount(1) .withNodeFactory(createHazelcastInstanceFactory(1)) .withWriteDelaySeconds(100) .build(); } private void assertEntriesRemoved(IMap<Integer, Integer> map, int numberOfItems) { for (int i = 0; i < numberOfItems; i++) { assertNull(i + " should not be in this map", map.get(i)); } } private void assertStoreCount(final int expected, final AtomicInteger storeCount) { assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertEquals(expected, storeCount.get()); } }); } private MapStore<Integer, Integer> createSlowMapStore(final AtomicInteger storeCount) { return new MapStoreAdapter<Integer, Integer>() { @Override public void store(Integer key, Integer value) { storeCount.incrementAndGet(); sleepSeconds(5); } }; } private void assertFinalValueEquals(final int expected, final int actual) { assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertEquals(expected, actual); } }, 20); } private void populateMap(IMap<Integer, Integer> map, int numberOfItems) { populateMap(map, numberOfItems, 0); } private void populateMap(IMap<Integer, Integer> map, int numberOfItems, int ttlSeconds) { for (int i = 0; i < numberOfItems; i++) { map.put(i, i, ttlSeconds, TimeUnit.SECONDS); } } private void continuouslyUpdateKey(IMap<Integer, Integer> map, int numberOfUpdates, int key) { for (int i = 0; i < numberOfUpdates; i++) { map.put(key, i); } } private void evictMap(IMap<Integer, Integer> map, int numberOfItems) { for (int i = 0; i < numberOfItems; i++) { map.evict(i); } } private void assertFinalValueEqualsForEachEntry(IMap<Integer, Integer> map, int numberOfItems) { for (int i = 0; i < numberOfItems; i++) { assertFinalValueEquals(i, map.get(i)); } } }
/* * Copyright 2012 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.drools.workbench.screens.dtablexls.backend.server.conversion.builders; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.drools.decisiontable.parser.ActionType; import org.drools.decisiontable.parser.RuleSheetParserUtil; import org.drools.template.model.SnippetBuilder; import org.drools.template.model.SnippetBuilder.SnippetType; import org.drools.template.parser.DecisionTableParseException; import org.drools.workbench.models.datamodel.oracle.DataType; import org.drools.workbench.models.datamodel.rule.FreeFormLine; import org.drools.workbench.models.guided.dtable.shared.conversion.ConversionMessageType; import org.drools.workbench.models.guided.dtable.shared.conversion.ConversionResult; import org.drools.workbench.models.guided.dtable.shared.model.BRLConditionColumn; import org.drools.workbench.models.guided.dtable.shared.model.BRLConditionVariableColumn; import org.drools.workbench.models.guided.dtable.shared.model.DTCellValue52; import org.drools.workbench.models.guided.dtable.shared.model.GuidedDecisionTable52; /** * Builder for Condition columns */ public class GuidedDecisionTableLHSBuilder implements HasColumnHeadings, GuidedDecisionTableSourceBuilder { private final int headerRow; private final int headerCol; //DRL generation parameters private String colDefPrefix; private String colDefSuffix; private boolean hasPattern; private String andop; //Operators used to detect whether a template contains an operator or implies "==" private static Set<String> operators; static { operators = new HashSet<String>(); operators.add( "==" ); operators.add( "=" ); operators.add( "!=" ); operators.add( "<" ); operators.add( ">" ); operators.add( "<=" ); operators.add( ">=" ); operators.add( "contains" ); operators.add( "matches" ); operators.add( "memberOf" ); operators.add( "str[startsWith]" ); operators.add( "str[endsWith]" ); operators.add( "str[length]" ); } private static final Pattern patParFrm = Pattern.compile( "\\(\\s*\\)\\s*from\\b" ); private static final Pattern patFrm = Pattern.compile( "\\s+from\\s+" ); private static final Pattern patPar = Pattern.compile( "\\(\\s*\\)" ); private static final Pattern patEval = Pattern.compile( "\\beval\\s*(?:\\(\\s*\\)\\s*)?$" ); //Map of column headers, keyed on XLS column index private final Map<Integer, String> columnHeaders = new HashMap<Integer, String>(); //Map of column value parsers, keyed on XLS column index private final Map<Integer, ParameterizedValueBuilder> valueBuilders = new HashMap<Integer, ParameterizedValueBuilder>(); //Utility class to convert XLS parameters to BRLFragment Template keys private final ParameterUtilities parameterUtilities; private ConversionResult conversionResult; public GuidedDecisionTableLHSBuilder( final int row, final int column, final String colDefinition, final ParameterUtilities parameterUtilities, final ConversionResult conversionResult ) { this.headerRow = row; this.headerCol = column; this.parameterUtilities = parameterUtilities; this.conversionResult = conversionResult; preProcessColumnDefinition( colDefinition ); } private void preProcessColumnDefinition( final String colDefinition ) { //Determine DRL generation parameters String colDef = colDefinition == null ? "" : colDefinition; if ( "".equals( colDef ) ) { colDefPrefix = colDefSuffix = ""; hasPattern = false; andop = ""; return; } hasPattern = true; // ...eval final Matcher matEval = patEval.matcher( colDef ); if ( matEval.find() ) { colDefPrefix = colDef.substring( 0, matEval.start() ) + "eval("; colDefSuffix = ")"; andop = " && "; return; } andop = ", "; // ...(<b> ) from... final Matcher matParFrm = patParFrm.matcher( colDef ); if ( matParFrm.find() ) { colDefPrefix = colDef.substring( 0, matParFrm.start() ) + '('; colDefSuffix = ") from" + colDef.substring( matParFrm.end() ); return; } // ...from... final Matcher matFrm = patFrm.matcher( colDef ); if ( matFrm.find() ) { colDefPrefix = colDef.substring( 0, matFrm.start() ) + "("; colDefSuffix = ") from " + colDef.substring( matFrm.end() ); return; } // ...(<b> )... Matcher matPar = patPar.matcher( colDef ); if ( matPar.find() ) { colDefPrefix = colDef.substring( 0, matPar.start() ) + '('; colDefSuffix = ")" + colDef.substring( matPar.end() ); return; } // <a> colDefPrefix = colDef + '('; colDefSuffix = ")"; } @Override public void populateDecisionTable( final GuidedDecisionTable52 dtable, final int maxRowCount ) { if ( !hasPattern ) { //Add separate columns for each ValueBuilder addExplicitColumns( dtable, maxRowCount ); } else { //Add a single column for all ValueBuilders addPatternColumn( dtable, maxRowCount ); } } //An explicit column does not add constraints to a Pattern. It is does not have a value in the OBJECT row private void addExplicitColumns( final GuidedDecisionTable52 dtable, final int maxRowCount ) { //Sort column builders by column index to ensure Actions are added in the correct sequence final Set<Integer> sortedIndexes = new TreeSet<Integer>( this.valueBuilders.keySet() ); for ( Integer index : sortedIndexes ) { final ParameterizedValueBuilder vb = this.valueBuilders.get( index ); if ( vb instanceof LiteralValueBuilder ) { addLiteralColumn( dtable, (LiteralValueBuilder) vb, maxRowCount, index ); } else { addBRLFragmentColumn( dtable, vb, maxRowCount, index ); } } } private void addLiteralColumn( final GuidedDecisionTable52 dtable, final LiteralValueBuilder vb, final int maxRowCount, final int index ) { //Create column - Everything is a BRL fragment (for now) final BRLConditionColumn column = new BRLConditionColumn(); final FreeFormLine ffl = new FreeFormLine(); ffl.setText( vb.getTemplate() ); column.getDefinition().add( ffl ); final BRLConditionVariableColumn parameterColumn = new BRLConditionVariableColumn( "", DataType.TYPE_BOOLEAN ); column.getChildColumns().add( parameterColumn ); column.setHeader( this.columnHeaders.get( index ) ); dtable.getConditions().add( column ); //Add column data final List<List<DTCellValue52>> columnData = assertColumnData( vb, maxRowCount ); final int iColIndex = dtable.getExpandedColumns().indexOf( column.getChildColumns().get( 0 ) ); for ( int iRow = 0; iRow < columnData.size(); iRow++ ) { final List<DTCellValue52> rowData = dtable.getData().get( iRow ); rowData.addAll( iColIndex, columnData.get( iRow ) ); } } private void addBRLFragmentColumn( final GuidedDecisionTable52 dtable, final ParameterizedValueBuilder vb, final int maxRowCount, final int index ) { //Create column - Everything is a BRL fragment (for now) final BRLConditionColumn column = new BRLConditionColumn(); final FreeFormLine ffl = new FreeFormLine(); ffl.setText( vb.getTemplate() ); column.getDefinition().add( ffl ); for ( String parameter : vb.getParameters() ) { final BRLConditionVariableColumn parameterColumn = new BRLConditionVariableColumn( parameter, DataType.TYPE_OBJECT ); column.getChildColumns().add( parameterColumn ); } column.setHeader( this.columnHeaders.get( index ) ); dtable.getConditions().add( column ); //Add column data final List<List<DTCellValue52>> columnData = assertColumnData( vb, maxRowCount ); //We can use the index of the first child column to add all data final int iColIndex = dtable.getExpandedColumns().indexOf( column.getChildColumns().get( 0 ) ); for ( int iRow = 0; iRow < columnData.size(); iRow++ ) { final List<DTCellValue52> rowData = dtable.getData().get( iRow ); rowData.addAll( iColIndex, columnData.get( iRow ) ); } } //A Pattern column adds constraints to a Pattern. It has a value in the OBJECT row private void addPatternColumn( final GuidedDecisionTable52 dtable, final int maxRowCount ) { //Sort column builders by column index to ensure Actions are added in the correct sequence final TreeSet<Integer> sortedIndexes = new TreeSet<Integer>( this.valueBuilders.keySet() ); //If the Pattern spans multiple columns create a column header String columnHeader = this.columnHeaders.get( sortedIndexes.first() ); if ( sortedIndexes.size() > 1 ) { columnHeader = "Converted from cell [" + RuleSheetParserUtil.rc2name( this.headerRow + 1, this.headerCol ) + "]"; } //Create column - Everything is a BRL fragment (for now) final BRLConditionColumn column = new BRLConditionColumn(); dtable.getConditions().add( column ); final FreeFormLine ffl = new FreeFormLine(); column.getDefinition().add( ffl ); //DRL prefix final StringBuffer drl = new StringBuffer(); drl.append( this.colDefPrefix ); String sep = ""; int dataColumnIndex = 0; for ( Integer index : sortedIndexes ) { final ParameterizedValueBuilder vb = this.valueBuilders.get( index ); //DRL fragment drl.append( sep ).append( vb.getTemplate() ); sep = this.andop; //Add columns for parameters for ( String parameter : vb.getParameters() ) { final BRLConditionVariableColumn parameterColumn = new BRLConditionVariableColumn( parameter, DataType.TYPE_OBJECT ); column.getChildColumns().add( parameterColumn ); } //Add column data final List<List<DTCellValue52>> columnData = assertColumnData( vb, maxRowCount ); final int iColIndex = dtable.getExpandedColumns().indexOf( column.getChildColumns().get( dataColumnIndex ) ); for ( int iRow = 0; iRow < columnData.size(); iRow++ ) { final List<DTCellValue52> rowData = dtable.getData().get( iRow ); rowData.addAll( iColIndex, columnData.get( iRow ) ); } dataColumnIndex = dataColumnIndex + vb.getParameters().size(); } //DRL suffix drl.append( this.colDefSuffix ); ffl.setText( drl.toString() ); //Set header after children have been added as it's copied into them column.setHeader( columnHeader ); } @Override public void addTemplate( final int row, final int column, final String content ) { //Validate column template if ( valueBuilders.containsKey( column ) ) { final String message = "Internal error: Can't have a code snippet added twice to one spreadsheet column."; this.conversionResult.addMessage( message, ConversionMessageType.ERROR ); return; } //Add new template final String template = content.trim(); try { this.valueBuilders.put( column, getValueBuilder( template ) ); } catch ( DecisionTableParseException pe ) { this.conversionResult.addMessage( pe.getMessage(), ConversionMessageType.WARNING ); } } @Override public void setColumnHeader( final int column, final String value ) { this.columnHeaders.put( column, value.trim() ); } private ParameterizedValueBuilder getValueBuilder( final String content ) { // Work out the type of "template":- // age ---> SnippetType.SINGLE // age == ---> SnippetType.SINGLE // age == $param ---> SnippetType.PARAM // age == $1 || age == $2 ---> SnippetType.INDEXED // forall{age < $}{,} ---> SnippetType.FORALL String template = content.trim(); SnippetType type = SnippetBuilder.getType( template ); if ( type == SnippetType.SINGLE ) { type = SnippetType.PARAM; boolean hasExplicitOperator = false; for ( String op : operators ) { if ( template.endsWith( op ) ) { hasExplicitOperator = true; break; } } if ( !hasExplicitOperator ) { template = template + " =="; } template = template + " \""; template = template + SnippetBuilder.PARAM_STRING + "\""; } //Make a ValueBuilder for the template switch ( type ) { case INDEXED: return new IndexedParametersValueBuilder( template, parameterUtilities ); case PARAM: return new SingleParameterValueBuilder( template, parameterUtilities ); case SINGLE: return new LiteralValueBuilder( template ); } throw new DecisionTableParseException( "SnippetBuilder.SnippetType '" + type.toString() + "' is not supported. The column will not be added." ); } @Override public void addCellValue( final int row, final int column, final String value ) { //Add new row to column data final ParameterizedValueBuilder vb = this.valueBuilders.get( column ); if ( vb == null ) { final String message = "No code snippet for CONDITION, above cell " + RuleSheetParserUtil.rc2name( this.headerRow + 2, this.headerCol ); this.conversionResult.addMessage( message, ConversionMessageType.ERROR ); return; } vb.addCellValue( row, column, value ); } @Override public ActionType.Code getActionTypeCode() { return ActionType.Code.CONDITION; } @Override public String getResult() { throw new UnsupportedOperationException( "GuidedDecisionTableLHSBuilder does not return DRL." ); } @Override public void clearValues() { throw new UnsupportedOperationException(); } @Override public boolean hasValues() { throw new UnsupportedOperationException(); } @Override public int getRowCount() { int maxRowCount = 0; for ( ParameterizedValueBuilder pvb : valueBuilders.values() ) { maxRowCount = Math.max( maxRowCount, pvb.getColumnData().size() ); } return maxRowCount; } private List<List<DTCellValue52>> assertColumnData( final ParameterizedValueBuilder pvb, final int maxRowCount ) { final List<List<DTCellValue52>> columnData = pvb.getColumnData(); final List<String> parameters = pvb.getParameters(); if ( columnData.size() < maxRowCount ) { for ( int iRow = columnData.size(); iRow < maxRowCount; iRow++ ) { final List<DTCellValue52> brlFragmentData = new ArrayList<DTCellValue52>(); for ( int iCol = 0; iCol < parameters.size(); iCol++ ) { brlFragmentData.add( new DTCellValue52() ); } columnData.add( brlFragmentData ); } } return columnData; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.gcp.datastore; import static com.google.datastore.v1.PropertyFilter.Operator.EQUAL; import static com.google.datastore.v1.PropertyOrder.Direction.DESCENDING; import static com.google.datastore.v1.client.DatastoreHelper.makeAndFilter; import static com.google.datastore.v1.client.DatastoreHelper.makeDelete; import static com.google.datastore.v1.client.DatastoreHelper.makeFilter; import static com.google.datastore.v1.client.DatastoreHelper.makeKey; import static com.google.datastore.v1.client.DatastoreHelper.makeOrder; import static com.google.datastore.v1.client.DatastoreHelper.makeUpsert; import static com.google.datastore.v1.client.DatastoreHelper.makeValue; import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DATASTORE_BATCH_UPDATE_BYTES_LIMIT; import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.DEFAULT_BUNDLE_SIZE_BYTES; import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.QUERY_BATCH_LIMIT; import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.getEstimatedSizeBytes; import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.makeRequest; import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.translateGqlQueryWithLimitCheck; import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.isValidKey; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; import com.google.datastore.v1.CommitRequest; import com.google.datastore.v1.CommitResponse; import com.google.datastore.v1.Entity; import com.google.datastore.v1.EntityResult; import com.google.datastore.v1.GqlQuery; import com.google.datastore.v1.Key; import com.google.datastore.v1.Mutation; import com.google.datastore.v1.PartitionId; import com.google.datastore.v1.Query; import com.google.datastore.v1.QueryResultBatch; import com.google.datastore.v1.RunQueryRequest; import com.google.datastore.v1.RunQueryResponse; import com.google.datastore.v1.client.Datastore; import com.google.datastore.v1.client.DatastoreException; import com.google.datastore.v1.client.QuerySplitter; import com.google.protobuf.Int32Value; import com.google.rpc.Code; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DatastoreWriterFn; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DeleteEntity; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DeleteEntityFn; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DeleteKey; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DeleteKeyFn; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.ReadFn; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.SplitQueryFn; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.V1Options; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.UpsertFn; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.V1DatastoreFactory; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Write; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.options.ValueProvider; import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.DoFnTester; import org.apache.beam.sdk.transforms.DoFnTester.CloningBehavior; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.transforms.display.DisplayDataEvaluator; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.Mock; import org.mockito.MockitoAnnotations; /** Tests for {@link DatastoreV1}. */ @RunWith(JUnit4.class) public class DatastoreV1Test { private static final String PROJECT_ID = "testProject"; private static final String NAMESPACE = "testNamespace"; private static final String KIND = "testKind"; private static final Query QUERY; private static final String LOCALHOST = "localhost:9955"; private static final String GQL_QUERY = "SELECT * from " + KIND; private static final V1Options V_1_OPTIONS; static { Query.Builder q = Query.newBuilder(); q.addKindBuilder().setName(KIND); QUERY = q.build(); V_1_OPTIONS = V1Options.from(PROJECT_ID, NAMESPACE, null); } @Mock private Datastore mockDatastore; @Mock QuerySplitter mockQuerySplitter; @Mock V1DatastoreFactory mockDatastoreFactory; @Rule public final ExpectedException thrown = ExpectedException.none(); @Before public void setUp() { MockitoAnnotations.initMocks(this); DatastoreV1.Read initialRead = DatastoreIO.v1().read().withProjectId(PROJECT_ID).withQuery(QUERY).withNamespace(NAMESPACE); when(mockDatastoreFactory.getDatastore( any(PipelineOptions.class), any(String.class), any(String.class))) .thenReturn(mockDatastore); when(mockDatastoreFactory.getQuerySplitter()).thenReturn(mockQuerySplitter); } @Test public void testBuildRead() throws Exception { DatastoreV1.Read read = DatastoreIO.v1().read().withProjectId(PROJECT_ID).withQuery(QUERY).withNamespace(NAMESPACE); assertEquals(QUERY, read.getQuery()); assertEquals(PROJECT_ID, read.getProjectId().get()); assertEquals(NAMESPACE, read.getNamespace().get()); } @Test public void testBuildReadWithGqlQuery() throws Exception { DatastoreV1.Read read = DatastoreIO.v1() .read() .withProjectId(PROJECT_ID) .withLiteralGqlQuery(GQL_QUERY) .withNamespace(NAMESPACE); assertEquals(GQL_QUERY, read.getLiteralGqlQuery().get()); assertEquals(PROJECT_ID, read.getProjectId().get()); assertEquals(NAMESPACE, read.getNamespace().get()); } /** {@link #testBuildRead} but constructed in a different order. */ @Test public void testBuildReadAlt() throws Exception { DatastoreV1.Read read = DatastoreIO.v1() .read() .withQuery(QUERY) .withNamespace(NAMESPACE) .withProjectId(PROJECT_ID) .withLocalhost(LOCALHOST); assertEquals(QUERY, read.getQuery()); assertEquals(PROJECT_ID, read.getProjectId().get()); assertEquals(NAMESPACE, read.getNamespace().get()); assertEquals(LOCALHOST, read.getLocalhost()); } @Test public void testReadValidationFailsQueryAndGqlQuery() throws Exception { DatastoreV1.Read read = DatastoreIO.v1() .read() .withProjectId(PROJECT_ID) .withLiteralGqlQuery(GQL_QUERY) .withQuery(QUERY); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("withQuery() and withLiteralGqlQuery() are exclusive"); read.expand(null); } @Test public void testReadValidationFailsQueryLimitZero() throws Exception { Query invalidLimit = Query.newBuilder().setLimit(Int32Value.newBuilder().setValue(0)).build(); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Invalid query limit 0: must be positive"); DatastoreIO.v1().read().withQuery(invalidLimit); } @Test public void testReadValidationFailsQueryLimitNegative() throws Exception { Query invalidLimit = Query.newBuilder().setLimit(Int32Value.newBuilder().setValue(-5)).build(); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Invalid query limit -5: must be positive"); DatastoreIO.v1().read().withQuery(invalidLimit); } @Test public void testReadDisplayData() { DatastoreV1.Read read = DatastoreIO.v1().read().withProjectId(PROJECT_ID).withQuery(QUERY).withNamespace(NAMESPACE); DisplayData displayData = DisplayData.from(read); assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID)); assertThat(displayData, hasDisplayItem("query", QUERY.toString())); assertThat(displayData, hasDisplayItem("namespace", NAMESPACE)); } @Test public void testReadDisplayDataWithGqlQuery() { DatastoreV1.Read read = DatastoreIO.v1() .read() .withProjectId(PROJECT_ID) .withLiteralGqlQuery(GQL_QUERY) .withNamespace(NAMESPACE); DisplayData displayData = DisplayData.from(read); assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID)); assertThat(displayData, hasDisplayItem("gqlQuery", GQL_QUERY)); assertThat(displayData, hasDisplayItem("namespace", NAMESPACE)); } @Test public void testSourcePrimitiveDisplayData() { DisplayDataEvaluator evaluator = DisplayDataEvaluator.create(); int numSplits = 98; PTransform<PBegin, PCollection<Entity>> read = DatastoreIO.v1() .read() .withProjectId(PROJECT_ID) .withQuery(Query.newBuilder().build()) .withNumQuerySplits(numSplits); String assertMessage = "DatastoreIO read should include the '%s' in its primitive display data"; Set<DisplayData> displayData = evaluator.displayDataForPrimitiveSourceTransforms(read); assertThat( String.format(assertMessage, "project id"), displayData, hasItem(hasDisplayItem("projectId", PROJECT_ID))); assertThat( String.format(assertMessage, "number of query splits"), displayData, hasItem(hasDisplayItem("numQuerySplits", numSplits))); } @Test public void testWriteDisplayData() { Write write = DatastoreIO.v1().write().withProjectId(PROJECT_ID); DisplayData displayData = DisplayData.from(write); assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID)); } @Test public void testDeleteEntityDisplayData() { DeleteEntity deleteEntity = DatastoreIO.v1().deleteEntity().withProjectId(PROJECT_ID); DisplayData displayData = DisplayData.from(deleteEntity); assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID)); } @Test public void testDeleteKeyDisplayData() { DeleteKey deleteKey = DatastoreIO.v1().deleteKey().withProjectId(PROJECT_ID); DisplayData displayData = DisplayData.from(deleteKey); assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID)); } @Test public void testWritePrimitiveDisplayData() { DisplayDataEvaluator evaluator = DisplayDataEvaluator.create(); PTransform<PCollection<Entity>, ?> write = DatastoreIO.v1().write().withProjectId("myProject"); Set<DisplayData> displayData = evaluator.displayDataForPrimitiveTransforms(write); assertThat( "DatastoreIO write should include the project in its primitive display data", displayData, hasItem(hasDisplayItem("projectId"))); assertThat( "DatastoreIO write should include the upsertFn in its primitive display data", displayData, hasItem(hasDisplayItem("upsertFn"))); } @Test public void testDeleteEntityPrimitiveDisplayData() { DisplayDataEvaluator evaluator = DisplayDataEvaluator.create(); PTransform<PCollection<Entity>, ?> write = DatastoreIO.v1().deleteEntity().withProjectId("myProject"); Set<DisplayData> displayData = evaluator.displayDataForPrimitiveTransforms(write); assertThat( "DatastoreIO write should include the project in its primitive display data", displayData, hasItem(hasDisplayItem("projectId"))); assertThat( "DatastoreIO write should include the deleteEntityFn in its primitive display data", displayData, hasItem(hasDisplayItem("deleteEntityFn"))); } @Test public void testDeleteKeyPrimitiveDisplayData() { DisplayDataEvaluator evaluator = DisplayDataEvaluator.create(); PTransform<PCollection<Key>, ?> write = DatastoreIO.v1().deleteKey().withProjectId("myProject"); Set<DisplayData> displayData = evaluator.displayDataForPrimitiveTransforms(write); assertThat( "DatastoreIO write should include the project in its primitive display data", displayData, hasItem(hasDisplayItem("projectId"))); assertThat( "DatastoreIO write should include the deleteKeyFn in its primitive display data", displayData, hasItem(hasDisplayItem("deleteKeyFn"))); } /** Test building a Write using builder methods. */ @Test public void testBuildWrite() throws Exception { DatastoreV1.Write write = DatastoreIO.v1().write().withProjectId(PROJECT_ID); assertEquals(PROJECT_ID, write.getProjectId()); } /** Test the detection of complete and incomplete keys. */ @Test public void testHasNameOrId() { Key key; // Complete with name, no ancestor key = makeKey("bird", "finch").build(); assertTrue(isValidKey(key)); // Complete with id, no ancestor key = makeKey("bird", 123).build(); assertTrue(isValidKey(key)); // Incomplete, no ancestor key = makeKey("bird").build(); assertFalse(isValidKey(key)); // Complete with name and ancestor key = makeKey("bird", "owl").build(); key = makeKey(key, "bird", "horned").build(); assertTrue(isValidKey(key)); // Complete with id and ancestor key = makeKey("bird", "owl").build(); key = makeKey(key, "bird", 123).build(); assertTrue(isValidKey(key)); // Incomplete with ancestor key = makeKey("bird", "owl").build(); key = makeKey(key, "bird").build(); assertFalse(isValidKey(key)); key = makeKey().build(); assertFalse(isValidKey(key)); } /** Test that entities with incomplete keys cannot be updated. */ @Test public void testAddEntitiesWithIncompleteKeys() throws Exception { Key key = makeKey("bird").build(); Entity entity = Entity.newBuilder().setKey(key).build(); UpsertFn upsertFn = new UpsertFn(); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Entities to be written to the Cloud Datastore must have complete keys"); upsertFn.apply(entity); } @Test /** Test that entities with valid keys are transformed to upsert mutations. */ public void testAddEntities() throws Exception { Key key = makeKey("bird", "finch").build(); Entity entity = Entity.newBuilder().setKey(key).build(); UpsertFn upsertFn = new UpsertFn(); Mutation exceptedMutation = makeUpsert(entity).build(); assertEquals(upsertFn.apply(entity), exceptedMutation); } /** Test that entities with incomplete keys cannot be deleted. */ @Test public void testDeleteEntitiesWithIncompleteKeys() throws Exception { Key key = makeKey("bird").build(); Entity entity = Entity.newBuilder().setKey(key).build(); DeleteEntityFn deleteEntityFn = new DeleteEntityFn(); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Entities to be deleted from the Cloud Datastore must have complete keys"); deleteEntityFn.apply(entity); } /** Test that entities with valid keys are transformed to delete mutations. */ @Test public void testDeleteEntities() throws Exception { Key key = makeKey("bird", "finch").build(); Entity entity = Entity.newBuilder().setKey(key).build(); DeleteEntityFn deleteEntityFn = new DeleteEntityFn(); Mutation exceptedMutation = makeDelete(entity.getKey()).build(); assertEquals(deleteEntityFn.apply(entity), exceptedMutation); } /** Test that incomplete keys cannot be deleted. */ @Test public void testDeleteIncompleteKeys() throws Exception { Key key = makeKey("bird").build(); DeleteKeyFn deleteKeyFn = new DeleteKeyFn(); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Keys to be deleted from the Cloud Datastore must be complete"); deleteKeyFn.apply(key); } /** Test that valid keys are transformed to delete mutations. */ @Test public void testDeleteKeys() { Key key = makeKey("bird", "finch").build(); DeleteKeyFn deleteKeyFn = new DeleteKeyFn(); Mutation exceptedMutation = makeDelete(key).build(); assertEquals(deleteKeyFn.apply(key), exceptedMutation); } @Test public void testDatastoreWriteFnDisplayData() { DatastoreWriterFn datastoreWriter = new DatastoreWriterFn(PROJECT_ID, null); DisplayData displayData = DisplayData.from(datastoreWriter); assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID)); } /** Tests {@link DatastoreWriterFn} with entities less than one batch. */ @Test public void testDatatoreWriterFnWithOneBatch() throws Exception { datastoreWriterFnTest(100); } /** Tests {@link DatastoreWriterFn} with entities of more than one batches, but not a multiple. */ @Test public void testDatatoreWriterFnWithMultipleBatches() throws Exception { datastoreWriterFnTest(DatastoreV1.DATASTORE_BATCH_UPDATE_ENTITIES_START * 3 + 100); } /** * Tests {@link DatastoreWriterFn} with entities of several batches, using an exact multiple of * write batch size. */ @Test public void testDatatoreWriterFnWithBatchesExactMultiple() throws Exception { datastoreWriterFnTest(DatastoreV1.DATASTORE_BATCH_UPDATE_ENTITIES_START * 2); } // A helper method to test DatastoreWriterFn for various batch sizes. private void datastoreWriterFnTest(int numMutations) throws Exception { // Create the requested number of mutations. List<Mutation> mutations = new ArrayList<>(numMutations); for (int i = 0; i < numMutations; ++i) { mutations.add( makeUpsert(Entity.newBuilder().setKey(makeKey("key" + i, i + 1)).build()).build()); } DatastoreWriterFn datastoreWriter = new DatastoreWriterFn( StaticValueProvider.of(PROJECT_ID), null, mockDatastoreFactory, new FakeWriteBatcher()); DoFnTester<Mutation, Void> doFnTester = DoFnTester.of(datastoreWriter); doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE); doFnTester.processBundle(mutations); int start = 0; while (start < numMutations) { int end = Math.min(numMutations, start + DatastoreV1.DATASTORE_BATCH_UPDATE_ENTITIES_START); CommitRequest.Builder commitRequest = CommitRequest.newBuilder(); commitRequest.setMode(CommitRequest.Mode.NON_TRANSACTIONAL); commitRequest.addAllMutations(mutations.subList(start, end)); // Verify all the batch requests were made with the expected mutations. verify(mockDatastore, times(1)).commit(commitRequest.build()); start = end; } } /** * Tests {@link DatastoreWriterFn} with large entities that need to be split into more batches. */ @Test public void testDatatoreWriterFnWithLargeEntities() throws Exception { List<Mutation> mutations = new ArrayList<>(); int entitySize = 0; for (int i = 0; i < 12; ++i) { Entity entity = Entity.newBuilder() .setKey(makeKey("key" + i, i + 1)) .putProperties( "long", makeValue(new String(new char[900_000])).setExcludeFromIndexes(true).build()) .build(); entitySize = entity.getSerializedSize(); // Take the size of any one entity. mutations.add(makeUpsert(entity).build()); } DatastoreWriterFn datastoreWriter = new DatastoreWriterFn( StaticValueProvider.of(PROJECT_ID), null, mockDatastoreFactory, new FakeWriteBatcher()); DoFnTester<Mutation, Void> doFnTester = DoFnTester.of(datastoreWriter); doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE); doFnTester.processBundle(mutations); // This test is over-specific currently; it requires that we split the 12 entity writes into 3 // requests, but we only need each CommitRequest to be less than 10MB in size. int entitiesPerRpc = DATASTORE_BATCH_UPDATE_BYTES_LIMIT / entitySize; int start = 0; while (start < mutations.size()) { int end = Math.min(mutations.size(), start + entitiesPerRpc); CommitRequest.Builder commitRequest = CommitRequest.newBuilder(); commitRequest.setMode(CommitRequest.Mode.NON_TRANSACTIONAL); commitRequest.addAllMutations(mutations.subList(start, end)); // Verify all the batch requests were made with the expected mutations. verify(mockDatastore).commit(commitRequest.build()); start = end; } } /** Tests {@link DatastoreWriterFn} with a failed request which is retried. */ @Test public void testDatatoreWriterFnRetriesErrors() throws Exception { List<Mutation> mutations = new ArrayList<>(); int numRpcs = 2; for (int i = 0; i < DatastoreV1.DATASTORE_BATCH_UPDATE_ENTITIES_START * numRpcs; ++i) { mutations.add( makeUpsert(Entity.newBuilder().setKey(makeKey("key" + i, i + 1)).build()).build()); } CommitResponse successfulCommit = CommitResponse.getDefaultInstance(); when(mockDatastore.commit(any(CommitRequest.class))) .thenReturn(successfulCommit) .thenThrow(new DatastoreException("commit", Code.DEADLINE_EXCEEDED, "", null)) .thenReturn(successfulCommit); DatastoreWriterFn datastoreWriter = new DatastoreWriterFn( StaticValueProvider.of(PROJECT_ID), null, mockDatastoreFactory, new FakeWriteBatcher()); DoFnTester<Mutation, Void> doFnTester = DoFnTester.of(datastoreWriter); doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE); doFnTester.processBundle(mutations); } /** * Tests {@link DatastoreV1.Read#getEstimatedSizeBytes} to fetch and return estimated size for a * query. */ @Test public void testEstimatedSizeBytes() throws Exception { long entityBytes = 100L; // In seconds long timestamp = 1234L; RunQueryRequest latestTimestampRequest = makeRequest(makeLatestTimestampQuery(NAMESPACE), NAMESPACE); RunQueryResponse latestTimestampResponse = makeLatestTimestampResponse(timestamp); // Per Kind statistics request and response RunQueryRequest statRequest = makeRequest(makeStatKindQuery(NAMESPACE, timestamp), NAMESPACE); RunQueryResponse statResponse = makeStatKindResponse(entityBytes); when(mockDatastore.runQuery(latestTimestampRequest)).thenReturn(latestTimestampResponse); when(mockDatastore.runQuery(statRequest)).thenReturn(statResponse); assertEquals(entityBytes, getEstimatedSizeBytes(mockDatastore, QUERY, NAMESPACE)); verify(mockDatastore, times(1)).runQuery(latestTimestampRequest); verify(mockDatastore, times(1)).runQuery(statRequest); } /** Tests {@link SplitQueryFn} when number of query splits is specified. */ @Test public void testSplitQueryFnWithNumSplits() throws Exception { int numSplits = 100; when(mockQuerySplitter.getSplits( eq(QUERY), any(PartitionId.class), eq(numSplits), any(Datastore.class))) .thenReturn(splitQuery(QUERY, numSplits)); SplitQueryFn splitQueryFn = new SplitQueryFn(V_1_OPTIONS, numSplits, mockDatastoreFactory); DoFnTester<Query, Query> doFnTester = DoFnTester.of(splitQueryFn); /** * Although Datastore client is marked transient in {@link SplitQueryFn}, when injected through * mock factory using a when clause for unit testing purposes, it is not serializable because it * doesn't have a no-arg constructor. Thus disabling the cloning to prevent the doFn from being * serialized. */ doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE); List<Query> queries = doFnTester.processBundle(QUERY); assertEquals(queries.size(), numSplits); // Confirms that sub-queries are not equal to original when there is more than one split. for (Query subQuery : queries) { assertNotEquals(subQuery, QUERY); } verify(mockQuerySplitter, times(1)) .getSplits(eq(QUERY), any(PartitionId.class), eq(numSplits), any(Datastore.class)); verifyZeroInteractions(mockDatastore); } /** Tests {@link SplitQueryFn} when no query splits is specified. */ @Test public void testSplitQueryFnWithoutNumSplits() throws Exception { // Force SplitQueryFn to compute the number of query splits int numSplits = 0; int expectedNumSplits = 20; long entityBytes = expectedNumSplits * DEFAULT_BUNDLE_SIZE_BYTES; // In seconds long timestamp = 1234L; RunQueryRequest latestTimestampRequest = makeRequest(makeLatestTimestampQuery(NAMESPACE), NAMESPACE); RunQueryResponse latestTimestampResponse = makeLatestTimestampResponse(timestamp); // Per Kind statistics request and response RunQueryRequest statRequest = makeRequest(makeStatKindQuery(NAMESPACE, timestamp), NAMESPACE); RunQueryResponse statResponse = makeStatKindResponse(entityBytes); when(mockDatastore.runQuery(latestTimestampRequest)).thenReturn(latestTimestampResponse); when(mockDatastore.runQuery(statRequest)).thenReturn(statResponse); when(mockQuerySplitter.getSplits( eq(QUERY), any(PartitionId.class), eq(expectedNumSplits), any(Datastore.class))) .thenReturn(splitQuery(QUERY, expectedNumSplits)); SplitQueryFn splitQueryFn = new SplitQueryFn(V_1_OPTIONS, numSplits, mockDatastoreFactory); DoFnTester<Query, Query> doFnTester = DoFnTester.of(splitQueryFn); doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE); List<Query> queries = doFnTester.processBundle(QUERY); assertEquals(expectedNumSplits, queries.size()); verify(mockQuerySplitter, times(1)) .getSplits(eq(QUERY), any(PartitionId.class), eq(expectedNumSplits), any(Datastore.class)); verify(mockDatastore, times(1)).runQuery(latestTimestampRequest); verify(mockDatastore, times(1)).runQuery(statRequest); } /** Tests {@link DatastoreV1.Read.SplitQueryFn} when the query has a user specified limit. */ @Test public void testSplitQueryFnWithQueryLimit() throws Exception { Query queryWithLimit = QUERY.toBuilder().setLimit(Int32Value.newBuilder().setValue(1)).build(); SplitQueryFn splitQueryFn = new SplitQueryFn(V_1_OPTIONS, 10, mockDatastoreFactory); DoFnTester<Query, Query> doFnTester = DoFnTester.of(splitQueryFn); doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE); List<Query> queries = doFnTester.processBundle(queryWithLimit); assertEquals(1, queries.size()); verifyNoMoreInteractions(mockDatastore); verifyNoMoreInteractions(mockQuerySplitter); } /** Tests {@link ReadFn} with a query limit less than one batch. */ @Test public void testReadFnWithOneBatch() throws Exception { readFnTest(5); } /** Tests {@link ReadFn} with a query limit more than one batch, and not a multiple. */ @Test public void testReadFnWithMultipleBatches() throws Exception { readFnTest(QUERY_BATCH_LIMIT + 5); } /** Tests {@link ReadFn} for several batches, using an exact multiple of batch size results. */ @Test public void testReadFnWithBatchesExactMultiple() throws Exception { readFnTest(5 * QUERY_BATCH_LIMIT); } /** Tests that {@link ReadFn} retries after an error. */ @Test public void testReadFnRetriesErrors() throws Exception { // An empty query to read entities. Query query = Query.newBuilder().setLimit(Int32Value.newBuilder().setValue(1)).build(); // Use mockResponseForQuery to generate results. when(mockDatastore.runQuery(any(RunQueryRequest.class))) .thenThrow(new DatastoreException("RunQuery", Code.DEADLINE_EXCEEDED, "", null)) .thenAnswer( invocationOnMock -> { Query q = ((RunQueryRequest) invocationOnMock.getArguments()[0]).getQuery(); return mockResponseForQuery(q); }); ReadFn readFn = new ReadFn(V_1_OPTIONS, mockDatastoreFactory); DoFnTester<Query, Entity> doFnTester = DoFnTester.of(readFn); doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE); doFnTester.processBundle(query); } @Test public void testTranslateGqlQueryWithLimit() throws Exception { String gql = "SELECT * from DummyKind LIMIT 10"; String gqlWithZeroLimit = gql + " LIMIT 0"; GqlQuery gqlQuery = GqlQuery.newBuilder().setQueryString(gql).setAllowLiterals(true).build(); GqlQuery gqlQueryWithZeroLimit = GqlQuery.newBuilder().setQueryString(gqlWithZeroLimit).setAllowLiterals(true).build(); RunQueryRequest gqlRequest = makeRequest(gqlQuery, V_1_OPTIONS.getNamespace()); RunQueryRequest gqlRequestWithZeroLimit = makeRequest(gqlQueryWithZeroLimit, V_1_OPTIONS.getNamespace()); when(mockDatastore.runQuery(gqlRequestWithZeroLimit)) .thenThrow( new DatastoreException( "runQuery", Code.INVALID_ARGUMENT, "invalid query", // dummy new RuntimeException())); when(mockDatastore.runQuery(gqlRequest)) .thenReturn(RunQueryResponse.newBuilder().setQuery(QUERY).build()); assertEquals( translateGqlQueryWithLimitCheck(gql, mockDatastore, V_1_OPTIONS.getNamespace()), QUERY); verify(mockDatastore, times(1)).runQuery(gqlRequest); verify(mockDatastore, times(1)).runQuery(gqlRequestWithZeroLimit); } @Test public void testTranslateGqlQueryWithNoLimit() throws Exception { String gql = "SELECT * from DummyKind"; String gqlWithZeroLimit = gql + " LIMIT 0"; GqlQuery gqlQueryWithZeroLimit = GqlQuery.newBuilder().setQueryString(gqlWithZeroLimit).setAllowLiterals(true).build(); RunQueryRequest gqlRequestWithZeroLimit = makeRequest(gqlQueryWithZeroLimit, V_1_OPTIONS.getNamespace()); when(mockDatastore.runQuery(gqlRequestWithZeroLimit)) .thenReturn(RunQueryResponse.newBuilder().setQuery(QUERY).build()); assertEquals( translateGqlQueryWithLimitCheck(gql, mockDatastore, V_1_OPTIONS.getNamespace()), QUERY); verify(mockDatastore, times(1)).runQuery(gqlRequestWithZeroLimit); } /** Test options. * */ public interface RuntimeTestOptions extends PipelineOptions { ValueProvider<String> getDatastoreProject(); void setDatastoreProject(ValueProvider<String> value); ValueProvider<String> getGqlQuery(); void setGqlQuery(ValueProvider<String> value); ValueProvider<String> getNamespace(); void setNamespace(ValueProvider<String> value); } /** * Test to ensure that {@link ValueProvider} values are not accessed at pipeline construction time * when built with {@link DatastoreV1.Read#withQuery(Query)}. */ @Test public void testRuntimeOptionsNotCalledInApplyQuery() { RuntimeTestOptions options = PipelineOptionsFactory.as(RuntimeTestOptions.class); Pipeline pipeline = TestPipeline.create(options); pipeline .apply( DatastoreIO.v1() .read() .withProjectId(options.getDatastoreProject()) .withQuery(QUERY) .withNamespace(options.getNamespace())) .apply(DatastoreIO.v1().write().withProjectId(options.getDatastoreProject())); } /** * Test to ensure that {@link ValueProvider} values are not accessed at pipeline construction time * when built with {@link DatastoreV1.Read#withLiteralGqlQuery(String)}. */ @Test public void testRuntimeOptionsNotCalledInApplyGqlQuery() { RuntimeTestOptions options = PipelineOptionsFactory.as(RuntimeTestOptions.class); Pipeline pipeline = TestPipeline.create(options); pipeline .apply( DatastoreIO.v1() .read() .withProjectId(options.getDatastoreProject()) .withLiteralGqlQuery(options.getGqlQuery())) .apply(DatastoreIO.v1().write().withProjectId(options.getDatastoreProject())); } @Test public void testWriteBatcherWithoutData() { DatastoreV1.WriteBatcher writeBatcher = new DatastoreV1.WriteBatcherImpl(); writeBatcher.start(); assertEquals(DatastoreV1.DATASTORE_BATCH_UPDATE_ENTITIES_START, writeBatcher.nextBatchSize(0)); } @Test public void testWriteBatcherFastQueries() { DatastoreV1.WriteBatcher writeBatcher = new DatastoreV1.WriteBatcherImpl(); writeBatcher.start(); writeBatcher.addRequestLatency(0, 1000, 200); writeBatcher.addRequestLatency(0, 1000, 200); assertEquals(DatastoreV1.DATASTORE_BATCH_UPDATE_ENTITIES_LIMIT, writeBatcher.nextBatchSize(0)); } @Test public void testWriteBatcherSlowQueries() { DatastoreV1.WriteBatcher writeBatcher = new DatastoreV1.WriteBatcherImpl(); writeBatcher.start(); writeBatcher.addRequestLatency(0, 10000, 200); writeBatcher.addRequestLatency(0, 10000, 200); assertEquals(100, writeBatcher.nextBatchSize(0)); } @Test public void testWriteBatcherSizeNotBelowMinimum() { DatastoreV1.WriteBatcher writeBatcher = new DatastoreV1.WriteBatcherImpl(); writeBatcher.start(); writeBatcher.addRequestLatency(0, 30000, 50); writeBatcher.addRequestLatency(0, 30000, 50); assertEquals(DatastoreV1.DATASTORE_BATCH_UPDATE_ENTITIES_MIN, writeBatcher.nextBatchSize(0)); } @Test public void testWriteBatcherSlidingWindow() { DatastoreV1.WriteBatcher writeBatcher = new DatastoreV1.WriteBatcherImpl(); writeBatcher.start(); writeBatcher.addRequestLatency(0, 30000, 50); writeBatcher.addRequestLatency(50000, 5000, 200); writeBatcher.addRequestLatency(100000, 5000, 200); assertEquals(200, writeBatcher.nextBatchSize(150000)); } /** Helper Methods */ /** A helper function that verifies if all the queries have unique keys. */ private void verifyUniqueKeys(List<KV<Integer, Query>> queries) { Set<Integer> keys = new HashSet<>(); for (KV<Integer, Query> kv : queries) { keys.add(kv.getKey()); } assertEquals(keys.size(), queries.size()); } /** * A helper function that creates mock {@link Entity} results in response to a query. Always * indicates that more results are available, unless the batch is limited to fewer than {@link * DatastoreV1.Read#QUERY_BATCH_LIMIT} results. */ private static RunQueryResponse mockResponseForQuery(Query q) { // Every query DatastoreV1 sends should have a limit. assertTrue(q.hasLimit()); // The limit should be in the range [1, QUERY_BATCH_LIMIT] int limit = q.getLimit().getValue(); assertThat(limit, greaterThanOrEqualTo(1)); assertThat(limit, lessThanOrEqualTo(QUERY_BATCH_LIMIT)); // Create the requested number of entities. List<EntityResult> entities = new ArrayList<>(limit); for (int i = 0; i < limit; ++i) { entities.add( EntityResult.newBuilder() .setEntity(Entity.newBuilder().setKey(makeKey("key" + i, i + 1))) .build()); } // Fill out the other parameters on the returned result batch. RunQueryResponse.Builder ret = RunQueryResponse.newBuilder(); ret.getBatchBuilder() .addAllEntityResults(entities) .setEntityResultType(EntityResult.ResultType.FULL) .setMoreResults( limit == QUERY_BATCH_LIMIT ? QueryResultBatch.MoreResultsType.NOT_FINISHED : QueryResultBatch.MoreResultsType.NO_MORE_RESULTS); return ret.build(); } /** Helper function to run a test reading from a {@link ReadFn}. */ private void readFnTest(int numEntities) throws Exception { // An empty query to read entities. Query query = Query.newBuilder().setLimit(Int32Value.newBuilder().setValue(numEntities)).build(); // Use mockResponseForQuery to generate results. when(mockDatastore.runQuery(any(RunQueryRequest.class))) .thenAnswer( invocationOnMock -> { Query q = ((RunQueryRequest) invocationOnMock.getArguments()[0]).getQuery(); return mockResponseForQuery(q); }); ReadFn readFn = new ReadFn(V_1_OPTIONS, mockDatastoreFactory); DoFnTester<Query, Entity> doFnTester = DoFnTester.of(readFn); /** * Although Datastore client is marked transient in {@link ReadFn}, when injected through mock * factory using a when clause for unit testing purposes, it is not serializable because it * doesn't have a no-arg constructor. Thus disabling the cloning to prevent the test object from * being serialized. */ doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE); List<Entity> entities = doFnTester.processBundle(query); int expectedNumCallsToRunQuery = (int) Math.ceil((double) numEntities / QUERY_BATCH_LIMIT); verify(mockDatastore, times(expectedNumCallsToRunQuery)).runQuery(any(RunQueryRequest.class)); // Validate the number of results. assertEquals(numEntities, entities.size()); } /** Builds a per-kind statistics response with the given entity size. */ private static RunQueryResponse makeStatKindResponse(long entitySizeInBytes) { RunQueryResponse.Builder statKindResponse = RunQueryResponse.newBuilder(); Entity.Builder entity = Entity.newBuilder(); entity.setKey(makeKey("dummyKind", "dummyId")); entity.putProperties("entity_bytes", makeValue(entitySizeInBytes).build()); EntityResult.Builder entityResult = EntityResult.newBuilder(); entityResult.setEntity(entity); QueryResultBatch.Builder batch = QueryResultBatch.newBuilder(); batch.addEntityResults(entityResult); statKindResponse.setBatch(batch); return statKindResponse.build(); } /** Builds a response of the given timestamp. */ private static RunQueryResponse makeLatestTimestampResponse(long timestamp) { RunQueryResponse.Builder timestampResponse = RunQueryResponse.newBuilder(); Entity.Builder entity = Entity.newBuilder(); entity.setKey(makeKey("dummyKind", "dummyId")); entity.putProperties("timestamp", makeValue(new Date(timestamp * 1000)).build()); EntityResult.Builder entityResult = EntityResult.newBuilder(); entityResult.setEntity(entity); QueryResultBatch.Builder batch = QueryResultBatch.newBuilder(); batch.addEntityResults(entityResult); timestampResponse.setBatch(batch); return timestampResponse.build(); } /** Builds a per-kind statistics query for the given timestamp and namespace. */ private static Query makeStatKindQuery(String namespace, long timestamp) { Query.Builder statQuery = Query.newBuilder(); if (namespace == null) { statQuery.addKindBuilder().setName("__Stat_Kind__"); } else { statQuery.addKindBuilder().setName("__Stat_Ns_Kind__"); } statQuery.setFilter( makeAndFilter( makeFilter("kind_name", EQUAL, makeValue(KIND).build()).build(), makeFilter("timestamp", EQUAL, makeValue(timestamp * 1000000L).build()).build())); return statQuery.build(); } /** Builds a latest timestamp statistics query. */ private static Query makeLatestTimestampQuery(String namespace) { Query.Builder timestampQuery = Query.newBuilder(); if (namespace == null) { timestampQuery.addKindBuilder().setName("__Stat_Total__"); } else { timestampQuery.addKindBuilder().setName("__Stat_Ns_Total__"); } timestampQuery.addOrder(makeOrder("timestamp", DESCENDING)); timestampQuery.setLimit(Int32Value.newBuilder().setValue(1)); return timestampQuery.build(); } /** Generate dummy query splits. */ private List<Query> splitQuery(Query query, int numSplits) { List<Query> queries = new ArrayList<>(); int offsetOfOriginal = query.getOffset(); for (int i = 0; i < numSplits; i++) { Query.Builder q = Query.newBuilder(); q.addKindBuilder().setName(KIND); // Making sub-queries unique (and not equal to the original query) by setting different // offsets. q.setOffset(++offsetOfOriginal); queries.add(q.build()); } return queries; } /** * A WriteBatcher for unit tests, which does no timing-based adjustments (so unit tests have * consistent results). */ static class FakeWriteBatcher implements DatastoreV1.WriteBatcher { @Override public void start() {} @Override public void addRequestLatency( long timeSinceEpochMillis, long latencyMillis, int numMutations) {} @Override public int nextBatchSize(long timeSinceEpochMillis) { return DatastoreV1.DATASTORE_BATCH_UPDATE_ENTITIES_START; } } }
// ////////////////////////////////////////////////////////////////////////////// // // RMG - Reaction Mechanism Generator // // Copyright (c) 2002-2011 Prof. William H. Green (whgreen@mit.edu) and the // RMG Team (rmg_dev@mit.edu) // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. // // ////////////////////////////////////////////////////////////////////////////// package jing.rxnSys; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Calendar; import jing.param.VersionInfo; /** * The Logger class encapsulating logging functionality for RMG. During a normal RMG job, logging information will be * printed both to the console (stdout and stderr) and to a file. The amount of detail printed in each can be set * independently. This class contains only static methods (since we only one one instance of the logger at runtime). To * use, call the initialize */ public class Logger { // These constants map integers to various levels of detail // Larger integers correspond to more detail public static final int CRITICAL = 0; // Critical (fatal) errors public static final int ERROR = 10; // Regular (non-fatal) errors public static final int WARNING = 20; // Warnings public static final int INFO = 30; // Normal information public static final int VERBOSE = 40; // Detailed information public static final int DEBUG = 50; // Debug information /** The level of detail to use for log messages printed to stdout. */ private static int consoleLevel = INFO; /** The level of detail to use for log messages printed to the file. */ private static int fileLevel = VERBOSE; /** The object representing the log file. */ private static BufferedWriter logFile = null; /** The newline character to use. */ private static String newLine = System.getProperty("line.separator"); /** * Initialize the logger. The log file will be opened; if this is not successful, the program will abort. If called * with no log file path, then "RMG.log" is assumed. */ public static void initialize() { initialize("RMG.log"); } /** * Initialize the logger. The specified log file will be opened; if this is not successful, the program will abort. */ public static void initialize(String logFilePath) { try { // Open the log file (throws IOException if unsuccessful) logFile = new BufferedWriter(new FileWriter(logFilePath)); } catch (IOException e) { // Log information is important, so we better stop if we're not // saving any! System.out.println(String.format( "Unable to open file \"%s\" for logging.", logFilePath)); System.exit(0); } // Set stderr to redirect to stdout // At the moment RMG's errors and warnings need to be placed in the // context of when they occur, so this is necessary System.setErr(System.out); // Print an initialization timestamp SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); info("RMG execution initiated at " + sdf.format(Calendar.getInstance().getTime())); info(""); } /** * Finish the logger. The log file will be closed; if this is not successful, the program will abort. */ public static void finish() { // Print a termination timestamp SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); info(""); info("RMG execution terminated at " + sdf.format(Calendar.getInstance().getTime())); try { // Close the log file (throws IOException if unsuccessful) logFile.close(); } catch (IOException e) { throw new RuntimeException(e); } } public static void flush() { // Flush the log file try { logFile.flush(); } catch (IOException e) { System.err .println("Couldn't flush RMG.log file. Did you initialize the Logger?"); throw new RuntimeException(e); } } /** * Set the level of detail to use for log messages printed to the console (stdout and stderr). Generally you should * try to use one of the predefined levels if possible, but this is not required. * * @param level * The level of detail to use for log messages printed to the console */ public static void setConsoleLevel(int level) { consoleLevel = level; } /** * Set the level of detail to use for log messages printed to the log file. Generally you should try to use one of * the predefined levels if possible, but this is not required. * * @param level * The level of detail to use for log messages printed to the log file */ public static void setFileLevel(int level) { fileLevel = level; } /** * Log a message with a specified level of detail. Wrapper methods have been provided for cases when level is one of * the predefined values; you generally should use those instead. * * @param level * The level of detail to use for this message * @param message * The message to log */ public static void log(int level, String message) { // Do nothing if neither log target accepts messages at that level of detail if (consoleLevel < level && fileLevel < level) return; if (message == null) return; // Replace all "\n" with the proper platform end-of-line character message = message.replaceAll("\n", newLine); // Prefix the message if an error or warning if (level == CRITICAL) message = "CRITICAL: " + message; else if (level > CRITICAL && level <= ERROR) message = "ERROR: " + message; else if (level > ERROR && level <= WARNING) message = "Warning: " + message; // Print the log message to the console // Errors and warnings are printed to stderr, others to stdout if (consoleLevel >= level) { if (level > WARNING) System.out.println(message); else System.err.println(message); } // Print the log message to the file try { if (logFile != null && fileLevel >= level) logFile.write(message + newLine); } catch (IOException e) { // What should we do here? throw new RuntimeException(e); } // If it was an error message, make sure the log file is up to date. if (level < WARNING && logFile != null) { // i.e. ERROR and CRITICAL flush(); } } /** * Log a message describing a critical (fatal) error. Note that this will *not* cause the program to halt, but * simply defines a unique level for fatal errors. The prefix "CRITICAL: " will automatically be added to the * message. * * @param message * The message to log */ public static void critical(String message) { log(CRITICAL, message); } /** * Log a message describing a general error. The prefix "ERROR: " will automatically be added to the message. * * @param message * The message to log */ public static void error(String message) { log(ERROR, message); } /** * Log a message describing a warning. The prefix "Warning: " will automatically be added to the message. * * @param message * The message to log */ public static void warning(String message) { log(WARNING, message); } /** * Log an informational message. This is the method to use for "normal" log messages. * * @param message * The message to log */ public static void info(String message) { log(INFO, message); } /** * Log a detailed informational message. * * @param message * The message to log */ public static void verbose(String message) { log(VERBOSE, message); } /** * Log a message containing debug information. * * @param message * The message to log */ public static void debug(String message) { log(DEBUG, message); } /** * Log the current stack trace to both the console and the log file. */ public static void logStackTrace(Throwable e) { String message = e.toString() + "\n"; StackTraceElement[] stackTrace = e.getStackTrace(); for (int index = 0; index < stackTrace.length; index++) { message += " at " + stackTrace[index].toString() + "\n"; } error(message); } /** * Logs a header containing information about RMG. */ public static void logHeader() { String versionHash = VersionInfo.getVersionHash(); info("######################################################################"); info("# #"); info("# RMG - Reaction Mechanism Generator #"); info("# #"); info("# http://rmg.sourceforge.net/ #"); info("# #"); info("# This java code was compiled by ant at: #"); info(String.format("# %-60s #", VersionInfo.getBuildDate())); info("# The git repository was on the branch: #"); info(String.format("# %-60s #", VersionInfo.getBranchName())); info("# And at the commit with the hash: #"); info(String.format("# %-60s #", VersionInfo.getVersionHash())); info("# #"); info("# For details visit: #"); if (versionHash.startsWith("*")) // error messages should start with a * info("# http://github.com/GreenGroup/RMG-Java/ #"); else { info(String .format("# http://github.com/GreenGroup/RMG-Java/tree/%-17s #", versionHash.substring(0, 6))); info("# To see changes since then visit: #"); info(String .format("# http://github.com/GreenGroup/RMG-Java/compare/%-6s...master #", versionHash.substring(0, 6))); } info("# #"); info("# Copyright (c) 2002-2011 #"); info("# Prof. William H. Green and the RMG Team: #"); info("# Joshua W. Allen, Dr. Robert W. Ashcraft, Dr. Gregory J. Beran, #"); info("# Caleb A. Class, Connie Gao, Dr. C. Franklin Goldsmith, #"); info("# Michael R. Harper, Amrit Jalan, Gregory R. Magoon, #"); info("# Dr. David M. Matheu, Shamel S. Merchant, Jeffrey D. Mo, #"); info("# Sarah Petway, Sumathy Raman, Dr. Sandeep Sharma, #"); info("# Prof. Kevin M. Van Geem, Dr. Jing Song, Dr. John Wen, #"); info("# Dr. Richard H. West, Andrew Wong, Dr. Hsi-Wu Wong, #"); info("# Dr. Paul E. Yelvington, Dr. Joanna Yu #"); info("# #"); info("# The RMGVE graphical user interface to the RMG database #"); info("# was written by John Robotham. #"); info("# #"); info("# This software package incorporates parts of the following #"); info("# software packages: #"); info("# DASSL - Written by Prof. Linda Petzold et al #"); info("# http://www.cs.ucsb.edu/~cse/software.html #"); info("# CDK - Written by Prof. Cristoph Steinbeck et al #"); info("# http://cdk.sourceforge.net/ #"); info("# InChI - Available from IUPAC #"); info("# http://www.iupac.org/inchi/ #"); info("# cclib #"); info("# http://cclib.sourceforge.net #"); info("# #"); info("# For more information, including how to properly cite this #"); info("# program, see http://rmg.sourceforge.net/. #"); info("# #"); info("######################################################################"); info(""); } }
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Distributable under LGPL license. * See terms of license at gnu.org. */ package org.jitsi.impl.neomedia.codec.audio.silk; import java.util.*; /** * * @author Dingxin Xu */ class NSQDelDecStruct { int RandState[] = new int[ Define.DECISION_DELAY ]; int Q_Q10[] = new int[ Define.DECISION_DELAY ]; int Xq_Q10[] = new int[ Define.DECISION_DELAY ]; int Pred_Q16[] = new int[ Define.DECISION_DELAY ]; int Shape_Q10[] = new int[ Define.DECISION_DELAY ]; int Gain_Q16[] = new int[ Define.DECISION_DELAY ]; int sLPC_Q14[] = new int[ Define.MAX_FRAME_LENGTH / Define.NB_SUBFR + Define.NSQ_LPC_BUF_LENGTH() ]; int LF_AR_Q12; int Seed; int SeedInit; int RD_Q10; public void FieldsInit() { Arrays.fill(this.RandState, 0); Arrays.fill(this.Q_Q10, 0); Arrays.fill(this.Xq_Q10, 0); Arrays.fill(this.Pred_Q16, 0); Arrays.fill(this.Shape_Q10, 0); Arrays.fill(this.Gain_Q16, 0); Arrays.fill(this.sLPC_Q14, 0); this.LF_AR_Q12 = 0; this.Seed = 0; this.SeedInit = 0; this.RD_Q10 = 0; } } /** * * @author Dingxin Xu */ class NSQ_sample_struct implements Cloneable { int Q_Q10; int RD_Q10; int xq_Q14; int LF_AR_Q12; int sLTP_shp_Q10; int LPC_exc_Q16; public Object clone() { NSQ_sample_struct clone = null; try { clone = (NSQ_sample_struct) super.clone(); } catch (CloneNotSupportedException e) { // TODO Auto-generated catch block e.printStackTrace(); } return clone; } } /** * * @author Dingxin Xu */ public class NSQDelDec { static void SKP_Silk_NSQ_del_dec( SKP_Silk_encoder_state psEncC, /* I/O Encoder State */ SKP_Silk_encoder_control psEncCtrlC, /* I Encoder Control */ SKP_Silk_nsq_state NSQ, /* I/O NSQ state */ short x[], /* I Prefiltered input signal */ byte q[], /* O Quantized pulse signal */ int LSFInterpFactor_Q2, /* I LSF interpolation factor in Q2 */ short PredCoef_Q12[], /* I Prediction coefs */ short LTPCoef_Q14[], /* I LT prediction coefs */ short AR2_Q13[], /* I */ int HarmShapeGain_Q14[],/* I */ int Tilt_Q14[], /* I Spectral tilt */ int LF_shp_Q14[], /* I */ int Gains_Q16[], /* I */ int Lambda_Q10, /* I */ int LTP_scale_Q14 /* I LTP state scaling */ ) { int i, k, lag, start_idx, LSF_interpolation_flag, Winner_ind, subfr; int last_smple_idx, smpl_buf_idx, decisionDelay, subfr_length; short[] A_Q12; short[] B_Q14; short[] AR_shp_Q13; int A_Q12_offset, B_Q14_offset, AR_shp_Q13_offset; short[] pxq; int pxq_offset; int sLTP_Q16[] = new int[ 2 * Define.MAX_FRAME_LENGTH ]; short sLTP[] = new short[ 2 * Define.MAX_FRAME_LENGTH ]; int HarmShapeFIRPacked_Q14; int offset_Q10; int FiltState[] = new int[ Define.MAX_LPC_ORDER ], RDmin_Q10; int x_sc_Q10[] = new int[ Define.MAX_FRAME_LENGTH / Define.NB_SUBFR ]; NSQDelDecStruct psDelDec[] = new NSQDelDecStruct[ Define.DEL_DEC_STATES_MAX ]; /* * psDelDec is an array of references, which has to be created manually. */ { for(int psDelDecIni_i=0; psDelDecIni_i<Define.DEL_DEC_STATES_MAX; psDelDecIni_i++) { psDelDec[psDelDecIni_i] = new NSQDelDecStruct(); } } NSQDelDecStruct psDD; subfr_length = psEncC.frame_length / Define.NB_SUBFR; /* Set unvoiced lag to the previous one, overwrite later for voiced */ lag = NSQ.lagPrev; assert( NSQ.prev_inv_gain_Q16 != 0 ); //TODO: use a local copy of the parameter short x[], which is supposed to be input; short[] x_tmp = x.clone(); int x_tmp_offset = 0; //TODO: use a local copy of the parameter byte[] q, which is supposed to be output; byte[] q_tmp = q.clone(); int q_tmp_offset = 0; /* Initialize delayed decision states */ // SKP_memset( psDelDec, 0, psEncC.nStatesDelayedDecision * sizeof( NSQ_del_dec_struct ) ); //TODO: for(int inx=0; inx<psEncC.nStatesDelayedDecision; inx++) { psDelDec[inx].FieldsInit(); } for( k = 0; k < psEncC.nStatesDelayedDecision; k++ ) { psDD = psDelDec[ k ]; psDD.Seed = ( k + psEncCtrlC.Seed ) & 3; psDD.SeedInit = psDD.Seed; psDD.RD_Q10 = 0; psDD.LF_AR_Q12 = NSQ.sLF_AR_shp_Q12; psDD.Shape_Q10[ 0 ] = NSQ.sLTP_shp_Q10[ psEncC.frame_length - 1 ]; // SKP_memcpy( psDD.sLPC_Q14, NSQ.sLPC_Q14, NSQ_LPC_BUF_LENGTH * sizeof( SKP_int32 ) ); System.arraycopy(NSQ.sLPC_Q14, 0, psDD.sLPC_Q14, 0, Define.NSQ_LPC_BUF_LENGTH()); } offset_Q10 = TablesOther.SKP_Silk_Quantization_Offsets_Q10[ psEncCtrlC.sigtype ][ psEncCtrlC.QuantOffsetType ]; smpl_buf_idx = 0; /* index of oldest samples */ decisionDelay = ( Define.DECISION_DELAY < subfr_length ? Define.DECISION_DELAY:subfr_length ); /* For voiced frames limit the decision delay to lower than the pitch lag */ if( psEncCtrlC.sigtype == Define.SIG_TYPE_VOICED ) { for( k = 0; k < Define.NB_SUBFR; k++ ) { decisionDelay = ( decisionDelay < (psEncCtrlC.pitchL[ k ] - Define.LTP_ORDER / 2 - 1) ? decisionDelay:(psEncCtrlC.pitchL[ k ] - Define.LTP_ORDER / 2 - 1)); } } if( LSFInterpFactor_Q2 == ( 1 << 2 ) ) { LSF_interpolation_flag = 0; } else { LSF_interpolation_flag = 1; } /* Setup pointers to start of sub frame */ pxq = NSQ.xq; pxq_offset = psEncC.frame_length; NSQ.sLTP_shp_buf_idx = psEncC.frame_length; NSQ.sLTP_buf_idx = psEncC.frame_length; subfr = 0; for( k = 0; k < Define.NB_SUBFR; k++ ) { A_Q12 = PredCoef_Q12; A_Q12_offset = ( ( k >> 1 ) | ( 1 - LSF_interpolation_flag ) ) * Define.MAX_LPC_ORDER ; B_Q14 = LTPCoef_Q14; B_Q14_offset = k * Define.LTP_ORDER; AR_shp_Q13 = AR2_Q13; AR_shp_Q13_offset = k * Define.SHAPE_LPC_ORDER_MAX; NSQ.rewhite_flag = 0; if( psEncCtrlC.sigtype == Define.SIG_TYPE_VOICED ) { /* Voiced */ lag = psEncCtrlC.pitchL[ k ]; /* Re-whitening */ if( ( k & ( 3 - ( LSF_interpolation_flag << 1 ) ) ) == 0 ) { if( k == 2 ) { /* RESET DELAYED DECISIONS */ /* Find winner */ RDmin_Q10 = psDelDec[ 0 ].RD_Q10; Winner_ind = 0; for( i = 1; i < psEncC.nStatesDelayedDecision; i++ ) { if( psDelDec[ i ].RD_Q10 < RDmin_Q10 ) { RDmin_Q10 = psDelDec[ i ].RD_Q10; Winner_ind = i; } } for( i = 0; i < psEncC.nStatesDelayedDecision; i++ ) { if( i != Winner_ind ) { psDelDec[ i ].RD_Q10 += ( Integer.MAX_VALUE >> 4 ); assert( psDelDec[ i ].RD_Q10 >= 0 ); } } /* Copy final part of signals from winner state to output and long-term filter states */ psDD = psDelDec[ Winner_ind ]; last_smple_idx = smpl_buf_idx + decisionDelay; for( i = 0; i < decisionDelay; i++ ) { last_smple_idx = ( last_smple_idx - 1 ) & Define.DECISION_DELAY_MASK; // q[ i - decisionDelay ] = ( SKP_int )SKP_RSHIFT( psDD.Q_Q10[ last_smple_idx ], 10 ); q_tmp[ q_tmp_offset + i - decisionDelay ] = (byte) ( psDD.Q_Q10[ last_smple_idx ] >> 10 ); // pxq[ i - decisionDelay ] = ( SKP_int16 )SKP_SAT16( SKP_RSHIFT_ROUND( // SKP_SMULWW( psDD.Xq_Q10[ last_smple_idx ], // psDD.Gain_Q16[ last_smple_idx ] ), 10 ) ); pxq[ pxq_offset + i - decisionDelay ] = (short) SigProcFIX.SKP_SAT16( SigProcFIX.SKP_RSHIFT_ROUND( Macros.SKP_SMULWW( psDD.Xq_Q10[ last_smple_idx ], psDD.Gain_Q16[ last_smple_idx ] ), 10 ) ); NSQ.sLTP_shp_Q10[ NSQ.sLTP_shp_buf_idx - decisionDelay + i ] = psDD.Shape_Q10[ last_smple_idx ]; } subfr = 0; } /* Rewhiten with new A coefs */ start_idx = psEncC.frame_length - lag - psEncC.predictLPCOrder - Define.LTP_ORDER / 2; start_idx = SigProcFIX.SKP_LIMIT_int( start_idx, 0, psEncC.frame_length - psEncC.predictLPCOrder ); // SKP_memset( FiltState, 0, psEncC.predictLPCOrder * sizeof( SKP_int32 ) ); Arrays.fill(FiltState, 0, psEncC.predictLPCOrder, 0); MA.SKP_Silk_MA_Prediction( NSQ.xq, start_idx + k * psEncC.subfr_length, A_Q12, A_Q12_offset, FiltState, sLTP, start_idx, psEncC.frame_length - start_idx, psEncC.predictLPCOrder ); NSQ.sLTP_buf_idx = psEncC.frame_length; NSQ.rewhite_flag = 1; } } /* Noise shape parameters */ assert( HarmShapeGain_Q14[ k ] >= 0 ); HarmShapeFIRPacked_Q14 = ( HarmShapeGain_Q14[ k ] >> 2 ); HarmShapeFIRPacked_Q14 |= ( ( HarmShapeGain_Q14[ k ] >> 1 ) << 16 ); SKP_Silk_nsq_del_dec_scale_states( NSQ, psDelDec, x_tmp, x_tmp_offset, x_sc_Q10, subfr_length, sLTP, sLTP_Q16, k, psEncC.nStatesDelayedDecision, smpl_buf_idx, LTP_scale_Q14, Gains_Q16, psEncCtrlC.pitchL ); int smpl_buf_idx_ptr[] = new int[1]; smpl_buf_idx_ptr[0] = smpl_buf_idx; SKP_Silk_noise_shape_quantizer_del_dec( NSQ, psDelDec, psEncCtrlC.sigtype, x_sc_Q10, q_tmp, q_tmp_offset, pxq, pxq_offset, sLTP_Q16, A_Q12, A_Q12_offset, B_Q14, B_Q14_offset, AR_shp_Q13, AR_shp_Q13_offset, lag, HarmShapeFIRPacked_Q14, Tilt_Q14[ k ], LF_shp_Q14[ k ], Gains_Q16[ k ], Lambda_Q10, offset_Q10, psEncC.subfr_length, subfr++, psEncC.shapingLPCOrder, psEncC.predictLPCOrder, psEncC.nStatesDelayedDecision, smpl_buf_idx_ptr, decisionDelay ); smpl_buf_idx = smpl_buf_idx_ptr[0]; x_tmp_offset += psEncC.subfr_length; q_tmp_offset += psEncC.subfr_length; pxq_offset += psEncC.subfr_length; } /* Find winner */ RDmin_Q10 = psDelDec[ 0 ].RD_Q10; Winner_ind = 0; for( k = 1; k < psEncC.nStatesDelayedDecision; k++ ) { if( psDelDec[ k ].RD_Q10 < RDmin_Q10 ) { RDmin_Q10 = psDelDec[ k ].RD_Q10; Winner_ind = k; } } /* Copy final part of signals from winner state to output and long-term filter states */ psDD = psDelDec[ Winner_ind ]; psEncCtrlC.Seed = psDD.SeedInit; last_smple_idx = smpl_buf_idx + decisionDelay; for( i = 0; i < decisionDelay; i++ ) { last_smple_idx = ( last_smple_idx - 1 ) & Define.DECISION_DELAY_MASK; q_tmp[q_tmp_offset + i - decisionDelay] = ( byte )( psDD.Q_Q10[ last_smple_idx ] >> 10 ); pxq[ pxq_offset + i - decisionDelay ] = ( short )SigProcFIX.SKP_SAT16( SigProcFIX.SKP_RSHIFT_ROUND( Macros.SKP_SMULWW( psDD.Xq_Q10[ last_smple_idx ], psDD.Gain_Q16[ last_smple_idx ] ), 10 ) ); NSQ.sLTP_shp_Q10[ NSQ.sLTP_shp_buf_idx - decisionDelay + i ] = psDD.Shape_Q10[ last_smple_idx ]; sLTP_Q16[ NSQ.sLTP_buf_idx - decisionDelay + i ] = psDD.Pred_Q16[ last_smple_idx ]; } // SKP_memcpy( NSQ.sLPC_Q14, &psDD.sLPC_Q14[ psEncC.subfr_length ], NSQ_LPC_BUF_LENGTH * sizeof( SKP_int32 ) ); System.arraycopy(psDD.sLPC_Q14, psEncC.subfr_length, NSQ.sLPC_Q14, 0, Define.NSQ_LPC_BUF_LENGTH()); /* Update states */ NSQ.sLF_AR_shp_Q12 = psDD.LF_AR_Q12; NSQ.prev_inv_gain_Q16 = NSQ.prev_inv_gain_Q16; NSQ.lagPrev = psEncCtrlC.pitchL[ Define.NB_SUBFR - 1 ]; /* Save quantized speech and noise shaping signals */ // SKP_memcpy( NSQ.xq, &NSQ.xq[ psEncC.frame_length ], psEncC.frame_length * sizeof( SKP_int16 ) ); // SKP_memcpy( NSQ.sLTP_shp_Q10, &NSQ.sLTP_shp_Q10[ psEncC.frame_length ], psEncC.frame_length * sizeof( SKP_int32 ) ); System.arraycopy(NSQ.xq, psEncC.frame_length, NSQ.xq, 0, psEncC.frame_length); System.arraycopy(NSQ.sLTP_shp_Q10, psEncC.frame_length, NSQ.sLTP_shp_Q10, 0, psEncC.frame_length); //TODO: copy back the q_tmp to the output parameter q; System.arraycopy(q_tmp, 0, q, 0, q.length); } /** * Noise shape quantizer for one subframe. * @param NSQ NSQ state * @param psDelDec Delayed decision states * @param sigtype Signal type * @param x_Q10 * @param q * @param q_offset * @param xq * @param xq_offset * @param sLTP_Q16 LTP filter state * @param a_Q12 Short term prediction coefs * @param a_Q12_offset * @param b_Q14 Long term prediction coefs * @param b_Q14_offset * @param AR_shp_Q13 Noise shaping coefs * @param AR_shp_Q13_offset * @param lag Pitch lag * @param HarmShapeFIRPacked_Q14 * @param Tilt_Q14 Spectral tilt * @param LF_shp_Q14 * @param Gain_Q16 * @param Lambda_Q10 * @param offset_Q10 * @param length Input length * @param subfr Subframe number * @param shapingLPCOrder Shaping LPC filter order * @param predictLPCOrder Prediction LPC filter order * @param nStatesDelayedDecision Number of states in decision tree * @param smpl_buf_idx Index to newest samples in buffers * @param decisionDelay */ static void SKP_Silk_noise_shape_quantizer_del_dec( SKP_Silk_nsq_state NSQ, /* I/O NSQ state */ NSQDelDecStruct psDelDec[], /* I/O Delayed decision states */ int sigtype, /* I Signal type */ final int x_Q10[], /* I */ byte q[], /* O */ int q_offset, short xq[], /* O */ int xq_offset, int sLTP_Q16[], /* I/O LTP filter state */ final short a_Q12[], /* I Short term prediction coefs */ int a_Q12_offset, final short b_Q14[], /* I Long term prediction coefs */ int b_Q14_offset, final short AR_shp_Q13[], /* I Noise shaping coefs */ int AR_shp_Q13_offset, int lag, /* I Pitch lag */ int HarmShapeFIRPacked_Q14, /* I */ int Tilt_Q14, /* I Spectral tilt */ int LF_shp_Q14, /* I */ int Gain_Q16, /* I */ int Lambda_Q10, /* I */ int offset_Q10, /* I */ int length, /* I Input length */ int subfr, /* I Subframe number */ int shapingLPCOrder, /* I Shaping LPC filter order */ int predictLPCOrder, /* I Prediction LPC filter order */ int nStatesDelayedDecision, /* I Number of states in decision tree */ int []smpl_buf_idx, /* I Index to newest samples in buffers */ int decisionDelay /* I */ ) { int i, j, k, Winner_ind, RDmin_ind, RDmax_ind, last_smple_idx; int Winner_rand_state; int LTP_pred_Q14, LPC_pred_Q10, n_AR_Q10, n_LTP_Q14; int n_LF_Q10; int r_Q10, rr_Q20, rd1_Q10, rd2_Q10, RDmin_Q10, RDmax_Q10; int q1_Q10, q2_Q10; int dither; int exc_Q10, LPC_exc_Q10, xq_Q10; int tmp, sLF_AR_shp_Q10; int pred_lag_ptr[], shp_lag_ptr[]; int pred_lag_ptr_offset, shp_lag_ptr_offset; int []psLPC_Q14; int psLPC_Q14_offset; NSQ_sample_struct psSampleState[][] = new NSQ_sample_struct[ Define.DEL_DEC_STATES_MAX ][ 2 ]; /* * psSampleState is an two-dimension array of reference, which should be created manually. */ { for(int Ini_i=0; Ini_i<Define.DEL_DEC_STATES_MAX; Ini_i++) { for(int Ini_j=0; Ini_j<2; Ini_j++) { psSampleState[Ini_i][Ini_j] = new NSQ_sample_struct(); } } } NSQDelDecStruct psDD; NSQ_sample_struct[] psSS; shp_lag_ptr = NSQ.sLTP_shp_Q10; shp_lag_ptr_offset = NSQ.sLTP_shp_buf_idx - lag + Define.HARM_SHAPE_FIR_TAPS / 2; pred_lag_ptr = sLTP_Q16; pred_lag_ptr_offset = NSQ.sLTP_buf_idx - lag + Define.LTP_ORDER / 2; for( i = 0; i < length; i++ ) { /* Perform common calculations used in all states */ /* Long-term prediction */ if( sigtype == Define.SIG_TYPE_VOICED ) { /* Unrolled loop */ LTP_pred_Q14 = Macros.SKP_SMULWB( pred_lag_ptr[ pred_lag_ptr_offset+0 ], b_Q14[ b_Q14_offset+0 ] ); LTP_pred_Q14 = Macros.SKP_SMLAWB( LTP_pred_Q14, pred_lag_ptr[ pred_lag_ptr_offset-1 ], b_Q14[ b_Q14_offset+1 ] ); LTP_pred_Q14 = Macros.SKP_SMLAWB( LTP_pred_Q14, pred_lag_ptr[ pred_lag_ptr_offset-2 ], b_Q14[ b_Q14_offset+2 ] ); LTP_pred_Q14 = Macros.SKP_SMLAWB( LTP_pred_Q14, pred_lag_ptr[ pred_lag_ptr_offset-3 ], b_Q14[ b_Q14_offset+3 ] ); LTP_pred_Q14 = Macros.SKP_SMLAWB( LTP_pred_Q14, pred_lag_ptr[ pred_lag_ptr_offset-4 ], b_Q14[ b_Q14_offset+4 ] ); pred_lag_ptr_offset++; } else { LTP_pred_Q14 = 0; } /* Long-term shaping */ if( lag > 0 ) { /* Symmetric, packed FIR coefficients */ n_LTP_Q14 = Macros.SKP_SMULWB( ( shp_lag_ptr[ shp_lag_ptr_offset+0 ] + shp_lag_ptr[ shp_lag_ptr_offset-2 ] ), HarmShapeFIRPacked_Q14 ); n_LTP_Q14 = Macros.SKP_SMLAWT( n_LTP_Q14, shp_lag_ptr[ shp_lag_ptr_offset-1 ], HarmShapeFIRPacked_Q14 ); // n_LTP_Q14 = SKP_LSHIFT( n_LTP_Q14, 6 ); n_LTP_Q14 = ( n_LTP_Q14 << 6 ); shp_lag_ptr_offset++; } else { n_LTP_Q14 = 0; } for( k = 0; k < nStatesDelayedDecision; k++ ) { /* Delayed decision state */ psDD = psDelDec[ k ]; /* Sample state */ psSS = psSampleState[ k ]; /* Generate dither */ psDD.Seed = SigProcFIX.SKP_RAND( psDD.Seed ); /* dither = rand_seed < 0 ? 0xFFFFFFFF : 0; */ // dither = SKP_RSHIFT( psDD.Seed, 31 ); dither = ( psDD.Seed >> 31 ); /* Pointer used in short term prediction and shaping */ psLPC_Q14 = psDD.sLPC_Q14; psLPC_Q14_offset = Define.NSQ_LPC_BUF_LENGTH() - 1 + i; /* Short-term prediction */ assert( predictLPCOrder >= 10 ); /* check that unrolling works */ assert( ( predictLPCOrder & 1 ) == 0 ); /* check that order is even */ // SKP_assert( ( (SKP_int64)a_Q12 & 3 ) == 0 ); /* check that array starts at 4-byte aligned address */ /* Partially unrolled */ LPC_pred_Q10 = Macros.SKP_SMULWB( psLPC_Q14[ psLPC_Q14_offset+0 ], a_Q12[ a_Q12_offset+0 ] ); LPC_pred_Q10 = Macros.SKP_SMLAWB( LPC_pred_Q10, psLPC_Q14[ psLPC_Q14_offset-1 ], a_Q12[ a_Q12_offset+1 ] ); LPC_pred_Q10 = Macros.SKP_SMLAWB( LPC_pred_Q10, psLPC_Q14[ psLPC_Q14_offset-2 ], a_Q12[ a_Q12_offset+2 ] ); LPC_pred_Q10 = Macros.SKP_SMLAWB( LPC_pred_Q10, psLPC_Q14[ psLPC_Q14_offset-3 ], a_Q12[ a_Q12_offset+3 ] ); LPC_pred_Q10 = Macros.SKP_SMLAWB( LPC_pred_Q10, psLPC_Q14[ psLPC_Q14_offset-4 ], a_Q12[ a_Q12_offset+4 ] ); LPC_pred_Q10 = Macros.SKP_SMLAWB( LPC_pred_Q10, psLPC_Q14[ psLPC_Q14_offset-5 ], a_Q12[ a_Q12_offset+5 ] ); LPC_pred_Q10 = Macros.SKP_SMLAWB( LPC_pred_Q10, psLPC_Q14[ psLPC_Q14_offset-6 ], a_Q12[ a_Q12_offset+6 ] ); LPC_pred_Q10 = Macros.SKP_SMLAWB( LPC_pred_Q10, psLPC_Q14[ psLPC_Q14_offset-7 ], a_Q12[ a_Q12_offset+7 ] ); LPC_pred_Q10 = Macros.SKP_SMLAWB( LPC_pred_Q10, psLPC_Q14[ psLPC_Q14_offset-8 ], a_Q12[ a_Q12_offset+8 ] ); LPC_pred_Q10 = Macros.SKP_SMLAWB( LPC_pred_Q10, psLPC_Q14[ psLPC_Q14_offset-9 ], a_Q12[ a_Q12_offset+9 ] ); for( j = 10; j < predictLPCOrder; j ++ ) { LPC_pred_Q10 = Macros.SKP_SMLAWB( LPC_pred_Q10, psLPC_Q14[ psLPC_Q14_offset-j ], a_Q12[ a_Q12_offset+j]); } /* Noise shape feedback */ assert( ( shapingLPCOrder & 1 ) == 0 ); /* check that order is even */ // assert( ( (SKP_int64)AR_shp_Q13 & 3 ) == 0 ); /* check that array starts at 4-byte aligned address */ assert( shapingLPCOrder >= 12 ); /* check that unrolling works */ /* Partially unrolled */ n_AR_Q10 = Macros.SKP_SMULWB( psLPC_Q14[ psLPC_Q14_offset+0 ], AR_shp_Q13[ AR_shp_Q13_offset+0 ] ); n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psLPC_Q14[ psLPC_Q14_offset-1 ], AR_shp_Q13[ AR_shp_Q13_offset+1 ] ); n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psLPC_Q14[ psLPC_Q14_offset-2 ], AR_shp_Q13[ AR_shp_Q13_offset+2 ] ); n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psLPC_Q14[ psLPC_Q14_offset-3 ], AR_shp_Q13[ AR_shp_Q13_offset+3 ] ); n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psLPC_Q14[ psLPC_Q14_offset-4 ], AR_shp_Q13[ AR_shp_Q13_offset+4 ] ); n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psLPC_Q14[ psLPC_Q14_offset-5 ], AR_shp_Q13[ AR_shp_Q13_offset+5 ] ); n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psLPC_Q14[ psLPC_Q14_offset-6 ], AR_shp_Q13[ AR_shp_Q13_offset+6 ] ); n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psLPC_Q14[ psLPC_Q14_offset-7 ], AR_shp_Q13[ AR_shp_Q13_offset+7 ] ); n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psLPC_Q14[ psLPC_Q14_offset-8 ], AR_shp_Q13[ AR_shp_Q13_offset+8 ] ); n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psLPC_Q14[ psLPC_Q14_offset-9 ], AR_shp_Q13[ AR_shp_Q13_offset+9 ] ); n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psLPC_Q14[ psLPC_Q14_offset-10 ],AR_shp_Q13[ AR_shp_Q13_offset+10 ] ); n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psLPC_Q14[ psLPC_Q14_offset-11 ],AR_shp_Q13[ AR_shp_Q13_offset+11 ] ); for( j = 12; j < shapingLPCOrder; j ++ ) { n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psLPC_Q14[ psLPC_Q14_offset-j ], AR_shp_Q13[ AR_shp_Q13_offset+j ] ); } // n_AR_Q10 = SKP_RSHIFT( n_AR_Q10, 1 ); /* Q11 -> Q10 */ n_AR_Q10 = ( n_AR_Q10 >> 1 ); /* Q11 -> Q10 */ n_AR_Q10 = Macros.SKP_SMLAWB( n_AR_Q10, psDD.LF_AR_Q12, Tilt_Q14 ); n_LF_Q10 = ( Macros.SKP_SMULWB( psDD.Shape_Q10[ smpl_buf_idx[0] ], LF_shp_Q14 ) << 2 ); n_LF_Q10 = Macros.SKP_SMLAWT( n_LF_Q10, psDD.LF_AR_Q12, LF_shp_Q14 ); /* Input minus prediction plus noise feedback */ /* r = x[ i ] - LTP_pred - LPC_pred + n_AR + n_Tilt + n_LF + n_LTP */ tmp = ( LTP_pred_Q14 - n_LTP_Q14 ); /* Add Q14 stuff */ tmp = SigProcFIX.SKP_RSHIFT_ROUND( tmp, 4 ); /* round to Q10 */ tmp = ( tmp + LPC_pred_Q10 ); /* add Q10 stuff */ tmp = ( tmp - n_AR_Q10 ); /* subtract Q10 stuff */ tmp = ( tmp - n_LF_Q10 ); /* subtract Q10 stuff */ r_Q10 = ( x_Q10[ i ] - tmp ); /* residual error Q10 */ /* Flip sign depending on dither */ r_Q10 = ( r_Q10 ^ dither ) - dither; r_Q10 = ( r_Q10 - offset_Q10 ); r_Q10 = SigProcFIX.SKP_LIMIT_32( r_Q10, -64 << 10, 64 << 10 ); /* Find two quantization level candidates and measure their rate-distortion */ if( r_Q10 < -1536 ) { q1_Q10 = ( SigProcFIX.SKP_RSHIFT_ROUND( r_Q10, 10 ) << 10 ); r_Q10 = ( r_Q10 - q1_Q10 ); rd1_Q10 = ( Macros.SKP_SMLABB( ( -( q1_Q10 + offset_Q10 ) * Lambda_Q10 ), r_Q10, r_Q10 ) >> 10 ); rd2_Q10 = ( rd1_Q10 + 1024 ); rd2_Q10 = ( rd2_Q10 - SigProcFIX.SKP_ADD_LSHIFT32( Lambda_Q10, r_Q10, 1 ) ); q2_Q10 = ( q1_Q10 + 1024 ); } else if( r_Q10 > 512 ) { q1_Q10 = ( SigProcFIX.SKP_RSHIFT_ROUND( r_Q10, 10 ) << 10 ); r_Q10 = ( r_Q10 - q1_Q10 ); rd1_Q10 = ( Macros.SKP_SMLABB( ( ( q1_Q10 + offset_Q10 ) * Lambda_Q10 ), r_Q10, r_Q10 ) >> 10 ); rd2_Q10 = ( rd1_Q10 + 1024 ); rd2_Q10 = ( rd2_Q10 - SigProcFIX.SKP_SUB_LSHIFT32( Lambda_Q10, r_Q10, 1 ) ); q2_Q10 = ( q1_Q10 - 1024 ); } else { /* r_Q10 >= -1536 && q1_Q10 <= 512 */ rr_Q20 = Macros.SKP_SMULBB( offset_Q10, Lambda_Q10 ); rd2_Q10 = ( Macros.SKP_SMLABB( rr_Q20, r_Q10, r_Q10 ) >> 10 ); rd1_Q10 = ( rd2_Q10 + 1024 ); rd1_Q10 = ( rd1_Q10 + SigProcFIX.SKP_SUB_RSHIFT32( SigProcFIX.SKP_ADD_LSHIFT32( Lambda_Q10, r_Q10, 1 ), rr_Q20, 9 ) ); q1_Q10 = -1024; q2_Q10 = 0; } if( rd1_Q10 < rd2_Q10 ) { psSS[ 0 ].RD_Q10 = ( psDD.RD_Q10 + rd1_Q10 ); psSS[ 1 ].RD_Q10 = ( psDD.RD_Q10 + rd2_Q10 ); psSS[ 0 ].Q_Q10 = q1_Q10; psSS[ 1 ].Q_Q10 = q2_Q10; } else { psSS[ 0 ].RD_Q10 = ( psDD.RD_Q10 + rd2_Q10 ); psSS[ 1 ].RD_Q10 = ( psDD.RD_Q10 + rd1_Q10 ); psSS[ 0 ].Q_Q10 = q2_Q10; psSS[ 1 ].Q_Q10 = q1_Q10; } /* Update states for best quantization */ /* Quantized excitation */ exc_Q10 = ( offset_Q10 + psSS[ 0 ].Q_Q10 ); exc_Q10 = ( exc_Q10 ^ dither ) - dither; /* Add predictions */ LPC_exc_Q10 = exc_Q10 + SigProcFIX.SKP_RSHIFT_ROUND( LTP_pred_Q14, 4 ); xq_Q10 = ( LPC_exc_Q10 + LPC_pred_Q10 ); /* Update states */ sLF_AR_shp_Q10 = ( xq_Q10 - n_AR_Q10 ); psSS[ 0 ].sLTP_shp_Q10 = ( sLF_AR_shp_Q10 - n_LF_Q10 ); psSS[ 0 ].LF_AR_Q12 = ( sLF_AR_shp_Q10 << 2 ); psSS[ 0 ].xq_Q14 = ( xq_Q10 << 4 ); psSS[ 0 ].LPC_exc_Q16 = ( LPC_exc_Q10 << 6 ); /* Update states for second best quantization */ /* Quantized excitation */ exc_Q10 = ( offset_Q10 + psSS[ 1 ].Q_Q10 ); exc_Q10 = ( exc_Q10 ^ dither ) - dither; /* Add predictions */ LPC_exc_Q10 = exc_Q10 + SigProcFIX.SKP_RSHIFT_ROUND( LTP_pred_Q14, 4 ); xq_Q10 = ( LPC_exc_Q10 + LPC_pred_Q10 ); /* Update states */ sLF_AR_shp_Q10 = ( xq_Q10 - n_AR_Q10 ); psSS[ 1 ].sLTP_shp_Q10 = ( sLF_AR_shp_Q10 - n_LF_Q10 ); psSS[ 1 ].LF_AR_Q12 = ( sLF_AR_shp_Q10 << 2 ); psSS[ 1 ].xq_Q14 = ( xq_Q10 << 4 ); psSS[ 1 ].LPC_exc_Q16 = ( LPC_exc_Q10 << 6 ); } smpl_buf_idx[0] = ( smpl_buf_idx[0] - 1 ) & Define.DECISION_DELAY_MASK; /* Index to newest samples */ last_smple_idx = ( smpl_buf_idx[0] + decisionDelay ) & Define.DECISION_DELAY_MASK; /* Index to decisionDelay old samples */ /* Find winner */ RDmin_Q10 = psSampleState[ 0 ][ 0 ].RD_Q10; Winner_ind = 0; for( k = 1; k < nStatesDelayedDecision; k++ ) { if( psSampleState[ k ][ 0 ].RD_Q10 < RDmin_Q10 ) { RDmin_Q10 = psSampleState[ k ][ 0 ].RD_Q10; Winner_ind = k; } } /* Increase RD values of expired states */ Winner_rand_state = psDelDec[ Winner_ind ].RandState[ last_smple_idx ]; for( k = 0; k < nStatesDelayedDecision; k++ ) { if( psDelDec[ k ].RandState[ last_smple_idx ] != Winner_rand_state ) { psSampleState[ k ][ 0 ].RD_Q10 = ( psSampleState[ k ][ 0 ].RD_Q10 + ( Integer.MAX_VALUE >> 4 ) ); psSampleState[ k ][ 1 ].RD_Q10 = ( psSampleState[ k ][ 1 ].RD_Q10 + ( Integer.MAX_VALUE >> 4 ) ); assert( psSampleState[ k ][ 0 ].RD_Q10 >= 0 ); } } /* Find worst in first set and best in second set */ RDmax_Q10 = psSampleState[ 0 ][ 0 ].RD_Q10; RDmin_Q10 = psSampleState[ 0 ][ 1 ].RD_Q10; RDmax_ind = 0; RDmin_ind = 0; for( k = 1; k < nStatesDelayedDecision; k++ ) { /* find worst in first set */ if( psSampleState[ k ][ 0 ].RD_Q10 > RDmax_Q10 ) { RDmax_Q10 = psSampleState[ k ][ 0 ].RD_Q10; RDmax_ind = k; } /* find best in second set */ if( psSampleState[ k ][ 1 ].RD_Q10 < RDmin_Q10 ) { RDmin_Q10 = psSampleState[ k ][ 1 ].RD_Q10; RDmin_ind = k; } } /* Replace a state if best from second set outperforms worst in first set */ if( RDmin_Q10 < RDmax_Q10 ) { // SKP_Silk_copy_del_dec_state( &psDelDec[ RDmax_ind ], &psDelDec[ RDmin_ind ], i ); SKP_Silk_copy_del_dec_state( psDelDec[ RDmax_ind ], psDelDec[ RDmin_ind ], i ); //TODO:how to copy a struct ??? // SKP_memcpy( &psSampleState[ RDmax_ind ][ 0 ], &psSampleState[ RDmin_ind ][ 1 ], sizeof( NSQ_sample_struct ) ); psSampleState[ RDmax_ind ][ 0 ] = (NSQ_sample_struct) psSampleState[ RDmin_ind ][ 1 ].clone(); } /* Write samples from winner to output and long-term filter states */ psDD = psDelDec[ Winner_ind ]; if( subfr > 0 || i >= decisionDelay ) { q[ q_offset + i - decisionDelay ] = ( byte )( psDD.Q_Q10[ last_smple_idx ] >> 10 ); xq[ xq_offset + i - decisionDelay ] = ( short )SigProcFIX.SKP_SAT16( SigProcFIX.SKP_RSHIFT_ROUND( Macros.SKP_SMULWW( psDD.Xq_Q10[ last_smple_idx ], psDD.Gain_Q16[ last_smple_idx ] ), 10 ) ); NSQ.sLTP_shp_Q10[ NSQ.sLTP_shp_buf_idx - decisionDelay ] = psDD.Shape_Q10[ last_smple_idx ]; sLTP_Q16[ NSQ.sLTP_buf_idx - decisionDelay ] = psDD.Pred_Q16[ last_smple_idx ]; } NSQ.sLTP_shp_buf_idx++; NSQ.sLTP_buf_idx++; /* Update states */ for( k = 0; k < nStatesDelayedDecision; k++ ) { psDD = psDelDec[ k ]; //TODO: psSS is an array of reference rather than a reference. // psSS = &psSampleState[ k ][ 0 ]; psSS = psSampleState[ k ]; psDD.LF_AR_Q12 = psSS[0].LF_AR_Q12; psDD.sLPC_Q14[ Define.NSQ_LPC_BUF_LENGTH() + i ] = psSS[0].xq_Q14; psDD.Xq_Q10[ smpl_buf_idx[0] ] = ( psSS[0].xq_Q14 >> 4 ); psDD.Q_Q10[ smpl_buf_idx[0] ] = psSS[0].Q_Q10; psDD.Pred_Q16[ smpl_buf_idx[0] ] = psSS[0].LPC_exc_Q16; psDD.Shape_Q10[ smpl_buf_idx[0] ] = psSS[0].sLTP_shp_Q10; psDD.Seed = SigProcFIX.SKP_ADD_RSHIFT32( psDD.Seed, psSS[0].Q_Q10, 10 ); psDD.RandState[ smpl_buf_idx[0] ] = psDD.Seed; psDD.RD_Q10 = psSS[0].RD_Q10; psDD.Gain_Q16[ smpl_buf_idx[0] ] = Gain_Q16; } } /* Update LPC states */ for( k = 0; k < nStatesDelayedDecision; k++ ) { psDD = psDelDec[ k ]; System.arraycopy(psDD.sLPC_Q14, length, psDD.sLPC_Q14, 0, Define.NSQ_LPC_BUF_LENGTH()); } } /** * * @param NSQ NSQ state * @param psDelDec Delayed decision states * @param x Input in Q0 * @param x_offset offset of valid data. * @param x_sc_Q10 nput scaled with 1/Gain in Q10 * @param length Length of input * @param sLTP Re-whitened LTP state in Q0 * @param sLTP_Q16 LTP state matching scaled input * @param subfr Subframe number * @param nStatesDelayedDecision Number of del dec states * @param smpl_buf_idx Index to newest samples in buffers * @param LTP_scale_Q14 LTP state scaling * @param Gains_Q16 * @param pitchL Pitch lag */ static void SKP_Silk_nsq_del_dec_scale_states( SKP_Silk_nsq_state NSQ, /* I/O NSQ state */ NSQDelDecStruct psDelDec[], /* I/O Delayed decision states */ final short x[], /* I Input in Q0 */ int x_offset, int x_sc_Q10[], /* O Input scaled with 1/Gain in Q10 */ int length, /* I Length of input */ short sLTP[], /* I Re-whitened LTP state in Q0 */ int sLTP_Q16[], /* O LTP state matching scaled input */ int subfr, /* I Subframe number */ int nStatesDelayedDecision, /* I Number of del dec states */ int smpl_buf_idx, /* I Index to newest samples in buffers */ final int LTP_scale_Q14, /* I LTP state scaling */ final int Gains_Q16[], /* I */ final int pitchL[] /* I Pitch lag */ ) { int i, k, scale_length, lag; int inv_gain_Q16, gain_adj_Q16, inv_gain_Q32; NSQDelDecStruct psDD; inv_gain_Q16 = ( Integer.MAX_VALUE / ( Gains_Q16[ subfr ] >> 1 ) ); inv_gain_Q16 = ( inv_gain_Q16 < Short.MAX_VALUE ? inv_gain_Q16:Short.MAX_VALUE); lag = pitchL[ subfr ]; /* After rewhitening the LTP state is un-scaled. So scale with inv_gain_Q16 */ if( NSQ.rewhite_flag != 0) { inv_gain_Q32 = ( inv_gain_Q16 << 16 ); if( subfr == 0 ) { /* Do LTP downscaling */ inv_gain_Q32 = ( Macros.SKP_SMULWB( inv_gain_Q32, LTP_scale_Q14 ) << 2 ); } for( i = NSQ.sLTP_buf_idx - lag - Define.LTP_ORDER / 2; i < NSQ.sLTP_buf_idx; i++ ) { assert( i < Define.MAX_FRAME_LENGTH ); sLTP_Q16[ i ] = Macros.SKP_SMULWB( inv_gain_Q32, sLTP[ i ] ); } } /* Adjust for changing gain */ if( inv_gain_Q16 != NSQ.prev_inv_gain_Q16 ) { gain_adj_Q16 = Inlines.SKP_DIV32_varQ( inv_gain_Q16, NSQ.prev_inv_gain_Q16, 16 ); for( k = 0; k < nStatesDelayedDecision; k++ ) { psDD = psDelDec[ k ]; /* Scale scalar states */ psDD.LF_AR_Q12 = Macros.SKP_SMULWW( gain_adj_Q16, psDD.LF_AR_Q12 ); /* scale short term state */ for( i = 0; i < Define.NSQ_LPC_BUF_LENGTH(); i++ ) { psDD.sLPC_Q14[ Define.NSQ_LPC_BUF_LENGTH() - i - 1 ] = Macros.SKP_SMULWW( gain_adj_Q16, psDD.sLPC_Q14[ Define.NSQ_LPC_BUF_LENGTH() - i - 1 ] ); } for( i = 0; i < Define.DECISION_DELAY; i++ ) { psDD.Pred_Q16[ i ] = Macros.SKP_SMULWW( gain_adj_Q16, psDD.Pred_Q16[ i ] ); psDD.Shape_Q10[ i ] = Macros.SKP_SMULWW( gain_adj_Q16, psDD.Shape_Q10[ i ] ); } } /* Scale long term shaping state */ /* Calculate length to be scaled, Worst case: Next frame is voiced with max lag */ scale_length = length * Define.NB_SUBFR; /* aprox max lag */ scale_length = scale_length - Macros.SKP_SMULBB( Define.NB_SUBFR - ( subfr + 1 ), length ); /* subtract samples that will be too old in next frame */ scale_length = Math.max( scale_length, lag + Define.LTP_ORDER ); /* make sure to scale whole pitch period if voiced */ for( i = NSQ.sLTP_shp_buf_idx - scale_length; i < NSQ.sLTP_shp_buf_idx; i++ ) { NSQ.sLTP_shp_Q10[ i ] = Macros.SKP_SMULWW( gain_adj_Q16, NSQ.sLTP_shp_Q10[ i ] ); } /* Scale LTP predict state */ if( NSQ.rewhite_flag == 0 ) { for( i = NSQ.sLTP_buf_idx - lag - Define.LTP_ORDER / 2; i < NSQ.sLTP_buf_idx; i++ ) { sLTP_Q16[ i ] = Macros.SKP_SMULWW( gain_adj_Q16, sLTP_Q16[ i ] ); } } } /* Scale input */ for( i = 0; i < length; i++ ) { x_sc_Q10[ i ] = (Macros.SKP_SMULBB( x[ x_offset + i ], ( short )inv_gain_Q16 ) >> 6 ); } /* save inv_gain */ assert( inv_gain_Q16 != 0 ); NSQ.prev_inv_gain_Q16 = inv_gain_Q16; } /** * * @param DD_dst Dst del dec state * @param DD_src Src del dec state * @param LPC_state_idx Index to LPC buffer */ static void SKP_Silk_copy_del_dec_state( NSQDelDecStruct DD_dst, /* I Dst del dec state */ NSQDelDecStruct DD_src, /* I Src del dec state */ int LPC_state_idx /* I Index to LPC buffer */ ) { System.arraycopy(DD_src.RandState, 0, DD_dst.RandState, 0, Define.DECISION_DELAY); System.arraycopy(DD_src.Q_Q10, 0, DD_dst.Q_Q10, 0, Define.DECISION_DELAY); System.arraycopy(DD_src.Pred_Q16, 0, DD_dst.Pred_Q16, 0, Define.DECISION_DELAY); System.arraycopy(DD_src.Shape_Q10, 0, DD_dst.Shape_Q10, 0, Define.DECISION_DELAY); System.arraycopy(DD_src.Xq_Q10, 0, DD_dst.Xq_Q10, 0, Define.DECISION_DELAY); System.arraycopy(DD_src.sLPC_Q14, LPC_state_idx, DD_dst.sLPC_Q14, LPC_state_idx, Define.NSQ_LPC_BUF_LENGTH()); DD_dst.LF_AR_Q12 = DD_src.LF_AR_Q12; DD_dst.Seed = DD_src.Seed; DD_dst.SeedInit = DD_src.SeedInit; DD_dst.RD_Q10 = DD_src.RD_Q10; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.karaf.shell.commands.utils; import java.io.InputStream; import java.io.OutputStream; import java.io.IOException; // // Based on Apache Ant 1.6.5 // /** * Copies standard output and error of children streams to standard output and error of the parent. * * @version $Rev: 705608 $ $Date: 2008-10-17 15:28:45 +0200 (Fri, 17 Oct 2008) $ */ public class PumpStreamHandler { private InputStream in; private OutputStream out; private OutputStream err; private Thread outputThread; private Thread errorThread; private StreamPumper inputPump; // // NOTE: May want to use a ThreadPool here, 3 threads per/pair seems kinda expensive :-( // public PumpStreamHandler(final InputStream in, final OutputStream out, final OutputStream err) { assert in != null; assert out != null; assert err != null; this.in = in; this.out = out; this.err = err; } public PumpStreamHandler(final OutputStream out, final OutputStream err) { this(null, out, err); } public PumpStreamHandler(final OutputStream outAndErr) { this(outAndErr, outAndErr); } /** * Set the input stream from which to read the standard output of the child. */ public void setChildOutputStream(final InputStream in) { assert in != null; createChildOutputPump(in, out); } /** * Set the input stream from which to read the standard error of the child. */ public void setChildErrorStream(final InputStream in) { assert in != null; if (err != null) { createChildErrorPump(in, err); } } /** * Set the output stream by means of which input can be sent to the child. */ public void setChildInputStream(final OutputStream out) { assert out != null; if (in != null) { inputPump = createInputPump(in, out, true); } else { try { out.close(); } catch (IOException e) { } } } /** * Attach to a child streams from the given process. * * @param p The process to attach to. */ public void attach(final Process p) { assert p != null; setChildInputStream(p.getOutputStream()); setChildOutputStream(p.getInputStream()); setChildErrorStream(p.getErrorStream()); } /** * Start pumping the streams. */ public void start() { if (outputThread != null) { outputThread.start(); } if (errorThread != null) { errorThread.start(); } if (inputPump != null) { Thread inputThread = new Thread(inputPump); inputThread.setDaemon(true); inputThread.start(); } } /** * Stop pumping the streams. */ public void stop() { if (outputThread != null) { try { outputThread.join(); } catch (InterruptedException e) { // ignore } } if (errorThread != null) { try { errorThread.join(); } catch (InterruptedException e) { // ignore } } if (inputPump != null) { inputPump.stop(); } try { err.flush(); } catch (IOException e) { } try { out.flush(); } catch (IOException e) { } } /** * Create the pump to handle child output. */ protected void createChildOutputPump(final InputStream in, final OutputStream out) { assert in != null; assert out != null; outputThread = createPump(in, out); } /** * Create the pump to handle error output. */ protected void createChildErrorPump(final InputStream in, final OutputStream out) { assert in != null; assert out != null; errorThread = createPump(in, out); } /** * Creates a stream pumper to copy the given input stream to the given output stream. */ protected Thread createPump(final InputStream in, final OutputStream out) { assert in != null; assert out != null; return createPump(in, out, false); } /** * Creates a stream pumper to copy the given input stream to the * given output stream. * * @param in The input stream to copy from. * @param out The output stream to copy to. * @param closeWhenExhausted If true close the inputstream. * @return A thread object that does the pumping. */ protected Thread createPump(final InputStream in, final OutputStream out, final boolean closeWhenExhausted) { assert in != null; assert out != null; final Thread result = new Thread(new StreamPumper(in, out, closeWhenExhausted)); result.setDaemon(true); return result; } /** * Creates a stream pumper to copy the given input stream to the * given output stream. Used for standard input. */ protected StreamPumper createInputPump(final InputStream in, final OutputStream out, final boolean closeWhenExhausted) { assert in != null; assert out != null; StreamPumper pumper = new StreamPumper(in, out, closeWhenExhausted); pumper.setAutoflush(true); return pumper; } }
package org.broadinstitute.hellbender.utils.read.markduplicates; import htsjdk.samtools.SAMRecord; import htsjdk.samtools.SamReader; import htsjdk.samtools.SamReaderFactory; import htsjdk.samtools.util.CloseableIterator; import htsjdk.samtools.util.CloserUtil; import org.broadinstitute.hellbender.CommandLineProgramTest; import org.testng.annotations.Test; import java.io.File; /** * This class defines the individual test cases to run. The actual running of the test is done * by AbstractMarkDuplicatesCommandLineProgramTester or children thereof (see getTester). */ public abstract class AbstractMarkDuplicatesCommandLineProgramTest extends CommandLineProgramTest { protected static final File TEST_DATA_DIR = new File(getTestDataDir(), "picard/sam/MarkDuplicates/"); protected abstract AbstractMarkDuplicatesTester getTester(); protected final static int DEFAULT_BASE_QUALITY = 10; @Test public void testSingleUnmappedFragment() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addUnmappedFragment(-1, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testSingleUnmappedPair() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addUnmappedPair(-1, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testSingleMappedFragment() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedFragment(1, 1, false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testTwoMappedFragments() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedFragment(0, 1, false, DEFAULT_BASE_QUALITY); tester.addMappedFragment(0, 1, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.runTest(); } @Test public void testSingleMappedPair() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(1, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testSingleMappedFragmentAndSingleMappedPair() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedFragment(1, 1, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedPair(1, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testTwoMappedPairs() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(1, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(1, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.runTest(); } @Test public void testThreeMappedPairs() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(1, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(1, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedPair(1, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.runTest(); } @Test public void testSingleMappedFragmentAndTwoMappedPairs() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedFragment(1, 1, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedPair(1, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(1, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.runTest(); } @Test public void testTwoMappedPairsAndTerminalUnmappedFragment() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(1, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(1, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addUnmappedFragment(-1, DEFAULT_BASE_QUALITY); // unmapped fragment at end of file tester.runTest(); } @Test public void testTwoMappedPairsAndTerminalUnmappedPair() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(1, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(1, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addUnmappedPair(-1, DEFAULT_BASE_QUALITY); // unmapped pair at end of file tester.runTest(); } @Test public void testOpticalDuplicateFinding() { final AbstractMarkDuplicatesTester tester = getTester(); // explicitly creating 1 expected optical duplicate pair tester.setExpectedOpticalDuplicate(1); // pass in the read names manually, in order to control duplicates vs optical duplicates tester.addMatePair("READ0:1:1:1:1", 1, 1, 100, false, false, false, false, "50M", "50M", false, true, false, false, false, DEFAULT_BASE_QUALITY); // non-duplicate mapped pair to start tester.addMatePair("READ1:1:1:1:300", 1, 1, 100, false, false, true, true, "50M", "50M", false, true, false, false, false, DEFAULT_BASE_QUALITY); // duplicate pair, NOT optical duplicate (delta-Y > 100) tester.addMatePair("READ2:1:1:1:50", 1, 1, 100, false, false, true, true, "50M", "50M", false, true, false, false, false, DEFAULT_BASE_QUALITY); // duplicate pair, expected optical duplicate (delta-X and delta-Y < 100) tester.runTest(); } @Test public void testOpticalDuplicateClusterSamePositionNoOpticalDuplicates() { final AbstractMarkDuplicatesTester tester = getTester(); tester.setExpectedOpticalDuplicate(0); tester.addMatePair("RUNID:7:1203:2886:82292", 1, 485253, 485253, false, false, true, true, "42M59S", "59S42M", false, true, false, false, false, DEFAULT_BASE_QUALITY); tester.addMatePair("RUNID:7:1203:2884:16834", 1, 485253, 485253, false, false, false, false, "59S42M", "42M59S", true, false, false, false, false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testOpticalDuplicateClusterSamePositionNoOpticalDuplicatesWithinPixelDistance() { final AbstractMarkDuplicatesTester tester = getTester(); tester.setExpectedOpticalDuplicate(0); tester.addMatePair("RUNID:7:1203:2886:16834", 1, 485253, 485253, false, false, true, true, "42M59S", "59S42M", false, true, false, false, false, DEFAULT_BASE_QUALITY); tester.addMatePair("RUNID:7:1203:2884:16835", 1, 485253, 485253, false, false, false, false, "59S42M", "42M59S", true, false, false, false, false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testOpticalDuplicateClusterSamePositionOneOpticalDuplicatesWithinPixelDistance() { final AbstractMarkDuplicatesTester tester = getTester(); tester.setExpectedOpticalDuplicate(1); tester.addMatePair("RUNID:7:1203:2886:16834", 1, 485253, 485253, false, false, true, true, "45M", "45M", false, true, false, false, false, DEFAULT_BASE_QUALITY); tester.addMatePair("RUNID:7:1203:2884:16835", 1, 485253, 485253, false, false, false, false, "45M", "45M", false, true, false, false, false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testOpticalDuplicateClusterOneEndSamePositionOneCluster() { final AbstractMarkDuplicatesTester tester = getTester(); tester.setExpectedOpticalDuplicate(1); tester.addMatePair("RUNID:7:2205:17939:39728", 1, 485328, 485312, false, false, false, false, "55M46S", "30S71M", false, true, false, false, false, DEFAULT_BASE_QUALITY); tester.addMatePair("RUNID:7:2205:17949:39745", 1, 485328, 485328, false, false, true, true, "55M46S", "46S55M", false, true, false, false, false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testTwoMappedPairsAndMappedSecondaryFragment() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(1, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(1, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedFragment(1, 200, false, DEFAULT_BASE_QUALITY, true); // mapped non-primary fragment tester.runTest(); } @Test public void testMappedFragmentAndMappedPairFirstOfPairNonPrimary() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedFragment(1, 1, false,DEFAULT_BASE_QUALITY); tester.addMatePair(1, 200, 0, false, true, false, false, "54M22S", null, false, false, true, true, false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testTwoMappedPairsMatesSoftClipped() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(1, 10022, 10051, false, false, "76M", "8S68M", false, true, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(1, 10022, 10063, false, false, "76M", "5S71M", false, true, false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testTwoMappedPairsWithSoftClipping() { final AbstractMarkDuplicatesTester tester = getTester(); // NB: no duplicates // 5'1: 2, 5'2:46+73M=118 // 5'1: 2, 5'2:51+68M=118 tester.addMappedPair(1, 2, 46, false, false, "6S42M28S", "3S73M", false, DEFAULT_BASE_QUALITY); tester.addMappedPair(1, 2, 51, true, true, "6S42M28S", "8S68M", false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testTwoMappedPairsWithSoftClippingFirstOfPairOnlyNoMateCigar() { final AbstractMarkDuplicatesTester tester = getTester(); tester.setNoMateCigars(true); // NB: no duplicates // 5'1: 2, 5'2:46+73M=118 // 5'1: 2, 5'2:51+68M=118 tester.addMappedPair(1, 12, 46, false, false, "6S42M28S", null, true, DEFAULT_BASE_QUALITY); // only add the first one tester.addMappedPair(1, 12, 51, false, false, "6S42M28S", null, true, DEFAULT_BASE_QUALITY); // only add the first one tester.runTest(); } @Test public void testTwoMappedPairsWithSoftClippingBoth() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(1, 10046, 10002, true, true, "3S73M", "6S42M28S", true, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(1, 10051, 10002, false, false, "8S68M", "6S48M22S", true, false, false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testMatePairSecondUnmapped() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMatePair(1, 10049, 10049, false, true, false, false, "11M2I63M", null, false, false, false, false, false, DEFAULT_BASE_QUALITY); // neither are duplicates tester.runTest(); } @Test public void testMatePairFirstUnmapped() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMatePair(1, 10056, 10056, true, false, false, false, null, "54M22S", false, false, false, false, false, DEFAULT_BASE_QUALITY); // neither are duplicates tester.runTest(); } @Test public void testMappedFragmentAndMatePairSecondUnmapped() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMatePair(1, 10049, 10049, false, true, false, false, "11M2I63M", null, false, false, false, false, false, DEFAULT_BASE_QUALITY); tester.addMappedFragment(1, 10049, true, DEFAULT_BASE_QUALITY); // duplicate tester.runTest(); } @Test public void testMappedFragmentAndMatePairFirstUnmapped() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMatePair(1, 10049, 10049, true, false, false, false, null, "11M2I63M", false, false, false, false, false, DEFAULT_BASE_QUALITY); tester.addMappedFragment(1, 10049, true, DEFAULT_BASE_QUALITY); // duplicate tester.runTest(); } @Test public void testMappedPairAndMatePairSecondUnmapped() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMatePair(1, 10040, 10040, false, true, true, false, "76M", null, false, false, false, false, false, DEFAULT_BASE_QUALITY); // second a duplicate, // second end unmapped tester.addMappedPair(1, 10189, 10040, false, false, "41S35M", "65M11S", true, false, false, DEFAULT_BASE_QUALITY); // mapped OK tester.runTest(); } @Test public void testMappedPairAndMatePairFirstUnmapped() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMatePair(1, 10040, 10040, true, false, false, true, null, "76M", false, false, false, false, false, DEFAULT_BASE_QUALITY); // first a duplicate, // first end unmapped tester.addMappedPair(1, 10189, 10040, false, false, "41S35M", "65M11S", true, false, false, DEFAULT_BASE_QUALITY); // mapped OK tester.runTest(); } // TODO: fails on MarkDuplicatesWithMateCigar @Test public void testMappedPairAndMatePairFirstOppositeStrandSecondUnmapped() { final AbstractMarkDuplicatesTester tester = getTester(); // first end mapped OK -, second end unmapped tester.addMatePair(1, 484071, 484071, false, true, false, false, "66S35M", null, true, false, false, false, false, DEFAULT_BASE_QUALITY); // mapped OK +/- tester.addMappedPair(1, 484105, 484075, false, false, "35M66S", "30S71M", false, true, false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testMappedPairAndMappedFragmentAndMatePairSecondUnmapped() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMatePair(1, 10040, 10040, false, true, true, false, "76M", null, false, false, false, false, false, DEFAULT_BASE_QUALITY); // first a duplicate, // second end unmapped tester.addMappedPair(1, 10189, 10040, false, false, "41S35M", "65M11S", true, false, false, DEFAULT_BASE_QUALITY); // mapped OK tester.addMappedFragment(1, 10040, true, DEFAULT_BASE_QUALITY); // duplicate tester.runTest(); } @Test public void testMappedPairAndMappedFragmentAndMatePairFirstUnmapped() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMatePair(1, 10040, 10040, true, false, false, true, null, "76M", false, false, false, false, false, DEFAULT_BASE_QUALITY); // first a duplicate, // first end unmapped tester.addMappedPair(1, 10189, 10040, false, false, "41S35M", "65M11S", true, false, false, DEFAULT_BASE_QUALITY); // mapped OK tester.addMappedFragment(1, 10040, true, DEFAULT_BASE_QUALITY); // duplicate tester.runTest(); } @Test public void testTwoMappedPairsWithOppositeOrientations() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(1, 10182, 10038, true, true, "32S44M", "66M10S", true, false, false, DEFAULT_BASE_QUALITY); // -/+ tester.addMappedPair(1, 10038, 10182, false, false, "70M6S", "32S44M", false, true, false, DEFAULT_BASE_QUALITY); // +/-, both are duplicates tester.runTest(); } @Test public void testTwoMappedPairsWithOppositeOrientationsNumberTwo() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(1, 10038, 10182, false, false, "70M6S", "32S44M", false, true, false, DEFAULT_BASE_QUALITY); // +/-, both are duplicates tester.addMappedPair(1, 10182, 10038, true, true, "32S44M", "66M10S", true, false, false, DEFAULT_BASE_QUALITY); // -/+ tester.runTest(); } @Test public void testThreeMappedPairsWithMatchingSecondMate() { final AbstractMarkDuplicatesTester tester = getTester(); // Read0 and Read2 are duplicates // 10181+41=10220, 10058 tester.addMappedPair(1, 10181, 10058, false, false, "41S35M", "47M29S", true, false, false, DEFAULT_BASE_QUALITY); // -/+ // 10181+37=10216, 10058 tester.addMappedPair(1, 10181, 10058, true, true, "37S39M", "44M32S", true, false, false, DEFAULT_BASE_QUALITY); // -/+ // 10180+36=10216, 10058 tester.addMappedPair(1, 10180, 10058, false, false, "36S40M", "50M26S", true, false, false, DEFAULT_BASE_QUALITY); // -/+, both are duplicates tester.runTest(); } @Test public void testMappedPairWithSamePosition() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(1, 4914, 4914, false, false, "37M39S", "73M3S", false, false, false, DEFAULT_BASE_QUALITY); // +/+ tester.runTest(); } @Test public void testMappedPairWithSamePositionSameCigar() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(1, 4914, 4914, false, false, "37M39S", "37M39S", false, false, false, DEFAULT_BASE_QUALITY); // +/+ tester.runTest(); } @Test public void testTwoMappedPairWithSamePosition() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(0, 5604914, 5604914, false, false, "37M39S", "73M3S", false, false, false, DEFAULT_BASE_QUALITY); // +/+ tester.addMappedPair(0, 5604914, 5604914, true, true, "37M39S", "73M3S", false, false, false, DEFAULT_BASE_QUALITY); // +/+ tester.runTest(); } @Test public void testTwoMappedPairWithSamePositionDifferentStrands() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(0, 5604914, 5604914, false, false, "50M", "50M", true, false, false, DEFAULT_BASE_QUALITY); // +/- tester.addMappedPair(0, 5604914, 5604914, true, true, "50M", "50M", false, true, false, DEFAULT_BASE_QUALITY); // -/+ tester.runTest(); } @Test public void testTwoMappedPairWithSamePositionDifferentStrands2() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(0, 5604914, 5604915, false, false, "50M", "50M", true, false, false, DEFAULT_BASE_QUALITY); // +/- tester.addMappedPair(0, 5604915, 5604914, true, true, "50M", "50M", false, true, false, DEFAULT_BASE_QUALITY); // -/+ tester.runTest(); } @Test public void testMappedPairWithFirstEndSamePositionAndOther() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(0, 5604914, 5605914, false, false, "37M39S", "73M3S", false, false, false, DEFAULT_BASE_QUALITY); // +/+ tester.addMappedPair(0, 5604914, 5604914, false, false, "37M39S", "73M3S", false, false, false, DEFAULT_BASE_QUALITY); // +/+ tester.runTest(); } @Test public void testTwoGroupsOnDifferentChromosomesOfTwoFragments() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedFragment(0, 1, false, DEFAULT_BASE_QUALITY); tester.addMappedFragment(0, 1, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedFragment(1, 1, false, DEFAULT_BASE_QUALITY); tester.addMappedFragment(1, 1, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.runTest(); } @Test public void testTwoGroupsOnDifferentChromosomesOfTwoMappedPairs() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(0, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(0, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedPair(1, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(1, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.runTest(); } @Test public void testTwoGroupsOnDifferentChromosomesOfThreeMappedPairs() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(0, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(0, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedPair(0, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedPair(1, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(1, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedPair(1, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.runTest(); } @Test public void testThreeGroupsOnDifferentChromosomesOfThreeMappedPairs() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(0, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(0, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedPair(0, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedPair(1, 1, 100, false, false, DEFAULT_BASE_QUALITY); tester.addMappedPair(1, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedPair(1, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedPair(2, 1, 100, false, false , DEFAULT_BASE_QUALITY); tester.addMappedPair(2, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.addMappedPair(2, 1, 100, true, true, DEFAULT_BASE_QUALITY); // duplicate!!! tester.runTest(); } @Test public void testBulkFragmentsNoDuplicates() { final AbstractMarkDuplicatesTester tester = getTester(); for(int position = 1; position <= 10000; position += 1) { tester.addMappedFragment(0, position, false, "100M", DEFAULT_BASE_QUALITY); } tester.runTest(); } @Test public void testBulkFragmentsWithDuplicates() { final AbstractMarkDuplicatesTester tester = getTester(); for(int position = 1; position <= 10000; position += 1) { tester.addMappedFragment(0, position, false, "100M", DEFAULT_BASE_QUALITY); tester.addMappedFragment(0, position, true, "100M", DEFAULT_BASE_QUALITY); tester.addMappedFragment(0, position, true, "100M", DEFAULT_BASE_QUALITY); tester.addMappedFragment(0, position, true, "100M", DEFAULT_BASE_QUALITY); tester.addMappedFragment(0, position, true, "100M", DEFAULT_BASE_QUALITY); } tester.runTest(); } @Test public void testStackOverFlowPairSetSwap() { final AbstractMarkDuplicatesTester tester = getTester(); File input = new File(TEST_DATA_DIR, "markDuplicatesWithMateCigar.pairSet.swap.sam"); SamReader reader = SamReaderFactory.makeDefault().open(input); tester.setHeader(reader.getFileHeader()); for (final SAMRecord record : reader) { tester.addRecord(record); } CloserUtil.close(reader); tester.setExpectedOpticalDuplicate(1); tester.runTest(); } @Test public void testSecondEndIsBeforeFirstInCoordinate() { final AbstractMarkDuplicatesTester tester = getTester(); tester.addMappedPair(0, 108855339, 108855323, false, false, "33S35M", "17S51M", true, true, false, DEFAULT_BASE_QUALITY); // +/- tester.runTest(); } @Test public void testPathologicalOrderingAtTheSamePosition() { final AbstractMarkDuplicatesTester tester = getTester(); tester.setExpectedOpticalDuplicate(1); tester.addMatePair("RUNID:3:1:15013:113051", 0, 129384554, 129384554, false, false, false, false, "68M", "68M", false, false, false, false, false, DEFAULT_BASE_QUALITY); tester.addMatePair("RUNID:3:1:15029:113060", 0, 129384554, 129384554, false, false, true, true, "68M", "68M", false, false, false, false, false, DEFAULT_BASE_QUALITY); // Create the pathology CloseableIterator<SAMRecord> iterator = tester.getRecordIterator(); int[] qualityOffset = {20, 30, 10, 40}; // creates an interesting pathological ordering int index = 0; while (iterator.hasNext()) { final SAMRecord record = iterator.next(); byte[] quals = new byte[record.getReadLength()]; for (int i = 0; i < record.getReadLength(); i++) { quals[i] = (byte)(qualityOffset[index] + 10); } record.setBaseQualities(quals); index++; } iterator.close(); // Run the test tester.runTest(); } @Test public void testDifferentChromosomesInOppositeOrder() { final AbstractMarkDuplicatesTester tester = getTester(); tester.setExpectedOpticalDuplicate(1); tester.addMatePair("RUNID:6:101:17642:6835", 0, 1, 123989, 18281, false, false, true, true, "37S64M", "52M49S", false, false, false, false, false, DEFAULT_BASE_QUALITY); tester.addMatePair("RUNID:6:101:17616:6888", 1, 0, 18281, 123989, false, false, false, false, "52M49S", "37S64M", false, false, false, false, false, DEFAULT_BASE_QUALITY); tester.runTest(); } @Test public void testOpticalDuplicateClustersAddingSecondEndFirstSameCoordinate() { final AbstractMarkDuplicatesTester tester = getTester(); tester.setExpectedOpticalDuplicate(1); tester.addMatePair("RUNID:1:1:15993:13361", 2, 41212324, 41212310, false, false, false, false, "33S35M", "19S49M", true, true, false, false, false, DEFAULT_BASE_QUALITY); tester.addMatePair("RUNID:1:1:16020:13352", 2, 41212324, 41212319, false, false, true, true, "33S35M", "28S40M", true, true, false, false, false, DEFAULT_BASE_QUALITY); tester.runTest(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.deltaspike.jsf.impl.config.view; import org.apache.deltaspike.core.api.config.ConfigResolver; import org.apache.deltaspike.core.api.config.view.metadata.Aggregated; import org.apache.deltaspike.core.api.config.view.ViewConfig; import org.apache.deltaspike.core.api.config.view.metadata.SkipMetaDataMerge; import org.apache.deltaspike.core.api.config.view.metadata.ViewMetaData; import org.apache.deltaspike.core.api.config.view.metadata.ConfigDescriptor; import org.apache.deltaspike.core.spi.config.view.ConfigNodeConverter; import org.apache.deltaspike.core.spi.config.view.ConfigPreProcessor; import org.apache.deltaspike.core.spi.config.view.ViewConfigNode; import org.apache.deltaspike.core.util.AnnotationUtils; import org.apache.deltaspike.core.util.ClassUtils; import org.apache.deltaspike.core.util.ExceptionUtils; import org.apache.deltaspike.core.util.metadata.AnnotationInstanceProvider; import org.apache.deltaspike.jsf.api.config.view.Folder; import org.apache.deltaspike.jsf.api.config.view.View; import org.apache.deltaspike.jsf.impl.util.ViewConfigUtils; import javax.enterprise.inject.Stereotype; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; public class DefaultConfigNodeConverter implements ConfigNodeConverter { @Override public ConfigDescriptor convert(ViewConfigNode node) { List<Annotation> mergedMetaData = mergeMetaData(node.getMetaData(), node.getInheritedMetaData()); //e.g. replace default placeholders needed for the merge with real default values mergedMetaData = preProcessMetaData(mergedMetaData, node); Class sourceClass = node.getSource(); if (ViewConfigUtils.isFolderConfig(sourceClass)) { Folder folderAnnotation = findMetaDataByType(mergedMetaData, Folder.class); return new DefaultFolderConfigDescriptor(folderAnnotation.name(), node.getSource(), mergedMetaData, node.getCallbackDescriptors()); } else if (ViewConfig.class.isAssignableFrom(sourceClass)) { View viewAnnotation = findMetaDataByType(mergedMetaData, View.class); String viewId = viewAnnotation.basePath() + viewAnnotation.name() + "." + viewAnnotation.extension(); return new DefaultViewPathConfigDescriptor(viewId, (Class<? extends ViewConfig>) node.getSource(), filterInheritedFolderMetaData(mergedMetaData), node.getCallbackDescriptors()); } else { throw new IllegalStateException(node.getSource() + " isn't a valid view-config"); } } private <T> T findMetaDataByType(List<Annotation> metaData, Class<T> target) { for (Annotation annotation : metaData) { if (target.equals(annotation.annotationType())) { return (T) annotation; } } return null; } private List<Annotation> mergeMetaData(Set<Annotation> metaData, List<Annotation> inheritedMetaData) { //TODO add qualifier support List<Annotation> nodeViewMetaData = new ArrayList<Annotation>(); List<Annotation> viewMetaDataFromStereotype = new ArrayList<Annotation>(); for (Annotation annotation : metaData) { if (annotation.annotationType().isAnnotationPresent(ViewMetaData.class)) { nodeViewMetaData.add(annotation); } //TODO move to stereotype-util, improve it and merge it with DefaultViewConfigInheritanceStrategy if (annotation.annotationType().isAnnotationPresent(Stereotype.class)) { for (Annotation metaAnnotation : annotation.annotationType().getAnnotations()) { if (metaAnnotation.annotationType().isAnnotationPresent(ViewMetaData.class)) { viewMetaDataFromStereotype.add(metaAnnotation); } } } } //merge meta-data of same level List<Annotation> result = mergeAnnotationInstances(viewMetaDataFromStereotype, nodeViewMetaData); if (inheritedMetaData != null && !inheritedMetaData.isEmpty()) { //merge meta-data with levels above result = mergeAnnotationInstances(inheritedMetaData, result); } return result; } private List<Annotation> mergeAnnotationInstances(List<Annotation> inheritedMetaData, List<Annotation> nodeMetaData) { List<Annotation> mergedResult = new ArrayList<Annotation>(); for (Annotation inheritedAnnotation : inheritedMetaData) { ViewMetaData viewMetaData = inheritedAnnotation.annotationType().getAnnotation(ViewMetaData.class); if (viewMetaData == null) { continue; } Aggregated aggregated = inheritedAnnotation.annotationType().getAnnotation(Aggregated.class); if (aggregated == null) { aggregated = viewMetaData.annotationType().getAnnotation(Aggregated.class); } if (aggregated.value()) //aggregation for the whole annotation is allowed { mergedResult.add(inheritedAnnotation); } else { Annotation currentNodeMetaData = findInResult(nodeMetaData, inheritedAnnotation); if (currentNodeMetaData == null) { Annotation mergedMetaData = findInResult(mergedResult, inheritedAnnotation); if (mergedMetaData == null) { mergedResult.add(inheritedAnnotation); } else { Annotation mergedAnnotation = mergeAnnotationInstance(mergedMetaData, inheritedAnnotation); mergedResult.add(mergedAnnotation); } } else { Annotation mergedAnnotation = mergeAnnotationInstance(currentNodeMetaData, inheritedAnnotation); mergedResult.add(mergedAnnotation); } } } //add all annotations at the beginning which weren't used for the merge mergedResult.addAll(0, nodeMetaData); return mergedResult; } private Annotation mergeAnnotationInstance(Annotation existingMetaData, Annotation inheritedMetaData) { Map<String, Object> values = new HashMap<String, Object>(); for (Method annotationMethod : existingMetaData.annotationType().getDeclaredMethods()) { annotationMethod.setAccessible(true); //TODO Annotation defaultAnnotation = AnnotationInstanceProvider.of(existingMetaData.annotationType()); try { Object defaultValue = null; try { defaultValue = annotationMethod.invoke(defaultAnnotation); } catch (Exception e) //happens with primitive data-types without default values { defaultValue = null; } Object existingValue = annotationMethod.invoke(existingMetaData); if (existingValue == null /*possible with literal instances*/ || existingValue.equals(defaultValue)) { Object inheritedValue = annotationMethod.invoke(inheritedMetaData); if (inheritedValue == null /*possible with literal instances*/ || inheritedValue.equals(defaultValue) || annotationMethod.isAnnotationPresent(SkipMetaDataMerge.class)) { values.put(annotationMethod.getName(), defaultValue); } else { values.put(annotationMethod.getName(), inheritedValue); } } else { values.put(annotationMethod.getName(), existingValue); } } catch (Exception e) { ExceptionUtils.throwAsRuntimeException(e); } } //TODO add aggregation in case of arrays return AnnotationInstanceProvider.of(existingMetaData.annotationType(), values); } private List<Annotation> preProcessMetaData(List<Annotation> mergedMetaData, ViewConfigNode node) { List<Annotation> result = new ArrayList<Annotation>(mergedMetaData.size()); for (Annotation annotation : mergedMetaData) { ViewMetaData viewMetaData = annotation.annotationType().getAnnotation(ViewMetaData.class); Class<? extends ConfigPreProcessor> preProcessorClass = viewMetaData.preProcessor(); if (!ConfigPreProcessor.class.equals(preProcessorClass)) { String customPreProcessorClassName = ConfigResolver.getPropertyValue(preProcessorClass.getName(), null); if (customPreProcessorClassName != null) { Class<? extends ConfigPreProcessor> customPreProcessorClass = ClassUtils.tryToLoadClassForName(customPreProcessorClassName, ConfigPreProcessor.class); if (customPreProcessorClass != null) { preProcessorClass = customPreProcessorClass; } else { throw new IllegalStateException(customPreProcessorClassName + " is configured to replace " + preProcessorClass.getName() + ", but it wasn't possible to load it."); } } ConfigPreProcessor preProcessor = ClassUtils.tryToInstantiateClass(preProcessorClass); Annotation resultToAdd = preProcessor.beforeAddToConfig(annotation, node); //it isn't possible to detect changed annotations if (resultToAdd != annotation) //check if the annotation(-instance) was changed { validateAnnotationChange(annotation); rewriteMetaDataOfNode(node.getMetaData(), annotation, resultToAdd); rewriteMetaDataOfNode(node.getInheritedMetaData(), annotation, resultToAdd); } result.add(resultToAdd); } else { result.add(annotation); } } return result; } private Annotation findInResult(List<Annotation> annotationList, Annotation annotationToFind) { for (Annotation annotation : annotationList) { if (annotationToFind.annotationType().equals(annotation.annotationType())) { annotationList.remove(annotation); return annotation; } } return null; } private List<Annotation> filterInheritedFolderMetaData(List<Annotation> mergedMetaData) { List<Annotation> result = new ArrayList<Annotation>(); for (Annotation metaData : mergedMetaData) { if (!Folder.class.equals(metaData.annotationType())) { result.add(metaData); } } return result; } protected void validateAnnotationChange(Annotation annotation) { Class<? extends Annotation> annotationType = annotation.annotationType(); if (Folder.class.equals(annotationType) || View.class.equals(annotationType)) { return; } ViewMetaData viewMetaData = annotationType.getAnnotation(ViewMetaData.class); if (viewMetaData == null) { return; } Aggregated aggregated = viewMetaData.annotationType().getAnnotation(Aggregated.class); if (aggregated != null && aggregated.value()) { throw new IllegalStateException("it isn't supported to change aggregated meta-data," + "because inheritance won't work correctly"); } } protected void rewriteMetaDataOfNode(Collection<Annotation> metaData, Annotation oldMetaData, Annotation newMetaData) { Iterator<Annotation> metaDataIterator = metaData.iterator(); while (metaDataIterator.hasNext()) { Annotation currentMetaData = metaDataIterator.next(); if (AnnotationUtils.getQualifierHashCode(currentMetaData) == AnnotationUtils.getQualifierHashCode(oldMetaData)) { metaDataIterator.remove(); metaData.add(newMetaData); break; } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.addthis.hydra.store.util; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import com.addthis.basis.util.LessBytes; import com.addthis.codec.annotations.FieldConfig; import com.addthis.codec.codables.SuperCodable; import com.addthis.hydra.common.hash.PluggableHashFunction; import com.fasterxml.jackson.annotation.JsonAutoDetect; /** * A simple, codable Bloom Filter adhering to the SeenFilter interface. * <p>A Bloom filter is a space-efficient probabilistic data structure that is used * to test whether an element is a member of a set. False positive matches are * possible, but false negatives are not; i.e. a query returns either * "inside set (may be wrong)" or "definitely not in set". Elements can be added to * the set, but not removed. The more elements that are added to the set, * the larger the probability of false positives. * * @user-reference */ @JsonAutoDetect(getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) public class SeenFilterBasic<K> implements SeenFilter<K>, SuperCodable { public static final int HASH_HASHCODE = 0; /* mostly bad */ public static final int HASH_HASHCODE_SHIFT_REV = 1; /* mostly bad */ public static final int HASH_HASHCODE_LONG_REV = 2; /* mostly bad */ public static final int HASH_MD5 = 3; /* marginally better accuracy, much slower */ public static final int HASH_PLUGGABLE_SHIFT = 4; /* default, best blend if speed and accuracy */ /** * for one of the hash types */ private static MessageDigest md5; static { try { md5 = java.security.MessageDigest.getInstance("MD5"); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } } /** * Optionally specify the initial state of the bloom filter. * If this field is specified then the {@link #bitsfree} field * must also be specified. */ @FieldConfig(codable = true) private int[] bitset; /** * Cardinality of the bloom filter * (total number of bits allocated to the filter). * This field must be 32 or greater. This field is required. */ @FieldConfig(codable = true, required = true) private int bits; /** * Number of hash function evaluations for each insertion * operation. This parameter is usually referred to as * the "k" parameter in the literature. This field is required. */ @FieldConfig(codable = true, required = true) private int bitsper; /** * Type of hash function that is used. The following types are available. * <p>0 - HASH_HASHCODE : mostly bad * <p>1 - HASH_HASHCODE_SHIFT_REV : mostly bad * <p>2 - HASH_HASHCODE_LONG_REV : mostly bad * <p>3 - HASH_MD5 : marginally better accuracy, much slower * <p>4 - HASH_PLUGGABLE_SHIFT : best blend of speed and accuracy * <p>This field is required. It is strongly recommended that you use "4". */ @FieldConfig(codable = true, required = true) private int hash; /** * If {@link #bitset} is specified the you must populate * this field with the number of 0 bits in the initial bloom filter. */ @FieldConfig(codable = true) private int bitsfree; public SeenFilterBasic() { } public SeenFilterBasic(int bits, int bitsper) { this(bits, bitsper, HASH_PLUGGABLE_SHIFT); } public SeenFilterBasic(int bits, int bitsper, int hash) { if (bits < 32) { throw new RuntimeException("invalid bits @ " + bits); } this.hash = hash; this.bits = (bits / 32) * 32; this.bitsfree = bits; this.bitsper = bitsper; this.bitset = new int[bits / 32]; } public SeenFilterBasic<K> newInstance() { return new SeenFilterBasic<>(bits, bitsper, hash); } public SeenFilterBasic<K> newInstance(int bits) { return new SeenFilterBasic<>(bits > 0 ? bits : this.bits, bitsper, hash); } @Override public String toString() { return "SeenFilterBasic[" + bits + "," + bitset.length + "," + hash + "]"; } /** * first stage byte set generator to feed hash algorithms */ private static byte[] generatePreHash(Object o) { Class<?> clazz = o.getClass(); if (clazz == Raw.class) { return ((Raw) o).toBytes(); } if (clazz == Long.class) { return LessBytes.toBytes((Long) o); } return Raw.get(o.toString()).toBytes(); } /** * call a given hash method for generating a single entry in a hash bit set */ public static long customHash(Object o, int index, int hash) { switch (hash) { default: case HASH_HASHCODE: return o.hashCode(); case HASH_HASHCODE_SHIFT_REV: int hc = o.hashCode(); return (hc << 32) | LessBytes.reverseBits(hc); case HASH_HASHCODE_LONG_REV: long lhc = (long) o.hashCode(); return lhc | LessBytes.reverseBits(lhc); case HASH_MD5: byte[] r1 = generatePreHash(o); byte[] r2 = new byte[r1.length]; for (int i = 0; i < r1.length; i++) { r2[r2.length - i - 1] = (byte) (r1[i] ^ index); } md5.reset(); r1 = md5.digest(r1); r2 = md5.digest(r2); return (((long) PluggableHashFunction.hash(r1)) << 32L) | ((long) PluggableHashFunction.hash(r2)); case HASH_PLUGGABLE_SHIFT: r1 = generatePreHash(o); r2 = new byte[r1.length]; for (int i = 0; i < r1.length; i++) { r2[r2.length - i - 1] = (byte) (r1[i] ^ index); } return (((long) PluggableHashFunction.hash(r1)) << 32) | ((long) PluggableHashFunction.hash(r2)); } } /** * return number of bits backing this filter */ public int getBits() { return bitset.length * 32; } public int[] getBitStore() { return bitset; } public int getBitCount() { return bits; } public int getBitsPerEntry() { return bitsper; } public int getHashMethod() { return hash; } /** * return used bit saturation (0-100) as a percent */ public int getSaturation() { try { return 100 - (int) ((bitsfree * 100L) / (bitset.length * 32L)); } catch (Exception ex) { System.out.println(hashCode() + " >> " + ex + " >> " + bits + " , " + bitsper + " , " + hash + " , " + bitsfree + " , " + bitset); return 0; } } /** * generate a single entry in hash offset set . * will be called with an index from 0-bitsper * to generate each bit in the hash set. * override this in subclasses and hash type will * be ignored. */ public long generateHash(K o, int index) { return customHash(o, index, hash); } /** * generate a bit hash offset set */ public long[] getHashSet(K o) { long[] bs = new long[bitsper]; for (int i = 0; i < bitsper; i++) { bs[i] = Math.abs(generateHash(o, i)); } return bs; } /** * return true (seen) if all bits set */ public boolean checkHashSet(long[] bs) { for (long l : bs) { if (!getBit(Math.abs((int) (l % bits)))) { return false; } } return true; } /** * warning: like setHashSet but does not update bitsfee */ public void updateHashSet(long[] bs) { for (int i = 0; i < bitsper; i++) { long hash = bs[i]; int offset = (int) (hash % bits); int byteoff = offset / 32; long val = (1 << (offset % 32)); bitset[byteoff] |= val; } } /** * set all bits from this hash offset set */ public void setHashSet(long[] bs) { for (long l : bs) { setBit(Math.abs((int) (l % bits))); } } /** * sets this offset bit */ public void setBit(int offset) { int byteoff = offset / 32; long val = (1 << (offset % 32)); if ((bitset[byteoff] & val) == 0 && bitsfree > 0) { bitsfree--; } bitset[byteoff] |= val; } /** * returns true of this offset bit is set */ public boolean getBit(int offset) { int byteoff = offset / 32; long val = (1 << (offset % 32)); return (bitset[byteoff] & val) == val; } /** * return true if seen before */ public boolean updateSeen(K o) { boolean allset = true; for (int i = 0; i < bitsper; i++) { long hash = Math.abs(generateHash(o, i)); int offset = (int) (hash % bits); int byteoff = offset / 32; long val = (1 << (offset % 32)); allset = allset & ((bitset[byteoff] & val) == 0); bitset[byteoff] |= val; } return allset; } public SeenFilterBasic<K> mergeSeen(SeenFilterBasic<?> merge) { return merge((SeenFilter<K>) merge); } @Override public SeenFilterBasic<K> merge(SeenFilter<K> merge) { if (!(merge instanceof SeenFilterBasic)) { throw new IllegalArgumentException(merge + " incompatible filter with " + this); } SeenFilterBasic<K> filterMerge = (SeenFilterBasic<K>) merge; if (!(filterMerge.hash == hash && filterMerge.bits == bits)) { throw new IllegalArgumentException(merge + " settings differ from " + this); } SeenFilterBasic<K> filterNew = new SeenFilterBasic<>(); if (filterMerge.bits != bits || filterMerge.bitsper != bitsper || filterMerge.bitset.length != bitset.length) { throw new IllegalArgumentException("cannot merge dissimilar blooms"); } filterNew.hash = hash; filterNew.bits = bits; filterNew.bitsfree = bits; filterNew.bitsper = bitsper; filterNew.bitset = new int[bitset.length]; for (int i = 0; i < bitset.length; i++) { filterNew.bitset[i] = bitset[i] | filterMerge.bitset[i]; long v = filterNew.bitset[i]; for (int j = 0; j < 32; j++) { if ((v & 1) == 1) { filterNew.bitsfree--; } v >>= 1; } } return filterNew; } @Override public void clear() { bitset = new int[bitset.length]; } @Override public void setSeen(K o) { for (int i = 0; i < bitsper; i++) { long hash = Math.abs(generateHash(o, i)); setBit((int) (hash % bits)); } } @Override public boolean getSeen(K o) { for (int i = 0; i < bitsper; i++) { long hash = Math.abs(generateHash(o, i)); if (!getBit((int) (hash % bits))) { return false; } } return true; } @Override public boolean getSetSeen(K o) { boolean seen = true; for (int i = 0; i < bitsper; i++) { long hash = Math.abs(generateHash(o, i)); int bit = (int) (hash % bits); if (getBit(bit)) { continue; } setBit(bit); seen = false; } return seen; } @Override public void postDecode() { if (bits <= 0) { throw new RuntimeException("invalid bits @ 0"); } if (bitset == null) { this.bitset = new int[bits / 32]; this.bitsfree = bits; } } @Override public void preEncode() { } }
/* * Copyright 2015 Adaptris Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.adaptris.transform.ff; import java.io.PrintWriter; import java.util.ArrayList; import org.w3c.dom.Node; import org.w3c.dom.NodeList; /** * * @author sellidge */ public class RecordHandler extends Handler { // Holds array of FieldHandlers ArrayList<FieldHandler> Fields = new ArrayList<FieldHandler>(); // Fields describing record structure String separator; String rec_id; String field_sep; boolean optional = true; int length = 0; int rec_id_start = 0; int rec_id_len = 0; /** * Constructor - creates a record parser based on file descriptor node * * @param input - the input description node */ RecordHandler(Node input) { //DEBUG=true; separator = getAttribute(input, "separator"); if ( separator.startsWith("asc") ) { separator = new String(new char[] {(char)Integer.parseInt(separator.substring(3))}); } rec_id = getAttribute(input, "rec_id"); field_sep = getAttribute(input, "field_sep"); length = getIntAttribute(input, "length"); rec_id_start = getIntAttribute(input, "rec_id_start"); rec_id_len = getIntAttribute(input, "rec_id_len"); try { if(getAttribute(input, "optional").equalsIgnoreCase("false")) { optional = false; } } catch(Exception e) { // do nothing - attribute "optional" is optional!! } setCount(getIntAttribute(input, "repetitions")); NodeList children = input.getChildNodes(); for ( int i=0; i < children.getLength(); i++ ) { if ( children.item(i).getNodeType() == Node.ELEMENT_NODE) { Fields.add(new FieldHandler(children.item(i))); } } } /** process - parses input file according to record descriptor * @param sp - the input StreamParser * @param output - output message StringBuffer */ @Override public void process(StreamParser sp, PrintWriter output) { String record = ""; // String passback = ""; //TM.reset(); //TM.start(); debug("Beginning to process record " + rec_id); output.print("<record_" + rec_id + ">"); debug("Processing record " + rec_id); try { record = sp.getContent(); StreamParser fieldParser = new StreamParser(record); debug("Extracted record : " + record); for ( int i=0; i < Fields.size(); i++ ) { ((FieldHandler)Fields.get(i)).process(fieldParser, output); } } catch (Exception e) { e.printStackTrace(); log("WARNING", "Failed to read record " + rec_id + ", separator='" + separator + "', length='" + length +"'"); log("EXCEPTION", e.getMessage()); } output.print("</record_" + rec_id + ">"); //debug("Read in record " + output); //TM.stop(); //logP.debug("Record " + rec_id + " took " + TM.getDuration() + " milliseconds to process"); //return passback; } /** Checks to see if next record is this one * @param sp - StreamParser to check against * @param rewind - Decides whether to leave the record read (if successfull) or return it to the Stream * @return true is next is this record, false otherwise */ @Override public boolean isThisHandler(StreamParser sp, boolean rewind) { String tmp = new String(); try { if ( rec_id.length() == 0) { if ( length > 0 ) { sp.setParseRule(StreamParser.FIXED_LENGTH, length); } else { sp.setParseRule(StreamParser.SEPARATED_STRING, separator.charAt(0)); } try { sp.readElement(); } catch (Exception e) { e.printStackTrace(); log("ERROR", "Failed to read element"); throw new Exception(); } if (sp.getContent().length() > 0) { if (rewind) { sp.rewindElement(sp.getContent()); } return true; } else { sp.rewindElement(sp.getContent()); return false; } } else { if ( rec_id_len > 0 ) { sp.setParseRule(StreamParser.FIXED_LENGTH, (rec_id_start-1 + rec_id_len)); try { sp.readElement(); } catch (Exception e) { e.printStackTrace(); log("ERROR", "Failed to read element"); throw new Exception(); } tmp = sp.getContent(); debug("Checking fixed record... id='" + rec_id + "', extract='" + tmp + "'"); try { if ( rec_id.equals(tmp.substring(rec_id_start-1))) { debug("This record " + tmp); sp.rewindElement(tmp); if (length > 0) { sp.setParseRule(StreamParser.FIXED_LENGTH, length); } else { sp.setParseRule(StreamParser.SEPARATED_STRING, separator.charAt(0)); } if (! rewind) { try { sp.readElement(); } catch (Exception exc) { logP.error("Error reading record " + rec_id); throw new Exception(); } logP.debug("Read record is " + sp.getContent()); } return true; } else { sp.rewindElement(tmp); return false; } } catch (StringIndexOutOfBoundsException se) { debug("Record not big enough"); sp.rewindElement(tmp); return false; } } else { sp.setParseRule(StreamParser.QUOTED_STRING, field_sep.charAt(0), '"'); ArrayList<String> history = new ArrayList<String>(); for ( int i=0; i<rec_id_start; i++ ) { debug("Looping to ID field"); try { sp.readElement(); history.add(sp.getContent()); } catch (Exception e) { for (int r=0; r<history.size(); r++) { sp.rewindElement(history.get(r).toString()); } return false; } } debug("Checking variable record... id='" + rec_id + "', extract='" + sp.getContent() + "'"); if (sp.getContent().equals(rec_id)) { for (int i=0; i<history.size(); i++) { sp.rewindElement(history.get(i).toString()); } if (! rewind) { sp.setParseRule(StreamParser.SEPARATED_STRING, separator.charAt(0)); try { sp.readElement(); } catch (Exception e) { log("ERROR", "Error reading record"); e.printStackTrace(); } } return true; } for (int i=0; i<history.size(); i++) { sp.rewindElement(history.get(i).toString()); } return false; } } } catch (Exception e) { return false; } } /** Sets repetitions value */ @Override public void setCount(int i) { COUNT = i; } /** Returns repetitions value */ @Override public int getCount() { return COUNT; } @Override public boolean isOptional() { return optional; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.sip; import java.net.URI; import java.text.ParseException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; import javax.sip.InvalidArgumentException; import javax.sip.ListeningPoint; import javax.sip.SipFactory; import javax.sip.SipStack; import javax.sip.address.Address; import javax.sip.address.AddressFactory; import javax.sip.address.SipURI; import javax.sip.header.CSeqHeader; import javax.sip.header.CallIdHeader; import javax.sip.header.ContactHeader; import javax.sip.header.ContentTypeHeader; import javax.sip.header.EventHeader; import javax.sip.header.ExpiresHeader; import javax.sip.header.ExtensionHeader; import javax.sip.header.FromHeader; import javax.sip.header.HeaderFactory; import javax.sip.header.MaxForwardsHeader; import javax.sip.header.ToHeader; import javax.sip.header.ViaHeader; import javax.sip.message.MessageFactory; import javax.sip.message.Request; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriParams; import org.apache.camel.spi.UriPath; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.URISupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @UriParams public class SipConfiguration { private static final Logger LOG = LoggerFactory.getLogger(SipConfiguration.class); private static final String IMPLEMENTATION = "gov.nist"; private SipComponent component; private String protocol; private Map<String, Object> parameters; @UriPath @Metadata(required = "true") private URI uri; @UriParam(label = "advanced") private AddressFactory addressFactory; @UriParam(label = "advanced") private MessageFactory messageFactory; @UriParam(label = "advanced") private HeaderFactory headerFactory; @UriParam(label = "advanced") private SipStack sipStack; @UriParam(label = "advanced") private ListeningPoint listeningPoint; @UriParam(label = "advanced") private SipURI sipUri; @UriParam(label = "common", defaultValue = "NAME_NOT_SET") private String stackName = "NAME_NOT_SET"; @UriParam(label = "common", defaultValue = "tcp", enums = "tcp,udp") private String transport = "tcp"; @UriParam(label = "proxy") private int maxForwards; @UriParam(label = "consumer") private boolean consumer; @UriParam(label = "common") private String eventHeaderName; @UriParam(label = "common") private String eventId; @UriParam(label = "common", defaultValue = "3600") private int msgExpiration = 3600; @UriParam(label = "proxy") private boolean useRouterForAllUris; @UriParam(label = "common", defaultValue = "10000") private long receiveTimeoutMillis = 10000; @UriParam(label = "advanced", defaultValue = "1048576") private int maxMessageSize = 1048576; @UriParam(label = "common") private boolean cacheConnections; @UriParam(label = "common", defaultValue = "text") private String contentType = "text"; @UriParam(label = "common", defaultValue = "plain") private String contentSubType = "plain"; @UriParam(label = "logging") private String implementationServerLogFile; @UriParam(label = "logging") private String implementationDebugLogFile; @UriParam(label = "logging", defaultValue = "0") private String implementationTraceLevel = "0"; @UriParam(label = "advanced") private SipFactory sipFactory; @UriParam(label = "common") private String fromUser; @UriParam(label = "common") private String fromHost; @UriParam(label = "common") private int fromPort; @UriParam(label = "common") private String toUser; @UriParam(label = "common") private String toHost; @UriParam(label = "common") private int toPort; @UriParam(label = "consumer") private boolean presenceAgent; @UriParam(label = "advanced") private FromHeader fromHeader; @UriParam(label = "advanced") private ToHeader toHeader; @UriParam(label = "advanced") private List<ViaHeader> viaHeaders; @UriParam(label = "advanced") private ContentTypeHeader contentTypeHeader; @UriParam(label = "advanced") private CallIdHeader callIdHeader; @UriParam(label = "advanced") private MaxForwardsHeader maxForwardsHeader; @UriParam(label = "advanced") private ContactHeader contactHeader; @UriParam(label = "advanced") private EventHeader eventHeader; @UriParam(label = "advanced") private ExtensionHeader extensionHeader; @UriParam(label = "advanced") private ExpiresHeader expiresHeader; public SipConfiguration() { sipFactory = SipFactory.getInstance(); sipFactory.setPathName(IMPLEMENTATION); } public void initialize(URI uri, Map<String, Object> parameters, SipComponent component) { this.setParameters(parameters); this.setComponent(component); this.setUri(uri); } public void parseURI() throws Exception { protocol = uri.getScheme(); if ((!protocol.equalsIgnoreCase("sip")) && (!protocol.equalsIgnoreCase("sips"))) { throw new IllegalArgumentException("Unrecognized SIP protocol: " + protocol + " for uri: " + uri); } Map<String, Object> settings = URISupport.parseParameters(uri); if (settings.containsKey("stackName")) { setStackName((String) settings.get("stackName")); } if (settings.containsKey("transport")) { setTransport((String) settings.get("transport")); } if (settings.containsKey("maxMessageSize")) { setMaxMessageSize(Integer.parseInt((String) settings.get("maxMessageSize"))); } if (settings.containsKey("cacheConnections")) { setCacheConnections(Boolean.valueOf((String) settings.get("cacheConnections"))); } if (settings.containsKey("contentType")) { setContentType((String) settings.get("contentType")); } if (settings.containsKey("contentSubType")) { setContentSubType((String) settings.get("contentSubType")); } if (settings.containsKey("maxForwards")) { setMaxForwards(Integer.parseInt((String) settings.get("maxForwards"))); } if (settings.containsKey("receiveTimeoutMillis")) { setReceiveTimeoutMillis(Long.parseLong((String) settings.get("receiveTimeoutMillis"))); } if (settings.containsKey("eventHeaderName")) { setEventHeaderName((String) settings.get("eventHeaderName")); } if (settings.containsKey("eventId")) { setEventId((String) settings.get("eventId")); } if (settings.containsKey("useRouterForAllUris")) { setUseRouterForAllUris(Boolean.valueOf((String) settings.get("useRouterForAllUris"))); } if (settings.containsKey("msgExpiration")) { setMsgExpiration(Integer.parseInt((String) settings.get("msgExpiration"))); } if (settings.containsKey("presenceAgent")) { setPresenceAgent(Boolean.valueOf((String) settings.get("presenceAgent"))); } if (!consumer) { if (settings.containsKey("fromUser")) { setFromUser((String) settings.get("fromUser")); } if (settings.containsKey("fromHost")) { setFromHost((String) settings.get("fromHost")); } if (settings.containsKey("fromPort")) { setFromPort(Integer.parseInt((String) settings.get("fromPort"))); } setToUser(uri.getUserInfo()); setToHost(uri.getHost()); setToPort(uri.getPort()); } else { setFromUser(uri.getUserInfo()); setFromHost(uri.getHost()); setFromPort(uri.getPort()); if (!presenceAgent) { if (settings.containsKey("toUser")) { setToUser((String) settings.get("toUser")); } if (settings.containsKey("toHost")) { setToHost((String) settings.get("toHost")); } if (settings.containsKey("toPort")) { setToPort(Integer.parseInt((String) settings.get("toPort"))); } } } implementationDebugLogFile = component.getAndRemoveParameter(parameters, "implementationDebugLogFile", String.class, null); implementationServerLogFile = component.getAndRemoveParameter(parameters, "implementationServerLogFile", String.class, null); implementationTraceLevel = component.getAndRemoveParameter(parameters, "implementationTraceLevel", String.class, "0"); LOG.trace("Consumer:" + consumer + " StackName:" + stackName); LOG.trace("From User: " + getFromUser() + " From host: " + getFromHost() + " From Port: " + getFromPort()); createFactoriesAndHeaders(parameters, component); sipUri = component.resolveAndRemoveReferenceParameter(parameters, "sipUri", SipURI.class, null); if (sipUri == null) { sipUri = addressFactory.createSipURI(getToUser(), getToHost() + ":" + getToPort()); } ObjectHelper.notNull(fromUser, "From User"); ObjectHelper.notNull(fromHost, "From Host"); ObjectHelper.notNull(fromPort, "From Port"); ObjectHelper.notNull(eventHeader, "Event Header"); ObjectHelper.notNull(eventHeaderName, "Event Header Name"); ObjectHelper.notNull(eventId, "Event Id"); } @SuppressWarnings("unchecked") private void createFactoriesAndHeaders(Map<String, Object> parameters, SipComponent component) throws Exception { headerFactory = sipFactory.createHeaderFactory(); addressFactory = sipFactory.createAddressFactory(); setMessageFactory(sipFactory.createMessageFactory()); fromHeader = component.resolveAndRemoveReferenceParameter(parameters, "fromHeader", FromHeader.class, null); if (fromHeader == null) { createFromHeader(); } if (!presenceAgent) { toHeader = component.resolveAndRemoveReferenceParameter(parameters, "toHeader", ToHeader.class, null); if (toHeader == null) { createToHeader(); } } viaHeaders = component.resolveAndRemoveReferenceParameter(parameters, "viaHeaders", List.class, null); if (viaHeaders == null) { createViaHeaders(); } contentTypeHeader = component.resolveAndRemoveReferenceParameter(parameters, "contentTypeHeader", ContentTypeHeader.class, null); if (contentTypeHeader == null) { createContentTypeHeader(); } callIdHeader = component.resolveAndRemoveReferenceParameter(parameters, "callIdHeader", CallIdHeader.class, null); maxForwardsHeader = component.resolveAndRemoveReferenceParameter(parameters, "maxForwardsHeader", MaxForwardsHeader.class, null); if (maxForwardsHeader == null) { createMaxForwardsHeader(); } // Optional Headers eventHeader = component.resolveAndRemoveReferenceParameter(parameters, "eventHeader", EventHeader.class, null); if (eventHeader == null) { createEventHeader(); } contactHeader = component.resolveAndRemoveReferenceParameter(parameters, "contactHeader", ContactHeader.class, null); if (contactHeader == null) { createContactHeader(); } expiresHeader = component.resolveAndRemoveReferenceParameter(parameters, "expiresHeader", ExpiresHeader.class, null); if (expiresHeader == null) { createExpiresHeader(); } extensionHeader = component.resolveAndRemoveReferenceParameter(parameters, "extensionHeader", ExtensionHeader.class, null); } public Request createSipRequest(long sequenceNumber, String requestMethod, Object body) throws ParseException, InvalidArgumentException { //SipConfiguration configuration = sipPublisher.getConfiguration(); CSeqHeader cSeqHeader = getHeaderFactory().createCSeqHeader(sequenceNumber, requestMethod); // Create the request. Request request = getMessageFactory().createRequest( getSipUri(), requestMethod, getCallIdHeader(), cSeqHeader, getFromHeader(), getToHeader(), getViaHeaders(), getMaxForwardsHeader()); if (getEventHeader() != null) { request.addHeader(getEventHeader()); } if (getExpiresHeader() != null) { request.addHeader(getExpiresHeader()); } if (getContactHeader() != null) { request.addHeader(getContactHeader()); } if (getExtensionHeader() != null) { request.addHeader(getExtensionHeader()); } request.setContent(body, getContentTypeHeader()); return request; } private void createFromHeader() throws ParseException { SipURI fromAddress = getAddressFactory().createSipURI(getFromUser(), getFromHost()); fromAddress.setPort(Integer.valueOf(getFromPort()).intValue()); Address fromNameAddress = addressFactory.createAddress(fromAddress); fromNameAddress.setDisplayName(getFromUser()); setFromHeader(headerFactory.createFromHeader(fromNameAddress, getFromUser() + "_Header")); } private void createToHeader() throws ParseException { SipURI toAddress = getAddressFactory().createSipURI(getToUser(), getToHost()); toAddress.setPort(getToPort()); Address toNameAddress = addressFactory.createAddress(toAddress); toNameAddress.setDisplayName(getToUser()); setToHeader(headerFactory.createToHeader(toNameAddress, getToUser() + "_Header")); } private void createViaHeaders() throws ParseException, InvalidArgumentException { viaHeaders = new ArrayList<ViaHeader>(); ViaHeader viaHeader = headerFactory.createViaHeader(getFromHost(), getFromPort(), getTransport(), null); viaHeaders.add(viaHeader); } private void createContentTypeHeader() throws ParseException { setContentTypeHeader(headerFactory.createContentTypeHeader(getContentType(), getContentSubType())); } private void createMaxForwardsHeader() throws ParseException, InvalidArgumentException { setMaxForwardsHeader(headerFactory.createMaxForwardsHeader(getMaxForwards())); } private void createEventHeader() throws ParseException { eventHeader = getHeaderFactory().createEventHeader(getEventHeaderName()); eventHeader.setEventId(getEventId()); } private void createContactHeader() throws ParseException { SipURI contactURI = addressFactory.createSipURI(getFromUser(), getFromHost()); contactURI.setTransportParam(getTransport()); contactURI.setPort(Integer.valueOf(getFromPort()).intValue()); Address contactAddress = addressFactory.createAddress(contactURI); // Add the contact address. contactAddress.setDisplayName(getFromUser()); contactHeader = headerFactory.createContactHeader(contactAddress); } private void createExpiresHeader() throws ParseException, InvalidArgumentException { expiresHeader = getHeaderFactory().createExpiresHeader(getMsgExpiration()); } Properties createInitialProperties() { Properties properties = new Properties(); properties.setProperty("javax.sip.STACK_NAME", getStackName()); properties.setProperty("gov.nist.javax.sip.MAX_MESSAGE_SIZE", "" + getMaxMessageSize()); properties.setProperty("gov.nist.javax.sip.CACHE_CLIENT_CONNECTIONS", "" + isCacheConnections()); properties.setProperty("javax.sip.USE_ROUTER_FOR_ALL_URIS", "" + isUseRouterForAllUris()); if ((implementationDebugLogFile != null) && (implementationServerLogFile != null)) { properties.setProperty("gov.nist.javax.sip.DEBUG_LOG", implementationDebugLogFile); properties.setProperty("gov.nist.javax.sip.SERVER_LOG", implementationServerLogFile); properties.setProperty("gov.nist.javax.sip.TRACE_LEVEL", implementationTraceLevel); } return properties; } public AddressFactory getAddressFactory() { return addressFactory; } /** * To use a custom AddressFactory */ public void setAddressFactory(AddressFactory addressFactory) { this.addressFactory = addressFactory; } public MessageFactory getMessageFactory() { return messageFactory; } /** * To use a custom MessageFactory */ public void setMessageFactory(MessageFactory messageFactory) { this.messageFactory = messageFactory; } public HeaderFactory getHeaderFactory() { return headerFactory; } /** * To use a custom HeaderFactory */ public void setHeaderFactory(HeaderFactory headerFactory) { this.headerFactory = headerFactory; } public SipStack getSipStack() { return sipStack; } /** * To use a custom SipStack */ public void setSipStack(SipStack sipStack) { this.sipStack = sipStack; } public SipURI getSipUri() { return sipUri; } /** * To use a custom SipURI. If none configured, then the SipUri fallback to use the options toUser toHost:toPort */ public void setSipUri(SipURI sipUri) { this.sipUri = sipUri; } public String getStackName() { return stackName; } /** * Name of the SIP Stack instance associated with an SIP Endpoint. */ public void setStackName(String stackName) { this.stackName = stackName; } public String getTransport() { return transport; } /** * Setting for choice of transport protocol. Valid choices are "tcp" or "udp". */ public void setTransport(String transport) { this.transport = transport; } public int getMaxMessageSize() { return maxMessageSize; } /** * Setting for maximum allowed Message size in bytes. */ public void setMaxMessageSize(int maxMessageSize) { this.maxMessageSize = maxMessageSize; } public boolean isCacheConnections() { return cacheConnections; } /** * Should connections be cached by the SipStack to reduce cost of connection creation. This is useful if the connection is used for long running conversations. */ public void setCacheConnections(boolean cacheConnections) { this.cacheConnections = cacheConnections; } public ListeningPoint getListeningPoint() { return listeningPoint; } /** * To use a custom ListeningPoint implementation */ public void setListeningPoint(ListeningPoint listeningPoint) { this.listeningPoint = listeningPoint; } /** * Setting for contentType can be set to any valid MimeType. */ public void setContentType(String contentType) { this.contentType = contentType; } public String getContentType() { return contentType; } /** * Setting for contentSubType can be set to any valid MimeSubType. */ public void setContentSubType(String contentSubType) { this.contentSubType = contentSubType; } public String getContentSubType() { return contentSubType; } /** * Number of maximum proxy forwards */ public void setMaxForwards(int maxForwards) { this.maxForwards = maxForwards; } public int getMaxForwards() { return maxForwards; } /** * Setting for specifying amount of time to wait for a Response and/or Acknowledgement can be received from another SIP stack */ public void setReceiveTimeoutMillis(long receiveTimeoutMillis) { this.receiveTimeoutMillis = receiveTimeoutMillis; } public long getReceiveTimeoutMillis() { return receiveTimeoutMillis; } public void setParameters(Map<String, Object> parameters) { this.parameters = parameters; } public Map<String, Object> getParameters() { return parameters; } public void setComponent(SipComponent component) { this.component = component; } public SipComponent getComponent() { return component; } public String getImplementationServerLogFile() { return implementationServerLogFile; } /** * Name of server log file to use for logging */ public void setImplementationServerLogFile(String implementationServerLogFile) { this.implementationServerLogFile = implementationServerLogFile; } public String getImplementationDebugLogFile() { return implementationDebugLogFile; } /** * Name of client debug log file to use for logging */ public void setImplementationDebugLogFile(String implementationDebugLogFile) { this.implementationDebugLogFile = implementationDebugLogFile; } public String getImplementationTraceLevel() { return implementationTraceLevel; } /** * Logging level for tracing */ public void setImplementationTraceLevel(String implementationTraceLevel) { this.implementationTraceLevel = implementationTraceLevel; } public SipFactory getSipFactory() { return sipFactory; } /** * To use a custom SipFactory to create the SipStack to be used */ public void setSipFactory(SipFactory sipFactory) { this.sipFactory = sipFactory; } public String getFromUser() { return fromUser; } /** * Username of the message originator. Mandatory setting unless a registry based custom FromHeader is specified. */ public void setFromUser(String fromUser) { this.fromUser = fromUser; } public String getFromHost() { return fromHost; } /** * Hostname of the message originator. Mandatory setting unless a registry based FromHeader is specified */ public void setFromHost(String fromHost) { this.fromHost = fromHost; } public int getFromPort() { return fromPort; } /** * Port of the message originator. Mandatory setting unless a registry based FromHeader is specified */ public void setFromPort(int fromPort) { this.fromPort = fromPort; } public String getToUser() { return toUser; } /** * Username of the message receiver. Mandatory setting unless a registry based custom ToHeader is specified. */ public void setToUser(String toUser) { this.toUser = toUser; } public String getToHost() { return toHost; } /** * Hostname of the message receiver. Mandatory setting unless a registry based ToHeader is specified */ public void setToHost(String toHost) { this.toHost = toHost; } public int getToPort() { return toPort; } /** * Portname of the message receiver. Mandatory setting unless a registry based ToHeader is specified */ public void setToPort(int toPort) { this.toPort = toPort; } public FromHeader getFromHeader() { return fromHeader; } /** * A custom Header object containing message originator settings. Must implement the type javax.sip.header.FromHeader */ public void setFromHeader(FromHeader fromHeader) { this.fromHeader = fromHeader; } public ToHeader getToHeader() { return toHeader; } /** * A custom Header object containing message receiver settings. Must implement the type javax.sip.header.ToHeader */ public void setToHeader(ToHeader toHeader) { this.toHeader = toHeader; } public List<ViaHeader> getViaHeaders() { return viaHeaders; } /** * List of custom Header objects of the type javax.sip.header.ViaHeader. * Each ViaHeader containing a proxy address for request forwarding. (Note this header is automatically updated by each proxy when the request arrives at its listener) */ public void setViaHeaders(List<ViaHeader> viaHeaders) { this.viaHeaders = viaHeaders; } public ContentTypeHeader getContentTypeHeader() { return contentTypeHeader; } /** * A custom Header object containing message content details. Must implement the type javax.sip.header.ContentTypeHeader */ public void setContentTypeHeader(ContentTypeHeader contentTypeHeader) { this.contentTypeHeader = contentTypeHeader; } public CallIdHeader getCallIdHeader() { return callIdHeader; } /** * A custom Header object containing call details. Must implement the type javax.sip.header.CallIdHeader */ public void setCallIdHeader(CallIdHeader callIdHeader) { this.callIdHeader = callIdHeader; } public MaxForwardsHeader getMaxForwardsHeader() { return maxForwardsHeader; } /** * A custom Header object containing details on maximum proxy forwards. * This header places a limit on the viaHeaders possible. Must implement the type javax.sip.header.MaxForwardsHeader */ public void setMaxForwardsHeader(MaxForwardsHeader maxForwardsHeader) { this.maxForwardsHeader = maxForwardsHeader; } public ContactHeader getContactHeader() { return contactHeader; } /** * An optional custom Header object containing verbose contact details (email, phone number etc). Must implement the type javax.sip.header.ContactHeader */ public void setContactHeader(ContactHeader contactHeader) { this.contactHeader = contactHeader; } public ExtensionHeader getExtensionHeader() { return extensionHeader; } /** * A custom Header object containing user/application specific details. Must implement the type javax.sip.header.ExtensionHeader */ public void setExtensionHeader(ExtensionHeader extensionHeader) { this.extensionHeader = extensionHeader; } /** * URI of the SIP server to connect to (the username and password can be included such as: john:secret@myserver:9999) */ public void setUri(URI uri) { this.uri = uri; } public URI getUri() { return uri; } /** * This setting is used to determine whether the kind of header (FromHeader,ToHeader etc) that needs to be created for this endpoint */ public void setConsumer(boolean consumer) { this.consumer = consumer; } public boolean isConsumer() { return consumer; } /** * A custom Header object containing event details. Must implement the type javax.sip.header.EventHeader */ public void setEventHeader(EventHeader eventHeader) { this.eventHeader = eventHeader; } public EventHeader getEventHeader() { return eventHeader; } /** * Setting for a String based event type. */ public void setEventHeaderName(String eventHeaderName) { this.eventHeaderName = eventHeaderName; } public String getEventHeaderName() { return eventHeaderName; } /** * Setting for a String based event Id. Mandatory setting unless a registry based FromHeader is specified */ public void setEventId(String eventId) { this.eventId = eventId; } public String getEventId() { return eventId; } /** * This setting is used when requests are sent to the Presence Agent via a proxy. */ public void setUseRouterForAllUris(boolean useRouterForAllUris) { this.useRouterForAllUris = useRouterForAllUris; } public boolean isUseRouterForAllUris() { return useRouterForAllUris; } public int getMsgExpiration() { return msgExpiration; } /** * The amount of time a message received at an endpoint is considered valid */ public void setMsgExpiration(int msgExpiration) { this.msgExpiration = msgExpiration; } public ExpiresHeader getExpiresHeader() { return expiresHeader; } /** * A custom Header object containing message expiration details. Must implement the type javax.sip.header.ExpiresHeader */ public void setExpiresHeader(ExpiresHeader expiresHeader) { this.expiresHeader = expiresHeader; } public boolean isPresenceAgent() { return presenceAgent; } /** * This setting is used to distinguish between a Presence Agent & a consumer. * This is due to the fact that the SIP Camel component ships with a basic Presence Agent (for testing purposes only). Consumers have to set this flag to true. */ public void setPresenceAgent(boolean presenceAgent) { this.presenceAgent = presenceAgent; } }
/** * Copyright 2005-2016 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.maven; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.lang.reflect.Field; import java.net.URL; import java.net.URLConnection; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.Enumeration; import java.util.HashMap; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.regex.Pattern; import io.fabric8.agent.download.DownloadManager; import io.fabric8.agent.download.DownloadManagers; import io.fabric8.agent.model.BundleInfo; import io.fabric8.agent.model.ConfigFile; import io.fabric8.agent.model.Feature; import io.fabric8.agent.model.Repository; import io.fabric8.agent.service.Agent; import io.fabric8.agent.service.MetadataBuilder; import io.fabric8.common.util.MultiException; import org.apache.felix.utils.version.VersionRange; import org.apache.karaf.deployer.blueprint.BlueprintTransformer; import org.apache.maven.artifact.Artifact; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugin.descriptor.PluginDescriptor; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.eclipse.aether.RepositorySystem; import org.osgi.framework.Constants; import org.osgi.framework.ServiceReference; import org.osgi.service.url.URLStreamHandlerService; import static io.fabric8.agent.DeploymentAgent.getMetadata; import static io.fabric8.agent.DeploymentAgent.getPrefixedProperties; import static io.fabric8.agent.utils.AgentUtils.downloadRepositories; @Mojo(name = "verify-features") public class VerifyFeatureResolutionMojo extends AbstractMojo { @Parameter(property = "descriptors") private Set<String> descriptors; @Parameter(property = "features") private Set<String> features; @Parameter(property = "framework") private Set<String> framework; @Parameter(property = "distribution", defaultValue = "org.apache.karaf:apache-karaf") private String distribution; @Parameter(property = "javase") private String javase; @Parameter(property = "dist-dir") private String distDir; @Parameter(property = "additional-metadata") private File additionalMetadata; @Parameter(property = "fail") private String fail = "end"; @Parameter(property = "verify-transitive") private boolean verifyTransitive = false; @Component protected PluginDescriptor pluginDescriptor; @Component protected RepositorySystem repositorySystem; @Override public void execute() throws MojoExecutionException, MojoFailureException { try { Field field = URL.class.getDeclaredField("factory"); field.setAccessible(true); field.set(null, null); } catch (Exception e) { e.printStackTrace(); } URL.setURLStreamHandlerFactory(new CustomBundleURLStreamHandlerFactory()); System.setProperty("karaf.home", "target/karaf"); System.setProperty("karaf.data", "target/karaf/data"); ScheduledExecutorService executor = Executors.newScheduledThreadPool(8); Hashtable<String, String> properties = new Hashtable<>(); if (additionalMetadata != null) { try (Reader reader = new FileReader(additionalMetadata)) { Properties metadata = new Properties(); metadata.load(reader); for (Enumeration<?> e = metadata.propertyNames(); e.hasMoreElements(); ) { Object key = e.nextElement(); Object val = metadata.get(key); properties.put(key.toString(), val.toString()); } } catch (IOException e) { throw new MojoExecutionException("Unable to load additional metadata from " + additionalMetadata, e); } } DownloadManager manager; MavenResolver resolver; final Map<String, Repository> repositories; Map<String, Feature[]> repos = new HashMap<>(); Map<String, Feature> allFeatures = new HashMap<>(); try { resolver = MavenResolvers.createMavenResolver(null, properties, "org.ops4j.pax.url.mvn", repositorySystem); manager = DownloadManagers.createDownloadManager(resolver, executor); repositories = downloadRepositories(manager, descriptors).call(); for (String repoUri : repositories.keySet()) { Feature[] features = repositories.get(repoUri).getFeatures(); // Ack features to inline configuration files urls for (Feature feature : features) { for (BundleInfo bi : feature.getBundles()) { String loc = bi.getLocation(); String nloc = null; if (loc.contains("file:")) { for (ConfigFile cfi : feature.getConfigurationFiles()) { if (cfi.getFinalname().substring(1) .equals(loc.substring(loc.indexOf("file:") + "file:".length()))) { nloc = cfi.getLocation(); } } } if (nloc != null) { bi.setLocation(loc.substring(0, loc.indexOf("file:")) + nloc); } } allFeatures.put(feature.getId(), feature); } repos.put(repoUri, features); } } catch (Exception e) { throw new MojoExecutionException("Unable to load features descriptors", e); } List<Feature> featuresToTest = new ArrayList<>(); if (verifyTransitive) { for (Feature[] features : repos.values()) { featuresToTest.addAll(Arrays.asList(features)); } } else { for (String uri : descriptors) { featuresToTest.addAll(Arrays.asList(repos.get(uri))); } } if (features != null && !features.isEmpty()) { StringBuilder sb = new StringBuilder(); for (String feature : features) { if (sb.length() > 0) { sb.append("|"); } String p = feature.replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*"); sb.append(p); if (!feature.contains("/")) { sb.append("/.*"); } } Pattern pattern = Pattern.compile(sb.toString()); for (Iterator<Feature> iterator = featuresToTest.iterator(); iterator.hasNext();) { Feature feature = iterator.next(); String id = feature.getName() + "/" + feature.getVersion(); if (!pattern.matcher(id).matches()) { iterator.remove(); } } } for (String fmk : framework) { properties.put("feature.framework." + fmk, fmk); } List<Throwable> failures = new ArrayList<>(); for (Feature feature : featuresToTest) { try { String id = feature.getName() + "/" + feature.getVersion(); manager = DownloadManagers.createDownloadManager(resolver, executor); verifyResolution(manager, allFeatures, id, properties); getLog().info("Verification of feature " + id + " succeeded"); } catch (Exception e) { getLog().warn(e.getMessage()); failures.add(e); if ("first".equals(fail)) { throw e; } } } if ("end".equals(fail) && !failures.isEmpty()) { throw new MojoExecutionException("Verification failures", new MultiException("Verification failures", failures)); } } private void verifyResolution(DownloadManager manager, Map<String, Feature> allFeatures, String feature, Hashtable<String, String> properties) throws MojoExecutionException { try { properties.put("feature.totest", feature); FakeSystemBundle systemBundle = getSystemBundleResource(getMetadata(properties, "metadata#")); FakeServiceReference profileHandlerSR = new FakeServiceReference(URLStreamHandlerService.class.getName(), "(url.handler.protocol=profile)"); systemBundle.setServiceReferences(URLStreamHandlerService.class.getName(), null, new ServiceReference[] { profileHandlerSR }); systemBundle.setService(profileHandlerSR, new Object()); Agent agent = new Agent(null, systemBundle, manager); agent.setOptions(EnumSet.of( io.fabric8.agent.service.Constants.Option.Simulate, io.fabric8.agent.service.Constants.Option.Silent )); try { agent.provision( allFeatures, getPrefixedProperties(properties, "feature."), getPrefixedProperties(properties, "bundle."), getPrefixedProperties(properties, "req."), getPrefixedProperties(properties, "override."), getPrefixedProperties(properties, "optional."), getMetadata(properties, "metadata#") ); } catch (Exception e) { Set<String> resources = new TreeSet<>(manager.getProviders().keySet()); throw new MojoExecutionException("Feature resolution failed for " + feature + "\nMessage: " + e.toString() + "\nResources: " + toString(resources), e); } } catch (MojoExecutionException e) { throw e; } catch (Exception e) { throw new MojoExecutionException("Error verifying feature " + feature + "\nMessage: " + e.getMessage(), e); } } private String toString(Collection<String> collection) { StringBuilder sb = new StringBuilder(); sb.append("{\n"); for (String s : collection) { sb.append("\t").append(s).append("\n"); } sb.append("}"); return sb.toString(); } private FakeSystemBundle getSystemBundleResource(Map<String, Map<VersionRange, Map<String, String>>> metadata) throws Exception { Artifact karafDistro = pluginDescriptor.getArtifactMap().get(distribution); String dir = distDir; if (dir == null) { dir = karafDistro.getArtifactId() + "-" + karafDistro.getBaseVersion(); } URL configPropURL = new URL("jar:file:" + karafDistro.getFile() + "!/" + dir + "/etc/config.properties"); org.apache.felix.utils.properties.Properties configProps = PropertiesLoader.loadPropertiesFile(configPropURL, true); // copySystemProperties(configProps); if (javase == null) { configProps.put("java.specification.version", System.getProperty("java.specification.version")); } else { configProps.put("java.specification.version", javase); } configProps.substitute(); Hashtable<String, String> headers = new Hashtable<>(); headers.put(Constants.BUNDLE_MANIFESTVERSION, "2"); headers.put(Constants.BUNDLE_SYMBOLICNAME, "system-bundle"); headers.put(Constants.BUNDLE_VERSION, "0.0.0"); String exportPackages = configProps.getProperty("org.osgi.framework.system.packages"); if (configProps.containsKey("org.osgi.framework.system.packages.extra")) { if (!"".equals(configProps.getProperty("org.osgi.framework.system.packages.extra").trim())) { exportPackages += "," + configProps.getProperty("org.osgi.framework.system.packages.extra"); } } headers.put(Constants.EXPORT_PACKAGE, exportPackages); String systemCaps = configProps.getProperty("org.osgi.framework.system.capabilities"); headers.put(Constants.PROVIDE_CAPABILITY, systemCaps); new MetadataBuilder(metadata).overrideHeaders(headers); return new FakeSystemBundle(headers); } public static class CustomBundleURLStreamHandlerFactory implements URLStreamHandlerFactory { public URLStreamHandler createURLStreamHandler(String protocol) { if (protocol.equals("wrap")) { return new URLStreamHandler() { @Override protected URLConnection openConnection(URL url) throws IOException { return new URLConnection(url) { @Override public void connect() throws IOException { } @Override public InputStream getInputStream() throws IOException { WrapUrlParser parser = new WrapUrlParser(url.getPath()); synchronized (CustomBundleURLStreamHandlerFactory.class) { return org.ops4j.pax.swissbox.bnd.BndUtils.createBundle( parser.getWrappedJarURL().openStream(), parser.getWrappingProperties(), url.toExternalForm(), parser.getOverwriteMode() ); } } }; } }; } else if (protocol.equals("blueprint")) { return new URLStreamHandler() { @Override protected URLConnection openConnection(URL url) throws IOException { return new URLConnection(url) { @Override public void connect() throws IOException { } @Override public InputStream getInputStream() throws IOException { try { ByteArrayOutputStream os = new ByteArrayOutputStream(); BlueprintTransformer.transform(new URL(url.getPath()), os); os.close(); return new ByteArrayInputStream(os.toByteArray()); } catch (Exception e) { throw (IOException) new IOException("Error opening blueprint xml url").initCause(e); } } }; } }; } else if (protocol.equals("war")) { return new org.ops4j.pax.url.war.Handler(); } else { return null; } } } }
package nl.esciencecenter.neon.util; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /* Copyright 2013 Netherlands eScience Center * * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * A singleton pattern generic Settings File reader for use in OpenGL * applications. Pair this with a settings.properties file in your project root. * * @author Maarten van Meersbergen <m.van.meersbergen@esciencecenter.nl> * @see TypedProperties */ public class Settings { private static final Logger logger = LoggerFactory.getLogger(Settings.class); private boolean stereoRendering = true; private boolean stereoSwitched = true; private float stereoOcularDistanceMin = 0f; private float stereoOcularDistanceDef = .2f; private float stereoOcularDistanceMax = 1f; // Size settings for default startup and screenshots private int defaultScreenWidth = 1920; private int defaultScreenHeight = 720; // Settings for the initial view private int initialSimulationFrame = 0; private float initialRotationX = 17f; private float initialRotationY = -25f; private float initialZoom = -390.0f; // Setting per movie frame private boolean movieRotate = true; private float movieRotationSpeedMin = -1f; private float movieRotationSpeedMax = 1f; private float movieRotationSpeedDef = -0.25f; // Settings for the gas cloud octree private int maxOctreeDepth = 25; private float octreeEdges = 800f; // Settings that should never change, but are listed here to make sure they // can be found if necessary private int maxExpectedModels = 1000; private String screenshotPath = System.getProperty("user.dir") + System.getProperty("path.separator"); private String[] acceptableNetcdfExtenstions = { ".nc" }; private String currentNetcdfExtenstion = "nc"; private boolean touchConnectionEnabled = false; private int interfaceWidth = 240; private int interfaceHeight = 720; private static class SingletonHolder { public final static Settings instance = new Settings(); } public static Settings getInstance() { return SingletonHolder.instance; } public Settings() { try { TypedProperties props = new TypedProperties(); props.loadFromClassPath("settings.properties"); stereoRendering = props.getBooleanProperty("STEREO_RENDERING"); stereoSwitched = props.getBooleanProperty("STEREO_SWITCHED"); stereoOcularDistanceMin = props.getFloatProperty("STEREO_OCULAR_DISTANCE_MIN"); stereoOcularDistanceMax = props.getFloatProperty("STEREO_OCULAR_DISTANCE_MAX"); stereoOcularDistanceDef = props.getFloatProperty("STEREO_OCULAR_DISTANCE_DEF"); // Size settings for default startup and screenshots defaultScreenWidth = props.getIntProperty("DEFAULT_SCREEN_WIDTH"); defaultScreenHeight = props.getIntProperty("DEFAULT_SCREEN_HEIGHT"); interfaceWidth = props.getIntProperty("INTERFACE_WIDTH"); interfaceHeight = props.getIntProperty("INTERFACE_HEIGHT"); // Settings for the initial view initialSimulationFrame = props.getIntProperty("INITIAL_SIMULATION_FRAME"); initialRotationX = props.getFloatProperty("INITIAL_ROTATION_X"); initialRotationY = props.getFloatProperty("INITIAL_ROTATION_Y"); initialZoom = props.getFloatProperty("INITIAL_ZOOM"); screenshotPath = props.getProperty("SCREENSHOT_PATH"); } catch (NumberFormatException e) { logger.warn(e.getMessage()); } } public boolean getStereo() { return stereoRendering; } public void setStereo(int stateChange) { if (stateChange == 1) { stereoRendering = true; } if (stateChange == 2) { stereoRendering = false; } } public boolean getStereoSwitched() { return stereoSwitched; } public void setStereoSwitched(int stateChange) { if (stateChange == 1) { stereoSwitched = true; } if (stateChange == 2) { stereoSwitched = false; } } public float getStereoOcularDistanceMin() { return stereoOcularDistanceMin; } public float getStereoOcularDistanceMax() { return stereoOcularDistanceMax; } public float getStereoOcularDistance() { return stereoOcularDistanceDef; } public void setStereoOcularDistance(float value) { stereoOcularDistanceDef = value; } public int getDefaultScreenWidth() { return defaultScreenWidth; } public int getDefaultScreenHeight() { return defaultScreenHeight; } public int getMaxOctreeDepth() { return maxOctreeDepth; } public float getOctreeEdges() { return octreeEdges; } public int getMaxExpectedModels() { return maxExpectedModels; } public float getInitialRotationX() { return initialRotationX; } public float getInitialRotationY() { return initialRotationY; } public float getInitialZoom() { return initialZoom; } public void setMovieRotate(int stateChange) { if (stateChange == 1) { movieRotate = true; } if (stateChange == 2) { movieRotate = false; } } public boolean getMovieRotate() { return movieRotate; } public void setMovieRotationSpeed(float value) { movieRotationSpeedDef = value; } public float getMovieRotationSpeedMin() { return movieRotationSpeedMin; } public float getMovieRotationSpeedMax() { return movieRotationSpeedMax; } public float getMovieRotationSpeedDef() { return movieRotationSpeedDef; } public int getInitialSimulationFrame() { return initialSimulationFrame; } public void setInitialSimulationFrame(int initialSimulationFrame) { this.initialSimulationFrame = initialSimulationFrame; } public void setInitialRotationX(float initialRotationX) { this.initialRotationX = initialRotationX; } public void setInitialRotationY(float initialRotationY) { this.initialRotationY = initialRotationY; } public String getScreenshotPath() { return screenshotPath; } public void setScreenshotPath(String newPath) { screenshotPath = newPath; } public String getCurrentNetCDFExtension() { return currentNetcdfExtenstion; } public String[] getAcceptableNetCDFExtensions() { return acceptableNetcdfExtenstions.clone(); } public boolean isTouchConnected() { return touchConnectionEnabled; } public int getInterfaceWidth() { return interfaceWidth; } public int getInterfaceHeight() { return interfaceHeight; } /** * Getter for stereoRendering. * * @return the stereoRendering. */ public boolean isStereoRendering() { return stereoRendering; } /** * Setter for stereoRendering. * * @param stereoRendering * the stereoRendering to set */ public void setStereoRendering(boolean stereoRendering) { this.stereoRendering = stereoRendering; } /** * Getter for stereoOcularDistanceDef. * * @return the stereoOcularDistanceDef. */ public float getStereoOcularDistanceDef() { return stereoOcularDistanceDef; } /** * Setter for stereoOcularDistanceDef. * * @param stereoOcularDistanceDef * the stereoOcularDistanceDef to set */ public void setStereoOcularDistanceDef(float stereoOcularDistanceDef) { this.stereoOcularDistanceDef = stereoOcularDistanceDef; } /** * Getter for acceptableNetcdfExtenstions. * * @return the acceptableNetcdfExtenstions. */ public String[] getAcceptableNetcdfExtenstions() { return acceptableNetcdfExtenstions.clone(); } /** * Setter for acceptableNetcdfExtenstions. * * @param acceptableNetcdfExtenstions * the acceptableNetcdfExtenstions to set */ public void setAcceptableNetcdfExtenstions(String[] acceptableNetcdfExtenstions) { this.acceptableNetcdfExtenstions = acceptableNetcdfExtenstions.clone(); } /** * Getter for currentNetcdfExtenstion. * * @return the currentNetcdfExtenstion. */ public String getCurrentNetcdfExtenstion() { return currentNetcdfExtenstion; } /** * Setter for currentNetcdfExtenstion. * * @param currentNetcdfExtenstion * the currentNetcdfExtenstion to set */ public void setCurrentNetcdfExtenstion(String currentNetcdfExtenstion) { this.currentNetcdfExtenstion = currentNetcdfExtenstion; } /** * Getter for touchConnectionEnabled. * * @return the touchConnectionEnabled. */ public boolean isTouchConnectionEnabled() { return touchConnectionEnabled; } /** * Setter for touchConnectionEnabled. * * @param touchConnectionEnabled * the touchConnectionEnabled to set */ public void setTouchConnectionEnabled(boolean touchConnectionEnabled) { this.touchConnectionEnabled = touchConnectionEnabled; } /** * Setter for stereoSwitched. * * @param stereoSwitched * the stereoSwitched to set */ public void setStereoSwitched(boolean stereoSwitched) { this.stereoSwitched = stereoSwitched; } /** * Setter for stereoOcularDistanceMin. * * @param stereoOcularDistanceMin * the stereoOcularDistanceMin to set */ public void setStereoOcularDistanceMin(float stereoOcularDistanceMin) { this.stereoOcularDistanceMin = stereoOcularDistanceMin; } /** * Setter for stereoOcularDistanceMax. * * @param stereoOcularDistanceMax * the stereoOcularDistanceMax to set */ public void setStereoOcularDistanceMax(float stereoOcularDistanceMax) { this.stereoOcularDistanceMax = stereoOcularDistanceMax; } /** * Setter for defaultScreenWidth. * * @param defaultScreenWidth * the defaultScreenWidth to set */ public void setDefaultScreenWidth(int defaultScreenWidth) { this.defaultScreenWidth = defaultScreenWidth; } /** * Setter for defaultScreenHeight. * * @param defaultScreenHeight * the defaultScreenHeight to set */ public void setDefaultScreenHeight(int defaultScreenHeight) { this.defaultScreenHeight = defaultScreenHeight; } /** * Setter for initialZoom. * * @param initialZoom * the initialZoom to set */ public void setInitialZoom(float initialZoom) { this.initialZoom = initialZoom; } /** * Setter for movieRotate. * * @param movieRotate * the movieRotate to set */ public void setMovieRotate(boolean movieRotate) { this.movieRotate = movieRotate; } /** * Setter for movieRotationSpeedMin. * * @param movieRotationSpeedMin * the movieRotationSpeedMin to set */ public void setMovieRotationSpeedMin(float movieRotationSpeedMin) { this.movieRotationSpeedMin = movieRotationSpeedMin; } /** * Setter for movieRotationSpeedMax. * * @param movieRotationSpeedMax * the movieRotationSpeedMax to set */ public void setMovieRotationSpeedMax(float movieRotationSpeedMax) { this.movieRotationSpeedMax = movieRotationSpeedMax; } /** * Setter for movieRotationSpeedDef. * * @param movieRotationSpeedDef * the movieRotationSpeedDef to set */ public void setMovieRotationSpeedDef(float movieRotationSpeedDef) { this.movieRotationSpeedDef = movieRotationSpeedDef; } /** * Setter for maxOctreeDepth. * * @param maxOctreeDepth * the maxOctreeDepth to set */ public void setMaxOctreeDepth(int maxOctreeDepth) { this.maxOctreeDepth = maxOctreeDepth; } /** * Setter for octreeEdges. * * @param octreeEdges * the octreeEdges to set */ public void setOctreeEdges(float octreeEdges) { this.octreeEdges = octreeEdges; } /** * Setter for maxExpectedModels. * * @param maxExpectedModels * the maxExpectedModels to set */ public void setMaxExpectedModels(int maxExpectedModels) { this.maxExpectedModels = maxExpectedModels; } /** * Setter for interfaceWidth. * * @param interfaceWidth * the interfaceWidth to set */ public void setInterfaceWidth(int interfaceWidth) { this.interfaceWidth = interfaceWidth; } /** * Setter for interfaceHeight. * * @param interfaceHeight * the interfaceHeight to set */ public void setInterfaceHeight(int interfaceHeight) { this.interfaceHeight = interfaceHeight; } }
package org.moparscape.elysium.net.handler; import org.moparscape.elysium.Server; import org.moparscape.elysium.def.ItemWieldableDef; import org.moparscape.elysium.entity.*; import org.moparscape.elysium.entity.component.Inventory; import org.moparscape.elysium.net.Packets; import org.moparscape.elysium.net.Session; import org.moparscape.elysium.net.codec.decoder.message.*; import org.moparscape.elysium.task.timed.ItemDropTask; import org.moparscape.elysium.task.timed.ItemPickupTask; import org.moparscape.elysium.util.Formulae; import org.moparscape.elysium.world.Point; import org.moparscape.elysium.world.Region; import java.util.List; import java.util.Map; /** * Created by daniel on 13/02/2015. */ public final class ItemMessageHandlers { public static final class ItemDoorMessageHandler extends MessageHandler<ItemDoorMessage> { @Override public boolean handle(Session session, Player player, ItemDoorMessage message) { //To change body of implemented methods use File | Settings | File Templates. return true; } } public static final class ItemDropMessageHandler extends MessageHandler<ItemDropMessage> { @Override public boolean handle(Session session, Player player, ItemDropMessage message) { int itemIndex = message.getIndex(); Inventory inventory = player.getInventory(); if (itemIndex >= inventory.size()) return true; player.setState(PlayerState.ITEM_DROP); Server.getInstance().submitTimedTask(new ItemDropTask(player, itemIndex, player.getActionCount())); return true; } } public static final class ItemGameObjectMessageHandler extends MessageHandler<ItemGameObjectMessage> { @Override public boolean handle(Session session, Player player, ItemGameObjectMessage message) { //To change body of implemented methods use File | Settings | File Templates. return true; } } public static final class ItemGroundItemMessageHandler extends MessageHandler<ItemGroundItemMessage> { @Override public boolean handle(Session session, Player player, ItemGroundItemMessage message) { //To change body of implemented methods use File | Settings | File Templates. return true; } } public static final class ItemItemMessageHandler extends MessageHandler<ItemItemMessage> { @Override public boolean handle(Session session, Player player, ItemItemMessage message) { //To change body of implemented methods use File | Settings | File Templates. return true; } } public static final class ItemNpcMessageHandler extends MessageHandler<ItemNpcMessage> { @Override public boolean handle(Session session, Player player, ItemNpcMessage message) { //To change body of implemented methods use File | Settings | File Templates. return true; } } public static final class ItemPickupMessageHandler extends MessageHandler<ItemPickupMessage> { @Override public boolean handle(Session session, Player player, ItemPickupMessage message) { Point itemLoc = message.getLocation(); Region r = Region.getRegion(itemLoc); Item targetItem = r.getItem(message.getItemId(), itemLoc); if (targetItem != null && targetItem.isVisibleTo(player)) { player.setState(PlayerState.ITEM_PICKUP); Server.getInstance().submitTimedTask(new ItemPickupTask(player, targetItem)); } return true; } } public static final class ItemPlayerMessageHandler extends MessageHandler<ItemPlayerMessage> { @Override public boolean handle(Session session, Player player, ItemPlayerMessage message) { //To change body of implemented methods use File | Settings | File Templates. return true; } } public static final class ItemUnwieldMessageHandler extends MessageHandler<ItemUnwieldMessage> { @Override public boolean handle(Session session, Player player, ItemUnwieldMessage message) { int itemIndex = message.getItemIndex(); Inventory inventory = player.getInventory(); if (itemIndex >= inventory.size()) return true; InvItem item = inventory.get(itemIndex); if (item == null || !item.isWielded()) return true; if (!item.isWieldable()) { // Might want to mark this as cheating. return true; } ItemWieldableDef wieldableDef = item.getWieldableDef(); Appearance appearance = player.getAppearance(); item.setWielded(false); player.setWornItem(wieldableDef.getWieldPos(), appearance.getSprite(wieldableDef.getWieldPos())); Packets.sendSound(player, "click"); Packets.sendInventory(player); return true; } } public static final class ItemWieldMessageHandler extends MessageHandler<ItemWieldMessage> { @Override public boolean handle(Session session, Player player, ItemWieldMessage message) { int itemIndex = message.getItemIndex(); Inventory inventory = player.getInventory(); if (itemIndex >= inventory.size()) return true; InvItem item = inventory.get(itemIndex); if (item == null || item.isWielded()) return true; if (!item.isWieldable()) { // Might want to mark this as cheating. return true; } ItemWieldableDef wieldableDef = item.getWieldableDef(); // Check stats required. StringBuilder outer = null; StringBuilder inner = null; for (Map.Entry<Integer, Integer> e : wieldableDef.getStatsRequired()) { if (player.getMaxStat(e.getKey()) < e.getValue()) { if (outer == null) { outer = new StringBuilder(100); inner = new StringBuilder(50); outer.append("You must have at least "); } inner.append(e.getValue()).append(" ") .append(Formulae.statArray[e.getKey()]) .append(", "); } } // If they lack one or more stats, message them the requirements. if (outer != null) { outer.append(inner.substring(0, inner.length() - 2)) .append(" to use this item."); Packets.sendMessage(player, outer.toString()); return true; } // If the item is female-only and they're male then tell them // that they can't wear the item. Appearance appearance = player.getAppearance(); if (wieldableDef.femaleOnly() && appearance.isMale()) { Packets.sendMessage(player, "Only females can wear that item."); return true; } List<InvItem> items = inventory.getItems(); for (InvItem i : items) { if (i.isWielded() && item.wieldingAffectsItem(i)) { i.setWielded(false); ItemWieldableDef def = i.getWieldableDef(); player.setWornItem(def.getWieldPos(), appearance.getSprite(def.getWieldPos())); } } item.setWielded(true); player.setWornItem(wieldableDef.getWieldPos(), wieldableDef.getSprite()); Packets.sendSound(player, "click"); Packets.sendInventory(player); return true; } } }
package cz.metacentrum.perun.webgui.tabs; import com.google.gwt.core.client.GWT; import com.google.gwt.core.client.JavaScriptObject; import cz.metacentrum.perun.webgui.client.PerunWebSession; import cz.metacentrum.perun.webgui.client.UiElements; import cz.metacentrum.perun.webgui.client.resources.PerunEntity; import cz.metacentrum.perun.webgui.json.GetEntityById; import cz.metacentrum.perun.webgui.json.JsonCallbackEvents; import cz.metacentrum.perun.webgui.model.GeneralObject; import cz.metacentrum.perun.webgui.model.SecurityTeam; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; /** * Class for parsion URL and loading proper tabs * * @author Vaclav Mach <374430@mail.muni.cz> */ public class UrlMapper { private PerunWebSession session = PerunWebSession.getInstance(); static public final String TAB_SEPARATOR = ";"; static public final String TAB_NAME_SEPARATOR = "/"; static private boolean parsingRunning = false; /** * UrlMapper instance */ public UrlMapper() {} /** * Parses URL, and calls the ***Tabs classes. * * URL EXAMPLE: * vo-admin/detail?vo=123&dummy=blabla;perun-admin/users?searchString=Vaclav;group-admin/members * * @param url */ public void parseUrl(String url) { // disable multiple parsing if(parsingRunning) return; parsingRunning = true; session.getUiElements().setLogText("Parsing url:" + url); String[] parts = url.split(TAB_SEPARATOR); // if url leads to tabs without any "active", add it to first of them if (parts.length >= 1 && !url.contains("active=1")) { if (parts[0].contains("?")) { parts[0] = parts[0]+"&active=1"; } else { parts[0] = parts[0]+"?active=1"; } } for(String tabString : parts) { parseTab(tabString); } parsingRunning = false; // reloads the menu session.getUiElements().getMenu().updateLinks(); } /** * Called */ private boolean parseTab(String tabString) { String[] parts = tabString.split("\\?", 2); String[] packageSlashName = parts[0].split("/", 2); if (packageSlashName.length != 2) { return false; } if(packageSlashName[0] == null || packageSlashName[1] == null) { return false; } String tabPackage = packageSlashName[0]; String tabName = packageSlashName[1]; Map<String, String> parameters; if(parts.length == 2) { parameters = parseParameters(parts[1], "&"); } else { parameters = new HashMap<String, String>(); } // try to open tab try { if (getAndOpenTab(tabPackage, tabName, parameters)) { return true; } } catch (RuntimeException e) { UiElements.generateAlert("URL parsing error", "This tab couldn't be opened: " + tabPackage + " / " + tabName + ". "); } session.getUiElements().setLogText("No entry exists for: " + tabPackage + " | " + tabName); return false; } private boolean getAndOpenTab(String tabPackage, String tabName, Map<String, String> parameters) { // homepage, info if(tabPackage.equals(OtherTabs.URL)) { OtherTabs tabs = new OtherTabs(); tabs.loadTab(tabName, parameters); } // VO if(tabPackage.equals(VosTabs.URL)) { VosTabs tabs = new VosTabs(); tabs.loadTab(tabName, parameters); return true; } // Group if(tabPackage.equals(GroupsTabs.URL)) { GroupsTabs tabs = new GroupsTabs(); tabs.loadTab(tabName, parameters); return true; } // Facilities if(tabPackage.equals(FacilitiesTabs.URL)) { FacilitiesTabs tabs = new FacilitiesTabs(); tabs.loadTab(tabName, parameters); return true; } // Perun if(tabPackage.equals(PerunAdminTabs.URL)) { PerunAdminTabs tabs = new PerunAdminTabs(); tabs.loadTab(tabName, parameters); return true; } // Users if(tabPackage.equals(UsersTabs.URL)) { UsersTabs tabs = new UsersTabs(); tabs.loadTab(tabName, parameters); return true; } // Cabinet if(tabPackage.equals(CabinetTabs.URL)) { CabinetTabs tabs = new CabinetTabs(); tabs.loadTab(tabName, parameters); return true; } // Attributes if(tabPackage.equals(AttributesTabs.URL)) { AttributesTabs tabs = new AttributesTabs(); tabs.loadTab(tabName, parameters); return true; } // Services if(tabPackage.equals(ServicesTabs.URL)) { ServicesTabs tabs = new ServicesTabs(); tabs.loadTab(tabName, parameters); return true; } // Resources if(tabPackage.equals(ResourcesTabs.URL)) { ResourcesTabs tabs = new ResourcesTabs(); tabs.loadTab(tabName, parameters); return true; } // member if(tabPackage.equals(MembersTabs.URL)) { MembersTabs tabs = new MembersTabs(); tabs.loadTab(tabName, parameters); return true; } // registrar if(tabPackage.equals(RegistrarTabs.URL)) { RegistrarTabs tabs = new RegistrarTabs(); tabs.loadTab(tabName, parameters); return true; } // security if(tabPackage.equals(SecurityTabs.URL)) { SecurityTabs tabs = new SecurityTabs(); tabs.loadTab(tabName, parameters); return true; } // test if(tabPackage.equals(TestTabs.URL)) { TestTabs tabs = new TestTabs(); tabs.loadTab(tabName, parameters); return true; } return false; } /** * Parses the parameters * * @param parameters String with parameters to parse * @param character Character, which splits the multiple parameters - usually & * @return */ private Map<String, String> parseParameters(String parameters, String character){ Map<String, String> paramMap = new HashMap<String, String>(); if(parameters == null){ return paramMap; } String[] pairs = parameters.split(character); for(int i = 0; i < pairs.length; i++) { String[] keyValue = pairs[i].split("=", 2); if(keyValue.length == 2){ paramMap.put(keyValue[0], keyValue[1]); } } return paramMap; } /** * Parses a list of objects from URL parameters. * The returned list is NOT COMPLETE immediately after the method finishes, * it is still loading the entities. * The list's length must be equal to the number returned by method: * "parseListLengthFromUrl" * * * @param paramName Parameter name * @param entity Entity which load from IDs * @param parameters * @return */ static public <T extends JavaScriptObject> ArrayList<T> parseListFromUrl(String paramName, PerunEntity entity, Map<String, String> parameters){ ArrayList<T> list = new ArrayList<T>(); int i = 0; String param = paramName + "[" + i + "]"; while(parameters.containsKey(param)){ String idStr = parameters.get(param); int id = Integer.parseInt(idStr); addEntityToList(entity, id, i, list); i++; param = paramName + "[" + i + "]"; } return list; } /** * Returns the number of entities in URL. * * * @param paramName * @param parameters * @return */ static public int parseListLengthFromUrl(String paramName, Map<String, String> parameters){ int i = 0; String param = paramName + "[" + i + "]"; while(parameters.containsKey(param)){ i++; param = paramName + "[" + i + "]"; } return i; } /** * Private helper method * @param entity * @param id * @param i * @param list */ static private <T extends JavaScriptObject> void addEntityToList(PerunEntity entity, int id, final int i, final ArrayList<T> list) { JsonCallbackEvents events = new JsonCallbackEvents(){ @SuppressWarnings("unchecked") public void onFinished(JavaScriptObject jso){ JavaScriptObject obj = jso.cast(); list.add(i, (T) obj.cast()); } }; switch (entity) { case USER: new GetEntityById(PerunEntity.USER, id, events).retrieveData(); break; case VIRTUAL_ORGANIZATION: new GetEntityById(PerunEntity.VIRTUAL_ORGANIZATION, id, events).retrieveData(); break; case GROUP: new GetEntityById(PerunEntity.GROUP, id, events).retrieveData(); break; case MEMBER: new GetEntityById(PerunEntity.MEMBER, id, events).retrieveData(); break; case FACILITY: new GetEntityById(PerunEntity.FACILITY, id, events).retrieveData(); break; case RESOURCE: new GetEntityById(PerunEntity.RESOURCE, id, events).retrieveData(); break; case SERVICE: new GetEntityById(PerunEntity.SERVICE, id, events).retrieveData(); break; default: throw new RuntimeException("This entity is not supported."); } } /** * Returns a URL from the list of perun entities * * @param paramName * @param list * @return */ static public <T extends JavaScriptObject> String getUrlFromList(String paramName, ArrayList<T> list) { String url = ""; int i = 0; for(T obj : list) { GeneralObject go = obj.cast(); url += paramName + "[" + i + "]=" + go.getId() + "&"; i++; } return url.substring(0, url.length() - 1); } }
/* * Copyright 2016 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns; import static com.google.errorprone.matchers.Description.NO_MATCH; import static com.google.errorprone.matchers.Matchers.anyOf; import static com.google.errorprone.matchers.Matchers.instanceMethod; import static com.google.errorprone.matchers.Matchers.staticMethod; import static com.google.errorprone.matchers.Matchers.symbolHasAnnotation; import static com.google.errorprone.util.ASTHelpers.getReceiver; import static com.google.errorprone.util.ASTHelpers.getSymbol; import static com.google.errorprone.util.ASTHelpers.getType; import com.google.errorprone.VisitorState; import com.google.errorprone.annotations.FormatMethod; import com.google.errorprone.bugpatterns.BugChecker.BinaryTreeMatcher; import com.google.errorprone.bugpatterns.BugChecker.CompoundAssignmentTreeMatcher; import com.google.errorprone.bugpatterns.BugChecker.MethodInvocationTreeMatcher; import com.google.errorprone.fixes.Fix; import com.google.errorprone.matchers.Description; import com.google.errorprone.matchers.Matcher; import com.google.errorprone.predicates.TypePredicate; import com.sun.source.tree.BinaryTree; import com.sun.source.tree.CompoundAssignmentTree; import com.sun.source.tree.ExpressionTree; import com.sun.source.tree.MethodInvocationTree; import com.sun.source.tree.Tree; import com.sun.source.tree.Tree.Kind; import com.sun.source.util.TreePath; import com.sun.tools.javac.code.Type; import com.sun.tools.javac.code.Type.MethodType; import java.util.List; import java.util.Optional; import javax.lang.model.type.TypeKind; /** * An abstract matcher for implicit and explicit calls to {@code Object.toString()}, for use on * types that do not have a human-readable {@code toString()} implementation. * * <p>See examples in {@link StreamToString} and {@link ArrayToString}. */ public abstract class AbstractToString extends BugChecker implements BinaryTreeMatcher, MethodInvocationTreeMatcher, CompoundAssignmentTreeMatcher { /** The type to match on. */ protected abstract TypePredicate typePredicate(); /** * Constructs a fix for an implicit toString call, e.g. from string concatenation or from passing * an argument to {@code println} or {@code StringBuilder.append}. * * @param tree the tree node for the expression being converted to a String */ protected abstract Optional<Fix> implicitToStringFix(ExpressionTree tree, VisitorState state); /** Adds the description message for match on the type without fixes. */ protected Optional<String> descriptionMessageForDefaultMatch(Type type, VisitorState state) { return Optional.empty(); } /** Whether this kind of toString call is allowable for this check. */ protected boolean allowableToStringKind(ToStringKind toStringKind) { return false; } /** * Constructs a fix for an explicit toString call, e.g. from {@code Object.toString()} or {@code * String.valueOf()}. * * @param parent the expression's parent (e.g. {@code String.valueOf(expression)}) */ protected abstract Optional<Fix> toStringFix( Tree parent, ExpressionTree expression, VisitorState state); private static final Matcher<ExpressionTree> TO_STRING = instanceMethod().anyClass().named("toString").withNoParameters(); private static final Matcher<ExpressionTree> FLOGGER_LOG = instanceMethod().onDescendantOf("com.google.common.flogger.LoggingApi").named("log"); private static final Matcher<ExpressionTree> FORMAT_METHOD = symbolHasAnnotation(FormatMethod.class); private static final Matcher<ExpressionTree> STRING_FORMAT = staticMethod().onClass("java.lang.String").named("format"); private static final Matcher<ExpressionTree> VALUE_OF = staticMethod() .onClass("java.lang.String") .named("valueOf") .withParameters("java.lang.Object"); private static final Matcher<ExpressionTree> PRINT_STRING = anyOf( instanceMethod() .onDescendantOf("java.io.PrintStream") .namedAnyOf("print", "println") .withParameters("java.lang.Object"), instanceMethod() .onExactClass("java.lang.StringBuilder") .named("append") .withParameters("java.lang.Object")); private static boolean isInVarargsPosition( ExpressionTree argTree, MethodInvocationTree methodInvocationTree, VisitorState state) { int parameterCount = getSymbol(methodInvocationTree).getParameters().size(); List<? extends ExpressionTree> arguments = methodInvocationTree.getArguments(); // Don't match if we're passing an array into a varargs parameter, but do match if there are // other parameters along with it. return (arguments.size() > parameterCount || !state.getTypes().isArray(getType(argTree))) && arguments.indexOf(argTree) >= parameterCount - 1; } @Override public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) { if (PRINT_STRING.matches(tree, state)) { for (ExpressionTree argTree : tree.getArguments()) { handleStringifiedTree(argTree, ToStringKind.IMPLICIT, state); } } if (VALUE_OF.matches(tree, state)) { for (ExpressionTree argTree : tree.getArguments()) { handleStringifiedTree( tree, argTree, ToStringKind.EXPLICIT, state.withPath(new TreePath(state.getPath(), argTree))); } } if (TO_STRING.matches(tree, state)) { ExpressionTree receiver = getReceiver(tree); if (receiver != null) { handleStringifiedTree(tree, receiver, ToStringKind.EXPLICIT, state); } } if (FORMAT_METHOD.matches(tree, state)) { for (ExpressionTree argTree : tree.getArguments()) { if (isInVarargsPosition(argTree, tree, state)) { handleStringifiedTree(argTree, ToStringKind.FORMAT_METHOD, state); } } } if (STRING_FORMAT.matches(tree, state)) { for (ExpressionTree argTree : tree.getArguments()) { if (isInVarargsPosition(argTree, tree, state)) { handleStringifiedTree(argTree, ToStringKind.IMPLICIT, state); } } } if (FLOGGER_LOG.matches(tree, state)) { for (ExpressionTree argTree : tree.getArguments()) { handleStringifiedTree(argTree, ToStringKind.FLOGGER, state); } } return NO_MATCH; } @Override public Description matchBinary(BinaryTree tree, VisitorState state) { if (!state.getTypes().isSameType(getType(tree), state.getSymtab().stringType)) { return NO_MATCH; } if (tree.getKind() == Kind.PLUS) { handleStringifiedTree(tree.getLeftOperand(), ToStringKind.IMPLICIT, state); handleStringifiedTree(tree.getRightOperand(), ToStringKind.IMPLICIT, state); } if (tree.getKind() == Kind.PLUS_ASSIGNMENT) { handleStringifiedTree(tree.getRightOperand(), ToStringKind.IMPLICIT, state); } return NO_MATCH; } @Override public Description matchCompoundAssignment(CompoundAssignmentTree tree, VisitorState state) { if (state.getTypes().isSameType(getType(tree.getVariable()), state.getSymtab().stringType) && tree.getKind() == Kind.PLUS_ASSIGNMENT) { handleStringifiedTree(tree.getExpression(), ToStringKind.IMPLICIT, state); } return NO_MATCH; } private void handleStringifiedTree( ExpressionTree tree, ToStringKind toStringKind, VisitorState state) { handleStringifiedTree(tree, tree, toStringKind, state); } private void handleStringifiedTree( Tree parent, ExpressionTree tree, ToStringKind toStringKind, VisitorState state) { Type type = type(tree); if (type.getKind() == TypeKind.NULL || !typePredicate().apply(type, state) || allowableToStringKind(toStringKind)) { return; } state.reportMatch(maybeFix(tree, state, type, getFix(tree, state, parent, toStringKind))); } private static Type type(ExpressionTree tree) { Type type = getType(tree); if (type instanceof MethodType) { return type.getReturnType(); } return type; } private Optional<Fix> getFix( ExpressionTree tree, VisitorState state, Tree parent, ToStringKind toStringKind) { switch (toStringKind) { case IMPLICIT: case FLOGGER: case FORMAT_METHOD: return implicitToStringFix(tree, state); case EXPLICIT: return toStringFix(parent, tree, state); case NONE: // fall out } throw new AssertionError(); } private Description maybeFix(Tree tree, VisitorState state, Type matchedType, Optional<Fix> fix) { Description.Builder description = buildDescription(tree); fix.ifPresent(description::addFix); descriptionMessageForDefaultMatch(matchedType, state).ifPresent(description::setMessage); return description.build(); } enum ToStringKind { /** String concatenation, or an enclosing print method. */ IMPLICIT, /** {@code String.valueOf()} or {@code #toString()}. */ EXPLICIT, FORMAT_METHOD, FLOGGER, NONE, } }
/* * Copyright (C) 2005-2008 Jive Software. All rights reserved. * Copyright 2008-2016 Robert Marcano * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.admin; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import org.jivesoftware.database.DbConnectionManager; import org.jivesoftware.openfire.XMPPServerInfo; import org.jivesoftware.util.JiveGlobals; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xmpp.packet.JID; /** * The JDBC admin provider allows you to use an external database to define the administrators * users. It is best used with the JDBCAuthProvider &amp; JDBCGroupProvider to provide integration * between your external system and Openfire. All data is treated as read-only so any * set operations will result in an exception.<p> * * To enable this provider, set the following in the system properties: * * <ul> * <li>{@code provider.admin.className = org.jivesoftware.openfire.admin.JDBCAdminProvider}</li> * </ul> * * Then you need to set your driver, connection string and SQL statements: * <ul> * <li>{@code jdbcProvider.driver = com.mysql.jdbc.Driver}</li> * <li>{@code jdbcProvider.connectionString = jdbc:mysql://localhost/dbname?user=username&amp;password=secret}</li> * <li>{@code jdbcAdminProvider.getAdminsSQL = SELECT user FROM myAdmins}</li> * </ul> * <p> * If you want to be able to update the admin users via the UI, add the following properties: * <ul> * <li>{@code jdbcAdminProvider.insertAdminsSQL = INSERT INTO myAdmins (user) VALUES (?)}</li> * <li>{@code jdbcAdminProvider.deleteAdminsSQL = DELETE FROM myAdmins WHERE user = ?}</li> * </ul> * <p> * In order to use the configured JDBC connection provider do not use a JDBC * connection string, set the following property * * <ul> * <li>{@code jdbcAdminProvider.useConnectionProvider = true}</li> * </ul> * * * @author Robert Marcano */ public class JDBCAdminProvider implements AdminProvider { private static final Logger Log = LoggerFactory.getLogger(JDBCAdminProvider.class); private final String getAdminsSQL; private final String insertAdminsSQL; private final String deleteAdminsSQL; private final String xmppDomain; private final boolean useConnectionProvider; private String connectionString; /** * Constructs a new JDBC admin provider. */ public JDBCAdminProvider() { // Convert XML based provider setup to Database based JiveGlobals.migrateProperty("jdbcProvider.driver"); JiveGlobals.migrateProperty("jdbcProvider.connectionString"); JiveGlobals.migrateProperty("jdbcAdminProvider.getAdminsSQL"); xmppDomain = XMPPServerInfo.XMPP_DOMAIN.getValue(); useConnectionProvider = JiveGlobals.getBooleanProperty("jdbcAdminProvider.useConnectionProvider"); // Load database statement for reading admin list getAdminsSQL = JiveGlobals.getProperty("jdbcAdminProvider.getAdminsSQL"); insertAdminsSQL = JiveGlobals.getProperty("jdbcAdminProvider.insertAdminsSQL", ""); deleteAdminsSQL = JiveGlobals.getProperty("jdbcAdminProvider.deleteAdminsSQL", ""); // Load the JDBC driver and connection string if (!useConnectionProvider) { String jdbcDriver = JiveGlobals.getProperty("jdbcProvider.driver"); try { Class.forName(jdbcDriver).getDeclaredConstructor().newInstance(); } catch (Exception e) { Log.error("Unable to load JDBC driver: " + jdbcDriver, e); return; } connectionString = JiveGlobals.getProperty("jdbcProvider.connectionString"); } } /** * XMPP disallows some characters in identifiers, requiring them to be escaped. * * This implementation assumes that the database returns properly escaped identifiers, * but can apply escaping by setting the value of the 'jdbcAdminProvider.isEscaped' * property to 'false'. * * @return 'false' if this implementation needs to escape database content before processing. */ protected boolean assumePersistedDataIsEscaped() { return JiveGlobals.getBooleanProperty( "jdbcAdminProvider.isEscaped", true ); } @Override public synchronized List<JID> getAdmins() { Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; List<JID> jids = new ArrayList<>(); try { con = getConnection(); pstmt = con.prepareStatement(getAdminsSQL); rs = pstmt.executeQuery(); while (rs.next()) { // OF-1837: When the database does not hold escaped data, escape values before processing them further. final String username; if (assumePersistedDataIsEscaped()) { username = rs.getString(1); } else { username = JID.escapeNode( rs.getString(1) ); } jids.add(new JID(username + "@" + xmppDomain)); } return jids; } catch (SQLException e) { throw new RuntimeException(e); } finally { DbConnectionManager.closeConnection(rs, pstmt, con); } } private void changeAdmins(final Connection con, final String sql, final List<JID> admins) throws SQLException { if (!admins.isEmpty()) { try (final PreparedStatement pstmt = con.prepareStatement(sql)) { for (final JID jid : admins) { // OF-1837: When the database does not hold escaped data, our query should use unescaped values in the 'where' clause. final String queryValue = assumePersistedDataIsEscaped() ? jid.getNode() : JID.unescapeNode( jid.getNode() ); pstmt.setString(1, queryValue); pstmt.execute(); } } } } @Override public synchronized void setAdmins(List<JID> newAdmins) { if (isReadOnly()) { // Reject the operation since the provider is read-only throw new UnsupportedOperationException(); } final List<JID> currentAdmins = getAdmins(); // Get a list of everyone in the new list not in the current list final List<JID> adminsToAdd = new ArrayList<>(newAdmins); adminsToAdd.removeAll(currentAdmins); // Get a list of everyone in the current list not in the new list currentAdmins.removeAll(newAdmins); try (final Connection con = getConnection()) { changeAdmins(con, insertAdminsSQL, adminsToAdd); changeAdmins(con, deleteAdminsSQL, currentAdmins); } catch (SQLException e) { throw new RuntimeException(e); } } @Override public boolean isReadOnly() { return insertAdminsSQL.isEmpty() || deleteAdminsSQL.isEmpty(); } private Connection getConnection() throws SQLException { if (useConnectionProvider) { return DbConnectionManager.getConnection(); } return DriverManager.getConnection(connectionString); } }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.siddhi.core.query.input.stream.state; import io.siddhi.core.event.ComplexEvent; import io.siddhi.core.event.ComplexEventChunk; import io.siddhi.core.event.state.StateEvent; import io.siddhi.core.event.stream.StreamEvent; import io.siddhi.query.api.execution.query.input.state.LogicalStateElement; import io.siddhi.query.api.execution.query.input.stream.StateInputStream; import java.util.Iterator; /** * Logical and &amp; or processor. */ public class LogicalPreStateProcessor extends StreamPreStateProcessor { protected LogicalStateElement.Type logicalType; protected LogicalPreStateProcessor partnerStatePreProcessor; public LogicalPreStateProcessor(LogicalStateElement.Type type, StateInputStream.Type stateType) { super(stateType); this.logicalType = type; } protected void addState(StateEvent stateEvent, StreamPreState state) { lock.lock(); try { if (isStartState || stateType == StateInputStream.Type.SEQUENCE) { if (state.getNewAndEveryStateEventList().isEmpty()) { state.getNewAndEveryStateEventList().add(stateEvent); } if (partnerStatePreProcessor != null && partnerStatePreProcessor.isNewAndEveryStateEventListEmpty()) { partnerStatePreProcessor.addEventToNewAndEveryStateEventList(stateEvent); } } else { state.getNewAndEveryStateEventList().add(stateEvent); if (partnerStatePreProcessor != null) { partnerStatePreProcessor.addEventToNewAndEveryStateEventList(stateEvent); } } } finally { lock.unlock(); } } @Override public void addEveryState(StateEvent stateEvent) { StateEvent clonedEvent = stateEventCloner.copyStateEvent(stateEvent); clonedEvent.setType(ComplexEvent.Type.CURRENT); clonedEvent.setEvent(stateId, null); for (int i = stateId; i < clonedEvent.getStreamEvents().length; i++) { clonedEvent.setEvent(i, null); } StreamPreState state = stateHolder.getState(); lock.lock(); try { state.getNewAndEveryStateEventList().add(clonedEvent); if (partnerStatePreProcessor != null) { clonedEvent.setEvent(partnerStatePreProcessor.stateId, null); partnerStatePreProcessor.addEventToNewAndEveryStateEventList(clonedEvent); } } finally { lock.unlock(); stateHolder.returnState(state); } } @Override public void resetState() { StreamPreState state = stateHolder.getState(); lock.lock(); try { if (logicalType == LogicalStateElement.Type.OR || state.getPendingStateEventList().size() == partnerStatePreProcessor.getPendingStateEventList().size()) { state.getPendingStateEventList().clear(); partnerStatePreProcessor.getPendingStateEventList().clear(); if (isStartState && state.getNewAndEveryStateEventList().isEmpty()) { if (stateType == StateInputStream.Type.SEQUENCE && thisStatePostProcessor.nextEveryStatePreProcessor == null && !((StreamPreStateProcessor) thisStatePostProcessor.nextStatePreProcessor) .getPendingStateEventList().isEmpty()) { return; } init(); } } } finally { lock.unlock(); stateHolder.returnState(state); } } @Override public void updateState() { StreamPreState state = stateHolder.getState(); lock.lock(); try { state.getNewAndEveryStateEventList().sort(eventTimeComparator); state.getPendingStateEventList().addAll(state.getNewAndEveryStateEventList()); state.getNewAndEveryStateEventList().clear(); partnerStatePreProcessor.moveAllNewAndEveryStateEventListEventsToPendingStateEventList(); } finally { lock.unlock(); stateHolder.returnState(state); } } @Override public ComplexEventChunk<StateEvent> processAndReturn(ComplexEventChunk complexEventChunk) { ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(false); complexEventChunk.reset(); StreamEvent streamEvent = (StreamEvent) complexEventChunk.next(); //Sure only one will be sent StreamPreState state = stateHolder.getState(); lock.lock(); try { for (Iterator<StateEvent> iterator = state.getPendingStateEventList().iterator(); iterator.hasNext(); ) { StateEvent stateEvent = iterator.next(); if (logicalType == LogicalStateElement.Type.OR && stateEvent.getStreamEvent(partnerStatePreProcessor.getStateId()) != null) { iterator.remove(); continue; } stateEvent.setEvent(stateId, streamEventCloner.copyStreamEvent(streamEvent)); process(stateEvent); if (this.thisLastProcessor.isEventReturned()) { this.thisLastProcessor.clearProcessedEvent(); returnEventChunk.add(stateEvent); } if (state.isStateChanged()) { iterator.remove(); } else { switch (stateType) { case PATTERN: stateEvent.setEvent(stateId, null); break; case SEQUENCE: stateEvent.setEvent(stateId, null); iterator.remove(); break; } } } } finally { lock.unlock(); stateHolder.returnState(state); } return returnEventChunk; } public void setPartnerStatePreProcessor(LogicalPreStateProcessor partnerStatePreProcessor) { this.partnerStatePreProcessor = partnerStatePreProcessor; partnerStatePreProcessor.lock = lock; } public void moveAllNewAndEveryStateEventListEventsToPendingStateEventList() { StreamPreState state = stateHolder.getState(); try { state.getNewAndEveryStateEventList().sort(eventTimeComparator); state.getPendingStateEventList().addAll(state.getNewAndEveryStateEventList()); state.getNewAndEveryStateEventList().clear(); } finally { stateHolder.returnState(state); } } public boolean isNewAndEveryStateEventListEmpty() { StreamPreState state = stateHolder.getState(); try { return state.getNewAndEveryStateEventList().isEmpty(); } finally { stateHolder.returnState(state); } } public void addEventToNewAndEveryStateEventList(StateEvent event) { StreamPreState state = stateHolder.getState(); try { state.getNewAndEveryStateEventList().add(event); } finally { stateHolder.returnState(state); } } }
/* * Copyright 2012 Medical Research Council Harwell. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mousephenotype.dcc.entities.qc; import java.io.Serializable; import java.util.Collection; import java.util.Date; import javax.persistence.Basic; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; /** * * @author Gagarine Yaikhom <g.yaikhom@har.mrc.ac.uk> */ @Entity @Table(name = "an_issue", catalog = "PHENODCC_QC_DATABASE_NAME", schema = "") @XmlRootElement @NamedQueries({ @NamedQuery(name = "AnIssue.findAll", query = "SELECT a FROM AnIssue a WHERE a.isDeleted = 0 ORDER BY a.lastUpdate DESC"), @NamedQuery(name = "AnIssue.findById", query = "SELECT a FROM AnIssue a WHERE (a.isDeleted = 0 AND a.id = :id)"), @NamedQuery(name = "AnIssue.findByTitle", query = "SELECT a FROM AnIssue a WHERE (a.isDeleted = 0 AND a.title = :title)"), @NamedQuery(name = "AnIssue.findByPriority", query = "SELECT a FROM AnIssue a WHERE (a.isDeleted = 0 AND a.priority = :priority) ORDER BY a.lastUpdate DESC"), @NamedQuery(name = "AnIssue.findByLastUpdate", query = "SELECT a FROM AnIssue a WHERE (a.isDeleted = 0 AND a.lastUpdate = :lastUpdate)"), @NamedQuery(name = "AnIssue.findByContextId", query = "SELECT a FROM AnIssue a WHERE (a.isDeleted = 0 AND a.contextId.id = :contextId) ORDER BY a.lastUpdate DESC"), @NamedQuery(name = "AnIssue.findByAssignedTo", query = "SELECT a FROM AnIssue a WHERE (a.isDeleted = 0 AND a.assignedTo = :assignedTo) ORDER BY a.lastUpdate DESC"), @NamedQuery(name = "AnIssue.findByRaisedBy", query = "SELECT a FROM AnIssue a WHERE (a.isDeleted = 0 AND a.raisedBy = :raisedBy) ORDER BY a.lastUpdate DESC"), @NamedQuery(name = "AnIssue.findByStatus", query = "SELECT a FROM AnIssue a WHERE (a.isDeleted = 0 AND a.status = :status) ORDER BY a.lastUpdate DESC"), @NamedQuery(name = "AnIssue.findByCentreId", query = "SELECT a FROM AnIssue a, DataContext d WHERE (a.isDeleted = 0 AND a.contextId = d AND d.cid = :centreId) ORDER BY a.lastUpdate DESC") }) public class AnIssue implements Serializable, Comparable<AnIssue> { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Basic(optional = false) @Column(nullable = false) private Long id; @Basic(optional = false) @Column(nullable = false, length = 256) private String title; @Basic(optional = false) @Column(nullable = false) private short priority; @Basic(optional = false) @Column(name = "control_setting", nullable = false) private Integer controlSetting; @Basic(optional = false) @Column(name = "last_update", columnDefinition = "TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP", nullable = false, insertable = false, updatable = false) @Temporal(TemporalType.TIMESTAMP) private Date lastUpdate; @Basic(optional = false) @Column(name = "raised_by", nullable = false) private Integer raisedBy; @Basic(optional = false) @Column(name = "assigned_to", nullable = false) private Integer assignedTo; @JoinColumn(name = "status", referencedColumnName = "id", nullable = false) @ManyToOne(cascade = CascadeType.ALL, optional = false) private IssueStatus status; @JoinColumn(name = "context_id", referencedColumnName = "id", nullable = false) @ManyToOne(cascade = CascadeType.ALL, optional = false) private DataContext contextId; @OneToMany(cascade = CascadeType.ALL, mappedBy = "issueId") private Collection<AnAction> anActionCollection; @OneToMany(cascade = CascadeType.ALL, mappedBy = "issueId") private Collection<CitedDataPoint> citedDataPointsCollection; @Basic(optional = false) @Column(name = "is_deleted", nullable = false) private int isDeleted; public AnIssue() { } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public short getPriority() { return priority; } public String getPriorityString() { String returnValue; switch (priority) { case 2: returnValue = "Medium"; break; case 3: returnValue = "High"; break; case 1: default: returnValue = "Low"; } return returnValue; } public void setPriority(short priority) { this.priority = priority; } public Integer getControlSetting() { return controlSetting; } public void setControlSetting(Integer controlSetting) { this.controlSetting = controlSetting; } public Date getLastUpdate() { return lastUpdate; } public void setLastUpdate(Date lastUpdate) { this.lastUpdate = lastUpdate; } public Integer getRaisedBy() { return raisedBy; } public void setRaisedBy(Integer raisedBy) { this.raisedBy = raisedBy; } public Integer getAssignedTo() { return assignedTo; } public void setAssignedTo(Integer assignedTo) { this.assignedTo = assignedTo; } public IssueStatus getStatus() { return status; } public void setStatus(IssueStatus status) { this.status = status; } public DataContext getContextId() { return contextId; } public void setContextId(DataContext contextId) { this.contextId = contextId; } @XmlTransient public Collection<AnAction> getAnActionCollection() { return anActionCollection; } public void setAnActionCollection(Collection<AnAction> anActionCollection) { this.anActionCollection = anActionCollection; } @XmlTransient public Collection<CitedDataPoint> getCitedDataPointsCollection() { return citedDataPointsCollection; } public void setCitedDataPointsCollection(Collection<CitedDataPoint> citedDataPointsCollection) { this.citedDataPointsCollection = citedDataPointsCollection; } public int getIsDeleted() { return isDeleted; } public void setIsDeleted(int isDeleted) { this.isDeleted = isDeleted; } @Override public int compareTo(AnIssue another) { int a = this.status.getCid(); int b = another.status.getCid(); /* this is to force down resolved issues at the bottom */ if ("resolved".equals(this.status.getShortName())) { a = 100; } if ("resolved".equals(another.status.getShortName())) { b = 100; } if (a < b) { return -1; } else if (a > b) { return 1; } else { return 0; } } }
/* * BytecodeAstLanguage.java * * Copyright (c) 2013 Mike Strobel * * This source code is based on Mono.Cecil from Jb Evain, Copyright (c) Jb Evain; * and ILSpy/ICSharpCode from SharpDevelop, Copyright (c) AlphaSierraPapa. * * This source code is subject to terms and conditions of the Apache License, Version 2.0. * A copy of the license can be found in the License.html file at the root of this distribution. * By using this source code in any fashion, you are agreeing to be bound by the terms of the * Apache License, Version 2.0. * * You must not remove this notice, or any other, from this software. */ package com.strobel.decompiler.languages; import com.strobel.assembler.metadata.*; import com.strobel.core.ArrayUtilities; import com.strobel.core.ExceptionUtilities; import com.strobel.core.StringUtilities; import com.strobel.core.VerifyArgument; import com.strobel.decompiler.DecompilationOptions; import com.strobel.decompiler.DecompilerContext; import com.strobel.decompiler.DecompilerHelpers; import com.strobel.decompiler.ITextOutput; import com.strobel.decompiler.NameSyntax; import com.strobel.decompiler.PlainTextOutput; import com.strobel.decompiler.ast.AstBuilder; import com.strobel.decompiler.ast.AstOptimizationStep; import com.strobel.decompiler.ast.AstOptimizer; import com.strobel.decompiler.ast.Block; import com.strobel.decompiler.ast.Expression; import com.strobel.decompiler.ast.Variable; import javax.lang.model.element.Modifier; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; public class BytecodeAstLanguage extends Language { private final String _name; private final boolean _inlineVariables; private final AstOptimizationStep _abortBeforeStep; public BytecodeAstLanguage() { this("Bytecode AST", true, AstOptimizationStep.None); } private BytecodeAstLanguage(final String name, final boolean inlineVariables, final AstOptimizationStep abortBeforeStep) { _name = name; _inlineVariables = inlineVariables; _abortBeforeStep = abortBeforeStep; } @Override public String getName() { return _name; } @Override public String getFileExtension() { return ".jvm"; } @Override public TypeDecompilationResults decompileType(final TypeDefinition type, final ITextOutput output, final DecompilationOptions options) { writeTypeHeader(type, output); output.writeLine(" {"); output.indent(); try { boolean first = true; for (final MethodDefinition method : type.getDeclaredMethods()) { if (!first) { output.writeLine(); } else { first = false; } decompileMethod(method, output, options); } if (!options.getSettings().getExcludeNestedTypes()) { for (final TypeDefinition innerType : type.getDeclaredTypes()) { output.writeLine(); decompileType(innerType, output, options); } } } finally { output.unindent(); output.writeLine("}"); } return new TypeDecompilationResults( null /*no line number mapping*/); } @Override @SuppressWarnings("ConstantConditions") public void decompileMethod(final MethodDefinition method, final ITextOutput output, final DecompilationOptions options) { VerifyArgument.notNull(method, "method"); VerifyArgument.notNull(output, "output"); VerifyArgument.notNull(options, "options"); writeMethodHeader(method, output); final MethodBody body = method.getBody(); if (body == null) { output.writeDelimiter(";"); output.writeLine(); return; } final DecompilerContext context = new DecompilerContext(); context.setCurrentMethod(method); context.setCurrentType(method.getDeclaringType()); final Block methodAst = new Block(); output.writeLine(" {"); output.indent(); try { methodAst.getBody().addAll(AstBuilder.build(body, _inlineVariables, context)); if (_abortBeforeStep != null) { AstOptimizer.optimize(context, methodAst, _abortBeforeStep); } final Set<Variable> allVariables = new LinkedHashSet<>(); for (final Expression e : methodAst.getSelfAndChildrenRecursive(Expression.class)) { final Object operand = e.getOperand(); if (operand instanceof Variable && !((Variable) operand).isParameter()) { allVariables.add((Variable) operand); } } if (!allVariables.isEmpty()) { for (final Variable variable : allVariables) { output.writeDefinition(variable.getName(), variable); final TypeReference type = variable.getType(); if (type != null) { output.writeDelimiter(" : "); DecompilerHelpers.writeType(output, type, NameSyntax.SHORT_TYPE_NAME); } if (variable.isGenerated()) { output.write(" [generated]"); } output.writeLine(); } output.writeLine(); } methodAst.writeTo(output); } catch (final Throwable t) { writeError(output, t); } finally { output.unindent(); output.writeLine("}"); } } private static void writeError(final ITextOutput output, final Throwable t) { final List<String> lines = StringUtilities.split( ExceptionUtilities.getStackTraceString(t), true, '\r', '\n' ); for (final String line : lines) { output.writeComment("// " + line.replace("\t", " ")); output.writeLine(); } } private void writeTypeHeader(final TypeDefinition type, final ITextOutput output) { long flags = type.getFlags() & (Flags.ClassFlags | Flags.STATIC | Flags.FINAL); if (type.isInterface()) { flags &= ~Flags.ABSTRACT; } else if (type.isEnum()) { flags &= Flags.AccessFlags; } for (final Modifier modifier : Flags.asModifierSet(flags)) { output.writeKeyword(modifier.toString()); output.write(' '); } if (type.isInterface()) { if (type.isAnnotation()) { output.writeKeyword("@interface"); } else { output.writeKeyword("interface"); } } else if (type.isEnum()) { output.writeKeyword("enum"); } else { output.writeKeyword("class"); } output.write(' '); DecompilerHelpers.writeType(output, type, NameSyntax.TYPE_NAME, true); } private void writeMethodHeader(final MethodDefinition method, final ITextOutput output) { if (method.isTypeInitializer()) { output.writeKeyword("static"); return; } if (!method.getDeclaringType().isInterface()) { for (final Modifier modifier : Flags.asModifierSet(method.getFlags() & Flags.MethodFlags)) { output.writeKeyword(modifier.toString()); output.write(' '); } } final List<GenericParameter> genericParameters = method.getGenericParameters(); if (!genericParameters.isEmpty()) { output.writeDelimiter("<"); for (int i = 0; i < genericParameters.size(); i++) { final GenericParameter gp = genericParameters.get(i); if (i != 0) { output.writeDelimiter(", "); } DecompilerHelpers.writeType(output, gp, NameSyntax.TYPE_NAME); } output.writeDelimiter(">"); output.write(' '); } if (!method.isTypeInitializer()) { DecompilerHelpers.writeType(output, method.getReturnType(), NameSyntax.TYPE_NAME); output.write(' '); if (method.isConstructor()) { output.writeReference(method.getDeclaringType().getName(), method.getDeclaringType()); } else { output.writeReference(method.getName(), method); } output.writeDelimiter("("); final List<ParameterDefinition> parameters = method.getParameters(); for (int i = 0; i < parameters.size(); i++) { final ParameterDefinition parameter = parameters.get(i); if (i != 0) { output.writeDelimiter(", "); } DecompilerHelpers.writeType(output, parameter.getParameterType(), NameSyntax.TYPE_NAME); output.write(' '); output.writeReference(parameter.getName(), parameter); } output.writeDelimiter(")"); } } @Override public String typeToString(final TypeReference type, final boolean includePackage) { final ITextOutput output = new PlainTextOutput(); DecompilerHelpers.writeType(output, type, includePackage ? NameSyntax.TYPE_NAME : NameSyntax.SHORT_TYPE_NAME); return output.toString(); } public static List<BytecodeAstLanguage> getDebugLanguages() { final AstOptimizationStep[] steps = AstOptimizationStep.values(); final BytecodeAstLanguage[] languages = new BytecodeAstLanguage[steps.length]; languages[0] = new BytecodeAstLanguage("Bytecode AST (Unoptimized)", false, steps[0]); String nextName = "Bytecode AST (Variable Splitting)"; for (int i = 1; i < languages.length; i++) { languages[i] = new BytecodeAstLanguage(nextName, true, steps[i - 1]); nextName = "Bytecode AST (After " + steps[i - 1].name() + ")"; } return ArrayUtilities.asUnmodifiableList(languages); } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.sync; import android.accounts.Account; import android.app.Activity; import android.content.Context; import android.util.Log; import org.chromium.base.ThreadUtils; import org.chromium.chrome.browser.invalidation.InvalidationController; import org.chromium.chrome.browser.signin.SigninManager; import org.chromium.chrome.browser.signin.SigninManager.SignInFlowObserver; import org.chromium.sync.AndroidSyncSettings; import org.chromium.sync.signin.AccountManagerHelper; import org.chromium.sync.signin.ChromeSigninController; /** * SyncController handles the coordination of sync state between the invalidation controller, * the Android sync settings, and the native sync code. * * Sync state can be changed from four places: * * - The Chrome UI, which will call SyncController directly. * - Native sync, which can disable it via a dashboard stop and clear. * - Android's Chrome sync setting. * - Android's master sync setting. * * SyncController implements listeners for the last three cases. When master sync is disabled, we * are careful to not change the Android Chrome sync setting so we know whether to turn sync back * on when it is re-enabled. */ public class SyncController implements ProfileSyncService.SyncStateChangedListener, AndroidSyncSettings.AndroidSyncSettingsObserver { private static final String TAG = "SyncController"; private static SyncController sInstance; private final Context mContext; private final ChromeSigninController mChromeSigninController; private final AndroidSyncSettings mAndroidSyncSettings; private final ProfileSyncService mProfileSyncService; // TODO(maxbogue): Make final once it's constructed in this class. private SyncNotificationController mSyncNotificationController = null; private SyncController(Context context) { mContext = context; mChromeSigninController = ChromeSigninController.get(mContext); mAndroidSyncSettings = AndroidSyncSettings.get(context); mAndroidSyncSettings.registerObserver(this); mProfileSyncService = ProfileSyncService.get(mContext); mProfileSyncService.addSyncStateChangedListener(this); mChromeSigninController.ensureGcmIsInitialized(); } /** * Retrieve the singleton instance of this class. * * @param context the current context. * @return the singleton instance. */ public static SyncController get(Context context) { ThreadUtils.assertOnUiThread(); if (sInstance == null) { sInstance = new SyncController(context.getApplicationContext()); } return sInstance; } /** * Trigger Chromium sign in of the given account. * * This also ensure that sync setup is not in progress anymore, so sync will start after * sync initialization has happened. * * @param activity the current activity. * @param accountName the full account name. */ public void signIn(Activity activity, String accountName) { final Account account = AccountManagerHelper.createAccountFromName(accountName); // The SigninManager handles most of the sign-in flow, and doFinishSignIn handles the // ChromeShell specific details. SigninManager signinManager = SigninManager.get(mContext); signinManager.onFirstRunCheckDone(); final boolean passive = false; signinManager.startSignIn(activity, account, passive, new SignInFlowObserver() { @Override public void onSigninComplete() { SigninManager.get(mContext).logInSignedInUser(); mProfileSyncService.setSetupInProgress(false); mProfileSyncService.syncSignIn(); start(); } @Override public void onSigninCancelled() { stop(); } }); } /** * Updates sync to reflect the state of the Android sync settings. */ public void updateSyncStateFromAndroid() { if (mAndroidSyncSettings.isSyncEnabled()) { start(); } else { stop(); } } /** * Starts sync if the master sync flag is enabled. * * Affects native sync, the invalidation controller, and the Android sync settings. */ public void start() { ThreadUtils.assertOnUiThread(); if (mAndroidSyncSettings.isMasterSyncEnabled()) { Log.d(TAG, "Enabling sync"); Account account = mChromeSigninController.getSignedInUser(); InvalidationController.get(mContext).start(); mProfileSyncService.enableSync(); mAndroidSyncSettings.enableChromeSync(account); } } /** * Stops Sync if a user is currently signed in. * * Affects native sync, the invalidation controller, and the Android sync settings. */ public void stop() { ThreadUtils.assertOnUiThread(); if (mChromeSigninController.isSignedIn()) { Log.d(TAG, "Disabling sync"); Account account = mChromeSigninController.getSignedInUser(); InvalidationController.get(mContext).stop(); mProfileSyncService.disableSync(); if (mAndroidSyncSettings.isMasterSyncEnabled()) { // Only disable Android's Chrome sync setting if we weren't disabled // by the master sync setting. This way, when master sync is enabled // they will both be on and sync will start again. mAndroidSyncSettings.disableChromeSync(account); } } } /** * From {@link ProfileSyncService.SyncStateChangedListener}. * * Changes the invalidation controller and Android sync setting state to match * the new native sync state. */ @Override public void syncStateChanged() { ThreadUtils.assertOnUiThread(); Account account = mChromeSigninController.getSignedInUser(); // Don't do anything if there isn't an account. if (account == null) return; boolean isSyncActive = !mProfileSyncService.isStartSuppressed(); // Make the Java state match the native state. if (isSyncActive) { InvalidationController.get(mContext).start(); mAndroidSyncSettings.enableChromeSync(account); } else { InvalidationController.get(mContext).stop(); if (mAndroidSyncSettings.isMasterSyncEnabled()) { // See comment in stop(). mAndroidSyncSettings.disableChromeSync(account); } } } /** * From {@link AndroidSyncSettings.AndroidSyncSettingsObserver}. */ @Override public void androidSyncSettingsChanged() { ThreadUtils.runOnUiThread(new Runnable() { @Override public void run() { updateSyncStateFromAndroid(); } }); } /** * Sets the SyncNotificationController. * * This is a temporary method for transferring ownership of SyncNotificationController * upstream. Once all of SNC's dependencies are upstreamed, it will be created in the * SyncController constructor and this method won't exist. */ public void setSyncNotificationController(SyncNotificationController snc) { assert mSyncNotificationController == null; mSyncNotificationController = snc; mProfileSyncService.addSyncStateChangedListener(mSyncNotificationController); } /** * Returns the SyncNotificationController. */ public SyncNotificationController getSyncNotificationController() { return mSyncNotificationController; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.irc; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import org.apache.camel.RuntimeCamelException; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriParams; import org.apache.camel.spi.UriPath; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.URISupport; import org.apache.camel.util.UnsafeUriCharactersEncoder; import org.apache.camel.util.jsse.SSLContextParameters; import org.schwering.irc.lib.ssl.SSLDefaultTrustManager; import org.schwering.irc.lib.ssl.SSLTrustManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @UriParams public class IrcConfiguration implements Cloneable { private static final Logger LOG = LoggerFactory.getLogger(IrcConfiguration.class); private List<IrcChannel> channels = new ArrayList<IrcChannel>(); @UriPath @Metadata(required = "true") private String hostname; @UriPath(defaultValue = "6667,6668,6669") private int port; private int[] ports = {6667, 6668, 6669}; @UriParam private String password; @UriParam private String nickname; @UriParam private String realname; @UriParam private String username; private SSLTrustManager trustManager = new SSLDefaultTrustManager(); private boolean usingSSL; @UriParam(defaultValue = "true") private boolean persistent = true; @UriParam(defaultValue = "true") private boolean colors = true; @UriParam(defaultValue = "true") private boolean onNick = true; @UriParam(defaultValue = "true") private boolean onQuit = true; @UriParam(defaultValue = "true") private boolean onJoin = true; @UriParam(defaultValue = "true") private boolean onKick = true; @UriParam(defaultValue = "true") private boolean onMode = true; @UriParam(defaultValue = "true") private boolean onPart = true; @UriParam private boolean onReply; @UriParam(defaultValue = "true") private boolean onTopic = true; @UriParam(defaultValue = "true") private boolean onPrivmsg = true; @UriParam(defaultValue = "true") private boolean autoRejoin = true; private SSLContextParameters sslContextParameters; @UriParam private String nickPassword; public IrcConfiguration() { } public IrcConfiguration(String hostname, String nickname, String displayname, List<IrcChannel> channels) { this(hostname, null, null, nickname, displayname, channels); } public IrcConfiguration(String hostname, String username, String password, String nickname, String displayname, List<IrcChannel> channels) { this.channels = channels; this.hostname = hostname; this.username = username; this.password = password; this.nickname = nickname; this.realname = displayname; } public IrcConfiguration copy() { try { return (IrcConfiguration) clone(); } catch (CloneNotSupportedException e) { throw new RuntimeCamelException(e); } } public String getCacheKey() { return hostname + ":" + nickname; } /* * Return space separated list of channel names without pwd */ public String getListOfChannels() { String retval = ""; for (IrcChannel channel : channels) { retval += (retval.isEmpty() ? "" : " ") + channel.getName(); } return retval; } public void configure(String uriStr) throws URISyntaxException, UnsupportedEncodingException { // fix provided URI and handle that we can use # to indicate the IRC room if (uriStr.startsWith("ircs")) { setUsingSSL(true); if (!uriStr.startsWith("ircs://")) { uriStr = uriStr.replace("ircs:", "ircs://"); } } else if (!uriStr.startsWith("irc://")) { uriStr = uriStr.replace("irc:", "irc://"); } if (uriStr.contains("?")) { uriStr = ObjectHelper.before(uriStr, "?"); } URI uri = new URI(uriStr); // Because we can get a "sanitized" URI, we need to deal with the situation where the // user info includes the username and password together or else we get a mangled username // that includes the user's secret being sent to the server. String userInfo = uri.getUserInfo(); String username = null; String password = null; if (userInfo != null) { int colonIndex = userInfo.indexOf(":"); if (colonIndex != -1) { username = userInfo.substring(0, colonIndex); password = userInfo.substring(colonIndex + 1); } else { username = userInfo; } } if (uri.getPort() != -1) { setPorts(new int[] {uri.getPort()}); setPort(uri.getPort()); } setNickname(username); setUsername(username); setRealname(username); setPassword(password); setHostname(uri.getHost()); String path = uri.getPath(); if (path != null && !path.isEmpty()) { LOG.warn("Channel {} should not be specified in the URI path. Use an @channel query parameter instead.", path); } } public void setChannel(String channel) { channels.add(createChannel(channel)); } public void setChannel(List<String> channels) { for (String ci : channels) { this.channels.add(createChannel(ci)); } } public List<IrcChannel> getChannels() { return channels; } public IrcChannel findChannel(String name) { for (IrcChannel channel : channels) { if (channel.getName().equals(name)) { return channel; } } return null; } /** * The trust manager used to verify the SSL server's certificate. */ public void setTrustManager(SSLTrustManager trustManager) { this.trustManager = trustManager; } public SSLTrustManager getTrustManager() { return trustManager; } public boolean getUsingSSL() { return usingSSL; } private void setUsingSSL(boolean usingSSL) { this.usingSSL = usingSSL; } public String getHostname() { return hostname; } /** * Hostname for the IRC chat server */ public void setHostname(String hostname) { this.hostname = hostname; } public String getPassword() { return password; } /** * The IRC server password. */ public void setPassword(String password) { this.password = password; } public String getNickname() { return nickname; } /** * The nickname used in chat. */ public void setNickname(String nickname) { this.nickname = nickname; } public String getRealname() { return realname; } /** * The IRC user's actual name. */ public void setRealname(String realname) { this.realname = realname; } public String getUsername() { return username; } /** * The IRC server user name. */ public void setUsername(String username) { this.username = username; } public int[] getPorts() { return ports; } /** * Port numbers for the IRC chat server */ public void setPorts(int[] ports) { this.ports = ports; } public int getPort() { return port; } /** * Port number for the IRC chat server */ public void setPort(int port) { this.port = port; } public boolean isPersistent() { return persistent; } /** * Use persistent messages. * @deprecated not in use */ @Deprecated public void setPersistent(boolean persistent) { this.persistent = persistent; } public boolean isColors() { return colors; } /** * Whether or not the server supports color codes. */ public void setColors(boolean colors) { this.colors = colors; } public boolean isOnNick() { return onNick; } /** * Handle nickname change events. */ public void setOnNick(boolean onNick) { this.onNick = onNick; } public boolean isOnQuit() { return onQuit; } /** * Handle user quit events. */ public void setOnQuit(boolean onQuit) { this.onQuit = onQuit; } public boolean isOnJoin() { return onJoin; } /** * Handle user join events. */ public void setOnJoin(boolean onJoin) { this.onJoin = onJoin; } public boolean isOnKick() { return onKick; } /** * Handle kick events. */ public void setOnKick(boolean onKick) { this.onKick = onKick; } public boolean isOnMode() { return onMode; } /** * Handle mode change events. */ public void setOnMode(boolean onMode) { this.onMode = onMode; } public boolean isOnPart() { return onPart; } /** * Handle user part events. */ public void setOnPart(boolean onPart) { this.onPart = onPart; } public boolean isOnReply() { return onReply; } /** * Whether or not to handle general responses to commands or informational messages. */ public void setOnReply(boolean onReply) { this.onReply = onReply; } public boolean isOnTopic() { return onTopic; } /** * Handle topic change events. */ public void setOnTopic(boolean onTopic) { this.onTopic = onTopic; } public boolean isOnPrivmsg() { return onPrivmsg; } /** * Handle private message events. */ public void setOnPrivmsg(boolean onPrivmsg) { this.onPrivmsg = onPrivmsg; } public boolean isAutoRejoin() { return autoRejoin; } /** * Whether to auto re-join when being kicked */ public void setAutoRejoin(boolean autoRejoin) { this.autoRejoin = autoRejoin; } public SSLContextParameters getSslContextParameters() { return sslContextParameters; } /** * Used for configuring security using SSL. * Reference to a org.apache.camel.util.jsse.SSLContextParameters in the Registry. * This reference overrides any configured SSLContextParameters at the component level. * Note that this setting overrides the trustManager option. */ public void setSslContextParameters(SSLContextParameters sslContextParameters) { this.sslContextParameters = sslContextParameters; } /** * Your IRC server nickname password. */ public String getNickPassword() { return nickPassword; } public void setNickPassword(String nickPassword) { this.nickPassword = nickPassword; } public String toString() { return "IrcConfiguration[hostname: " + hostname + ", ports=" + Arrays.toString(ports) + ", username=" + username + "]"; } private static IrcChannel createChannel(String channelInfo) { String[] pair = channelInfo.split("!"); return new IrcChannel(pair[0], pair.length > 1 ? pair[1] : null); } @Deprecated public static String sanitize(String uri) { // may be removed in camel-3.0.0 // make sure it's an URL first int colon = uri.indexOf(':'); if (colon != -1 && uri.indexOf("://") != colon) { uri = uri.substring(0, colon) + "://" + uri.substring(colon + 1); } try { URI u = new URI(UnsafeUriCharactersEncoder.encode(uri)); String[] userInfo = u.getUserInfo() != null ? u.getUserInfo().split(":") : null; String username = userInfo != null ? userInfo[0] : null; String password = userInfo != null && userInfo.length > 1 ? userInfo[1] : null; String path = URLDecoder.decode(u.getPath() != null ? u.getPath() : "", "UTF-8"); if (path.startsWith("/")) { path = path.substring(1); } if (path.startsWith("#") && !path.startsWith("##")) { path = path.substring(1); } Map<String, Object> parameters = URISupport.parseParameters(u); String user = (String)parameters.get("username"); String nick = (String)parameters.get("nickname"); // not specified in authority if (user != null) { if (username == null) { username = user; } else if (!username.equals(user)) { LOG.warn("Username specified twice in endpoint URI with different values. " + "The userInfo value ('{}') will be used, paramter ('{}') ignored", username, user); } parameters.remove("username"); } if (nick != null) { if (username == null) { username = nick; } if (username.equals(nick)) { parameters.remove("nickname"); // redundant } } if (username == null) { throw new RuntimeCamelException("IrcEndpoint URI with no user/nick specified is invalid"); } String pwd = (String)parameters.get("password"); if (pwd != null) { password = pwd; parameters.remove("password"); } // Remove unneeded '#' channel prefixes per convention // and replace ',' separators and merge channel and key using convention "channel!key" List<String> cl = new ArrayList<String>(); String channels = (String)parameters.get("channels"); String keys = (String)parameters.get("keys"); keys = keys == null ? keys : keys + " "; // if @keys ends with a ',' it will miss the last empty key after split(",") if (channels != null) { String[] chs = channels.split(","); String[] ks = keys != null ? keys.split(",") : null; parameters.remove("channels"); int count = chs.length; if (ks != null) { parameters.remove("keys"); if (!path.isEmpty()) { LOG.warn("Specifying a channel '{}' in the URI path is ambiguous" + " when @channels and @keys are provided and will be ignored", path); path = ""; } if (ks.length != chs.length) { count = count < ks.length ? count : ks.length; LOG.warn("Different count of @channels and @keys. Only the first {} are used.", count); } } for (int i = 0; i < count; i++) { String channel = chs[i].trim(); String key = ks != null ? ks[i].trim() : null; if (channel.startsWith("#") && !channel.startsWith("##")) { channel = channel.substring(1); } if (key != null && !key.isEmpty()) { channel += "!" + key; } cl.add(channel); } } else { if (path.isEmpty()) { LOG.warn("No channel specified for the irc endpoint"); } cl.add(path); } parameters.put("channel", cl); StringBuilder sb = new StringBuilder(); sb.append(u.getScheme()); sb.append("://"); sb.append(username); sb.append(password == null ? "" : ":" + password); sb.append("@"); sb.append(u.getHost()); sb.append(u.getPort() == -1 ? "" : ":" + u.getPort()); // ignore the path we have it as a @channel now String query = formatQuery(parameters); if (!query.isEmpty()) { sb.append("?"); sb.append(query); } // make things a bit more predictable return sb.toString(); } catch (Exception e) { throw new RuntimeCamelException(e); } } private static String formatQuery(Map<String, Object> params) { if (params == null || params.size() == 0) { return ""; } StringBuilder result = new StringBuilder(); for (Map.Entry<String, Object> pair : params.entrySet()) { Object value = pair.getValue(); // the value may be a list since the same key has multiple values if (value instanceof List) { List<?> list = (List<?>)value; for (Object s : list) { addQueryParameter(result, pair.getKey(), s); } } else { addQueryParameter(result, pair.getKey(), value); } } return result.toString(); } private static void addQueryParameter(StringBuilder sb, String key, Object value) { sb.append(sb.length() == 0 ? "" : "&"); sb.append(key); if (value != null) { String s = value.toString(); sb.append(s.isEmpty() ? "" : "=" + UnsafeUriCharactersEncoder.encode(s)); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v9/services/campaign_bid_modifier_service.proto package com.google.ads.googleads.v9.services; /** * <pre> * A single operation (create, remove, update) on a campaign bid modifier. * </pre> * * Protobuf type {@code google.ads.googleads.v9.services.CampaignBidModifierOperation} */ public final class CampaignBidModifierOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v9.services.CampaignBidModifierOperation) CampaignBidModifierOperationOrBuilder { private static final long serialVersionUID = 0L; // Use CampaignBidModifierOperation.newBuilder() to construct. private CampaignBidModifierOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CampaignBidModifierOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CampaignBidModifierOperation(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CampaignBidModifierOperation( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.ads.googleads.v9.resources.CampaignBidModifier.Builder subBuilder = null; if (operationCase_ == 1) { subBuilder = ((com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_).toBuilder(); } operation_ = input.readMessage(com.google.ads.googleads.v9.resources.CampaignBidModifier.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom((com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_); operation_ = subBuilder.buildPartial(); } operationCase_ = 1; break; } case 18: { com.google.ads.googleads.v9.resources.CampaignBidModifier.Builder subBuilder = null; if (operationCase_ == 2) { subBuilder = ((com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_).toBuilder(); } operation_ = input.readMessage(com.google.ads.googleads.v9.resources.CampaignBidModifier.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom((com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_); operation_ = subBuilder.buildPartial(); } operationCase_ = 2; break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); operationCase_ = 3; operation_ = s; break; } case 34: { com.google.protobuf.FieldMask.Builder subBuilder = null; if (updateMask_ != null) { subBuilder = updateMask_.toBuilder(); } updateMask_ = input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(updateMask_); updateMask_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v9.services.CampaignBidModifierServiceProto.internal_static_google_ads_googleads_v9_services_CampaignBidModifierOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v9.services.CampaignBidModifierServiceProto.internal_static_google_ads_googleads_v9_services_CampaignBidModifierOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v9.services.CampaignBidModifierOperation.class, com.google.ads.googleads.v9.services.CampaignBidModifierOperation.Builder.class); } private int operationCase_ = 0; private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { CREATE(1), UPDATE(2), REMOVE(3), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return CREATE; case 2: return UPDATE; case 3: return REMOVE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int UPDATE_MASK_FIELD_NUMBER = 4; private com.google.protobuf.FieldMask updateMask_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return updateMask_ != null; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return getUpdateMask(); } public static final int CREATE_FIELD_NUMBER = 1; /** * <pre> * Create operation: No resource name is expected for the new campaign bid * modifier. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier create = 1;</code> * @return Whether the create field is set. */ @java.lang.Override public boolean hasCreate() { return operationCase_ == 1; } /** * <pre> * Create operation: No resource name is expected for the new campaign bid * modifier. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier create = 1;</code> * @return The create. */ @java.lang.Override public com.google.ads.googleads.v9.resources.CampaignBidModifier getCreate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_; } return com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance(); } /** * <pre> * Create operation: No resource name is expected for the new campaign bid * modifier. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier create = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v9.resources.CampaignBidModifierOrBuilder getCreateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_; } return com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance(); } public static final int UPDATE_FIELD_NUMBER = 2; /** * <pre> * Update operation: The campaign bid modifier is expected to have a valid * resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier update = 2;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 2; } /** * <pre> * Update operation: The campaign bid modifier is expected to have a valid * resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier update = 2;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v9.resources.CampaignBidModifier getUpdate() { if (operationCase_ == 2) { return (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_; } return com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance(); } /** * <pre> * Update operation: The campaign bid modifier is expected to have a valid * resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier update = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v9.resources.CampaignBidModifierOrBuilder getUpdateOrBuilder() { if (operationCase_ == 2) { return (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_; } return com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance(); } public static final int REMOVE_FIELD_NUMBER = 3; /** * <pre> * Remove operation: A resource name for the removed campaign bid modifier * is expected, in this format: * `customers/{customer_id}/CampaignBidModifiers/{campaign_id}~{criterion_id}` * </pre> * * <code>string remove = 3;</code> * @return Whether the remove field is set. */ public boolean hasRemove() { return operationCase_ == 3; } /** * <pre> * Remove operation: A resource name for the removed campaign bid modifier * is expected, in this format: * `customers/{customer_id}/CampaignBidModifiers/{campaign_id}~{criterion_id}` * </pre> * * <code>string remove = 3;</code> * @return The remove. */ public java.lang.String getRemove() { java.lang.Object ref = ""; if (operationCase_ == 3) { ref = operation_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (operationCase_ == 3) { operation_ = s; } return s; } } /** * <pre> * Remove operation: A resource name for the removed campaign bid modifier * is expected, in this format: * `customers/{customer_id}/CampaignBidModifiers/{campaign_id}~{criterion_id}` * </pre> * * <code>string remove = 3;</code> * @return The bytes for remove. */ public com.google.protobuf.ByteString getRemoveBytes() { java.lang.Object ref = ""; if (operationCase_ == 3) { ref = operation_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); if (operationCase_ == 3) { operation_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_); } if (operationCase_ == 2) { output.writeMessage(2, (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_); } if (operationCase_ == 3) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, operation_); } if (updateMask_ != null) { output.writeMessage(4, getUpdateMask()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_); } if (operationCase_ == 2) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_); } if (operationCase_ == 3) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, operation_); } if (updateMask_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, getUpdateMask()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v9.services.CampaignBidModifierOperation)) { return super.equals(obj); } com.google.ads.googleads.v9.services.CampaignBidModifierOperation other = (com.google.ads.googleads.v9.services.CampaignBidModifierOperation) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask() .equals(other.getUpdateMask())) return false; } if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getCreate() .equals(other.getCreate())) return false; break; case 2: if (!getUpdate() .equals(other.getUpdate())) return false; break; case 3: if (!getRemove() .equals(other.getRemove())) return false; break; case 0: default: } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } switch (operationCase_) { case 1: hash = (37 * hash) + CREATE_FIELD_NUMBER; hash = (53 * hash) + getCreate().hashCode(); break; case 2: hash = (37 * hash) + UPDATE_FIELD_NUMBER; hash = (53 * hash) + getUpdate().hashCode(); break; case 3: hash = (37 * hash) + REMOVE_FIELD_NUMBER; hash = (53 * hash) + getRemove().hashCode(); break; case 0: default: } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v9.services.CampaignBidModifierOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (create, remove, update) on a campaign bid modifier. * </pre> * * Protobuf type {@code google.ads.googleads.v9.services.CampaignBidModifierOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.services.CampaignBidModifierOperation) com.google.ads.googleads.v9.services.CampaignBidModifierOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v9.services.CampaignBidModifierServiceProto.internal_static_google_ads_googleads_v9_services_CampaignBidModifierOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v9.services.CampaignBidModifierServiceProto.internal_static_google_ads_googleads_v9_services_CampaignBidModifierOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v9.services.CampaignBidModifierOperation.class, com.google.ads.googleads.v9.services.CampaignBidModifierOperation.Builder.class); } // Construct using com.google.ads.googleads.v9.services.CampaignBidModifierOperation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); if (updateMaskBuilder_ == null) { updateMask_ = null; } else { updateMask_ = null; updateMaskBuilder_ = null; } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v9.services.CampaignBidModifierServiceProto.internal_static_google_ads_googleads_v9_services_CampaignBidModifierOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v9.services.CampaignBidModifierOperation getDefaultInstanceForType() { return com.google.ads.googleads.v9.services.CampaignBidModifierOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v9.services.CampaignBidModifierOperation build() { com.google.ads.googleads.v9.services.CampaignBidModifierOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v9.services.CampaignBidModifierOperation buildPartial() { com.google.ads.googleads.v9.services.CampaignBidModifierOperation result = new com.google.ads.googleads.v9.services.CampaignBidModifierOperation(this); if (updateMaskBuilder_ == null) { result.updateMask_ = updateMask_; } else { result.updateMask_ = updateMaskBuilder_.build(); } if (operationCase_ == 1) { if (createBuilder_ == null) { result.operation_ = operation_; } else { result.operation_ = createBuilder_.build(); } } if (operationCase_ == 2) { if (updateBuilder_ == null) { result.operation_ = operation_; } else { result.operation_ = updateBuilder_.build(); } } if (operationCase_ == 3) { result.operation_ = operation_; } result.operationCase_ = operationCase_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v9.services.CampaignBidModifierOperation) { return mergeFrom((com.google.ads.googleads.v9.services.CampaignBidModifierOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v9.services.CampaignBidModifierOperation other) { if (other == com.google.ads.googleads.v9.services.CampaignBidModifierOperation.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } switch (other.getOperationCase()) { case CREATE: { mergeCreate(other.getCreate()); break; } case UPDATE: { mergeUpdate(other.getUpdate()); break; } case REMOVE: { operationCase_ = 3; operation_ = other.operation_; onChanged(); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v9.services.CampaignBidModifierOperation parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v9.services.CampaignBidModifierOperation) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return updateMaskBuilder_ != null || updateMask_ != null; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; onChanged(); } else { updateMaskBuilder_.setMessage(value); } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> */ public Builder setUpdateMask( com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); onChanged(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (updateMask_ != null) { updateMask_ = com.google.protobuf.FieldMask.newBuilder(updateMask_).mergeFrom(value).buildPartial(); } else { updateMask_ = value; } onChanged(); } else { updateMaskBuilder_.mergeFrom(value); } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> */ public Builder clearUpdateMask() { if (updateMaskBuilder_ == null) { updateMask_ = null; onChanged(); } else { updateMask_ = null; updateMaskBuilder_ = null; } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 4;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v9.resources.CampaignBidModifier, com.google.ads.googleads.v9.resources.CampaignBidModifier.Builder, com.google.ads.googleads.v9.resources.CampaignBidModifierOrBuilder> createBuilder_; /** * <pre> * Create operation: No resource name is expected for the new campaign bid * modifier. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier create = 1;</code> * @return Whether the create field is set. */ @java.lang.Override public boolean hasCreate() { return operationCase_ == 1; } /** * <pre> * Create operation: No resource name is expected for the new campaign bid * modifier. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier create = 1;</code> * @return The create. */ @java.lang.Override public com.google.ads.googleads.v9.resources.CampaignBidModifier getCreate() { if (createBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_; } return com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance(); } else { if (operationCase_ == 1) { return createBuilder_.getMessage(); } return com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance(); } } /** * <pre> * Create operation: No resource name is expected for the new campaign bid * modifier. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier create = 1;</code> */ public Builder setCreate(com.google.ads.googleads.v9.resources.CampaignBidModifier value) { if (createBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { createBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Create operation: No resource name is expected for the new campaign bid * modifier. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier create = 1;</code> */ public Builder setCreate( com.google.ads.googleads.v9.resources.CampaignBidModifier.Builder builderForValue) { if (createBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { createBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Create operation: No resource name is expected for the new campaign bid * modifier. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier create = 1;</code> */ public Builder mergeCreate(com.google.ads.googleads.v9.resources.CampaignBidModifier value) { if (createBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance()) { operation_ = com.google.ads.googleads.v9.resources.CampaignBidModifier.newBuilder((com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { createBuilder_.mergeFrom(value); } createBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Create operation: No resource name is expected for the new campaign bid * modifier. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier create = 1;</code> */ public Builder clearCreate() { if (createBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } createBuilder_.clear(); } return this; } /** * <pre> * Create operation: No resource name is expected for the new campaign bid * modifier. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier create = 1;</code> */ public com.google.ads.googleads.v9.resources.CampaignBidModifier.Builder getCreateBuilder() { return getCreateFieldBuilder().getBuilder(); } /** * <pre> * Create operation: No resource name is expected for the new campaign bid * modifier. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier create = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v9.resources.CampaignBidModifierOrBuilder getCreateOrBuilder() { if ((operationCase_ == 1) && (createBuilder_ != null)) { return createBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_; } return com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance(); } } /** * <pre> * Create operation: No resource name is expected for the new campaign bid * modifier. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier create = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v9.resources.CampaignBidModifier, com.google.ads.googleads.v9.resources.CampaignBidModifier.Builder, com.google.ads.googleads.v9.resources.CampaignBidModifierOrBuilder> getCreateFieldBuilder() { if (createBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance(); } createBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v9.resources.CampaignBidModifier, com.google.ads.googleads.v9.resources.CampaignBidModifier.Builder, com.google.ads.googleads.v9.resources.CampaignBidModifierOrBuilder>( (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged();; return createBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v9.resources.CampaignBidModifier, com.google.ads.googleads.v9.resources.CampaignBidModifier.Builder, com.google.ads.googleads.v9.resources.CampaignBidModifierOrBuilder> updateBuilder_; /** * <pre> * Update operation: The campaign bid modifier is expected to have a valid * resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier update = 2;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 2; } /** * <pre> * Update operation: The campaign bid modifier is expected to have a valid * resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier update = 2;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v9.resources.CampaignBidModifier getUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 2) { return (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_; } return com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance(); } else { if (operationCase_ == 2) { return updateBuilder_.getMessage(); } return com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance(); } } /** * <pre> * Update operation: The campaign bid modifier is expected to have a valid * resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier update = 2;</code> */ public Builder setUpdate(com.google.ads.googleads.v9.resources.CampaignBidModifier value) { if (updateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { updateBuilder_.setMessage(value); } operationCase_ = 2; return this; } /** * <pre> * Update operation: The campaign bid modifier is expected to have a valid * resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier update = 2;</code> */ public Builder setUpdate( com.google.ads.googleads.v9.resources.CampaignBidModifier.Builder builderForValue) { if (updateBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { updateBuilder_.setMessage(builderForValue.build()); } operationCase_ = 2; return this; } /** * <pre> * Update operation: The campaign bid modifier is expected to have a valid * resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier update = 2;</code> */ public Builder mergeUpdate(com.google.ads.googleads.v9.resources.CampaignBidModifier value) { if (updateBuilder_ == null) { if (operationCase_ == 2 && operation_ != com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance()) { operation_ = com.google.ads.googleads.v9.resources.CampaignBidModifier.newBuilder((com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 2) { updateBuilder_.mergeFrom(value); } updateBuilder_.setMessage(value); } operationCase_ = 2; return this; } /** * <pre> * Update operation: The campaign bid modifier is expected to have a valid * resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier update = 2;</code> */ public Builder clearUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 2) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 2) { operationCase_ = 0; operation_ = null; } updateBuilder_.clear(); } return this; } /** * <pre> * Update operation: The campaign bid modifier is expected to have a valid * resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier update = 2;</code> */ public com.google.ads.googleads.v9.resources.CampaignBidModifier.Builder getUpdateBuilder() { return getUpdateFieldBuilder().getBuilder(); } /** * <pre> * Update operation: The campaign bid modifier is expected to have a valid * resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier update = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v9.resources.CampaignBidModifierOrBuilder getUpdateOrBuilder() { if ((operationCase_ == 2) && (updateBuilder_ != null)) { return updateBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 2) { return (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_; } return com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance(); } } /** * <pre> * Update operation: The campaign bid modifier is expected to have a valid * resource name. * </pre> * * <code>.google.ads.googleads.v9.resources.CampaignBidModifier update = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v9.resources.CampaignBidModifier, com.google.ads.googleads.v9.resources.CampaignBidModifier.Builder, com.google.ads.googleads.v9.resources.CampaignBidModifierOrBuilder> getUpdateFieldBuilder() { if (updateBuilder_ == null) { if (!(operationCase_ == 2)) { operation_ = com.google.ads.googleads.v9.resources.CampaignBidModifier.getDefaultInstance(); } updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v9.resources.CampaignBidModifier, com.google.ads.googleads.v9.resources.CampaignBidModifier.Builder, com.google.ads.googleads.v9.resources.CampaignBidModifierOrBuilder>( (com.google.ads.googleads.v9.resources.CampaignBidModifier) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 2; onChanged();; return updateBuilder_; } /** * <pre> * Remove operation: A resource name for the removed campaign bid modifier * is expected, in this format: * `customers/{customer_id}/CampaignBidModifiers/{campaign_id}~{criterion_id}` * </pre> * * <code>string remove = 3;</code> * @return Whether the remove field is set. */ @java.lang.Override public boolean hasRemove() { return operationCase_ == 3; } /** * <pre> * Remove operation: A resource name for the removed campaign bid modifier * is expected, in this format: * `customers/{customer_id}/CampaignBidModifiers/{campaign_id}~{criterion_id}` * </pre> * * <code>string remove = 3;</code> * @return The remove. */ @java.lang.Override public java.lang.String getRemove() { java.lang.Object ref = ""; if (operationCase_ == 3) { ref = operation_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (operationCase_ == 3) { operation_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Remove operation: A resource name for the removed campaign bid modifier * is expected, in this format: * `customers/{customer_id}/CampaignBidModifiers/{campaign_id}~{criterion_id}` * </pre> * * <code>string remove = 3;</code> * @return The bytes for remove. */ @java.lang.Override public com.google.protobuf.ByteString getRemoveBytes() { java.lang.Object ref = ""; if (operationCase_ == 3) { ref = operation_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); if (operationCase_ == 3) { operation_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Remove operation: A resource name for the removed campaign bid modifier * is expected, in this format: * `customers/{customer_id}/CampaignBidModifiers/{campaign_id}~{criterion_id}` * </pre> * * <code>string remove = 3;</code> * @param value The remove to set. * @return This builder for chaining. */ public Builder setRemove( java.lang.String value) { if (value == null) { throw new NullPointerException(); } operationCase_ = 3; operation_ = value; onChanged(); return this; } /** * <pre> * Remove operation: A resource name for the removed campaign bid modifier * is expected, in this format: * `customers/{customer_id}/CampaignBidModifiers/{campaign_id}~{criterion_id}` * </pre> * * <code>string remove = 3;</code> * @return This builder for chaining. */ public Builder clearRemove() { if (operationCase_ == 3) { operationCase_ = 0; operation_ = null; onChanged(); } return this; } /** * <pre> * Remove operation: A resource name for the removed campaign bid modifier * is expected, in this format: * `customers/{customer_id}/CampaignBidModifiers/{campaign_id}~{criterion_id}` * </pre> * * <code>string remove = 3;</code> * @param value The bytes for remove to set. * @return This builder for chaining. */ public Builder setRemoveBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); operationCase_ = 3; operation_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.services.CampaignBidModifierOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v9.services.CampaignBidModifierOperation) private static final com.google.ads.googleads.v9.services.CampaignBidModifierOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v9.services.CampaignBidModifierOperation(); } public static com.google.ads.googleads.v9.services.CampaignBidModifierOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CampaignBidModifierOperation> PARSER = new com.google.protobuf.AbstractParser<CampaignBidModifierOperation>() { @java.lang.Override public CampaignBidModifierOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CampaignBidModifierOperation(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CampaignBidModifierOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CampaignBidModifierOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v9.services.CampaignBidModifierOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
// 2.3.3 Set of Stacks // Implement a stack which consists of multiple stacks each no bigger than some max value import java.lang.*; import java.util.*; public class SetOfStacks { List<Node> stacks; Node currentSubstack; int currentHeight; final int maxHeight; SetOfStacks(int height) { if(height < 1) { throw new ArrayIndexOutOfBoundsException(); } this.maxHeight = height; stacks = new ArrayList<Node>(); currentHeight = 0; currentSubstack = null; } public int peek() { if(currentSubstack == null) { if(stacks.size() == 0) { throw new EmptyStackException(); } int lastStack = stacks.size() - 1; Node tempStack = stacks.get(lastStack); return tempStack.data; } return currentSubstack.data; } public void push(int value) { currentHeight++; Node newNode = new Node(value); if(currentSubstack == null) { currentSubstack = newNode; return; } if(currentHeight > maxHeight) { stacks.add(currentSubstack); currentSubstack = newNode; currentHeight = 1; return; } newNode.next = currentSubstack; currentSubstack = newNode; } public void moveToNextStack() { int stacksSize = stacks.size(); if(stacksSize == 0) { throw new EmptyStackException(); } currentSubstack = stacks.remove(stacksSize - 1); currentHeight = currentSubstack.size(); } public int pop() { while(currentSubstack == null) { moveToNextStack(); } int val = currentSubstack.data; currentSubstack = currentSubstack.next; currentHeight--; return val; } public int popAt(int stackIndex) { int stacksSize = stacks.size(); if(stackIndex > stacksSize) { throw new EmptyStackException(); } if(stackIndex == stacksSize) { if(currentSubstack == null) { throw new EmptyStackException(); } int val = currentSubstack.data; currentSubstack = currentSubstack.next; return val; } Node thisStack = stacks.get(stackIndex); if(thisStack.size() == 1) { stacks.remove(stackIndex); return thisStack.data; } stacks.set(stackIndex, thisStack.next); return thisStack.data; } public static void main(String[] args) { if(args.length != 1) { printUsage(); return; } int n = 0; try { n = Integer.parseInt(args[0]); } catch(Exception e) { printUsage(); return; } SetOfStacks stack = new SetOfStacks(n); Scanner scanner = new Scanner(System.in); while(true) { String s = scanner.nextLine(); int value = 0; String[] tokens = s.split(" "); String operation = tokens[0].toLowerCase(); if(operation.equals("x")) { return; } if(tokens.length == 2) { try { value = Integer.parseInt(tokens[1]); } catch(Exception e) { System.out.println("Expected a number but got something else: " + e); continue; } } if(operation.equals("push")) { if(tokens.length != 2) { System.out.println("Usage: push n where n is an integer"); continue; } stack.push(value); } if(operation.equals("pop")) { if(tokens.length != 1) { System.out.println("Usage: pop"); continue; } try { value = stack.pop(); } catch(Exception e) { System.out.println("There are no more values in stack " + value); continue; } System.out.println(value); } if(operation.equals("popat")) { if(tokens.length != 2) { System.out.println("Usage: popAt"); continue; } try { value = stack.popAt(value); } catch(Exception e) { System.out.println("There are no more values in stack " + value); continue; } System.out.println(value); } if(operation.equals("peek")) { if(tokens.length != 1) { System.out.println("Usage: peek"); continue; } try { value = stack.peek(); } catch(Exception e) { System.out.println("There are no more values in stack " + value); continue; } System.out.println(value); } } } static void printUsage() { System.out.println("Usage: java SetOfStacks n"); System.out.println("Where n is the maximum sub-stack height"); System.out.println("StdIn: A series of commands to push, peek, pop or view the min"); System.out.println("Eg: push 120 would push the value 120 onto the stack"); System.out.println("Exit with x"); } }
package micro16; import java.util.Arrays; /** * Micro16 CPU */ class CPU { // 11 named registers (R0..10), MAR, MBR and // 3 constant registers (0, 1, -1) public static final int REGISTER_COUNT = 16; public static final int INSTRUCTION_LENGTH = 32; public static final int ALU_NOP = 0; public static final int ALU_ADD = 1; public static final int ALU_AND = 2; public static final int ALU_NOT = 3; public static final int SH_NOP = 0; public static final int SH_LEFT = 1; public static final int SH_RIGHT = 2; public static final int COND_IF_N = 1; public static final int COND_IF_Z = 2; public static final int COND_GOTO = 3; public static final int R0_IDX = 4; public static final int R10_IDX = 14; public static final int MAR_REGISTER_IDX = 3; public static final int MBR_REGISTER_IDX = 15; private final short[] registers; private final int[] controlStore; private byte instructionCounter; private boolean negativeFlag; private boolean zeroFlag; private Memory memory; public CPU(int[] program) { instructionCounter = 0; controlStore = program; registers = new short[REGISTER_COUNT]; registers[0] = 0; registers[1] = 1; registers[2] = -1; zeroFlag = false; negativeFlag = false; memory = new Memory(); } /** * Processes one Micro16 instruction. */ public void step() { negativeFlag = zeroFlag = false; int raw = controlStore[instructionCounter++]; if (raw == 0) { return; } Instruction instr = new Instruction(raw); // A-MUX==true gets the value from the MAR byte aBus = instr.A_MUX() ? MAR_REGISTER_IDX : instr.A_BUS(); byte bBus = instr.B_BUS(); byte sBus = instr.S_BUS(); if (instr.ENS()) { sBus = instr.S_BUS(); } else if (instr.MBR() && !instr.ENS()) { sBus = MBR_REGISTER_IDX; } if (instr.ENS() && sBus < 3) { throw new IllegalArgumentException("Can't write to read-only registers!"); } if (instr.MAR()) { registers[MAR_REGISTER_IDX] = registers[bBus]; return; } short aluResult = aluOp(instr.ALU(), aBus, bBus); // Check the flags for the ALU result only negativeFlag = aluResult < 0; zeroFlag = aluResult == 0; short shifterResult = shifterOp(instr.SH(), aluResult); if (instr.COND() != 0) { condOp(instr.COND(), instr.ADDR()); } // Write result to the register registers[sBus] = shifterResult; // Memory IO if (instr.MS()) { short MAR = registers[MAR_REGISTER_IDX]; short MBR = registers[MBR_REGISTER_IDX]; // read if (instr.RD_WR()) { memory.read(MAR, registers); } else { memory.write(MAR, MBR); } } } public void stepUntilCompletion() { while (instructionCounter < controlStore.length) { step(); } } public byte getInstructionCounter() { return instructionCounter; } /** * Resets this CPU to its original state. */ public void reset() { this.instructionCounter = 0; Arrays.fill(registers, (short) 0); registers[0] = 0; registers[1] = 1; registers[2] = -1; zeroFlag = false; negativeFlag = false; this.memory.reset(); } /** * Returns the number of instructions of the * program this CPU executes. */ public int getProgramLength() { return controlStore.length; } Memory getMemory() { return memory; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Register contents:\n"); for (int i = R0_IDX; i <= R10_IDX; i++) { sb.append("R").append(i - 4).append(" = ").append(registers[i]).append("\n"); } sb.append("MAR = ").append(registers[MAR_REGISTER_IDX]).append("\n"); sb.append("MBR = ").append(registers[MBR_REGISTER_IDX]).append("\n"); sb.append("Negative flag: ").append(negativeFlag).append(" \n"); sb.append("Zero flag: ").append(zeroFlag).append("\n"); sb.append("Instruction counter: ").append(instructionCounter).append("\n"); sb.append("\nMemory:\n"); sb.append(memory.toString()).append("\n"); return sb.toString(); } public short[] getRegisters() { return registers; } private short aluOp(byte aluFlag, byte aBus, byte bBus) { short result; switch (aluFlag) { case ALU_NOP: result = (registers[aBus]); break; case ALU_ADD: result = (short) (registers[aBus] + registers[bBus]); break; case ALU_AND: result = (short) (registers[aBus] & registers[bBus]); break; case ALU_NOT: result = (short) (~registers[aBus]); break; default: throw new IllegalArgumentException("Not a valid ALU flag."); } return result; } private short shifterOp(byte shFlag, short aluResult) { short result = aluResult; switch (shFlag) { case SH_NOP: break; case SH_LEFT: result <<= 1; break; case SH_RIGHT: result >>>= 1; break; default: throw new IllegalArgumentException("Invalid SH command."); } return result; } private void condOp(byte condFlag, byte addr) { switch (condFlag) { case COND_IF_N: if (negativeFlag) { instructionCounter = addr; } break; case COND_IF_Z: if (zeroFlag) { instructionCounter = addr; } break; case COND_GOTO: instructionCounter = addr; break; default: throw new IllegalArgumentException("Invalid COND command."); } } }
/* * Copyright (c) 2017 Michael Krotscheck * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package net.krotscheck.kangaroo.authz.admin.v1.resource; import net.krotscheck.kangaroo.authz.admin.Scope; import net.krotscheck.kangaroo.authz.common.authenticator.AuthenticatorType; import net.krotscheck.kangaroo.authz.common.database.entity.AbstractAuthzEntity; import net.krotscheck.kangaroo.authz.common.database.entity.Application; import net.krotscheck.kangaroo.authz.common.database.entity.Authenticator; import net.krotscheck.kangaroo.authz.common.database.entity.ClientType; import net.krotscheck.kangaroo.authz.common.database.entity.OAuthToken; import net.krotscheck.kangaroo.authz.common.database.entity.User; import net.krotscheck.kangaroo.authz.common.database.entity.UserIdentity; import net.krotscheck.kangaroo.authz.oauth2.exception.RFC6749.InvalidScopeException; import net.krotscheck.kangaroo.common.hibernate.id.IdUtil; import net.krotscheck.kangaroo.common.hibernate.id.MalformedIdException; import net.krotscheck.kangaroo.common.response.ListResponseEntity; import org.hibernate.Criteria; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import javax.ws.rs.core.GenericType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriBuilder; import java.net.URI; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static org.junit.Assert.assertTrue; /** * Test the list and filter methods of the user identity service. * * @author Michael Krotscheck */ @RunWith(Parameterized.class) public final class UserIdentityServiceBrowseTest extends AbstractServiceBrowseTest<UserIdentity> { /** * Generic type declaration for list decoding. */ private static final GenericType<ListResponseEntity<UserIdentity>> LIST_TYPE = new GenericType<ListResponseEntity<UserIdentity>>() { }; /** * Create a new instance of this parameterized test. * * @param clientType The type of client. * @param tokenScope The client scope to issue. * @param createUser Whether to create a new user. */ public UserIdentityServiceBrowseTest(final ClientType clientType, final String tokenScope, final Boolean createUser) { super(clientType, tokenScope, createUser); } /** * Test parameters. * * @return The list of parameters. */ @Parameterized.Parameters public static Collection parameters() { return Arrays.asList( new Object[]{ ClientType.Implicit, Scope.IDENTITY_ADMIN, false }, new Object[]{ ClientType.Implicit, Scope.IDENTITY, false }, new Object[]{ ClientType.Implicit, Scope.IDENTITY_ADMIN, true }, new Object[]{ ClientType.Implicit, Scope.IDENTITY, true }, new Object[]{ ClientType.ClientCredentials, Scope.IDENTITY_ADMIN, false }, new Object[]{ ClientType.ClientCredentials, Scope.IDENTITY, false }); } /** * Return the token scope required for admin access on this test. * * @return The correct scope string. */ @Override protected String getAdminScope() { return Scope.IDENTITY_ADMIN; } /** * Return the token scope required for generic user access. * * @return The correct scope string. */ @Override protected String getRegularScope() { return Scope.IDENTITY; } /** * Return the list type used to decode browse results. * * @return The list type. */ @Override protected GenericType<ListResponseEntity<UserIdentity>> getListType() { return LIST_TYPE; } /** * Return the list of entities which should be accessible given a * specific token. * * @param token The oauth token to test against. * @return A list of entities (could be empty). */ @Override protected List<UserIdentity> getAccessibleEntities(final OAuthToken token) { // If you're an admin, you get to see everything. If you're not, you // only get to see what you own. OAuthToken attachedToken = getAttached(token); if (!attachedToken.getScopes().containsKey(getAdminScope())) { return getOwnedEntities(attachedToken); } // We know you're an admin. Get all applications in the system. Criteria c = getSession().createCriteria(Application.class); // Get all the owned clients. return ((List<Application>) c.list()) .stream() .flatMap(a -> a.getUsers().stream()) .flatMap(u -> u.getIdentities().stream()) .distinct() .collect(Collectors.toList()); } /** * Return the list of entities which are owned by the given oauth token. * * @param owner The owner of these entities. * @return A list of entities (could be empty). */ @Override protected List<UserIdentity> getOwnedEntities(final User owner) { // Get all the owned clients. return owner.getApplications() .stream() .flatMap(a -> a.getUsers().stream()) .flatMap(u -> u.getIdentities().stream()) .distinct() .collect(Collectors.toList()); } /** * Construct the request URL for this test given a specific resource ID. * * @param id The ID to use. * @return The resource URL. */ @Override protected URI getUrlForId(final String id) { return UriBuilder.fromPath("/v1/identity/") .path(id) .build(); } /** * Construct the request URL for this test given a specific resource ID. * * @param entity The entity to use. * @return The resource URL. */ @Override protected URI getUrlForEntity(final AbstractAuthzEntity entity) { return getUrlForId(IdUtil.toString(entity.getId())); } /** * Ensure that we can filter by the identity's user. */ @Test public void testBrowseFilterByUser() { User filtered = getAdminContext().getUserIdentity().getUser(); Map<String, String> params = new HashMap<>(); params.put("user", IdUtil.toString(filtered.getId())); Response r = browse(params, getAdminToken()); List<UserIdentity> expectedResults = getAccessibleEntities(getAdminToken()) .stream() .filter((identity) -> identity.getUser().equals(filtered)) .distinct() .collect(Collectors.toList()); Integer expectedTotal = expectedResults.size(); int expectedResultSize = Math.min(10, expectedTotal); Integer expectedOffset = 0; Integer expectedLimit = 10; if (isLimitedByClientCredentials()) { assertErrorResponse(r, new InvalidScopeException()); } else if (isAccessible(filtered, getAdminToken())) { assertListResponse(r, expectedResultSize, expectedOffset, expectedLimit, expectedTotal); } else { assertErrorResponse(r, Status.BAD_REQUEST); } } /** * Ensure that we cannot filter by an invalid user. */ @Test public void testBrowseFilterByInvalidUser() { Map<String, String> params = new HashMap<>(); params.put("user", IdUtil.toString(IdUtil.next())); Response r = browse(params, getAdminToken()); if (isLimitedByClientCredentials()) { assertErrorResponse(r, new InvalidScopeException()); } else { assertErrorResponse(r, new MalformedIdException()); } } /** * Ensure that we cannot filter by a malformed user. */ @Test public void testBrowseFilterByMalformedUser() { Map<String, String> params = new HashMap<>(); params.put("user", "malformed"); Response r = browse(params, getAdminToken()); assertErrorResponse(r, new MalformedIdException()); } /** * Ensure that we can filter by the identity's Authenticator. */ @Test public void testBrowseFilterByType() { Authenticator authenticator = getAdminContext().getAuthenticator(); AuthenticatorType type = authenticator.getType(); Map<String, String> params = new HashMap<>(); params.put("type", type.toString()); Response r = browse(params, getAdminToken()); List<UserIdentity> expectedResults = getAccessibleEntities(getAdminToken()) .stream() .filter((identity) -> identity.getType().equals(type)) .distinct() .collect(Collectors.toList()); Integer expectedTotal = expectedResults.size(); int expectedResultSize = Math.min(10, expectedTotal); Integer expectedOffset = 0; Integer expectedLimit = 10; if (isLimitedByClientCredentials()) { assertErrorResponse(r, new InvalidScopeException()); } else { assertTrue(expectedTotal > 0); assertListResponse(r, expectedResultSize, expectedOffset, expectedLimit, expectedTotal); } } /** * Ensure that we cannot filter by a malformed Authenticator. */ @Test public void testBrowseFilterByInvalidType() { Map<String, String> params = new HashMap<>(); params.put("type", "malformed"); Response r = browse(params, getAdminToken()); assertErrorResponse(r, Status.NOT_FOUND); } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.jdbc.datasource.init; import java.io.IOException; import java.io.LineNumberReader; import java.sql.Connection; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.Statement; import java.util.LinkedList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.core.io.Resource; import org.springframework.core.io.support.EncodedResource; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** * Generic utility methods for working with SQL scripts. * * <p>Mainly for internal use within the framework. * * @author Thomas Risberg * @author Sam Brannen * @author Juergen Hoeller * @author Keith Donald * @author Dave Syer * @author Chris Beams * @author Oliver Gierke * @author Chris Baldwin * @author Nicolas Debeissat * @since 4.0.3 */ public abstract class ScriptUtils { /** * Default statement separator within SQL scripts: {@code ";"}. */ public static final String DEFAULT_STATEMENT_SEPARATOR = ";"; /** * Fallback statement separator within SQL scripts: {@code "\n"}. * <p>Used if neither a custom separator nor the * {@link #DEFAULT_STATEMENT_SEPARATOR} is present in a given script. */ public static final String FALLBACK_STATEMENT_SEPARATOR = "\n"; /** * End of file (EOF) SQL statement separator: {@code "^^^ END OF SCRIPT ^^^"}. * <p>This value may be supplied as the {@code separator} to {@link * #executeSqlScript(Connection, EncodedResource, boolean, boolean, String, String, String, String)} * to denote that an SQL script contains a single statement (potentially * spanning multiple lines) with no explicit statement separator. Note that * such a script should not actually contain this value; it is merely a * <em>virtual</em> statement separator. */ public static final String EOF_STATEMENT_SEPARATOR = "^^^ END OF SCRIPT ^^^"; /** * Default prefix for single-line comments within SQL scripts: {@code "--"}. */ public static final String DEFAULT_COMMENT_PREFIX = "--"; /** * Default start delimiter for block comments within SQL scripts: {@code "/*"}. */ public static final String DEFAULT_BLOCK_COMMENT_START_DELIMITER = "/*"; /** * Default end delimiter for block comments within SQL scripts: <code>"*&#47;"</code>. */ public static final String DEFAULT_BLOCK_COMMENT_END_DELIMITER = "*/"; private static final Log logger = LogFactory.getLog(ScriptUtils.class); /** * Split an SQL script into separate statements delimited by the provided * separator character. Each individual statement will be added to the * provided {@code List}. * <p>Within the script, {@value #DEFAULT_COMMENT_PREFIX} will be used as the * comment prefix; any text beginning with the comment prefix and extending to * the end of the line will be omitted from the output. Similarly, * {@value #DEFAULT_BLOCK_COMMENT_START_DELIMITER} and * {@value #DEFAULT_BLOCK_COMMENT_END_DELIMITER} will be used as the * <em>start</em> and <em>end</em> block comment delimiters: any text enclosed * in a block comment will be omitted from the output. In addition, multiple * adjacent whitespace characters will be collapsed into a single space. * @param script the SQL script * @param separator character separating each statement &mdash; typically a ';' * @param statements the list that will contain the individual statements * @throws ScriptException if an error occurred while splitting the SQL script * @see #splitSqlScript(String, String, List) * @see #splitSqlScript(EncodedResource, String, String, String, String, String, List) */ public static void splitSqlScript(String script, char separator, List<String> statements) throws ScriptException { splitSqlScript(script, String.valueOf(separator), statements); } /** * Split an SQL script into separate statements delimited by the provided * separator string. Each individual statement will be added to the * provided {@code List}. * <p>Within the script, {@value #DEFAULT_COMMENT_PREFIX} will be used as the * comment prefix; any text beginning with the comment prefix and extending to * the end of the line will be omitted from the output. Similarly, * {@value #DEFAULT_BLOCK_COMMENT_START_DELIMITER} and * {@value #DEFAULT_BLOCK_COMMENT_END_DELIMITER} will be used as the * <em>start</em> and <em>end</em> block comment delimiters: any text enclosed * in a block comment will be omitted from the output. In addition, multiple * adjacent whitespace characters will be collapsed into a single space. * @param script the SQL script * @param separator text separating each statement &mdash; typically a ';' or newline character * @param statements the list that will contain the individual statements * @throws ScriptException if an error occurred while splitting the SQL script * @see #splitSqlScript(String, char, List) * @see #splitSqlScript(EncodedResource, String, String, String, String, String, List) */ public static void splitSqlScript(String script, String separator, List<String> statements) throws ScriptException { splitSqlScript(null, script, separator, DEFAULT_COMMENT_PREFIX, DEFAULT_BLOCK_COMMENT_START_DELIMITER, DEFAULT_BLOCK_COMMENT_END_DELIMITER, statements); } /** * Split an SQL script into separate statements delimited by the provided * separator string. Each individual statement will be added to the provided * {@code List}. * <p>Within the script, the provided {@code commentPrefix} will be honored: * any text beginning with the comment prefix and extending to the end of the * line will be omitted from the output. Similarly, the provided * {@code blockCommentStartDelimiter} and {@code blockCommentEndDelimiter} * delimiters will be honored: any text enclosed in a block comment will be * omitted from the output. In addition, multiple adjacent whitespace characters * will be collapsed into a single space. * @param resource the resource from which the script was read * @param script the SQL script; never {@code null} or empty * @param separator text separating each statement &mdash; typically a ';' or * newline character; never {@code null} * @param commentPrefix the prefix that identifies SQL line comments &mdash; * typically "--"; never {@code null} or empty * @param blockCommentStartDelimiter the <em>start</em> block comment delimiter; * never {@code null} or empty * @param blockCommentEndDelimiter the <em>end</em> block comment delimiter; * never {@code null} or empty * @param statements the list that will contain the individual statements * @throws ScriptException if an error occurred while splitting the SQL script */ public static void splitSqlScript(EncodedResource resource, String script, String separator, String commentPrefix, String blockCommentStartDelimiter, String blockCommentEndDelimiter, List<String> statements) throws ScriptException { Assert.hasText(script, "script must not be null or empty"); Assert.notNull(separator, "separator must not be null"); Assert.hasText(commentPrefix, "commentPrefix must not be null or empty"); Assert.hasText(blockCommentStartDelimiter, "blockCommentStartDelimiter must not be null or empty"); Assert.hasText(blockCommentEndDelimiter, "blockCommentEndDelimiter must not be null or empty"); StringBuilder sb = new StringBuilder(); boolean inSingleQuote = false; boolean inDoubleQuote = false; boolean inEscape = false; char[] content = script.toCharArray(); for (int i = 0; i < script.length(); i++) { char c = content[i]; if (inEscape) { inEscape = false; sb.append(c); continue; } // MySQL style escapes if (c == '\\') { inEscape = true; sb.append(c); continue; } if (!inDoubleQuote && (c == '\'')) { inSingleQuote = !inSingleQuote; } else if (!inSingleQuote && (c == '"')) { inDoubleQuote = !inDoubleQuote; } if (!inSingleQuote && !inDoubleQuote) { if (script.startsWith(separator, i)) { // we've reached the end of the current statement if (sb.length() > 0) { statements.add(sb.toString()); sb = new StringBuilder(); } i += separator.length() - 1; continue; } else if (script.startsWith(commentPrefix, i)) { // skip over any content from the start of the comment to the EOL int indexOfNextNewline = script.indexOf("\n", i); if (indexOfNextNewline > i) { i = indexOfNextNewline; continue; } else { // if there's no EOL, we must be at the end // of the script, so stop here. break; } } else if (script.startsWith(blockCommentStartDelimiter, i)) { // skip over any block comments int indexOfCommentEnd = script.indexOf(blockCommentEndDelimiter, i); if (indexOfCommentEnd > i) { i = indexOfCommentEnd + blockCommentEndDelimiter.length() - 1; continue; } else { throw new ScriptParseException(String.format("Missing block comment end delimiter [%s].", blockCommentEndDelimiter), resource); } } else if (c == ' ' || c == '\n' || c == '\t') { // avoid multiple adjacent whitespace characters if (sb.length() > 0 && sb.charAt(sb.length() - 1) != ' ') { c = ' '; } else { continue; } } } sb.append(c); } if (StringUtils.hasText(sb)) { statements.add(sb.toString()); } } /** * Read a script from the given resource, using "{@code --}" as the comment prefix * and "{@code ;}" as the statement separator, and build a String containing the lines. * @param resource the {@code EncodedResource} to be read * @return {@code String} containing the script lines * @throws IOException in case of I/O errors */ static String readScript(EncodedResource resource) throws IOException { return readScript(resource, DEFAULT_COMMENT_PREFIX, DEFAULT_STATEMENT_SEPARATOR); } /** * Read a script from the provided resource, using the supplied comment prefix * and statement separator, and build a {@code String} containing the lines. * <p>Lines <em>beginning</em> with the comment prefix are excluded from the * results; however, line comments anywhere else &mdash; for example, within * a statement &mdash; will be included in the results. * @param resource the {@code EncodedResource} containing the script * to be processed * @param commentPrefix the prefix that identifies comments in the SQL script &mdash; * typically "--" * @param separator the statement separator in the SQL script &mdash; typically ";" * @return a {@code String} containing the script lines * @throws IOException in case of I/O errors */ private static String readScript(EncodedResource resource, String commentPrefix, String separator) throws IOException { LineNumberReader lnr = new LineNumberReader(resource.getReader()); try { return readScript(lnr, commentPrefix, separator); } finally { lnr.close(); } } /** * Read a script from the provided {@code LineNumberReader}, using the supplied * comment prefix and statement separator, and build a {@code String} containing * the lines. * <p>Lines <em>beginning</em> with the comment prefix are excluded from the * results; however, line comments anywhere else &mdash; for example, within * a statement &mdash; will be included in the results. * @param lineNumberReader the {@code LineNumberReader} containing the script * to be processed * @param commentPrefix the prefix that identifies comments in the SQL script &mdash; * typically "--" * @param separator the statement separator in the SQL script &mdash; typically ";" * @return a {@code String} containing the script lines * @throws IOException in case of I/O errors */ public static String readScript(LineNumberReader lineNumberReader, String commentPrefix, String separator) throws IOException { String currentStatement = lineNumberReader.readLine(); StringBuilder scriptBuilder = new StringBuilder(); while (currentStatement != null) { if (commentPrefix != null && !currentStatement.startsWith(commentPrefix)) { if (scriptBuilder.length() > 0) { scriptBuilder.append('\n'); } scriptBuilder.append(currentStatement); } currentStatement = lineNumberReader.readLine(); } appendSeparatorToScriptIfNecessary(scriptBuilder, separator); return scriptBuilder.toString(); } private static void appendSeparatorToScriptIfNecessary(StringBuilder scriptBuilder, String separator) { if (separator == null) { return; } String trimmed = separator.trim(); if (trimmed.length() == separator.length()) { return; } // separator ends in whitespace, so we might want to see if the script is trying // to end the same way if (scriptBuilder.lastIndexOf(trimmed) == scriptBuilder.length() - trimmed.length()) { scriptBuilder.append(separator.substring(trimmed.length())); } } /** * Does the provided SQL script contain the specified delimiter? * @param script the SQL script * @param delim String delimiting each statement - typically a ';' character */ public static boolean containsSqlScriptDelimiters(String script, String delim) { boolean inLiteral = false; char[] content = script.toCharArray(); for (int i = 0; i < script.length(); i++) { if (content[i] == '\'') { inLiteral = !inLiteral; } if (!inLiteral && script.startsWith(delim, i)) { return true; } } return false; } /** * Execute the given SQL script using default settings for statement * separators, comment delimiters, and exception handling flags. * <p>Statement separators and comments will be removed before executing * individual statements within the supplied script. * <p><strong>Warning</strong>: this method does <em>not</em> release the * provided {@link Connection}. * @param connection the JDBC connection to use to execute the script; already * configured and ready to use * @param resource the resource to load the SQL script from; encoded with the * current platform's default encoding * @throws ScriptException if an error occurred while executing the SQL script * @see #executeSqlScript(Connection, EncodedResource, boolean, boolean, String, String, String, String) * @see #DEFAULT_STATEMENT_SEPARATOR * @see #DEFAULT_COMMENT_PREFIX * @see #DEFAULT_BLOCK_COMMENT_START_DELIMITER * @see #DEFAULT_BLOCK_COMMENT_END_DELIMITER * @see org.springframework.jdbc.datasource.DataSourceUtils#getConnection * @see org.springframework.jdbc.datasource.DataSourceUtils#releaseConnection */ public static void executeSqlScript(Connection connection, Resource resource) throws ScriptException { executeSqlScript(connection, new EncodedResource(resource)); } /** * Execute the given SQL script using default settings for statement * separators, comment delimiters, and exception handling flags. * <p>Statement separators and comments will be removed before executing * individual statements within the supplied script. * <p><strong>Warning</strong>: this method does <em>not</em> release the * provided {@link Connection}. * @param connection the JDBC connection to use to execute the script; already * configured and ready to use * @param resource the resource (potentially associated with a specific encoding) * to load the SQL script from * @throws ScriptException if an error occurred while executing the SQL script * @see #executeSqlScript(Connection, EncodedResource, boolean, boolean, String, String, String, String) * @see #DEFAULT_STATEMENT_SEPARATOR * @see #DEFAULT_COMMENT_PREFIX * @see #DEFAULT_BLOCK_COMMENT_START_DELIMITER * @see #DEFAULT_BLOCK_COMMENT_END_DELIMITER * @see org.springframework.jdbc.datasource.DataSourceUtils#getConnection * @see org.springframework.jdbc.datasource.DataSourceUtils#releaseConnection */ public static void executeSqlScript(Connection connection, EncodedResource resource) throws ScriptException { executeSqlScript(connection, resource, false, false, DEFAULT_COMMENT_PREFIX, DEFAULT_STATEMENT_SEPARATOR, DEFAULT_BLOCK_COMMENT_START_DELIMITER, DEFAULT_BLOCK_COMMENT_END_DELIMITER); } /** * Execute the given SQL script. * <p>Statement separators and comments will be removed before executing * individual statements within the supplied script. * <p><strong>Warning</strong>: this method does <em>not</em> release the * provided {@link Connection}. * @param connection the JDBC connection to use to execute the script; already * configured and ready to use * @param resource the resource (potentially associated with a specific encoding) * to load the SQL script from * @param continueOnError whether or not to continue without throwing an exception * in the event of an error * @param ignoreFailedDrops whether or not to continue in the event of specifically * an error on a {@code DROP} statement * @param commentPrefix the prefix that identifies single-line comments in the * SQL script &mdash; typically "--" * @param separator the script statement separator; defaults to * {@value #DEFAULT_STATEMENT_SEPARATOR} if not specified and falls back to * {@value #FALLBACK_STATEMENT_SEPARATOR} as a last resort; may be set to * {@value #EOF_STATEMENT_SEPARATOR} to signal that the script contains a * single statement without a separator * @param blockCommentStartDelimiter the <em>start</em> block comment delimiter; never * {@code null} or empty * @param blockCommentEndDelimiter the <em>end</em> block comment delimiter; never * {@code null} or empty * @throws ScriptException if an error occurred while executing the SQL script * @see #DEFAULT_STATEMENT_SEPARATOR * @see #FALLBACK_STATEMENT_SEPARATOR * @see #EOF_STATEMENT_SEPARATOR * @see org.springframework.jdbc.datasource.DataSourceUtils#getConnection * @see org.springframework.jdbc.datasource.DataSourceUtils#releaseConnection */ public static void executeSqlScript(Connection connection, EncodedResource resource, boolean continueOnError, boolean ignoreFailedDrops, String commentPrefix, String separator, String blockCommentStartDelimiter, String blockCommentEndDelimiter) throws ScriptException { try { if (logger.isInfoEnabled()) { logger.info("Executing SQL script from " + resource); } long startTime = System.currentTimeMillis(); String script; try { script = readScript(resource, commentPrefix, separator); } catch (IOException ex) { throw new CannotReadScriptException(resource, ex); } if (separator == null) { separator = DEFAULT_STATEMENT_SEPARATOR; } if (!EOF_STATEMENT_SEPARATOR.equals(separator) && !containsSqlScriptDelimiters(script, separator)) { separator = FALLBACK_STATEMENT_SEPARATOR; } List<String> statements = new LinkedList<>(); splitSqlScript(resource, script, separator, commentPrefix, blockCommentStartDelimiter, blockCommentEndDelimiter, statements); int stmtNumber = 0; Statement stmt = connection.createStatement(); try { for (String statement : statements) { stmtNumber++; try { stmt.execute(statement); int rowsAffected = stmt.getUpdateCount(); if (logger.isDebugEnabled()) { logger.debug(rowsAffected + " returned as update count for SQL: " + statement); SQLWarning warningToLog = stmt.getWarnings(); while (warningToLog != null) { logger.debug("SQLWarning ignored: SQL state '" + warningToLog.getSQLState() + "', error code '" + warningToLog.getErrorCode() + "', message [" + warningToLog.getMessage() + "]"); warningToLog = warningToLog.getNextWarning(); } } } catch (SQLException ex) { boolean dropStatement = StringUtils.startsWithIgnoreCase(statement.trim(), "drop"); if (continueOnError || (dropStatement && ignoreFailedDrops)) { if (logger.isDebugEnabled()) { logger.debug(ScriptStatementFailedException.buildErrorMessage(statement, stmtNumber, resource), ex); } } else { throw new ScriptStatementFailedException(statement, stmtNumber, resource, ex); } } } } finally { try { stmt.close(); } catch (Throwable ex) { logger.debug("Could not close JDBC Statement", ex); } } long elapsedTime = System.currentTimeMillis() - startTime; if (logger.isInfoEnabled()) { logger.info("Executed SQL script from " + resource + " in " + elapsedTime + " ms."); } } catch (Exception ex) { if (ex instanceof ScriptException) { throw (ScriptException) ex; } throw new UncategorizedScriptException( "Failed to execute database script from resource [" + resource + "]", ex); } } }
package nl.mycompany.questionaire.process; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.util.List; import nl.mycompany.questionaire.conf.ApplicationConfiguration; import nl.mycompany.questionaire.domain.Question; import nl.mycompany.questionaire.identity.AccessDeniedException; import nl.mycompany.questionaire.identity.Groups; import nl.mycompany.questionaire.repository.QuestionRepository; import nl.mycompany.questionaire.service.Result; import nl.mycompany.questionaire.service.process.AnswerQuestionService; import org.activiti.engine.IdentityService; import org.activiti.engine.RuntimeService; import org.activiti.engine.TaskService; import org.activiti.engine.identity.User; import org.activiti.engine.impl.identity.Authentication; import org.activiti.engine.runtime.ProcessInstance; import org.activiti.engine.task.Task; import org.activiti.engine.test.ActivitiRule; import org.activiti.engine.test.Deployment; import org.apache.log4j.Logger; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = ApplicationConfiguration.class) public class AnswerQuestionTest { @Autowired private RuntimeService runtimeService; @Autowired private TaskService taskService; @Autowired private IdentityService identityService; @Autowired QuestionRepository repo; @Autowired @Rule public ActivitiRule activitiSpringRule; @Autowired public AnswerQuestionService service; private static final Logger LOG = Logger .getLogger(AnswerQuestionTest.class); @Test @Deployment(resources = "diagrams/AnswerQuestion.bpmn") public void acceptedAuditQuestionTaskTest() { Question question = createQuestion(); // set the authenticated user for authorization Authentication.setAuthenticatedUserId("testClient"); service.startQuestionProcess(question); // check if the process is there ProcessInstance processInstance = service .findProcessInstanceForQuestion(question); assertNotNull(processInstance); // claim the task service.claimCurrentTaskByCurrentUser(processInstance.getId()); service.answerQuestion(question, "de kaboutertjes"); // set the authenticated user for authorization Authentication.setAuthenticatedUserId("testAuditor"); service.auditQuestion(question, Result.RESULT_ACCEPTED); // check the database String remark = repo.findOne(question.getId()).get().getAuditResult(); assertEquals(Result.RESULT_ACCEPTED, remark); } @Test @Deployment(resources = "diagrams/AnswerQuestion.bpmn") public void deniedAuditQuestionTaskTest() { Question question = createQuestion(); // set the authenticated user for authorization Authentication.setAuthenticatedUserId("testClient"); service.startQuestionProcess(question); // check if the process is there ProcessInstance processInstance = service .findProcessInstanceForQuestion(question); assertNotNull(processInstance); // claim the task service.claimCurrentTaskByCurrentUser(processInstance.getId()); service.answerQuestion(question, "de kaboutertjes"); // set the authenticated user for authorization Authentication.setAuthenticatedUserId("testAuditor"); service.auditQuestion(question, Result.RESULT_DENIED); // check the database String remark = repo.findOne(question.getId()).get().getAuditResult(); assertEquals(Result.RESULT_DENIED, remark); // Check if a answer question task is available Task theAuditTask = taskService.createTaskQuery() .processInstanceId(processInstance.getId()).singleResult(); assertNotNull(theAuditTask); assertEquals(processInstance.getId(), theAuditTask.getProcessInstanceId()); LOG.debug("Task: " + theAuditTask.getName()); assertEquals("answerQuestionTask", theAuditTask.getName()); } @Test @Deployment(resources = "diagrams/AnswerQuestion.bpmn") public void answerQuestionTaskTest() { Question question = createQuestion(); // set the authenticated user for authorization Authentication.setAuthenticatedUserId("testClient"); service.startQuestionProcess(question); // check if the process is there ProcessInstance processInstance = service .findProcessInstanceForQuestion(question); assertNotNull(processInstance); // claim the task Task theAnswerTask = taskService.createTaskQuery() .processInstanceId(processInstance.getId()).active().singleResult(); taskService.setAssignee(theAnswerTask.getId(), "testClient"); service.answerQuestion(question, "de kaboutertjes"); // Check if a task is available, queued for an auditor Task theAuditTask = taskService.createTaskQuery() .taskCandidateGroup("auditors").singleResult(); assertNotNull(theAuditTask); assertEquals(processInstance.getId(), theAuditTask.getProcessInstanceId()); LOG.debug("Task: " + theAuditTask.getName()); assertEquals("auditQuestionTask", theAuditTask.getName()); } @Test(expected = AccessDeniedException.class) @Deployment(resources = "diagrams/AnswerQuestion.bpmn") public void unauthorizedAnswerQuestionTaskTest() { Question question = createQuestion(); // set the authenticated user for authorization Authentication.setAuthenticatedUserId("testManager"); service.startQuestionProcess(question); } @Test @Deployment(resources = "diagrams/AnswerQuestion.bpmn") public void simpleProcessTest() { ProcessInstance processInstance = runtimeService .startProcessInstanceByKey("answerQuestionProcess"); Assert.assertNotNull(processInstance); // Check if a task is available for the given process Assert.assertEquals( 1, taskService.createTaskQuery() .processInstanceId(processInstance.getId()).count()); } @Before public void setUp() { User client = identityService.newUser("testClient"); client.setFirstName("Vincent"); client.setLastName("testClient"); identityService.saveUser(client); identityService.createMembership("testClient", Groups.GROUP_CLIENTS); User manager = identityService.newUser("testManager"); client.setFirstName("Willem"); client.setLastName("testManager"); identityService.saveUser(manager); identityService.createMembership("testManager", Groups.GROUP_MANAGERS); User auditor = identityService.newUser("testAuditor"); client.setFirstName("Wendelien"); client.setLastName("testAuditor"); identityService.saveUser(auditor); identityService.createMembership("testAuditor", Groups.GROUP_AUDITORS); } @After public void tearDown() { deleteQuestions(); try { identityService.deleteUser("testClient"); identityService.deleteUser("testManager"); identityService.deleteUser("testAuditor"); } catch (Throwable t) { // ignore } } private Question createQuestion() { Question q = new Question(); q.setDomain("test"); q.setQuestionText("Wie heeft er in de prullenbak gepoept?"); LOG.debug("Question: " + q.getId() + " is created"); repo.save(q); LOG.debug("Question: " + q.getId() + " is saved"); return q; } private void deleteQuestions() { // clean up database List<Question> questions = repo.findAll(); for (Question question : questions) { LOG.debug("deleting question: " + question.getId()); repo.delete(question); } } }
/* * Copyright (c) 2016 simplity.org * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.simplity.pet.service; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import org.simplity.json.JSONException; import org.simplity.json.JSONObject; import org.simplity.json.JSONWriter; import org.simplity.kernel.ApplicationError; import org.simplity.kernel.FormattedMessage; import org.simplity.kernel.db.DbDriver; import org.simplity.service.AbstractService; import org.simplity.service.ServiceData; /** * @author simplity.org * */ public class FilterOwners extends AbstractService { private static final String OWNERS_SQL = "SELECT id AS \"ownerId\", first_name AS \"firstName\", last_name AS \"lastName\" , address, city, telephone FROM owners"; private static final String WHERE = " WHERE last_name LIKE ? "; private static final String PETS_SQL = "SELECT name AS \"petName\" FROM pets WHERE owner_id = ?"; private static final String MSG_NAME = "alphaOnly"; private static final String LAST_NAME = "lastName"; /* * (non-Javadoc) * * @see org.simplity.service.ServiceInterface#respond(org.simplity .service. * ServiceData) */ @Override public ServiceData respond(ServiceData inputData) { Connection con = null; try { ServiceData outData = new ServiceData(); String lastName = this.getLastName(inputData.getPayLoad()); if (lastName == null) { outData.addMessage(new FormattedMessage(MSG_NAME, null, LAST_NAME, null, 0)); return outData; } JSONWriter writer = new JSONWriter(); /** * json is of the form * * <pre> * { * owners: [ * { * ownerId:1, * firstName:"..", * .... * petDetails:[ * {petName:"pinky"}, * {petname:"vinky"} * ] * }, * { * ownerId:2, * .... * } * ] * } * </pre> */ writer.object(); writer.key("owners"); writer.array(); con = DbDriver.getConnection(); PreparedStatement stmt = null; /* * we check whether last name is specified */ if (lastName.isEmpty()) { stmt = con.prepareStatement(OWNERS_SQL); } else { stmt = con.prepareStatement(OWNERS_SQL + WHERE); stmt.setString(1, '%' + lastName + '%'); } ResultSet rs = stmt.executeQuery(); while (rs.next()) { long ownerId = rs.getLong(1); writer.object(); this.extractOwnerData(rs, writer); writer.key("petDetails"); writer.array(); this.extractPetDetails(con, ownerId, writer); writer.endArray(); writer.endObject(); } writer.endArray(); writer.endObject(); outData.setPayLoad(writer.toString()); return outData; } catch (SQLException e) { throw new ApplicationError(e, "Error while filtering rows from owners"); } finally { if (con != null) { try { con.close(); } catch (Exception ignore) { // } } } } /** * @param payLoad * @return */ private String getLastName(String payload) { String lastName = ""; if (payload == null || payload.isEmpty()) { return lastName; } JSONObject json = new JSONObject(payload); lastName = json.optString(LAST_NAME).trim(); if (lastName.isEmpty() || lastName.matches("^[a-zA-Z]*$")) { return lastName; } /* * it is in error.. */ return null; } /** * extract all columns from a row of owners into json writer * * @param rs * @param writer * @throws JSONException * @throws SQLException */ private void extractOwnerData(ResultSet rs, JSONWriter writer) throws JSONException, SQLException { writer.key("ownerId"); writer.value(rs.getLong(1)); writer.key("firstName"); writer.value(rs.getString(2)); writer.key("lastName"); writer.value(rs.getString(3)); writer.key("address"); writer.value(rs.getString(4)); writer.key("city"); writer.value(rs.getString(5)); writer.key("telephone"); writer.value(rs.getString(6)); } /** * extract pet names for the given owner int josn writer * * @param con * @param ownerId * @param writer * @throws SQLException */ private void extractPetDetails(Connection con, long ownerId, JSONWriter writer) throws SQLException { PreparedStatement stmt = con.prepareStatement(PETS_SQL); stmt.setLong(1, ownerId); ResultSet rs = stmt.executeQuery(); while (rs.next()) { writer.object(); writer.key("petName"); writer.value(rs.getString(1)); writer.endObject(); } rs.close(); stmt.close(); } }
/* * The MIT License (MIT) * * Copyright (c) 2015 The MsgCodec Authors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.cinnober.msgcodec.blink; import com.cinnober.msgcodec.io.ByteSource; import com.cinnober.msgcodec.DecodeException; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; /** * Methods for reading primitive Native Blink data types. * * <p>Read methods for variable size data types come in two flavors: * <code>readInlineXxx</code> and <code>readDataXxx</code> for inline and data area respectively. * * @see BlinkOutput * @author mikael.brannstrom * */ public class NativeBlinkInput { private NativeBlinkInput() { } /** * Read a signed 8-bit integer. * @param in the input stream to read from, not null. * @return the value * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static byte readInt8(ByteSource in) throws IOException { return (byte) in.read(); } /** * Read a signed 16-bit integer. * @param in the input stream to read from, not null. * @return the value * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static short readInt16(ByteSource in) throws IOException { return (short)(in.read() | in.read() << 8); } /** * Read a signed 32-bit integer. * @param in the input stream to read from, not null. * @return the value * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static int readInt32(ByteSource in) throws IOException { return in.readIntLE(); } /** * Read a signed 64-bit integer. * @param in the input stream to read from, not null. * @return the value * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static long readInt64(ByteSource in) throws IOException { return in.readLongLE(); } /** * Read a nullable signed 8-bit integer. * @param in the input stream to read from, not null. * @return the value, or null. * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static Byte readInt8Null(ByteSource in) throws IOException { if (in.read() != 0) { return (byte) in.read(); } else { in.skip(1); return null; } } /** * Read a nullable signed 16-bit integer. * @param in the input stream to read from, not null. * @return the value, or null. * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static Short readInt16Null(ByteSource in) throws IOException { if (in.read() != 0) { return readInt16(in); } else { in.skip(2); return null; } } /** * Read a nullable signed 32-bit integer. * @param in the input stream to read from, not null. * @return the value, or null. * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static Integer readInt32Null(ByteSource in) throws IOException { if (in.read() != 0) { return in.readIntLE(); } else { in.skip(4); return null; } } /** * Read a nullable signed 64-bit integer. * @param in the input stream to read from, not null. * @return the value, or null. * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static Long readInt64Null(ByteSource in) throws IOException { if (in.read() != 0) { return in.readLongLE(); } else { in.skip(8); return null; } } public static byte readUInt8(ByteSource in) throws IOException { return readInt8(in); } public static Byte readUInt8Null(ByteSource in) throws IOException { return readInt8Null(in); } public static short readUInt16(ByteSource in) throws IOException { return readInt16(in); } public static Short readUInt16Null(ByteSource in) throws IOException { return readInt16Null(in); } public static int readUInt32(ByteSource in) throws IOException { return readInt32(in); } public static Integer readUInt32Null(ByteSource in) throws IOException { return readInt32Null(in); } public static long readUInt64(ByteSource in) throws IOException { return readInt64(in); } public static Long readUInt64Null(ByteSource in) throws IOException { return readInt64Null(in); } /** * Read a signed big integer. * @param in the input stream to read from, not null. * @param maxLength the maximum bigint length (bytes) that is allowed, or -1 for no limit. * @return the value, not null. * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static BigInteger readDataBigInt(ByteSource in, int maxLength) throws IOException { byte[] bytes = readDataBinary(in, maxLength); if (bytes.length == 0) { throw new DecodeException("Cannot decode BigInt from zero length data"); } // convert bytes from LE to BE for (int i=0, i2=bytes.length-1; i<i2; i++, i2--) { byte b = bytes[i]; bytes[i] = bytes[i2]; bytes[i2] = b; } return new BigInteger(bytes); } /** * Read a 32-bit floating point number. * * <p>Since the Blink protocol does not support 32-bit floating point numbers, * the value is read as a 64-bit floating point number. * * @param in the input stream to read from, not null. * @return the value * @see #readFloat64(ByteSource) * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static float readFloat32(ByteSource in) throws IOException { return (float) readFloat64(in); } /** * Read a nullable 32-bit floating point number. * * <p>Since the Blink protocol does not support 32-bit floating point numbers, * the value is read as a 64-bit floating point number. * * @param in the input stream to read from, not null. * @return the value, or null. * @see #readFloat64(ByteSource) * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static Float readFloat32Null(ByteSource in) throws IOException { Double value = readFloat64Null(in); if (value == null) { return null; } else { return value.floatValue(); } } /** * Read a 64-bit floating point number. * @param in the input stream to read from, not null. * @return the value * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static double readFloat64(ByteSource in) throws IOException { long value = in.readLongLE(); return Double.longBitsToDouble(value); } /** * Read a nullable 64-bit floating point number. * @param in the input stream to read from, not null. * @return the value, or null * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static Double readFloat64Null(ByteSource in) throws IOException { if (in.read() != 0) { return readFloat64(in); } else { in.skip(8); return null; } } /** * Read a decimal number. * @param in the input stream to read from, not null. * @return the value, not null * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static BigDecimal readDecimal(ByteSource in) throws IOException { int exp = readInt8(in); long mantissa = readInt64(in); return BigDecimal.valueOf(mantissa, -exp); } /** * Read a nullable decimal number. * @param in the input stream to read from, not null. * @return the value, or null * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static BigDecimal readDecimalNull(ByteSource in) throws IOException { if (in.read() != 0) { return readDecimal(in); } else { in.skip(9); return null; } } /** * Read a big decimal number. * @param in the input stream to read from, not null. * @param maxLength the maximum data length (bytes) that is allowed for the mantissa, or -1 for no limit. * @return the value, not null * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static BigDecimal readDataBigDecimal(ByteSource in, int maxLength) throws IOException { int exp = readInt32(in); BigInteger mantissa = readDataBigInt(in, maxLength); return new BigDecimal(mantissa, -exp); } /** * Read a boolean value. * @param in the input stream to read from, not null. * @return the value * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static boolean readBoolean(ByteSource in) throws IOException { return in.read() != 0; } /** * Read a nullable boolean value. * @param in the input stream to read from, not null. * @return the value, or null * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static Boolean readBooleanNull(ByteSource in) throws IOException { if (in.read() != 0) { return in.read() != 0; } else { in.skip(0); return null; } } /** * Read a unicode string. * @param in the input stream to read from, not null. * @param maxLength the maximum string length (bytes), in the range [1, 255]. * @return the value, not null. * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static String readInlineStringUTF8(ByteSource in, int maxLength) throws IOException { int size = in.read(); if (size > maxLength) { throw new DecodeException("String length (" + size + ") exceeds limit (" + maxLength + ")"); } String str = in.readStringUtf8(size); in.skip(maxLength - size); return str; } /** * Read a nullable unicode string. * @param in the input stream to read from, not null. * @param maxLength the maximum string length (bytes), in the range [1, 255]. * @return the value, or null. * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static String readInlineStringUTF8Null(ByteSource in, int maxLength) throws IOException { if (in.read() != 0) { return readInlineStringUTF8(in, maxLength); } else { in.skip(1 + maxLength); return null; } } /** * Read a unicode string. * @param in the input stream to read from, not null. * @return the value, not null. * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static String readDataStringUTF8(ByteSource in) throws IOException { return readDataStringUTF8(in, -1); } /** * Read a unicode string. * @param in the input stream to read from, not null. * @param maxLength the maximum string length (bytes) that is allowed, or -1 for no limit. * @return the value, not null. * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static String readDataStringUTF8(ByteSource in, int maxLength) throws IOException { int size = readUInt32(in); if (size < 0) { throw new DecodeException("Cannot read string larger than " + Integer.MAX_VALUE + " bytes."); } if (size > maxLength && maxLength >= 0) { throw new DecodeException("String length (" + size + ") exceeds limit (" + maxLength + ")"); } return in.readStringUtf8(size); } /** * Read a binary value. * @param in the input stream to read from, not null. * @param maxLength the maximum binary length (bytes), in the range [1, 255]. * @return the value, not null. * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static byte[] readInlineBinary(ByteSource in, int maxLength) throws IOException { int size = in.read(); if (size > maxLength) { throw new DecodeException("Binary length (" + size + ") exceeds limit (" + maxLength + ")"); } byte[] value = new byte[size]; in.read(value); return value; } /** * Read a nullable binary value. * @param in the input stream to read from, not null. * @param maxLength the maximum binary length (bytes), in the range [1, 255]. * @return the value, or null. * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static byte[] readInlineBinaryNull(ByteSource in, int maxLength) throws IOException { if (in.read() != 0) { return readInlineBinary(in, maxLength); } else { in.skip(1 + maxLength); return null; } } /** * Read binary data. * * @param in the input stream to read from, not null. * @param maxLength the maximum binary length (bytes) that is allowed, or -1 for no limit. * @return the value, not null. * @throws IOException if the input stream throws an exception. * @throws DecodeException if the value could not be parsed. */ public static byte[] readDataBinary(ByteSource in, int maxLength) throws IOException { int size = readUInt32(in); if (size < 0) { throw new DecodeException("Cannot read binary larger than " + Integer.MAX_VALUE + " bytes."); } if (size > maxLength && maxLength >= 0) { throw new DecodeException("Binary length (" + size + ") exceeds limit (" + maxLength + ")"); } byte[] data = new byte[size]; in.read(data); return data; } }
package nisui.h2_store; import java.sql.Array; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import nisui.core.*; import nisui.core.util.IterWithSeparator; import nisui.core.util.QueryChunk; import nisui.core.util.ResultSetIterator; public abstract class H2Operations<D, R> implements AutoCloseable { private static Logger logger = LoggerFactory.getLogger(H2Operations.class); protected H2ResultsStorage<D, R>.Connection con; protected PreparedStatement stmt; public H2Operations(H2ResultsStorage<D, R>.Connection con) { this.con = con; } @Override public void close() throws SQLException { if (stmt != null) { stmt.close(); } } private static <V> Object toDbRep(ExperimentValuesHandler<V>.Field field, V experimentValue) { Object value = field.get(experimentValue); if (value != null && field.getType().isEnum()) { value = value.toString(); } return value; } private static <V> Object fromDbRep(ExperimentValuesHandler<V>.Field field, Object dbRep) { if (dbRep instanceof String) { return field.parseString((String)dbRep); } else { return dbRep; } } private static void applyQueryChunk(QueryChunk chunk, StringBuilder sql, List<Object> parameters) { chunk.scan(sql::append, (param) -> { sql.append(" ? "); parameters.add(param); }); } public static class InsertDataPoint<D, R> extends H2Operations<D, R> implements DataPointInserter<D> { InsertDataPoint(H2ResultsStorage<D, R>.Connection con) { super(con); StringBuilder sql = new StringBuilder(); sql.append("INSERT INTO ").append(con.DATA_POINTS_TABLE_NAME).append("("); sql.append("num_planned").append(", ").append("num_performed"); for (ExperimentValuesHandler<D>.Field field : con.parent().dataPointHandler.fields()) { sql.append(", ").append(field.getName()); } sql.append(") VALUES("); int numberOfFields = con.parent().dataPointHandler.fields().size(); for (int i = 0; i < 2 + numberOfFields; ++i) { if (0 < i) { sql.append(", "); } sql.append("?"); } sql.append(");"); stmt = con.createPreparedStatement(sql.toString()); } @Override public void insert(long numPlanned, long numPerformed, D dataPoint) throws SQLException { stmt.clearParameters(); stmt.setLong(1, numPlanned); stmt.setLong(2, numPerformed); int paramIndex = 3; for (ExperimentValuesHandler<D>.Field field : con.parent().dataPointHandler.fields()) { stmt.setObject(paramIndex, toDbRep(field, dataPoint)); ++paramIndex; } stmt.executeUpdate(); } } public static class ReadDataPoints<D, R> extends H2Operations<D, R> implements DataPointsReader<D> { String[] filters; ReadDataPoints(H2ResultsStorage<D, R>.Connection con, String... filters) { super(con); this.filters = filters; } @Override public Iterator<DataPoint<D>> iterator() { LinkedList<Object> parameters = new LinkedList<>(); if (stmt == null) { StringBuilder sql = new StringBuilder(); sql.append("SELECT id, num_planned, num_performed"); for (ExperimentValuesHandler<D>.Field field : con.parent().dataPointHandler.fields()) { sql.append(", ").append(field.getName()); } sql.append(" FROM ").append(con.DATA_POINTS_TABLE_NAME); if (0 < filters.length) { H2QueryParser queryParser = new H2QueryParser(); for (int i = 0; i < filters.length; ++i) { if (0 == i) { sql.append(" WHERE "); } else { sql.append(" AND "); } sql.append("("); applyQueryChunk(queryParser.parseBoolean(filters[i]), sql, parameters); sql.append(")"); } } sql.append(';'); stmt = con.createPreparedStatement(sql.toString()); } try { { int i = 1; for (Object parameter : parameters) { stmt.setObject(i, parameter); i += 1; } } return new ResultSetIterator<>(stmt.executeQuery(), rs -> { D value = con.parent().dataPointHandler.createValue(); int i = 4; for (ExperimentValuesHandler<D>.Field field : con.parent().dataPointHandler.fields()) { field.set(value, fromDbRep(field, rs.getObject(i))); ++i; } return new H2DataPoint<>(rs.getLong(1), rs.getLong(2), rs.getLong(3), value); }); } catch (SQLException e) { throw new RuntimeException(e); } } } public static class InsertExperimentResult<D, R> extends H2Operations<D, R> implements ExperimentResultInserter<R> { private PreparedStatement updateDpStatement; @Override public void close() throws SQLException { try { if (updateDpStatement != null) { updateDpStatement.close(); } } finally { super.close(); } } InsertExperimentResult(H2ResultsStorage<D, R>.Connection con) { super(con); StringBuilder sql = new StringBuilder(); sql.append("INSERT INTO ").append(con.EXPERIMENT_RESULTS_TABLE_NAME).append("("); sql.append("data_point_id").append(", ").append("seed"); for (ExperimentValuesHandler<R>.Field field : con.parent().experimentResultHandler.fields()) { sql.append(", ").append(field.getName()); } sql.append(") VALUES("); int numberOfFields = con.parent().experimentResultHandler.fields().size(); for (int i = 0; i < 2 + numberOfFields; ++i) { if (0 < i) { sql.append(", "); } sql.append("?"); } sql.append(");"); stmt = con.createPreparedStatement(sql.toString()); updateDpStatement = con.createPreparedStatement(String.format("UPDATE %s SET num_performed = num_performed + 1 WHERE id = ?;", con.DATA_POINTS_TABLE_NAME)); } @Override public void insert(DataPoint<?> dataPoint, long seed, R experimentResult) throws SQLException { stmt.clearParameters(); stmt.setLong(1, ((H2DataPoint<?>)dataPoint).getId()); stmt.setLong(2, seed); int paramIndex = 3; for (ExperimentValuesHandler<R>.Field field : con.parent().experimentResultHandler.fields()) { stmt.setObject(paramIndex, toDbRep(field, experimentResult)); ++paramIndex; } updateDpStatement.setLong(1, ((H2DataPoint<?>)dataPoint).getId()); stmt.executeUpdate(); updateDpStatement.executeUpdate(); } } public static class ReadExperimentResults<D, R> extends H2Operations<D, R> implements ExperimentResultsReader<D, R> { private HashMap<Long, H2DataPoint<D>> dataPoints; ReadExperimentResults(H2ResultsStorage<D, R>.Connection con, Iterable<DataPoint<D>> dataPoints) { super(con); this.dataPoints = new HashMap<>(); for (DataPoint<D> dp : dataPoints) { H2DataPoint<D> h2dp = (H2DataPoint<D>)dp; this.dataPoints.put(h2dp.getId(), h2dp); } } @Override public Iterator<ExperimentResult<D, R>> iterator() { if (stmt == null) { StringBuilder sql = new StringBuilder(); sql.append("SELECT id"); sql.append(", ").append("data_point_id"); sql.append(", ").append("seed"); for (ExperimentValuesHandler<R>.Field field : con.parent().experimentResultHandler.fields()) { sql.append(", ").append(field.getName()); } sql.append(" FROM ").append(con.EXPERIMENT_RESULTS_TABLE_NAME); sql.append(" WHERE data_point_id IN (SELECT * FROM TABLE(id BIGINT = ?))"); sql.append(';'); stmt = con.createPreparedStatement(sql.toString()); } try { Array array = stmt.getConnection().createArrayOf("BIGINT", dataPoints.keySet().toArray()); stmt.setArray(1, array); return new ResultSetIterator<>(stmt.executeQuery(), rs -> { R value = con.parent().experimentResultHandler.createValue(); int i = 4; for (ExperimentValuesHandler<R>.Field field : con.parent().experimentResultHandler.fields()) { field.set(value, fromDbRep(field, rs.getObject(i))); ++i; } return new H2ExperimentResult<>(rs.getLong(1), dataPoints.get(rs.getLong(2)), rs.getLong(3), value); }); } catch (SQLException e) { throw new RuntimeException(e); } } } }
package org.broadinstitute.hellbender.tools.walkers.genotyper.afcalc; import com.google.common.annotations.VisibleForTesting; import htsjdk.variant.variantcontext.Allele; import org.broadinstitute.hellbender.utils.MathUtils; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * TODO this class (+AFCalculator) is a bit messy... it seems that it combines "debugging" (unnecessarily adding CPU cost in production) * TODO but it also contains important part of the AF calculation state... why mix both!!!? It seems that the second * TODO part could be just blend into AFCalculator ... one one hand you want to reduce classes code size ... but these * TODO two classes code seems to be quite intertwine and makes it difficult to understand what is going on. * in the production setting without much need * * Keeps track of the state information during the exact model AF calculation. * * Tracks things like the MLE and MAP AC values, their corresponding likelihood and posterior * values, the likelihood of the AF == 0 state, and the number of evaluations needed * by the calculation to compute the P(AF == 0) */ final class StateTracker { private static final double VALUE_NOT_CALCULATED = Double.NEGATIVE_INFINITY; static final double MAX_LOG10_ERROR_TO_STOP_EARLY = 6; // we want the calculation to be accurate to 1 / 10^6 /** * These variables are intended to contain the MLE and MAP (and their corresponding allele counts) * of the site over all alternate alleles */ private double log10MLE; private double log10MAP; /** * Returns a vector with maxAltAlleles values containing AC values at the MLE * * The values of the ACs for this call are stored in the getAllelesUsedInGenotyping order, * starting from index 0 (i.e., the first alt allele is at 0). The vector is always * maxAltAlleles in length, and so only the first getAllelesUsedInGenotyping.size() - 1 values * are meaningful. */ private int[] alleleCountsOfMLE; private int[] alleleCountsOfMAP; /** * A vector of log10 likelihood values seen, for future summation. When the size of the * vector is exceeed -- because we've pushed more posteriors than there's space to hold * -- we simply sum up the existing values, make that the first value, and continue. */ private final double[] log10LikelihoodsForAFGt0 = new double[LIKELIHOODS_CACHE_SIZE]; private static final int LIKELIHOODS_CACHE_SIZE = 5000; private int log10LikelihoodsForAFGt0CacheIndex = 0; /** * The actual sum of the likelihoods. Null if the sum hasn't been computed yet */ private Double log10LikelihoodsForAFGt0Sum = null; /** * Contains the likelihood for the site's being monomorphic (i.e. AF=0 for all alternate alleles) */ private double log10LikelihoodOfAFzero = 0.0; /** * The list of alleles actually used in computing the AF */ private List<Allele> allelesUsedInGenotyping = null; /** * Create a results object capability of storing results for calls with up to maxAltAlleles * * @param maxAltAlleles an integer >= 1 */ StateTracker(final int maxAltAlleles) { if ( maxAltAlleles < 0 ) { throw new IllegalArgumentException("maxAltAlleles must be >= 0, saw " + maxAltAlleles); } alleleCountsOfMLE = new int[maxAltAlleles]; alleleCountsOfMAP = new int[maxAltAlleles]; reset(); } /** * Is the likelihood of configuration K too low to consider, related to the * maximum likelihood seen already? * * @param log10LofK the log10 likelihood of the configuration we're considering analyzing * @return true if the configuration cannot meaningfully contribute to our likelihood sum */ private boolean tooLowLikelihood(final double log10LofK) { return log10LofK < log10MLE - MAX_LOG10_ERROR_TO_STOP_EARLY; } /** * @return true iff all ACs in this object are less than or equal to their corresponding ACs in the provided set */ private boolean isLowerAC(final ExactACcounts otherACs, final boolean otherACsContainsReference) { final int[] otherACcounts = otherACs.getCounts(); final int firstAltAlleleIndex = otherACsContainsReference ? 1 : 0; for ( int i = firstAltAlleleIndex; i < otherACcounts.length; i++ ) { if ( alleleCountsOfMLE[i - firstAltAlleleIndex] > otherACcounts[i] ) { return false; } } return true; } /** * Should we stop exploring paths from ACs, given it's log10LofK * * @param log10LofK the log10LofK of these ACs * @param ACs the ACs of this state * @param exactACcountsContainReference whether the {@code ACs} contains the reference allele count (index == 0) beside all other alternative alleles. * @return return true if there's no reason to continue with subpaths of AC, or false otherwise */ @VisibleForTesting boolean abort(final double log10LofK, final ExactACcounts ACs, final boolean enforceLowerACs, final boolean exactACcountsContainReference) { return tooLowLikelihood(log10LofK) && (!enforceLowerACs || isLowerAC(ACs,exactACcountsContainReference)); } @VisibleForTesting int[] getAlleleCountsOfMAP() { return alleleCountsOfMAP; } /** * @return the likelihoods summed across all AC values for AC > 0 */ private double getLog10LikelihoodOfAFNotZero() { if ( log10LikelihoodsForAFGt0Sum == null ) { if ( log10LikelihoodsForAFGt0CacheIndex == 0 ){ // there's nothing to sum up, so make the sum equal to the smallest thing we have log10LikelihoodsForAFGt0Sum = MathUtils.LOG10_P_OF_ZERO; } else { log10LikelihoodsForAFGt0Sum = MathUtils.log10SumLog10(log10LikelihoodsForAFGt0, 0, log10LikelihoodsForAFGt0CacheIndex); } } return log10LikelihoodsForAFGt0Sum; } /** * @return the log10 likelihood of AF == 0 */ private double getLog10LikelihoodOfAFzero() { return log10LikelihoodOfAFzero; } /** * Convert this state to an corresponding AFCalcResult. * * Assumes that the values in this state have been filled in with meaningful values during the calculation. * For example, that the allelesUsedInGenotyping has been set, that the alleleCountsOfMLE contains meaningful * values, etc. * * @param log10PriorsByAC the priors by AC * * @return an AFCalcResult summarizing the final results of this calculation */ AFCalculationResult toAFCalculationResult(final double[] log10PriorsByAC) { final int [] subACOfMLE = Arrays.copyOf(alleleCountsOfMLE, allelesUsedInGenotyping.size() - 1); final double[] log10Likelihoods = MathUtils.normalizeLog10(new double[]{getLog10LikelihoodOfAFzero(), getLog10LikelihoodOfAFNotZero()}); final double[] log10Priors = MathUtils.normalizeLog10(new double[]{log10PriorsByAC[0], MathUtils.log10SumLog10(log10PriorsByAC, 1)}); final Map<Allele, Double> log10pRefByAllele = new LinkedHashMap<>(allelesUsedInGenotyping.size()); for ( int i = 0; i < subACOfMLE.length; i++ ) { final Allele allele = allelesUsedInGenotyping.get(i+1); final double log10PRef = alleleCountsOfMAP[i] > 0 ? -10000 : 0; // TODO -- a total hack but in effect what the old behavior was log10pRefByAllele.put(allele, log10PRef); } return new AFCalculationResult(subACOfMLE, allelesUsedInGenotyping, log10Likelihoods, log10Priors, log10pRefByAllele); } // -------------------------------------------------------------------------------- // // Protected mutational methods only for use within the calculation models themselves // // -------------------------------------------------------------------------------- /** * Reset the data in this results object, so that it can be used in a subsequent AF calculation * * Resetting of the data is done by the calculation model itself, so shouldn't be done by callers any longer * * @param ensureAltAlleleCapacity indicate the minimum number of alt-alleles that should be supported by the * tracker. */ void reset(final int ensureAltAlleleCapacity) { log10MLE = log10MAP = log10LikelihoodOfAFzero = VALUE_NOT_CALCULATED; log10LikelihoodsForAFGt0CacheIndex = 0; log10LikelihoodsForAFGt0Sum = null; allelesUsedInGenotyping = null; if (alleleCountsOfMAP.length < ensureAltAlleleCapacity) { final int newCapacity = Math.max(ensureAltAlleleCapacity, alleleCountsOfMAP.length << 1); alleleCountsOfMAP = new int[newCapacity]; alleleCountsOfMLE = new int[newCapacity]; } else { Arrays.fill(alleleCountsOfMLE, 0); Arrays.fill(alleleCountsOfMAP, 0); } Arrays.fill(log10LikelihoodsForAFGt0, Double.POSITIVE_INFINITY); } /** * Reset the data in this results object, so that it can be used in a subsequent AF calculation * * Resetting of the data is done by the calculation model itself, so shouldn't be done by callers any longer */ void reset() { log10MLE = log10MAP = log10LikelihoodOfAFzero = VALUE_NOT_CALCULATED; log10LikelihoodsForAFGt0CacheIndex = 0; log10LikelihoodsForAFGt0Sum = null; allelesUsedInGenotyping = null; Arrays.fill(alleleCountsOfMLE, 0); Arrays.fill(alleleCountsOfMAP, 0); Arrays.fill(log10LikelihoodsForAFGt0, Double.POSITIVE_INFINITY); } /** * Update the maximum log10 likelihoods seen, if log10LofKs is higher, and the corresponding ACs of this state * * @param log10LofK the likelihood of our current configuration state, cannot be the 0 state * @param alleleCountsForK the allele counts for this state */ void updateMLEifNeeded(final double log10LofK, final int[] alleleCountsForK) { addToLikelihoodsCache(log10LofK); if ( log10LofK > log10MLE ) { log10MLE = log10LofK; System.arraycopy(alleleCountsForK, 0, alleleCountsOfMLE, 0, alleleCountsForK.length); } } /** * Update the maximum log10 posterior seen, if log10PofKs is higher, and the corresponding ACs of this state * * @param log10PofK the posterior of our current configuration state * @param alleleCountsForK the allele counts for this state */ void updateMAPifNeeded(final double log10PofK, final int[] alleleCountsForK) { if ( log10PofK > log10MAP ) { log10MAP = log10PofK; System.arraycopy(alleleCountsForK, 0, alleleCountsOfMAP, 0, alleleCountsForK.length); } } private void addToLikelihoodsCache(final double log10LofK) { // add to the cache log10LikelihoodsForAFGt0[log10LikelihoodsForAFGt0CacheIndex++] = log10LofK; // if we've filled up the cache, then condense by summing up all of the values and placing the sum back into the first cell if ( log10LikelihoodsForAFGt0CacheIndex == LIKELIHOODS_CACHE_SIZE) { final double temporarySum = MathUtils.log10SumLog10(log10LikelihoodsForAFGt0, 0, log10LikelihoodsForAFGt0CacheIndex); Arrays.fill(log10LikelihoodsForAFGt0, Double.POSITIVE_INFINITY); log10LikelihoodsForAFGt0[0] = temporarySum; log10LikelihoodsForAFGt0CacheIndex = 1; } } void setLog10LikelihoodOfAFzero(final double log10LikelihoodOfAFzero) { this.log10LikelihoodOfAFzero = log10LikelihoodOfAFzero; if ( log10LikelihoodOfAFzero > log10MLE ) { log10MLE = log10LikelihoodOfAFzero; Arrays.fill(alleleCountsOfMLE, 0); } } void setLog10PosteriorOfAFzero(final double log10PosteriorOfAFzero) { if ( log10PosteriorOfAFzero > log10MAP ) { log10MAP = log10PosteriorOfAFzero; Arrays.fill(alleleCountsOfMAP, 0); } } /** * Set the list of alleles used in genotyping * * @param allelesUsedInGenotyping the list of alleles, where the first allele is reference */ void setAllelesUsedInGenotyping(final List<Allele> allelesUsedInGenotyping) { if ( allelesUsedInGenotyping == null || allelesUsedInGenotyping.isEmpty() ) { throw new IllegalArgumentException("allelesUsedInGenotyping cannot be null or empty"); } if ( allelesUsedInGenotyping.get(0).isNonReference() ) { throw new IllegalArgumentException("The first element of allelesUsedInGenotyping must be the reference allele"); } this.allelesUsedInGenotyping = allelesUsedInGenotyping; } public void ensureMaximumAlleleCapacity(final int capacity) { if (this.alleleCountsOfMAP.length < capacity) { reset(capacity); } } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.netty.handler.codec.http2; import static io.netty.handler.codec.http.HttpMethod.GET; import static io.netty.handler.codec.http.HttpMethod.POST; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; import static io.netty.handler.codec.http2.Http2TestUtil.as; import static io.netty.util.CharsetUtil.UTF_8; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyShort; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import io.netty.bootstrap.Bootstrap; import io.netty.bootstrap.ServerBootstrap; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelPipeline; import io.netty.channel.ChannelPromise; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.channel.socket.nio.NioSocketChannel; import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.DefaultHttpContent; import io.netty.handler.codec.http.DefaultHttpRequest; import io.netty.handler.codec.http.DefaultLastHttpContent; import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.HttpHeaderNames; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.codec.http.LastHttpContent; import io.netty.handler.codec.http2.Http2TestUtil.FrameCountDown; import io.netty.util.NetUtil; import io.netty.util.concurrent.Future; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CountDownLatch; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; /** * Testing the {@link HttpToHttp2ConnectionHandler} for {@link FullHttpRequest} objects into HTTP/2 frames */ public class HttpToHttp2ConnectionHandlerTest { private static final int WAIT_TIME_SECONDS = 5; @Mock private Http2FrameListener clientListener; @Mock private Http2FrameListener serverListener; private ServerBootstrap sb; private Bootstrap cb; private Channel serverChannel; private Channel clientChannel; private CountDownLatch requestLatch; private CountDownLatch serverSettingsAckLatch; private CountDownLatch trailersLatch; private FrameCountDown serverFrameCountDown; @Before public void setup() throws Exception { MockitoAnnotations.initMocks(this); } @After public void teardown() throws Exception { serverChannel.close().sync(); Future<?> serverGroup = sb.group().shutdownGracefully(0, 0, MILLISECONDS); Future<?> serverChildGroup = sb.childGroup().shutdownGracefully(0, 0, MILLISECONDS); Future<?> clientGroup = cb.group().shutdownGracefully(0, 0, MILLISECONDS); serverGroup.sync(); serverChildGroup.sync(); clientGroup.sync(); } @Test public void testJustHeadersRequest() throws Exception { bootstrapEnv(2, 1, 0); final FullHttpRequest request = new DefaultFullHttpRequest(HTTP_1_1, GET, "/example"); final HttpHeaders httpHeaders = request.headers(); httpHeaders.setInt(HttpUtil.ExtensionHeaderNames.STREAM_ID.text(), 5); httpHeaders.set(HttpHeaderNames.HOST, "http://my-user_name@www.example.org:5555/example"); httpHeaders.set(HttpUtil.ExtensionHeaderNames.AUTHORITY.text(), "www.example.org:5555"); httpHeaders.set(HttpUtil.ExtensionHeaderNames.SCHEME.text(), "http"); httpHeaders.add("foo", "goo"); httpHeaders.add("foo", "goo2"); httpHeaders.add("foo2", "goo2"); final Http2Headers http2Headers = new DefaultHttp2Headers().method(as("GET")).path(as("/example")) .authority(as("www.example.org:5555")).scheme(as("http")) .add(as("foo"), as("goo")).add(as("foo"), as("goo2")) .add(as("foo2"), as("goo2")); ChannelPromise writePromise = newPromise(); ChannelFuture writeFuture = clientChannel.writeAndFlush(request, writePromise); assertTrue(writePromise.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(writePromise.isSuccess()); assertTrue(writeFuture.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(writeFuture.isSuccess()); awaitRequests(); verify(serverListener).onHeadersRead(any(ChannelHandlerContext.class), eq(5), eq(http2Headers), eq(0), anyShort(), anyBoolean(), eq(0), eq(true)); verify(serverListener, never()).onDataRead(any(ChannelHandlerContext.class), anyInt(), any(ByteBuf.class), anyInt(), anyBoolean()); } @Test public void testRequestWithBody() throws Exception { final String text = "foooooogoooo"; final List<String> receivedBuffers = Collections.synchronizedList(new ArrayList<String>()); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock in) throws Throwable { receivedBuffers.add(((ByteBuf) in.getArguments()[2]).toString(UTF_8)); return null; } }).when(serverListener).onDataRead(any(ChannelHandlerContext.class), eq(3), any(ByteBuf.class), eq(0), eq(true)); bootstrapEnv(3, 1, 0); final FullHttpRequest request = new DefaultFullHttpRequest(HTTP_1_1, POST, "/example", Unpooled.copiedBuffer(text, UTF_8)); final HttpHeaders httpHeaders = request.headers(); httpHeaders.set(HttpHeaderNames.HOST, "http://your_user-name123@www.example.org:5555/example"); httpHeaders.add("foo", "goo"); httpHeaders.add("foo", "goo2"); httpHeaders.add("foo2", "goo2"); final Http2Headers http2Headers = new DefaultHttp2Headers().method(as("POST")).path(as("/example")) .authority(as("www.example.org:5555")).scheme(as("http")) .add(as("foo"), as("goo")).add(as("foo"), as("goo2")) .add(as("foo2"), as("goo2")); ChannelPromise writePromise = newPromise(); ChannelFuture writeFuture = clientChannel.writeAndFlush(request, writePromise); assertTrue(writePromise.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(writePromise.isSuccess()); assertTrue(writeFuture.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(writeFuture.isSuccess()); awaitRequests(); verify(serverListener).onHeadersRead(any(ChannelHandlerContext.class), eq(3), eq(http2Headers), eq(0), anyShort(), anyBoolean(), eq(0), eq(false)); verify(serverListener).onDataRead(any(ChannelHandlerContext.class), eq(3), any(ByteBuf.class), eq(0), eq(true)); assertEquals(1, receivedBuffers.size()); assertEquals(text, receivedBuffers.get(0)); } @Test public void testRequestWithBodyAndTrailingHeaders() throws Exception { final String text = "foooooogoooo"; final List<String> receivedBuffers = Collections.synchronizedList(new ArrayList<String>()); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock in) throws Throwable { receivedBuffers.add(((ByteBuf) in.getArguments()[2]).toString(UTF_8)); return null; } }).when(serverListener).onDataRead(any(ChannelHandlerContext.class), eq(3), any(ByteBuf.class), eq(0), eq(false)); bootstrapEnv(4, 1, 1); final FullHttpRequest request = new DefaultFullHttpRequest(HTTP_1_1, POST, "/example", Unpooled.copiedBuffer(text, UTF_8)); final HttpHeaders httpHeaders = request.headers(); httpHeaders.set(HttpHeaderNames.HOST, "http://your_user-name123@www.example.org:5555/example"); httpHeaders.add("foo", "goo"); httpHeaders.add("foo", "goo2"); httpHeaders.add("foo2", "goo2"); final Http2Headers http2Headers = new DefaultHttp2Headers().method(as("POST")).path(as("/example")) .authority(as("www.example.org:5555")).scheme(as("http")) .add(as("foo"), as("goo")).add(as("foo"), as("goo2")) .add(as("foo2"), as("goo2")); request.trailingHeaders().add("trailing", "bar"); final Http2Headers http2TrailingHeaders = new DefaultHttp2Headers().add(as("trailing"), as("bar")); ChannelPromise writePromise = newPromise(); ChannelFuture writeFuture = clientChannel.writeAndFlush(request, writePromise); assertTrue(writePromise.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(writePromise.isSuccess()); assertTrue(writeFuture.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(writeFuture.isSuccess()); awaitRequests(); verify(serverListener).onHeadersRead(any(ChannelHandlerContext.class), eq(3), eq(http2Headers), eq(0), anyShort(), anyBoolean(), eq(0), eq(false)); verify(serverListener).onDataRead(any(ChannelHandlerContext.class), eq(3), any(ByteBuf.class), eq(0), eq(false)); verify(serverListener).onHeadersRead(any(ChannelHandlerContext.class), eq(3), eq(http2TrailingHeaders), eq(0), anyShort(), anyBoolean(), eq(0), eq(true)); assertEquals(1, receivedBuffers.size()); assertEquals(text, receivedBuffers.get(0)); } @Test public void testChunkedRequestWithBodyAndTrailingHeaders() throws Exception { final String text = "foooooo"; final String text2 = "goooo"; final List<String> receivedBuffers = Collections.synchronizedList(new ArrayList<String>()); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock in) throws Throwable { receivedBuffers.add(((ByteBuf) in.getArguments()[2]).toString(UTF_8)); return null; } }).when(serverListener).onDataRead(any(ChannelHandlerContext.class), eq(3), any(ByteBuf.class), eq(0), eq(false)); bootstrapEnv(4, 1, 1); final HttpRequest request = new DefaultHttpRequest(HTTP_1_1, POST, "/example"); final HttpHeaders httpHeaders = request.headers(); httpHeaders.set(HttpHeaderNames.HOST, "http://your_user-name123@www.example.org:5555/example"); httpHeaders.add(HttpHeaderNames.TRANSFER_ENCODING, "chunked"); httpHeaders.add("foo", "goo"); httpHeaders.add("foo", "goo2"); httpHeaders.add("foo2", "goo2"); final Http2Headers http2Headers = new DefaultHttp2Headers().method(as("POST")).path(as("/example")) .authority(as("www.example.org:5555")).scheme(as("http")) .add(as("foo"), as("goo")).add(as("foo"), as("goo2")) .add(as("foo2"), as("goo2")); final DefaultHttpContent httpContent = new DefaultHttpContent(Unpooled.copiedBuffer(text, UTF_8)); final LastHttpContent lastHttpContent = new DefaultLastHttpContent(Unpooled.copiedBuffer(text2, UTF_8)); lastHttpContent.trailingHeaders().add("trailing", "bar"); final Http2Headers http2TrailingHeaders = new DefaultHttp2Headers().add(as("trailing"), as("bar")); ChannelPromise writePromise = newPromise(); ChannelFuture writeFuture = clientChannel.write(request, writePromise); ChannelPromise contentPromise = newPromise(); ChannelFuture contentFuture = clientChannel.write(httpContent, contentPromise); ChannelPromise lastContentPromise = newPromise(); ChannelFuture lastContentFuture = clientChannel.write(lastHttpContent, lastContentPromise); clientChannel.flush(); assertTrue(writePromise.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(writePromise.isSuccess()); assertTrue(writeFuture.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(writeFuture.isSuccess()); assertTrue(contentPromise.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(contentPromise.isSuccess()); assertTrue(contentFuture.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(contentFuture.isSuccess()); assertTrue(lastContentPromise.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(lastContentPromise.isSuccess()); assertTrue(lastContentFuture.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(lastContentFuture.isSuccess()); awaitRequests(); verify(serverListener).onHeadersRead(any(ChannelHandlerContext.class), eq(3), eq(http2Headers), eq(0), anyShort(), anyBoolean(), eq(0), eq(false)); verify(serverListener).onDataRead(any(ChannelHandlerContext.class), eq(3), any(ByteBuf.class), eq(0), eq(false)); verify(serverListener).onHeadersRead(any(ChannelHandlerContext.class), eq(3), eq(http2TrailingHeaders), eq(0), anyShort(), anyBoolean(), eq(0), eq(true)); assertEquals(1, receivedBuffers.size()); assertEquals(text + text2, receivedBuffers.get(0)); } private void bootstrapEnv(int requestCountDown, int serverSettingsAckCount, int trailersCount) throws Exception { requestLatch = new CountDownLatch(requestCountDown); serverSettingsAckLatch = new CountDownLatch(serverSettingsAckCount); trailersLatch = trailersCount == 0 ? null : new CountDownLatch(trailersCount); sb = new ServerBootstrap(); cb = new Bootstrap(); sb.group(new NioEventLoopGroup(), new NioEventLoopGroup()); sb.channel(NioServerSocketChannel.class); sb.childHandler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ChannelPipeline p = ch.pipeline(); serverFrameCountDown = new FrameCountDown(serverListener, serverSettingsAckLatch, requestLatch, null, trailersLatch); p.addLast(new HttpToHttp2ConnectionHandler(true, serverFrameCountDown)); } }); cb.group(new NioEventLoopGroup()); cb.channel(NioSocketChannel.class); cb.handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ChannelPipeline p = ch.pipeline(); p.addLast(new HttpToHttp2ConnectionHandler(false, clientListener)); } }); serverChannel = sb.bind(new InetSocketAddress(0)).sync().channel(); int port = ((InetSocketAddress) serverChannel.localAddress()).getPort(); ChannelFuture ccf = cb.connect(new InetSocketAddress(NetUtil.LOCALHOST, port)); assertTrue(ccf.awaitUninterruptibly().isSuccess()); clientChannel = ccf.channel(); } private void awaitRequests() throws Exception { assertTrue(requestLatch.await(WAIT_TIME_SECONDS, SECONDS)); if (trailersLatch != null) { assertTrue(trailersLatch.await(WAIT_TIME_SECONDS, SECONDS)); } assertTrue(serverSettingsAckLatch.await(WAIT_TIME_SECONDS, SECONDS)); } private ChannelHandlerContext ctx() { return clientChannel.pipeline().firstContext(); } private ChannelPromise newPromise() { return ctx().newPromise(); } }
package com.njnu.kai.practice.danmaku; import android.animation.Animator; import android.animation.ObjectAnimator; import android.animation.ValueAnimator; import android.annotation.TargetApi; import android.content.Context; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.NinePatch; import android.graphics.drawable.NinePatchDrawable; import android.os.Build; import android.os.Handler; import android.os.HandlerThread; import android.os.Message; import android.support.v4.util.ArrayMap; import android.util.AttributeSet; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.animation.LinearInterpolator; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.TextView; import com.njnu.kai.practice.R; import com.njnu.kai.practice.danmaku.data.ExpPkg; import com.njnu.kai.practice.danmaku.data.SongDanmaku; import com.njnu.kai.support.LogUtils; import com.njnu.kai.support.StringUtils; import com.njnu.kai.support.ToastUtils; import java.io.InputStream; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Random; /** * @author hongkai.qian * @version 1.0.0 * @since 15-8-18 */ public class DanmakuView extends FrameLayout implements ValueAnimator.AnimatorUpdateListener, Animator.AnimatorListener, Handler.Callback { private static final String TAG = "DanmakuView"; private List<SongDanmaku> mSongDanmakuList; private ArrayMap<String, ExpPkg> mExpPkgMap; private HashMap<String, ExpPkg.Exp> mExpMap; private static final int DANMAKU_DURATION_MS = 5000; private static final int MAX_COUNT_PER_LINE = 3; private long mPrePlayTime; private long mPlayTime; private int mIndex = -1; private int mHorizontalPos = 0; private static final Random RANDOM = new Random(System.currentTimeMillis()); private HandlerThread mHandlerThread; private Handler mThreadHandler; private Handler mMainHandler = new MainHandler(this); private static final int WHAT_DATA_PREPARD = 0; private static final int WHAT_SET_PLAY_TIME = 1; static class MainHandler extends Handler { private WeakReference<DanmakuView> mWeakReference; public MainHandler(DanmakuView view) { mWeakReference = new WeakReference<DanmakuView>(view); } @Override public void handleMessage(Message msg) { if (mWeakReference != null && mWeakReference.get() != null) { mWeakReference.get().handleMessage(msg); } } } public DanmakuView(Context context) { super(context); initView(context); } public DanmakuView(Context context, AttributeSet attrs) { super(context, attrs); initView(context); } public DanmakuView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); initView(context); } @TargetApi(Build.VERSION_CODES.LOLLIPOP) public DanmakuView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); initView(context); } private void initView(Context context) { } public void reset() { mIndex = -1; mPrePlayTime = 0; mPlayTime = 0; } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); if (mHandlerThread == null) { mHandlerThread = new HandlerThread("songDanmakuThread"); mHandlerThread.start(); mThreadHandler = new Handler(mHandlerThread.getLooper(), this); } } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); if (mHandlerThread != null) { mHandlerThread.quit(); mHandlerThread = null; mThreadHandler = null; } } public void setExpPkgList(List<ExpPkg> expPkgList) { if (mExpPkgMap == null) { mExpPkgMap = new ArrayMap<>(); } else { mExpPkgMap.clear(); } if (mExpMap == null) { mExpMap = new HashMap<>(); } else { mExpMap.clear(); } if (expPkgList != null) { for (ExpPkg expPkg : expPkgList) { mExpPkgMap.put(expPkg.getPkg(), expPkg); final ArrayList<ExpPkg.Exp> expList = expPkg.getExpList(); if (expList != null && !expList.isEmpty()) { for (ExpPkg.Exp exp : expList) { exp.setPkg(expPkg.getPkg()); mExpMap.put(exp.getMd5(), exp); } } } } } public void setSongDanmakuList(List<SongDanmaku> songDanmakuList) { mSongDanmakuList = songDanmakuList; } public void setPlayTime(int time) { mThreadHandler.removeMessages(WHAT_SET_PLAY_TIME); final Message message = mThreadHandler.obtainMessage(WHAT_SET_PLAY_TIME, time, 0); mThreadHandler.sendMessage(message); } private void giveViewMagic(DataCarrier dataCarrier) { final View view = obtainView(); view.setVisibility(View.VISIBLE); giveViewData(view, dataCarrier.mSongDanmaku, dataCarrier.mExp, dataCarrier.mBitmap); animateView(view); } private ExpPkg.Exp getExp(SongDanmaku danmaku) { return mExpMap.get(danmaku.getMd5()); } private void animateView(View view) { final ViewGroup.LayoutParams layoutParams = view.getLayoutParams(); ValueAnimator verticalTranslateAnim = ObjectAnimator.ofFloat(view, "y", getBottom(), -layoutParams.height); view.setX(mHorizontalPos * layoutParams.width); verticalTranslateAnim.setDuration(DANMAKU_DURATION_MS); // verticalTranslateAnim.setRepeatCount(ValueAnimator.INFINITE); // verticalTranslateAnim.setRepeatMode(ValueAnimator.REVERSE); verticalTranslateAnim.setInterpolator(new LinearInterpolator()); verticalTranslateAnim.addUpdateListener(this); verticalTranslateAnim.addListener(this); verticalTranslateAnim.start(); ++mHorizontalPos; if (mHorizontalPos >= MAX_COUNT_PER_LINE) { mHorizontalPos = 0; } } private void giveViewData(View view, SongDanmaku danmaku, ExpPkg.Exp exp, Bitmap bitmap) { ViewHolder viewHolder = (ViewHolder) view.getTag(R.id.tag_view_holder); viewHolder.flushView(danmaku, exp, bitmap); } private ArrayList<View> mViewPool; private View obtainView() { if (mViewPool == null) { mViewPool = new ArrayList<View>(); } if (mViewPool.isEmpty()) { View view = LayoutInflater.from(getContext()).inflate(R.layout.song_danmaku_item, null); final int width = getWidth() / MAX_COUNT_PER_LINE; FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(width, width); addView(view, layoutParams); view.setOnClickListener(mOnClickListener); view.setTag(R.id.tag_view_holder, new ViewHolder(view)); return view; } else { return mViewPool.remove(mViewPool.size() - 1); } } private void recycleView(View view) { ViewHolder viewHolder = (ViewHolder) view.getTag(R.id.tag_view_holder); viewHolder.recycle(); mViewPool.add(view); } private SongDanmaku findDanmaku() { int danmakuCount = mSongDanmakuList != null ? mSongDanmakuList.size() : 0; if (danmakuCount > 0 && mIndex < danmakuCount) { SongDanmaku preDanmaku; SongDanmaku danmaku = null; for (int idx = mIndex + 1; idx < danmakuCount; ++idx) { preDanmaku = danmaku; danmaku = mSongDanmakuList.get(idx); if (danmaku.getTimeMs() > mPlayTime) { if (preDanmaku != null) { mIndex = idx - 1; return preDanmaku; } else { return null; } } } mIndex = danmakuCount - 1; return danmaku; } return null; } private static final long DRAW_INTERVAL = 50 * 1000 * 1000; private long mNanoTime; @Override public void onAnimationUpdate(ValueAnimator animation) { if (animation instanceof ObjectAnimator) { ObjectAnimator objectAnimator = (ObjectAnimator) animation; final View view = (View) objectAnimator.getTarget(); ViewHolder viewHolder = (ViewHolder) view.getTag(R.id.tag_view_holder); } else { LogUtils.e(TAG, "onAnimationUpdate not ObjectAnimator type=%s", animation.getClass().getSimpleName()); } // long curNanoTime = System.nanoTime(); // boolean draw = curNanoTime - mNanoTime > DRAW_INTERVAL; // if (draw) { // mNanoTime = curNanoTime; invalidate(); // } } @Override public void onAnimationStart(Animator animation) { // LogUtils.e(TAG, "onAnimationStart type=%s", animation.getClass().getSimpleName()); } @Override public void onAnimationEnd(Animator animation) { // LogUtils.e(TAG, "onAnimationEnd type=%s", animation.getClass().getSimpleName()); if (animation instanceof ObjectAnimator) { ObjectAnimator objectAnimator = (ObjectAnimator) animation; final View view = (View) objectAnimator.getTarget(); recycleView(view); } else { LogUtils.e(TAG, "onAnimationEnd not ObjectAnimator type=%s", animation.getClass().getSimpleName()); } } @Override public void onAnimationCancel(Animator animation) { LogUtils.e(TAG, "onAnimationCancel type=%s", animation.getClass().getSimpleName()); } @Override public void onAnimationRepeat(Animator animation) { LogUtils.e(TAG, "onAnimationRepeat type=%s", animation.getClass().getSimpleName()); } private View.OnClickListener mOnClickListener = new OnClickListener() { @Override public void onClick(View v) { ViewHolder viewHolder = (ViewHolder) v.getTag(R.id.tag_view_holder); final SongDanmaku songDanmaku = viewHolder.mSongDanmaku; ToastUtils.showToast(String.format("time=%d txt=%s", songDanmaku.getTimeMs(), songDanmaku.getText())); } }; @Override public boolean handleMessage(Message msg) { boolean dealed = true; switch (msg.what) { case WHAT_SET_PLAY_TIME: mPrePlayTime = mPlayTime; mPlayTime = msg.arg1; final SongDanmaku danmaku = findDanmaku(); if (danmaku != null) { final ExpPkg.Exp exp = getExp(danmaku); if (exp != null) { Bitmap bitmap = getExpBitmapInBkg(exp); mMainHandler.sendMessage(mMainHandler.obtainMessage(WHAT_DATA_PREPARD, new DataCarrier(danmaku, exp, bitmap))); } } break; case WHAT_DATA_PREPARD: giveViewMagic((DataCarrier) msg.obj); break; default: dealed = false; break; } return dealed; } public static final String ANDROID_ASSET = "file:///android_asset/"; private Bitmap getExpBitmapInBkg(ExpPkg.Exp exp) { final ExpPkg expPkg = mExpPkgMap.get(exp.getPkg()); String expFullPath = expPkg.getFullPkgPath() + exp.getFileName(); if (expFullPath.startsWith(ANDROID_ASSET)) { final String assetPath = expFullPath.substring(ANDROID_ASSET.length(), expFullPath.length()); return decodeAssetBitmap(getContext(), assetPath); } else { return null; } } private static Bitmap decodeAssetBitmap(Context context, String assetPath) { InputStream inputStream = null; try { inputStream = context.getResources().getAssets().open(assetPath); return BitmapFactory.decodeStream(inputStream); } catch (Throwable t) { t.printStackTrace(); return null; } finally { if (inputStream != null) { try { inputStream.close(); } catch (Exception e) { e.printStackTrace(); } } } } private static class DataCarrier { SongDanmaku mSongDanmaku; ExpPkg.Exp mExp; Bitmap mBitmap; DataCarrier(SongDanmaku songDanmaku, ExpPkg.Exp exp, Bitmap bitmap) { mSongDanmaku = songDanmaku; mExp = exp; mBitmap = bitmap; } } private final static class ViewHolder { private View mRootView; private TextView mTvLeft; private TextView mTvRight; private ImageView mImageView; private SongDanmaku mSongDanmaku; private ExpPkg.Exp mExp; private ViewHolder(View view) { mRootView = view; mImageView = (ImageView) view.findViewById(R.id.imageview); mTvLeft = (TextView) view.findViewById(R.id.tv_left); mTvRight = (TextView) view.findViewById(R.id.tv_right); mTvLeft.setBackgroundResource(R.drawable.left_txt_bkg); mTvRight.setBackgroundResource(R.drawable.right_txt_bkg); final Resources resources = mRootView.getContext().getResources(); final Bitmap leftBitmap = decodeAssetBitmap(view.getContext(), "textBoxPkg/default/left_blue.9.png"); if (leftBitmap != null) { final NinePatchDrawable leftNinePatchDrawable = dealNinePathDrawable(resources, leftBitmap); if (leftNinePatchDrawable != null) { mTvLeft.setBackgroundDrawable(leftNinePatchDrawable); } } final Bitmap rightBitmap = decodeAssetBitmap(view.getContext(), "textBoxPkg/default/right_blue.9.png"); if (rightBitmap != null) { final NinePatchDrawable rightNinePatchDrawable = dealNinePathDrawable(resources, rightBitmap); if (rightNinePatchDrawable != null) { mTvRight.setBackgroundDrawable(rightNinePatchDrawable); } } } private void recycle() { mTvLeft.setText(null); mTvRight.setText(null); mImageView.setImageDrawable(null); mRootView.setVisibility(View.GONE); mSongDanmaku = null; mExp = null; } public void flushView(SongDanmaku danmaku, ExpPkg.Exp exp, Bitmap bitmap) { mSongDanmaku = danmaku; mExp = exp; if (bitmap != null) { mImageView.setImageBitmap(bitmap); } else { mImageView.setImageResource(R.drawable.ic_launcher); } final String text = danmaku.getText(); if (!StringUtils.isEmpty(text)) { if (RANDOM.nextBoolean()) { mTvLeft.setText(text); mTvLeft.setVisibility(View.VISIBLE); mTvRight.setVisibility(View.GONE); } else { mTvRight.setText(text); mTvLeft.setVisibility(View.GONE); mTvRight.setVisibility(View.VISIBLE); } } else { mTvLeft.setVisibility(View.GONE); mTvRight.setVisibility(View.GONE); } } } private static NinePatchDrawable dealNinePathDrawable(Resources resources, Bitmap bitmap) { if (bitmap != null) { byte[] nineChunk = bitmap.getNinePatchChunk(); if (nineChunk != null && NinePatch.isNinePatchChunk(nineChunk)) { final NinePatchChunk npc = NinePatchChunk.deserialize(nineChunk); return new NinePatchDrawable(resources, bitmap, nineChunk, npc.getPaddingRect(), null); } } return null; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oozie.sla; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; import org.apache.oozie.AppType; import org.apache.oozie.FaultInjection; import org.apache.oozie.client.event.SLAEvent.EventStatus; import org.apache.oozie.client.rest.JsonBean; import org.apache.oozie.command.SkipCommitFaultInjection; import org.apache.oozie.executor.jpa.BatchQueryExecutor; import org.apache.oozie.executor.jpa.BatchQueryExecutor.UpdateEntry; import org.apache.oozie.executor.jpa.SLASummaryQueryExecutor; import org.apache.oozie.executor.jpa.SLASummaryQueryExecutor.SLASummaryQuery; import org.apache.oozie.service.JPAService; import org.apache.oozie.service.Services; import org.apache.oozie.test.XDataTestCase; import org.junit.After; import org.junit.Before; import org.junit.Test; /** * Testcase to check db operations on SLA_SUMMARY table */ public class TestSLACalculationJPAExecutor extends XDataTestCase { Services services; Calendar cal; @Override @Before protected void setUp() throws Exception { super.setUp(); services = new Services(); services.init(); cal = Calendar.getInstance(); } @Override @After protected void tearDown() throws Exception { services.destroy(); super.tearDown(); } /** * Test simple insert * * @throws Exception */ @Test public void testInsert() throws Exception { JPAService jpaService = Services.get().get(JPAService.class); assertNotNull(jpaService); String wfId = "workflow-1"; cal.setTime(new Date()); cal.add(Calendar.DAY_OF_MONTH, -2); Date expStart = cal.getTime(); cal.add(Calendar.DAY_OF_MONTH, -1); Date expEnd = cal.getTime(); Date actStart = new Date(); cal.add(Calendar.DAY_OF_MONTH, 2); Date actEnd = cal.getTime(); SLASummaryBean bean2 = _createSLASummaryBean(wfId, "RUNNING", EventStatus.START_MISS, expStart, expEnd, 1000, actStart, actEnd, 2000, (byte) 1, actEnd); List<JsonBean> insertList = new ArrayList<JsonBean>(); insertList.add(bean2); BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, null, null); SLASummaryBean sBean = SLASummaryQueryExecutor.getInstance().get(SLASummaryQuery.GET_SLA_SUMMARY, wfId); assertEquals(wfId, sBean.getId()); assertEquals("RUNNING", sBean.getJobStatus()); assertEquals(EventStatus.START_MISS, sBean.getEventStatus()); assertEquals(expStart, sBean.getExpectedStart()); assertEquals(expEnd, sBean.getExpectedEnd()); assertEquals(1000, sBean.getExpectedDuration()); assertEquals(actStart, sBean.getActualStart()); assertEquals(actEnd, sBean.getActualEnd()); assertEquals(2000, sBean.getActualDuration()); assertEquals(actEnd, sBean.getLastModifiedTime()); } /** * Test insert + update * * @throws Exception */ @Test public void testInsertUpdate() throws Exception { JPAService jpaService = Services.get().get(JPAService.class); assertNotNull(jpaService); String wfId = "workflow-1"; // initial insert cal.setTime(new Date()); cal.add(Calendar.DAY_OF_MONTH, -2); Date expStart = cal.getTime(); cal.add(Calendar.DAY_OF_MONTH, -1); Date expEnd = cal.getTime(); Date actStart = new Date(); SLASummaryBean bean2 = _createSLASummaryBean(wfId, "RUNNING", EventStatus.START_MISS, expStart, expEnd, 1000, actStart, null, 2000, (byte) 0, actStart); List<JsonBean> insertList = new ArrayList<JsonBean>(); insertList.add(bean2); BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, null, null); // update existing record Date newDate = new Date(); bean2 = _createSLASummaryBean(wfId, "RUNNING", EventStatus.DURATION_MISS, expStart, expEnd, 1000, actStart, newDate, 2000, (byte) 1, newDate); bean2.setAppType(AppType.WORKFLOW_ACTION); List<UpdateEntry> updateList = new ArrayList<UpdateEntry>(); SLASummaryQueryExecutor.getInstance().executeUpdate(SLASummaryQuery.UPDATE_SLA_SUMMARY_ALL, bean2); SLASummaryBean sBean = SLASummaryQueryExecutor.getInstance().get(SLASummaryQuery.GET_SLA_SUMMARY, wfId); // check updated + original fields assertEquals(wfId, sBean.getId()); assertEquals(EventStatus.DURATION_MISS, sBean.getEventStatus()); assertEquals(expStart, sBean.getExpectedStart()); assertEquals(expEnd, sBean.getExpectedEnd()); assertEquals(1000, sBean.getExpectedDuration()); assertEquals(actStart, sBean.getActualStart()); assertEquals(newDate, sBean.getActualEnd()); assertEquals(2000, sBean.getActualDuration()); assertEquals(newDate, sBean.getLastModifiedTime()); } /** * Test inserts and updates rollback * * @throws Exception */ @Test public void testRollback() throws Exception { JPAService jpaService = Services.get().get(JPAService.class); assertNotNull(jpaService); String wfId1 = "workflow-1"; String wfId2 = "workflow-2"; // initial insert SLASummaryBean bean1 = _createSLASummaryBean(wfId1, "RUNNING", EventStatus.START_MISS, new Date(), new Date(), 1000, null, null, 2000, 0, null); List<JsonBean> list = new ArrayList<JsonBean>(); list.add(bean1); BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(list, null, null); // update existing record and insert another Date newDate = new Date(); bean1 = new SLASummaryBean(); bean1.setId(wfId1); bean1.setActualEnd(newDate); List<UpdateEntry> updateList = new ArrayList<UpdateEntry>(); updateList.add(new UpdateEntry<SLASummaryQuery>(SLASummaryQuery.UPDATE_SLA_SUMMARY_ALL,bean1)); SLASummaryBean bean2 = _createSLASummaryBean(wfId2, "RUNNING", EventStatus.END_MISS, new Date(), new Date(), 1000, null, null, 2000, 0, null); List<JsonBean> insertList = new ArrayList<JsonBean>(); insertList.add(bean2); // set fault injection to true, so transaction is roll backed setSystemProperty(FaultInjection.FAULT_INJECTION, "true"); setSystemProperty(SkipCommitFaultInjection.ACTION_FAILOVER_FAULT_INJECTION, "true"); try { BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null); fail("Expected exception due to commit failure but didn't get any"); } catch (Exception e) { } FaultInjection.deactivate("org.apache.oozie.command.SkipCommitFaultInjection"); // Check whether transactions are rolled back or not SLASummaryBean sBean = SLASummaryQueryExecutor.getInstance().get(SLASummaryQuery.GET_SLA_SUMMARY, wfId1); // isSlaProcessed should NOT be changed to 1 // actualEnd should be null as before assertNull(sBean.getActualEnd()); sBean = SLASummaryQueryExecutor.getInstance().get(SLASummaryQuery.GET_SLA_SUMMARY, wfId2); assertNull(sBean); //new bean should not have been inserted due to rollback } private SLASummaryBean _createSLASummaryBean(String jobId, String status, EventStatus slaType, Date eStart, Date eEnd, long eDur, Date aStart, Date aEnd, long aDur, int slaProc, Date lastMod) { SLASummaryBean bean = new SLASummaryBean(); bean.setId(jobId); bean.setJobStatus(status); bean.setEventStatus(slaType); bean.setExpectedStart(eStart); bean.setExpectedEnd(eEnd); bean.setExpectedDuration(eDur); bean.setActualStart(aStart); bean.setActualEnd(aEnd); bean.setActualDuration(aDur); bean.setLastModifiedTime(lastMod); return bean; } }
/* * (C) Johannes Kepler University Linz, Austria, 2005-2013 * Institute for Systems Engineering and Automation (SEA) * * The software may only be used for academic purposes (teaching, scientific * research). Any redistribution or commercialization of the software program * and documentation (or any part thereof) requires prior written permission of * the JKU. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * This software program and documentation are copyrighted by Johannes Kepler * University Linz, Austria (the JKU). The software program and documentation * are supplied AS IS, without any accompanying services from the JKU. The JKU * does not warrant that the operation of the program will be uninterrupted or * error-free. The end-user understands that the program was developed for * research purposes and is advised not to rely exclusively on the program for * any reason. * * IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, * SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS, * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF THE * AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. THE AUTHOR * SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. * THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND THE AUTHOR HAS * NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, * OR MODIFICATIONS. */ /* * Workspace.java created on 13.03.2013 * * (c) alexander noehrer * (c) andreas demuth */ package at.jku.sea.cloud; import java.util.Collection; import java.util.Map; import java.util.Set; import at.jku.sea.cloud.exceptions.ArtifactConflictException; import at.jku.sea.cloud.exceptions.ArtifactDoesNotExistException; import at.jku.sea.cloud.exceptions.ArtifactNotPushOrPullableException; import at.jku.sea.cloud.exceptions.CollectionArtifactDoesNotExistException; import at.jku.sea.cloud.exceptions.MetaModelDoesNotExistException; import at.jku.sea.cloud.exceptions.PackageDoesNotExistException; import at.jku.sea.cloud.exceptions.ProjectDoesNotExistException; import at.jku.sea.cloud.exceptions.PropertyConflictException; import at.jku.sea.cloud.exceptions.PropertyDoesNotExistException; import at.jku.sea.cloud.exceptions.PropertyNotCommitableException; import at.jku.sea.cloud.exceptions.PropertyNotPushOrPullableException; import at.jku.sea.cloud.exceptions.VersionConflictException; import at.jku.sea.cloud.exceptions.WorkspaceConflictException; import at.jku.sea.cloud.exceptions.WorkspaceCycleException; import at.jku.sea.cloud.exceptions.WorkspaceEmptyException; import at.jku.sea.cloud.exceptions.WorkspaceExpiredException; import at.jku.sea.cloud.exceptions.WorkspaceNotEmptyException; import at.jku.sea.cloud.listeners.EventSource; import at.jku.sea.cloud.listeners.WorkspaceListener; import at.jku.sea.cloud.listeners.events.artifact.CollectionAddedElement; /** * A workspace is a private working area and the only way to create and adapt {@link Artifact}s and {@link Property}s. A workspace is defined through a unique negative identifier (id), a developer obtaining the workspace (owner), the tool the developer is working with (Tool) and the obtained * revision (base). Owner and tool record necessary traceability information in revision control, to answer the question: who did what, and when? * * Changes contained in the workspace can be committed, in order to publicize those, an update (i.e., a setting a new base version) is as well supported. * * A workspace only stores the new states of {@link Artifact}s and {@link Property}s with regard to the obtained base version.<br> * Consider the public accessible {@link Property} <br> * (aid=100,version=5,isAlive=true,isReference=false,name="name",value="Class")<br> * Changing this {@link Property} in a workspace (id=-1), would result in the entry:<br> * (aid=100,version=-1,isAlive=true,isReference=false,name="name",value="Class1") <br> * Committing the change: (aid=100,version=6,isAlive=true,isReference=false,name="name",value="Class1") * * A base version of a workspace, can as well be another existing workspace. The workspace which's base version is another workspace then see's all performed adaptations of the other workspace. * * Implementation Note:<br> * Implementation wise, there is actually no difference between a public available version, and a workspace, both are stored in the same table. The only difference being is that workspace have negative version numbers. * * @author alexander noehrer * @author mriedl */ public interface Workspace extends Version, EventSource<WorkspaceListener> { /** * Returns the negative id of the workspace. * * @return the negative id of the workspace. */ public long getId(); /** * Returns {@literal true} if the workspace was closed * * @return {@literal true} if the workspace was closed */ public boolean isClosed(); /** * Returns the base {@link Version} of the workspace---either {@link Workspace} or {@link PublicVersion}. * * @return the base {@link Version} of the workspace. */ public Version getBaseVersion(); /** * Returns the specified parent {@link Workspace} of this workspace. * * @return the specified parent {@link Workspace} of this workspace, can be {@literal null} if no parent is specified */ public Workspace getParent(); /** * Sets the specified {@link Workspace} as the new parent of this workspace. Necessary push/pull operations are called automatically. * * @param parent * the specified parent {@link Workspace} to be set as new parent, can be {@literal null}. * * @throws IllegalArgumentException * if the parent workspace has a different baseVersion */ public void setParent(Workspace parent) throws IllegalArgumentException, WorkspaceExpiredException, WorkspaceConflictException, WorkspaceCycleException; /** * Returns the collection of workspaces that are children to this {@link Workspace}. * * @return the collection of workspaces that are children to this {@link Workspace}. */ public Collection<Workspace> getChildren(); /** * Returns the currently employed push {@link PropagtionType} strategy of the workspace * * @return the currently employed push {@link PropagtionType} strategy */ public PropagationType getPush(); /** * Sets the to be employed push strategy of the workspace If set to instant all contents of the workspace are pushed * * @param type * the to be set {@link PropagtionType} * @throws WorkspaceConflictException * if push is set from triggered to instant and the parent workspace and this workspace contain conflicting changes */ public void setPush(PropagationType type) throws WorkspaceConflictException; /** * Returns the currently employed pull {@link PropagtionType} strategy of the workspace * * @return the currently employed pull {@link PropagtionType} strategy */ public PropagationType getPull(); /*** * Sets the to be employed pull strategy of the workspace. If set to instant all contents of the workspace are pulled. * * @param type * the to be set {@link PropagtionType} */ public void setPull(PropagationType type); // create new artifacts /** * Returns the newly created {@link Package}. * * @return the newly created {@link Package} * @throws WorkspaceExpiredException * if the workspace is closed */ public Package createPackage() throws WorkspaceExpiredException; /** * Returns the newly created {@link Package} with a name. * * @return the newly created {@link Package} * @throws WorkspaceExpiredException * if the workspace is closed */ public Package createPackage(String name) throws WorkspaceExpiredException; /** * Returns a newly created {@link Package} that itself is contained within the specified {@link Package}. * * @return a newly created {@link Package} that itself is contained within the specified {@link Package} * @throws WorkspaceExpiredException * if the workspace is closed */ public Package createPackage(final Package parent) throws WorkspaceExpiredException; /** * Returns a newly created {@link Package} with a name that itself is contained within the specified {@link Package}. * * @return a newly created {@link Package} with a name that itself is contained within the specified {@link Package} * @throws WorkspaceExpiredException * if the workspace is closed */ public Package createPackage(final Package parent, String name) throws WorkspaceExpiredException; /** * Returns the newly created {@link Project}. * * @return the newly created {@link Project} * @throws WorkspaceExpiredException * if the workspace is closed. */ public Project createProject() throws WorkspaceExpiredException; /** * Returns the newly created {@link Project}. * * @return the newly created {@link Project} * @throws WorkspaceExpiredException * if the workspace is closed. */ public Project createProject(String name) throws WorkspaceExpiredException; /** * @deprecated * * Returns a newly created {@link Project} that is contained within the specified {@link Package}. * * @return a newly created {@link Project} that is contained within the specified {@link Package}. * @throws WorkspaceExpiredException * if the workspace is closed. */ @Deprecated public Project createProject(final Package pckg) throws WorkspaceExpiredException; /** * Returns a newly created {@link MetaModel}. * * @return a newly created {@link MetaModel} * @throws WorkspaceExpiredException * if the workspace is closed. */ public MetaModel createMetaModel() throws WorkspaceExpiredException; /** * Returns a newly created {@link MetaModel} that is contained within the specified {@link Package}. * * @return a newly created {@link MetaModel} that is contained within the specified {@link Package} * @throws WorkspaceExpiredException * if the workspace is closed. */ public MetaModel createMetaModel(final Package pckg) throws WorkspaceExpiredException; /** * Returns a newly created {@link Artifact}. * * @return a newly created {@link Artifact} * @throws WorkspaceExpiredException * if the workspace is closed. */ public Artifact createArtifact() throws WorkspaceExpiredException; /** * Returns a newly created {@link Artifact} of the specified type. * * @return a newly created {@link Artifact} of the specified type. * @throws WorkspaceExpiredException * if the workspace is closed. */ public Artifact createArtifact(final Artifact type) throws WorkspaceExpiredException; /** * Returns a newly created {@link Artifact} that is contained within the specified {@link Package}. * * @return a newly created {@link Artifact} that is contained within the specified {@link Package}. * @throws WorkspaceExpiredException * if the workspace is closed. */ public Artifact createArtifact(final Artifact type, final Container container, final MetaModel metamodel, final Project project, final Map<String, Object> properties) throws WorkspaceExpiredException; public Artifact createArtifact(final Package pckg) throws WorkspaceExpiredException; /** * Returns a newly created {@link Artifact} of the specified type and contained within the specified {@link Package}. * * @return a newly created {@link Artifact} of the specified type and contained within the specified {@link Package}. * @throws WorkspaceExpiredException * if the workspace is closed. */ public Artifact createArtifact(final Artifact type, final Package pckg) throws WorkspaceExpiredException; /** * Returns a newly created {@link CollectionArtifact}. * * @param containsOnlyArtifacts * if the {@link CollectionAddedElement} should <tt>only</tt> contain objects of type {@link Artifact}. * @return the newly created {@link CollectionArtifact}. * @throws WorkspaceExpiredException * if the workspace is closed */ public CollectionArtifact createCollection(final boolean containsOnlyArtifacts) throws WorkspaceExpiredException; /** * Returns a newly created {@link CollectionArtifact}. * * @param containsOnlyArtifacts * if the {@link CollectionAddedElement} should <tt>only</tt> contain objects of type {@link Artifact}. * @return the newly created {@link CollectionArtifact}. * @throws WorkspaceExpiredException * if the workspace is closed */ public CollectionArtifact createCollection(final boolean containsOnlyArtifacts, final Package pckg) throws WorkspaceExpiredException; public CollectionArtifact createCollection(final boolean containsOnlyArtifacts, final Package pckg, Collection<?> elements, Map<String, Object> properties) throws WorkspaceExpiredException; /** * Returns a newly created {@link MapArtifact}. * * @return the newly created {@link MapArtifact} * @throws WorkspaceExpiredException * if the workspace is closed */ public MapArtifact createMap() throws WorkspaceExpiredException; /** * Returns a newly created {@link MapArtifact} that is contained within the specified {@link Package}. * * @param pckg * which contains the newly created {@link MapArtifact} * @return the newly created {@link MapArtifact} * @throws WorkspaceExpiredException * if the workspace is closed */ public MapArtifact createMap(Package pckg) throws WorkspaceExpiredException; public Resource createResource(String fullQualifiedName); public Resource createResource(String fullQualifiedName, Package pckg, Project project, Collection<Artifact> artifacts); public Resource getResource(long id); public Collection<Resource> getResources(); public Collection<Resource> getResources(String fullQualifiedName); // get artifacts /** * Returns the {@link Artifact} with the specified id, its version will be the id of the workspace. * * @param id * the specified artifact id. * @return the {@link Artifact} with the specified id. * @throws WorkspaceExpiredException * if the workspace is closed * @throws ArtifactDoesNotExistException * if an {@link Artifact} with the specified id does not exist. */ public Artifact getArtifact(final long id) throws WorkspaceExpiredException, ArtifactDoesNotExistException; /** * Returns a collection of all currently available {@link Artifact}s.<br> * Speaking with sets: (BaseVersion U WorkspaceAdded) / WorkspaceRemoved * * @return the collection of currently available {@link Artifact}s. * @throws WorkspaceExpiredException * if the workspace is closed. */ public Collection<Artifact> getArtifacts() throws WorkspaceExpiredException; public Collection<Artifact> getArtifacts(Set<Long> ids) throws WorkspaceExpiredException; public Object[] getArtifactRepresentation(long id); public Collection<Artifact> getArtifacts(final Artifact... filters) throws WorkspaceExpiredException; public Collection<Artifact> getArtifactsWithProperty(final String propertyName, final Object propertyValue, final Artifact... filters) throws WorkspaceExpiredException; public Collection<Artifact> getArtifactsWithProperty(final String propertyName, final Object propertyValue, boolean alive, final Artifact... filters) throws WorkspaceExpiredException; public Collection<Artifact> getArtifactsWithProperty(final Map<String, Object> propertyToValue, final Artifact... filters) throws WorkspaceExpiredException; public Collection<Artifact> getArtifactsWithProperty(final Map<String, Object> propertyToValue, boolean alive, final Artifact... filters) throws WorkspaceExpiredException; public Collection<Artifact> getArtifactsWithReference(Artifact artifact, final Artifact... filters) throws WorkspaceExpiredException; public Map<Artifact, Map<String, Object>> getArtifactsAndPropertyMap(final Artifact... filters) throws WorkspaceExpiredException; public Map<Artifact, Map<String, Object>> getArtifactsAndPropertyMap(final String propertyName, final Object propertyValue, boolean alive, final Artifact... filters) throws WorkspaceExpiredException; public Map<Artifact, Map<String, Object>> getArtifactsAndPropertyMap(final Map<String, Object> propertyToValue, boolean alive, final Artifact... filters) throws WorkspaceExpiredException; public Map<Artifact, Map<String, Object>> getArtifactsPropertyMap(final Set<Artifact> artifacts, final Set<String> properties) throws WorkspaceExpiredException; public Map<Artifact, Map<String, Object>> getArtifactsPropertyMap(final Set<Artifact> artifacts) throws WorkspaceExpiredException; public Package getPackage(final long id) throws WorkspaceExpiredException, PackageDoesNotExistException; public Collection<Package> getPackages() throws WorkspaceExpiredException; public Project getProject(final long id) throws WorkspaceExpiredException, ProjectDoesNotExistException; public Collection<Project> getProjects() throws WorkspaceExpiredException; public MetaModel getMetaModel(final long id) throws WorkspaceExpiredException, MetaModelDoesNotExistException; public Collection<MetaModel> getMetaModels() throws WorkspaceExpiredException; public MetaModel getMetaMetaModel(final Artifact artifact) throws WorkspaceExpiredException; public CollectionArtifact getCollectionArtifact(final long id) throws WorkspaceExpiredException, CollectionArtifactDoesNotExistException; public Collection<CollectionArtifact> getCollectionArtifacts() throws WorkspaceExpiredException; // commit and rollback artifacts /** * Rebases the workspace to the specified {@link Version} (i.e., sets a new base version). This operation only returns the set of {@link Artifact}s and {@link Property}s that are in conflict. Meaning, that private adaptations overwrite the public adaptations. * * @param version * the version it should be rebased to. * @return the set of {@link Artifact}s and {@link Property}s that are in conflict with the applied private adaptations. * @throws WorkspaceExpiredException * if the workspace is closed. */ public Map<Artifact, Set<Property>> rebase(Version version) throws WorkspaceExpiredException; /** * Rebases the workspace to the current existing head {@link Version} (i.e., sets a new base version). This operation only returns the set of {@link Artifact}s and {@link Property}s that are in conflict. Meaning, that private adaptations overwrite the public adaptations. * * @return the set of {@link Artifact}s and {@link Property}s that are in conflict with the applied private adaptations. * @throws WorkspaceExpiredException * if the workspace is closed. */ public Map<Artifact, Set<Property>> rebaseToHeadVersion() throws WorkspaceExpiredException; /** * Commits (i.e., publishes) the specified artifact ({@link Artifact}), creates a new public version, and implicitly sets the base version of the workspace to the newly introduced version * * @param artifact * the specified artifact ({@link Artifact}) to commit * @param message * the commit message, can be set to null. * @return the new head revision number * @throws ArtifactDoesNotExistException * if the {@link Artifact} does no longer exist. * @throws WorkspaceExpiredException * if the workspace is closed. * @throws ArtifactConflictException * if newer versions than the base version as well contain changes to this {@link Artifact} * @throws WorkspaceEmptyException * if there are no changes for this workspace */ public long commitArtifact(final Artifact artifact, final String message) throws ArtifactDoesNotExistException, WorkspaceExpiredException, ArtifactConflictException, WorkspaceEmptyException; /*** * Deletes all stored changes for the specified artifact ({@link Artifact}) in the workspace ({@link Workspace}). If the artifact is only contained in the workspace (i.e., has no previous public version) the artifact ceases to exist. In this case, all of its properties are as well deleted. * * @param artifact * the specified artifact for which changes should be deleted. * @throws ArtifactDoesNotExistException * if the artifact does not exist. * @throws WorkspaceExpiredException * if the workspace is closed. */ public void rollbackArtifact(final Artifact artifact) throws ArtifactDoesNotExistException, WorkspaceExpiredException; /*** * Commits (i.e., publishes) the specified property ({@link Property}), creates a new public version, and implicitly sets the base version of the workspace to the newly introduced version * * @param property * the specified property to commit. * @param message * the commit message, can be set to null. * @return the new head version number * @throws ArtifactDoesNotExistException * if the artifact ({@link Artifact}) to which the specified property belongs does no longer exist * @throws PropertyDoesNotExistException * if the property does not exist. * @throws WorkspaceExpiredException * if the workspace is closed. * @throws PropertyConflictException * if the newer versions than the base version as well contain changes to the specified property. * @throws PropertyNotCommitableException * if the artifact the property belongs to only exists in the workspace. */ public long commitProperty(final Property property, final String message) throws ArtifactDoesNotExistException, PropertyDoesNotExistException, WorkspaceExpiredException, PropertyConflictException, PropertyNotCommitableException; /*** * Deletes all stored changes for the specified property ({@link Property}) in the workspace ({@link Workspace}). * * @param property * the specified property for which changes should be deleted. * @throws ArtifactDoesNotExistException * if the artifact ({@link Artifact}) to which the property belongs does not exist. * @throws PropertyDoesNotExistException * if the property does not exist. * @throws WorkspaceExpiredException * if the workspace is closed. */ public void rollbackProperty(final Property property) throws ArtifactDoesNotExistException, PropertyDoesNotExistException, WorkspaceExpiredException; /*** * Commits (i.e., publishes) the contents of the workspace, creates a new public version, and implicitly sets the base version of the workspace to the newly introduced version * * @param message * the commit message, can be set to null. * @return the new head version number. * @throws WorkspaceExpiredException * if the workspace is closed. * @throws VersionConflictException * if the newer versions than the base version contains similar changes. */ public long commitAll(final String message) throws WorkspaceExpiredException, VersionConflictException; /*** * Deletes all stored changes in the {@link Workspace}. * * @throws WorkspaceExpiredException * if the workspace is closed. */ public void rollbackAll() throws WorkspaceExpiredException; /*** * Pushes the specified artifact ({@link Artifact}) to the parent of this workspace. If no parent is specified, implicitly {@link #commitArtifact(Artifact, String)} is called with an empty string. * * @param artifact * the specified artifact to be pushed. * @throws WorkspaceExpiredException * if either the workspace, or its parent is closed. */ public void pushArtifact(final Artifact artifact) throws WorkspaceExpiredException, ArtifactDoesNotExistException, ArtifactNotPushOrPullableException; /*** * Pushes the specified property ({@link Property}) including properties) to the parent of this workspace. If no parent is specified, implicitly {@link #commitProperty(Artifact, String)} is called with an empty commit message string. * * @param property * the specified property to be pushed. * @throws WorkspaceExpiredException * if either the workspace, or its parent is closed. * @throws ArtifactDoesNotExistException * if the artifact ({@link Artifact}) to which the property belongs does not exist. * @throws PropertyDoesNotExistException * if the property does not exist. * @throws PropertyNotPushOrPullableException * if the property cannot be pushed as it references or belongs to an artifact only available in the parent workspace. */ public void pushProperty(final Property property) throws WorkspaceExpiredException, ArtifactDoesNotExistException, PropertyDoesNotExistException, PropertyNotPushOrPullableException; /** * Pushes all changes of the workspace ({@link Workspace}) to the parent of this workspace. If no parent is specified, implicitly {@link #commitAll(String)} is called with an empty string. * * @throws WorkspaceExpiredException * */ public void pushAll() throws WorkspaceExpiredException; /*** * Pulls changes of the specified artifact ({@link Artifact} including properties) from the parent workspace. * * @param artifact * the specified artifact for which changes should be pulled. * @throws WorkspaceExpiredException * if either the workspace, or its parent is closed. * @throws ArtifactDoesNotExistException * if the artifact does not exist. */ public void pullArtifact(final Artifact artifact) throws WorkspaceExpiredException, ArtifactDoesNotExistException, ArtifactNotPushOrPullableException; /** * Pulls changes of the specified property ({@link Property}) from the parent workspace. * * @param property * the specified property for which changes should be pulled. * @throws WorkspaceExpiredException * if either the workspace, or its parent is closed. * @throws ArtifactDoesNotExistException * if the artifact ({@link Artifact}) to which the property belongs does not exist. * @throws PropertyDoesNotExistException * if the property does not exist. * @throws PropertyNotPushOrPullableException * if the property cannot be pulled as it references or belongs to an artifact only available in the parent workspace. */ public void pullProperty(final Property property) throws WorkspaceExpiredException, ArtifactDoesNotExistException, PropertyDoesNotExistException, PropertyNotPushOrPullableException; /** * Pulls all changes from the parent workspace. * * @throws WorkspaceExpiredException * if either the workspace, or its parent is closed. */ public Map<Artifact, Set<Property>> pullAll() throws WorkspaceExpiredException; /** * Returns the set of artifacts ({@link Artifact}), for which changes exist in the workspace and in the version history from the workspace's baseVersion to the current head version. * * @return the set of artifacts that are in conflict with the version history. * @throws WorkspaceExpiredException * if the workspace is closed. */ public Set<Artifact> getArtifactConflicts() throws WorkspaceExpiredException; /** * Returns the set of artifacts ({@link Artifact}), for which changes exist in the workspace and in the version history from the workspace's baseVersion to the specified publicVersion. * * @param publicVersion * the specified version up to which conflicts should be checked. * @return the set of artifacts that are in conflict with the version history. * @throws WorkspaceExpiredException * if the workspace is closed. * @throws IllegalArgumentException * if publicVersion > headVersion || publicVersion < baseVersion. */ public Set<Artifact> getArtifactConflicts(final long publicVersion) throws WorkspaceExpiredException, IllegalArgumentException; /** * Returns the set of artifacts ({@link Artifact}), for which changes exist in the workspace and in the version history from the workspace's baseVersion to the current head version. * * @return the set of properties that are in conflict with the version history. * @throws WorkspaceExpiredException * if the workspace is closed. */ public Set<Property> getPropertyConflicts() throws WorkspaceExpiredException; /** * Returns the set of properties ({@link Property}), for which changes exist in the workspace and in the version history from the workspace's baseVersion to the specified publicVersion. * * @param publicVersion * the specified version up to which conflicts should be checked. * @return the set of properties that are in conflict with the version history. * @throws WorkspaceExpiredException * if the workspace is closed. * @throws IllegalArgumentException * if publicVersion > headVersion || publicVersion < baseVersion. */ public Set<Property> getPropertyConflicts(final long publicVersion) throws WorkspaceExpiredException, IllegalArgumentException; /** * closes the workspace, so it ceases to exist */ public void close() throws WorkspaceExpiredException, WorkspaceNotEmptyException; }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl.db; import java.util.Collection; import java.util.List; import java.util.Map; import org.camunda.bpm.engine.AuthorizationException; import org.camunda.bpm.engine.BadUserRequestException; import org.camunda.bpm.engine.OptimisticLockingException; import org.camunda.bpm.engine.ProcessEngineException; import org.camunda.bpm.engine.SuspendedEntityInteractionException; import org.camunda.bpm.engine.WrongDbException; import org.camunda.bpm.engine.exception.NotValidException; import org.camunda.bpm.engine.impl.ProcessEngineLogger; import org.camunda.bpm.engine.impl.db.entitymanager.cache.CachedDbEntity; import org.camunda.bpm.engine.impl.db.entitymanager.cache.DbEntityState; import org.camunda.bpm.engine.impl.db.entitymanager.operation.DbOperation; import org.camunda.bpm.engine.impl.persistence.entity.ExecutionEntity; import org.camunda.bpm.engine.impl.util.ClassNameUtil; import org.camunda.bpm.model.xml.instance.ModelElementInstance; /** * @author Stefan Hentschel. */ public class EnginePersistenceLogger extends ProcessEngineLogger { protected static final String HINT_TEXT = "Hint: Set <property name=\"databaseSchemaUpdate\" to value=\"true\" or " + "value=\"create-drop\" (use create-drop for testing only!) in bean " + "processEngineConfiguration in camunda.cfg.xml for automatic schema creation"; protected String buildStringFromList(Collection<?> list, Boolean isSQL) { StringBuilder message = new StringBuilder(); message.append("["); message.append("\n"); for( Object object : list ) { message.append(" "); if(isSQL) { message.append("SQL: "); } message.append(object.toString()); message.append("\n"); } message.append("]"); return message.toString(); } private String buildStringFromMap(Map<String, ?> map) { StringBuilder message = new StringBuilder(); message.append("["); message.append("\n"); for( Map.Entry<String, ?> entry : map.entrySet() ) { message.append(" "); message.append(entry.getKey()); message.append(": "); message.append(entry.getValue().toString()); message.append("\n"); } message.append("]"); return message.toString(); } public <T extends DbEntity> ProcessEngineException entityCacheLookupException(Class<T> type, String id, Class<? extends DbEntity> entity, Throwable cause) { return new ProcessEngineException(exceptionMessage( "001", "Could not lookup entity of type '{}' and id '{}': found entity of type '{}'.", type, id, entity ), cause); } public ProcessEngineException entityCacheDuplicateEntryException(String currentState, String id, Class<? extends DbEntity> entityClass, DbEntityState foundState) { return new ProcessEngineException(exceptionMessage( "002", "Cannot add {} entity with id '{}' and type '{}' into cache. An entity with the same id and type is already in state '{}'", currentState, id, entityClass, foundState )); } public ProcessEngineException alreadyMarkedEntityInEntityCacheException(String id, Class<? extends DbEntity> entityClass, DbEntityState state) { return new ProcessEngineException(exceptionMessage( "003", "Inserting an entity with Id '{}' and type '{}' which is already marked with state '{}'", id, entityClass, state )); } public ProcessEngineException flushDbOperationException(List<DbOperation> operationsToFlush, DbOperation operation, Throwable cause) { return new ProcessEngineException(exceptionMessage( "004", "Exception while executing Database Operation '{}' with message '{}'. Flush summary: \n {}", operation.toString(), cause.getMessage(), buildStringFromList(operationsToFlush, false) ), cause); } public OptimisticLockingException concurrentUpdateDbEntityException(DbOperation operation) { return new OptimisticLockingException(exceptionMessage( "005", "Execution of '{}' failed. Entity was updated by another transaction concurrently.", operation )); } public void flushedCacheState(List<CachedDbEntity> cachedEntities) { if(isDebugEnabled()) { logDebug("006", "Cache state after flush: {}", buildStringFromList(cachedEntities, false)); } } public ProcessEngineException mergeDbEntityException(DbEntity entity) { return new ProcessEngineException(exceptionMessage("007", "Cannot merge DbEntity '{}' without id", entity)); } public void databaseFlushSummary(Collection<DbOperation> operations) { if(isDebugEnabled()) { logDebug("008", "Flush Summary: {}", buildStringFromList(operations, false)); } } public void executeDatabaseOperation(String operationType, Object parameter) { if(isDebugEnabled()) { String message; if(parameter != null) { message = parameter.toString(); } else { message = "null"; } if(parameter instanceof DbEntity) { DbEntity dbEntity = (DbEntity) parameter; message = ClassNameUtil.getClassNameWithoutPackage(dbEntity) + "[id=" + dbEntity.getId() + "]"; } logDebug("009", "SQL operation: '{}'; Entity: '{}'", operationType, message); } } public void executeDatabaseBulkOperation(String operationType, String statement, Object parameter) { logDebug("010", "SQL bulk operation: '{}'; Statement: '{}'; Parameter: '{}'", operationType, statement, parameter); } public void fetchDatabaseTables(String source, List<String> tableNames) { if(isDebugEnabled()) { logDebug( "011", "Retrieving process engine tables from: '{}'. Retrieved tables: {}", source, buildStringFromList(tableNames, false) ); } } public void missingSchemaResource(String resourceName, String operation) { logDebug("012", "There is no schema resource '{}' for operation '{}'.", resourceName, operation); } public ProcessEngineException missingSchemaResourceException(String resourceName, String operation) { return new ProcessEngineException( exceptionMessage("013", "There is no schema resource '{}' for operation '{}'.", resourceName, operation)); } public ProcessEngineException missingSchemaResourceFileException(String fileName, Throwable cause) { return new ProcessEngineException( exceptionMessage("014", "Cannot find schema resource file with name '{}'",fileName), cause); } public void failedDatabaseOperation(String operation, String statement, Throwable cause) { logError( "015", "Problem during schema operation '{}' with statement '{}'. Cause: '{}'", operation, statement, cause.getMessage() ); } public void performedDatabaseOperation(String operation, String component, String resourceName, List<String> logLines) { logInfo( "016", "Performed operation '{}' on component '{}' with resource '{}': {}", operation, component, resourceName, buildStringFromList(logLines, true)); } public void successfulDatabaseOperation(String operation, String component) { logDebug("Database schema operation '{}' for component '{}' was successful.", operation, component); } public ProcessEngineException performDatabaseOperationException(String operation, String sql, Throwable cause) { return new ProcessEngineException(exceptionMessage( "017", "Could not perform operation '{}' on database schema for SQL Statement: '{}'.", operation, sql ), cause); } public ProcessEngineException checkDatabaseTableException(Throwable cause) { return new ProcessEngineException( exceptionMessage("018", "Could not check if tables are already present using metadata."), cause); } public ProcessEngineException getDatabaseTableNameException(Throwable cause) { return new ProcessEngineException(exceptionMessage("019", "Unable to fetch process engine table names."), cause); } public ProcessEngineException missingRelationMappingException(String relation) { return new ProcessEngineException( exceptionMessage("020", "There is no mapping for the relation '{}' registered.", relation)); } public ProcessEngineException databaseHistoryLevelException(String level) { return new ProcessEngineException( exceptionMessage("021", "historyLevel '{}' is higher then 'none' and dbHistoryUsed is set to false.", level)); } public ProcessEngineException invokeSchemaResourceToolException(int length) { return new ProcessEngineException(exceptionMessage( "022", "Schema resource tool was invoked with '{}' parameters." + "Schema resource tool must be invoked with exactly 2 parameters:" + "\n - 1st parameter is the process engine configuration file," + "\n - 2nd parameter is the schema resource file name", length )); } public ProcessEngineException loadModelException(String type, String modelName, String id, Throwable cause) { return new ProcessEngineException(exceptionMessage( "023", "Could not load {} Model for {} definition with id '{}'.", type, modelName, id ), cause); } public void removeEntryFromDeploymentCacheFailure(String modelName, String id, Throwable cause) { logWarn( "024", "Could not remove {} definition with id '{}' from the cache. Reason: '{}'", modelName, id, cause.getMessage() ); } public ProcessEngineException engineAuthorizationTypeException(int usedType, int global, int grant, int revoke) { return new ProcessEngineException(exceptionMessage( "025", "Unrecognized authorization type '{}'. Must be one of ['{}', '{}', '{}']", usedType, global, grant, revoke )); } public IllegalStateException permissionStateException(String methodName, String type) { return new IllegalStateException( exceptionMessage("026", "Method '{}' cannot be used for authorization with type '{}'.", methodName, type)); } public ProcessEngineException notUsableGroupIdForGlobalAuthorizationException() { return new ProcessEngineException(exceptionMessage("027", "Cannot use 'groupId' for GLOBAL authorization")); } public ProcessEngineException illegalValueForUserIdException(String id, String expected) { return new ProcessEngineException( exceptionMessage("028", "Illegal value '{}' for userId for GLOBAL authorization. Must be '{}'", id, expected)); } public AuthorizationException notAMemberException(String id, String group) { return new AuthorizationException( exceptionMessage("029", "The user with id '{}' is not a member of the group with id '{}'", id, group)); } public void createChildExecution(ExecutionEntity child, ExecutionEntity parent) { if(isDebugEnabled()) { logDebug("030", "Child execution '{}' created with parent '{}'.", child.toString(), parent.toString()); } } public void initializeExecution(ExecutionEntity entity) { logDebug("031", "Initializing execution '{}'", entity.toString()); } public void initializeTimerDeclaration(ExecutionEntity entity) { logDebug("032", "Initializing timer declaration '{}'", entity.toString()); } public ProcessEngineException requiredAsyncContinuationException(String id) { return new ProcessEngineException(exceptionMessage( "033", "Asynchronous Continuation for activity with id '{}' requires a message job declaration", id )); } public ProcessEngineException restoreProcessInstanceException(ExecutionEntity entity) { return new ProcessEngineException(exceptionMessage( "034", "Can only restore process instances. This method must be called on a process instance execution but was called on '{}'", entity.toString() )); } public ProcessEngineException executionNotFoundException(String id) { return new ProcessEngineException(exceptionMessage("035", "Unable to find execution for id '{}'", id)); } public ProcessEngineException castModelInstanceException(ModelElementInstance instance, String toElement, String type, String namespace, Throwable cause) { return new ProcessEngineException(exceptionMessage( "036", "Cannot cast '{}' to '{}'. Element is of type '{}' with namespace '{}'.", instance, toElement, type, namespace ), cause); } public BadUserRequestException requestedProcessInstanceNotFoundException(String id) { return new BadUserRequestException(exceptionMessage("037", "No process instance found for id '{}'", id)); } public NotValidException queryExtensionException(String extendedClassName, String extendingClassName) { return new NotValidException(exceptionMessage( "038", "Unable to extend a query of class '{}' by a query of class '{}'.", extendedClassName, extendingClassName )); } public ProcessEngineException unsupportedResourceTypeException(String type) { return new ProcessEngineException(exceptionMessage("039", "Unsupported resource type '{}'", type)); } public ProcessEngineException serializerNotDefinedException(Object entity) { return new ProcessEngineException(exceptionMessage("040", "No serializer defined for variable instance '{}'", entity)); } public ProcessEngineException serializerOutOfContextException() { return new ProcessEngineException(exceptionMessage("041", "Cannot work with serializers outside of command context.")); } public ProcessEngineException taskIsAlreadyAssignedException(String usedId, String foundId) { return new ProcessEngineException( exceptionMessage("042", "Cannot assign '{}' to a task assignment that has already '{}' set.", usedId, foundId)); } public SuspendedEntityInteractionException suspendedEntityException(String type, String id) { return new SuspendedEntityInteractionException(exceptionMessage("043", "{} with id '{}' is suspended.", type, id)); } public ProcessEngineException updateUnrelatedProcessDefinitionEntityException() { return new ProcessEngineException(exceptionMessage("044", "Cannot update entity from an unrelated process definition")); } public ProcessEngineException toManyProcessDefinitionsException(int count, String key, Integer version) { return new ProcessEngineException(exceptionMessage( "045", "There are '{}' results for a process definition with key '{}' and version '{}'.", count, key, version )); } public ProcessEngineException notAllowedIdException(String id) { return new ProcessEngineException( exceptionMessage("046", "Cannot set id '{}'. Only the provided id generation is allowed for properties.", id)); } public void countRowsPerProcessEngineTable(Map<String, Long> map) { if(isDebugEnabled()) { logDebug("047", "Number of rows per process engine table: {}", buildStringFromMap(map)); } } public ProcessEngineException countTableRowsException(Throwable cause) { return new ProcessEngineException(exceptionMessage("048", "Could not fetch table counts."), cause); } public void selectTableCountForTable(String name) { logDebug("049", "Selecting table count for table with name '{}'", name); } public ProcessEngineException retrieveMetadataException(Throwable cause) { return new ProcessEngineException( exceptionMessage("050", "Could not retrieve database metadata. Reason: '{}'", cause.getMessage()), cause); } public ProcessEngineException invokeTaskListenerException(Throwable cause) { return new ProcessEngineException(exceptionMessage( "051", "There was an exception while invoking the TaskListener. Message: '{}'", cause.getMessage() ), cause); } public BadUserRequestException uninitializedFormKeyException() { return new BadUserRequestException(exceptionMessage( "052", "The form key is not initialized. You must call initializeFormKeys() on the task query before you can " + "retrieve the form key." )); } public ProcessEngineException disabledHistoryException() { return new ProcessEngineException(exceptionMessage("053", "History is not enabled.")); } public ProcessEngineException instantiateSessionException(String name, Throwable cause) { return new ProcessEngineException(exceptionMessage( "054", "Could not instantiate class '{}'. Message: '{}'", name, cause.getMessage() ), cause); } public WrongDbException wrongDbVersionException(String version, String dbVersion) { return new WrongDbException(exceptionMessage( "055", "Version mismatch: activiti library version is '{}' and db version is '{}'. " + HINT_TEXT, version, dbVersion ), version, dbVersion); } public ProcessEngineException missingTableException(List<String> components) { return new ProcessEngineException(exceptionMessage( "056", "Tables are missing for the following components: {}", buildStringFromList(components, false) )); } public ProcessEngineException missingActivitiTablesException() { return new ProcessEngineException(exceptionMessage( "057", "There are no activiti tables in the database." + HINT_TEXT )); } public ProcessEngineException unableToFetchDbSchemaVersion(Throwable cause) { return new ProcessEngineException(exceptionMessage("058", "Could not fetch the database schema version."), cause); } public void failedTofetchVariableValue(Throwable cause) { logDebug("059", "Could not fetch value for variable.", cause); } public ProcessEngineException historicDecisionInputInstancesNotFetchedException() { return new ProcessEngineException(exceptionMessage( "060", "The input instances for the historic decision instance are not fetched. You must call 'includeInputs()' on the query to enable fetching." )); } public ProcessEngineException historicDecisionOutputInstancesNotFetchedException() { return new ProcessEngineException(exceptionMessage( "061", "The output instances for the historic decision instance are not fetched. You must call 'includeOutputs()' on the query to enable fetching." )); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.workdocs.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Describes the users or user groups. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/workdocs-2016-05-01/Participants" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class Participants implements Serializable, Cloneable, StructuredPojo { /** * <p> * The list of users. * </p> */ private java.util.List<UserMetadata> users; /** * <p> * The list of user groups. * </p> */ private java.util.List<GroupMetadata> groups; /** * <p> * The list of users. * </p> * * @return The list of users. */ public java.util.List<UserMetadata> getUsers() { return users; } /** * <p> * The list of users. * </p> * * @param users * The list of users. */ public void setUsers(java.util.Collection<UserMetadata> users) { if (users == null) { this.users = null; return; } this.users = new java.util.ArrayList<UserMetadata>(users); } /** * <p> * The list of users. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setUsers(java.util.Collection)} or {@link #withUsers(java.util.Collection)} if you want to override the * existing values. * </p> * * @param users * The list of users. * @return Returns a reference to this object so that method calls can be chained together. */ public Participants withUsers(UserMetadata... users) { if (this.users == null) { setUsers(new java.util.ArrayList<UserMetadata>(users.length)); } for (UserMetadata ele : users) { this.users.add(ele); } return this; } /** * <p> * The list of users. * </p> * * @param users * The list of users. * @return Returns a reference to this object so that method calls can be chained together. */ public Participants withUsers(java.util.Collection<UserMetadata> users) { setUsers(users); return this; } /** * <p> * The list of user groups. * </p> * * @return The list of user groups. */ public java.util.List<GroupMetadata> getGroups() { return groups; } /** * <p> * The list of user groups. * </p> * * @param groups * The list of user groups. */ public void setGroups(java.util.Collection<GroupMetadata> groups) { if (groups == null) { this.groups = null; return; } this.groups = new java.util.ArrayList<GroupMetadata>(groups); } /** * <p> * The list of user groups. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setGroups(java.util.Collection)} or {@link #withGroups(java.util.Collection)} if you want to override the * existing values. * </p> * * @param groups * The list of user groups. * @return Returns a reference to this object so that method calls can be chained together. */ public Participants withGroups(GroupMetadata... groups) { if (this.groups == null) { setGroups(new java.util.ArrayList<GroupMetadata>(groups.length)); } for (GroupMetadata ele : groups) { this.groups.add(ele); } return this; } /** * <p> * The list of user groups. * </p> * * @param groups * The list of user groups. * @return Returns a reference to this object so that method calls can be chained together. */ public Participants withGroups(java.util.Collection<GroupMetadata> groups) { setGroups(groups); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getUsers() != null) sb.append("Users: ").append(getUsers()).append(","); if (getGroups() != null) sb.append("Groups: ").append(getGroups()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Participants == false) return false; Participants other = (Participants) obj; if (other.getUsers() == null ^ this.getUsers() == null) return false; if (other.getUsers() != null && other.getUsers().equals(this.getUsers()) == false) return false; if (other.getGroups() == null ^ this.getGroups() == null) return false; if (other.getGroups() != null && other.getGroups().equals(this.getGroups()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getUsers() == null) ? 0 : getUsers().hashCode()); hashCode = prime * hashCode + ((getGroups() == null) ? 0 : getGroups().hashCode()); return hashCode; } @Override public Participants clone() { try { return (Participants) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.workdocs.model.transform.ParticipantsMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package com.algorelpublic.zambia.fragments; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.FragmentManager; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.Html; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.algorelpublic.zambia.R; import com.algorelpublic.zambia.Zambia; import com.algorelpublic.zambia.adapter.AdapterResult; import com.algorelpublic.zambia.model.SearchResultModel; import com.algorelpublic.zambia.utils.Constants; import com.google.gson.Gson; import java.util.ArrayList; import static android.widget.LinearLayout.VERTICAL; import static com.algorelpublic.zambia.fragments.AdvanceSearchStepsFragment.noOfPersons; import static com.algorelpublic.zambia.fragments.AdvanceSearchStepsFragment.queryList; import static com.algorelpublic.zambia.fragments.AdvanceSearchStepsFragment.selectionList; /** * Created by Adil Nazir on 16/07/2017. */ public class SearchResultFragment extends BaseFragment implements View.OnClickListener { public static SearchResultFragment instance; private static ArrayList<ArrayList<String>> resultIdList; public ArrayList<SearchResultModel.Results> searchList = new ArrayList<>(); private View view; private SearchResultModel searchResultModel; private RecyclerView rvResults; private boolean isItemAdded = false; public static SearchResultFragment newInstance(ArrayList<ArrayList<String>> mList) { resultIdList = mList; instance = new SearchResultFragment(); return instance; } @Override public void onCreate(Bundle savedInstanceState) { try { setToolBar(); } catch (NullPointerException ex) { ex.printStackTrace(); } super.onCreate(savedInstanceState); } private void setToolBar() throws NullPointerException { AppCompatActivity appCompatActivity = (AppCompatActivity) getActivity(); appCompatActivity.getSupportActionBar().setTitle(Html.fromHtml("<font color='#ffffff'>Results</font>")); } @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { view = inflater.inflate(R.layout.fragment_result, container, false); init(); addListener(); Gson gson = new Gson(); searchResultModel = gson.fromJson(Zambia.db.getString(Constants.RESPONSE_GSON_SEARCH_RESULT), SearchResultModel.class); getResult(); return view; } private void init() { rvResults = (RecyclerView) view.findViewById(R.id.rvResults); } private void addListener() { // btnForgotPassword.setOnClickListener(this); } @Override public void onClick(View v) { switch (v.getId()) { } } public void getResult() { if (searchList.size() > 0) searchList.clear(); for (int i = 0; i < resultIdList.size(); i++) { searchResult(resultIdList.get(i), i); } setRecyclerAdapter(); } private void searchResult(ArrayList<String> personDataList, int p) { switch (personDataList.size()) { case 0: break; case 1: for (int i = 0; i < searchResultModel.resultsList.size(); i++) { if (searchResultModel.resultsList.get(i).step_1_id.equalsIgnoreCase(personDataList.get(0))) { searchList.add(searchResultModel.resultsList.get(i)); isItemAdded = true; break; } } if (isItemAdded) { isItemAdded = false; } else { searchList.add(null); isItemAdded = false; } break; case 2: for (int i = 0; i < searchResultModel.resultsList.size(); i++) { if (searchResultModel.resultsList.get(i).step_1_id.equalsIgnoreCase(personDataList.get(0)) && searchResultModel.resultsList.get(i).step_2_id.equalsIgnoreCase(personDataList.get(1))) { searchList.add(searchResultModel.resultsList.get(i)); isItemAdded = true; break; } } if (isItemAdded) { isItemAdded = false; } else { searchList.add(null); isItemAdded = false; } break; case 3: for (int i = 0; i < searchResultModel.resultsList.size(); i++) { if (searchResultModel.resultsList.get(i).step_1_id.equalsIgnoreCase(personDataList.get(0)) && searchResultModel.resultsList.get(i).step_2_id.equalsIgnoreCase(personDataList.get(1)) && searchResultModel.resultsList.get(i).step_3_id.equalsIgnoreCase(personDataList.get(2))) { searchList.add(searchResultModel.resultsList.get(i)); isItemAdded = true; break; } } if (isItemAdded) { isItemAdded = false; } else { searchList.add(null); isItemAdded = false; } break; case 4: try{ for (int i = 0; i < searchResultModel.resultsList.size(); i++) { if (searchResultModel.resultsList.get(i).step_1_id.equalsIgnoreCase(personDataList.get(0)) && searchResultModel.resultsList.get(i).step_2_id.equalsIgnoreCase(personDataList.get(1)) && searchResultModel.resultsList.get(i).step_3_id.equalsIgnoreCase(personDataList.get(2)) && searchResultModel.resultsList.get(i).step_4_id.equalsIgnoreCase(personDataList.get(3))) { searchList.add(searchResultModel.resultsList.get(i)); isItemAdded = true; break; } } }catch (NullPointerException e){ searchList.add(null); } if (isItemAdded) { isItemAdded = false; } else { searchList.add(null); isItemAdded = false; } break; case 5: try{ for (int i = 0; i < searchResultModel.resultsList.size(); i++) { if (searchResultModel.resultsList.get(i).step_1_id.equalsIgnoreCase(personDataList.get(0)) && searchResultModel.resultsList.get(i).step_2_id.equalsIgnoreCase(personDataList.get(1)) && searchResultModel.resultsList.get(i).step_3_id.equalsIgnoreCase(personDataList.get(2)) && searchResultModel.resultsList.get(i).step_4_id.equalsIgnoreCase(personDataList.get(3)) && searchResultModel.resultsList.get(i).step_5_id.equalsIgnoreCase(personDataList.get(4))) { searchList.add(searchResultModel.resultsList.get(i)); isItemAdded = true; break; } } }catch (NullPointerException e){ searchList.add(null); } if (isItemAdded) { isItemAdded = false; } else { searchList.add(null); isItemAdded = false; } break; case 6: try{ for (int i = 0; i < searchResultModel.resultsList.size(); i++) { if (searchResultModel.resultsList.get(i).step_1_id.equalsIgnoreCase(personDataList.get(0)) && searchResultModel.resultsList.get(i).step_2_id.equalsIgnoreCase(personDataList.get(1)) && searchResultModel.resultsList.get(i).step_3_id.equalsIgnoreCase(personDataList.get(2)) && searchResultModel.resultsList.get(i).step_4_id.equalsIgnoreCase(personDataList.get(3)) && searchResultModel.resultsList.get(i).step_5_id.equalsIgnoreCase(personDataList.get(4)) && searchResultModel.resultsList.get(i).step_6_id.equalsIgnoreCase(personDataList.get(5))) { searchList.add(searchResultModel.resultsList.get(i)); isItemAdded = true; break; } } }catch (NullPointerException e){ searchList.add(null); } if (isItemAdded) { isItemAdded = false; } else { searchList.add(null); isItemAdded = false; } break; case 7: try{ for (int i = 0; i < searchResultModel.resultsList.size(); i++) { if (searchResultModel.resultsList.get(i).step_1_id.equalsIgnoreCase(personDataList.get(0)) && searchResultModel.resultsList.get(i).step_2_id.equalsIgnoreCase(personDataList.get(1)) && searchResultModel.resultsList.get(i).step_3_id.equalsIgnoreCase(personDataList.get(2)) && searchResultModel.resultsList.get(i).step_4_id.equalsIgnoreCase(personDataList.get(3)) && searchResultModel.resultsList.get(i).step_5_id.equalsIgnoreCase(personDataList.get(4)) && searchResultModel.resultsList.get(i).step_6_id.equalsIgnoreCase(personDataList.get(5)) && searchResultModel.resultsList.get(i).step_7_id.equalsIgnoreCase(personDataList.get(6))) { searchList.add(searchResultModel.resultsList.get(i)); isItemAdded = true; break; } } }catch (NullPointerException e){ searchList.add(null); } if (isItemAdded) { isItemAdded = false; } else { searchList.add(null); isItemAdded = false; } break; case 8: try{ for (int i = 0; i < searchResultModel.resultsList.size(); i++) { if (searchResultModel.resultsList.get(i).step_1_id.equalsIgnoreCase(personDataList.get(0)) && searchResultModel.resultsList.get(i).step_2_id.equalsIgnoreCase(personDataList.get(1)) && searchResultModel.resultsList.get(i).step_3_id.equalsIgnoreCase(personDataList.get(2)) && searchResultModel.resultsList.get(i).step_4_id.equalsIgnoreCase(personDataList.get(3)) && searchResultModel.resultsList.get(i).step_5_id.equalsIgnoreCase(personDataList.get(4)) && searchResultModel.resultsList.get(i).step_6_id.equalsIgnoreCase(personDataList.get(5)) && searchResultModel.resultsList.get(i).step_7_id.equalsIgnoreCase(personDataList.get(6)) && searchResultModel.resultsList.get(i).step_8_id.equalsIgnoreCase(personDataList.get(7))) { searchList.add(searchResultModel.resultsList.get(i)); isItemAdded = true; break; } } }catch (NullPointerException e){ searchList.add(null); } if (isItemAdded) { isItemAdded = false; } else { searchList.add(null); isItemAdded = false; } break; case 9: try{ for (int i = 0; i < searchResultModel.resultsList.size(); i++) { if (searchResultModel.resultsList.get(i).step_1_id.equalsIgnoreCase(personDataList.get(0)) && searchResultModel.resultsList.get(i).step_2_id.equalsIgnoreCase(personDataList.get(1)) && searchResultModel.resultsList.get(i).step_3_id.equalsIgnoreCase(personDataList.get(2)) && searchResultModel.resultsList.get(i).step_4_id.equalsIgnoreCase(personDataList.get(3)) && searchResultModel.resultsList.get(i).step_5_id.equalsIgnoreCase(personDataList.get(4)) && searchResultModel.resultsList.get(i).step_6_id.equalsIgnoreCase(personDataList.get(5)) && searchResultModel.resultsList.get(i).step_7_id.equalsIgnoreCase(personDataList.get(6)) && searchResultModel.resultsList.get(i).step_8_id.equalsIgnoreCase(personDataList.get(7)) && searchResultModel.resultsList.get(i).step_9_id.equalsIgnoreCase(personDataList.get(8))) { searchList.add(searchResultModel.resultsList.get(i)); isItemAdded = true; break; } } }catch (NullPointerException e){ searchList.add(null); } if (isItemAdded) { isItemAdded = false; } else { searchList.add(null); isItemAdded = false; } break; } } public void setRecyclerAdapter() { rvResults.setLayoutManager(new GridLayoutManager(getActivity(), 1, VERTICAL, false)); rvResults.setHasFixedSize(true); rvResults.setAdapter(new AdapterResult(getActivity(), searchList)); } public void allowBackPressed(){ noOfPersons = 0; selectionList.clear(); queryList.clear(); FragmentManager fm = getActivity() .getSupportFragmentManager(); fm.popBackStack ("AdvanceSearchFragment", FragmentManager.POP_BACK_STACK_INCLUSIVE); callFragmentWithReplace(R.id.container, AdvanceSearchFragment.newInstance(), "AdvanceSearchFragment"); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/admin/v1alpha/analytics_admin.proto package com.google.analytics.admin.v1alpha; /** * * * <pre> * Request message for ListDisplayVideo360AdvertiserLinkProposals RPC. * </pre> * * Protobuf type {@code * google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest} */ public final class ListDisplayVideo360AdvertiserLinkProposalsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest) ListDisplayVideo360AdvertiserLinkProposalsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListDisplayVideo360AdvertiserLinkProposalsRequest.newBuilder() to construct. private ListDisplayVideo360AdvertiserLinkProposalsRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListDisplayVideo360AdvertiserLinkProposalsRequest() { parent_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListDisplayVideo360AdvertiserLinkProposalsRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListDisplayVideo360AdvertiserLinkProposalsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } case 16: { pageSize_ = input.readInt32(); break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); pageToken_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_ListDisplayVideo360AdvertiserLinkProposalsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_ListDisplayVideo360AdvertiserLinkProposalsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest .class, com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest .Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_; /** * * * <pre> * The maximum number of resources to return. * If unspecified, at most 50 resources will be returned. * The maximum value is 200 (higher values will be coerced to the maximum). * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; private volatile java.lang.Object pageToken_; /** * * * <pre> * A page token, received from a previous * `ListDisplayVideo360AdvertiserLinkProposals` call. Provide this to retrieve * the subsequent page. * When paginating, all other parameters provided to * `ListDisplayVideo360AdvertiserLinkProposals` must match the call that * provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A page token, received from a previous * `ListDisplayVideo360AdvertiserLinkProposals` call. Provide this to retrieve * the subsequent page. * When paginating, all other parameters provided to * `ListDisplayVideo360AdvertiserLinkProposals` must match the call that * provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest)) { return super.equals(obj); } com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest other = (com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for ListDisplayVideo360AdvertiserLinkProposals RPC. * </pre> * * Protobuf type {@code * google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest) com.google.analytics.admin.v1alpha .ListDisplayVideo360AdvertiserLinkProposalsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_ListDisplayVideo360AdvertiserLinkProposalsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_ListDisplayVideo360AdvertiserLinkProposalsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest .class, com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest .Builder.class); } // Construct using // com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); parent_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_ListDisplayVideo360AdvertiserLinkProposalsRequest_descriptor; } @java.lang.Override public com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest getDefaultInstanceForType() { return com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest .getDefaultInstance(); } @java.lang.Override public com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest build() { com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest buildPartial() { com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest result = new com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest( this); result.parent_ = parent_; result.pageSize_ = pageSize_; result.pageToken_ = pageToken_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest) { return mergeFrom( (com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest other) { if (other == com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of resources to return. * If unspecified, at most 50 resources will be returned. * The maximum value is 200 (higher values will be coerced to the maximum). * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of resources to return. * If unspecified, at most 50 resources will be returned. * The maximum value is 200 (higher values will be coerced to the maximum). * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; onChanged(); return this; } /** * * * <pre> * The maximum number of resources to return. * If unspecified, at most 50 resources will be returned. * The maximum value is 200 (higher values will be coerced to the maximum). * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token, received from a previous * `ListDisplayVideo360AdvertiserLinkProposals` call. Provide this to retrieve * the subsequent page. * When paginating, all other parameters provided to * `ListDisplayVideo360AdvertiserLinkProposals` must match the call that * provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A page token, received from a previous * `ListDisplayVideo360AdvertiserLinkProposals` call. Provide this to retrieve * the subsequent page. * When paginating, all other parameters provided to * `ListDisplayVideo360AdvertiserLinkProposals` must match the call that * provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A page token, received from a previous * `ListDisplayVideo360AdvertiserLinkProposals` call. Provide this to retrieve * the subsequent page. * When paginating, all other parameters provided to * `ListDisplayVideo360AdvertiserLinkProposals` must match the call that * provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; onChanged(); return this; } /** * * * <pre> * A page token, received from a previous * `ListDisplayVideo360AdvertiserLinkProposals` call. Provide this to retrieve * the subsequent page. * When paginating, all other parameters provided to * `ListDisplayVideo360AdvertiserLinkProposals` must match the call that * provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); onChanged(); return this; } /** * * * <pre> * A page token, received from a previous * `ListDisplayVideo360AdvertiserLinkProposals` call. Provide this to retrieve * the subsequent page. * When paginating, all other parameters provided to * `ListDisplayVideo360AdvertiserLinkProposals` must match the call that * provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest) } // @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest) private static final com.google.analytics.admin.v1alpha .ListDisplayVideo360AdvertiserLinkProposalsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest(); } public static com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListDisplayVideo360AdvertiserLinkProposalsRequest> PARSER = new com.google.protobuf.AbstractParser< ListDisplayVideo360AdvertiserLinkProposalsRequest>() { @java.lang.Override public ListDisplayVideo360AdvertiserLinkProposalsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListDisplayVideo360AdvertiserLinkProposalsRequest( input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListDisplayVideo360AdvertiserLinkProposalsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListDisplayVideo360AdvertiserLinkProposalsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.admin.v1alpha.ListDisplayVideo360AdvertiserLinkProposalsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2010-2013 Ning, Inc. * Copyright 2014 Groupon, Inc * Copyright 2014 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.jaxrs; import java.math.BigDecimal; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.UUID; import javax.annotation.Nullable; import org.killbill.billing.ErrorCode; import org.killbill.billing.ObjectType; import org.killbill.billing.client.KillBillClientException; import org.killbill.billing.client.model.Account; import org.killbill.billing.client.model.Accounts; import org.killbill.billing.client.model.AuditLog; import org.killbill.billing.client.model.CustomField; import org.killbill.billing.client.model.InvoicePayments; import org.killbill.billing.client.model.PaymentMethod; import org.killbill.billing.client.model.PaymentMethodPluginDetail; import org.killbill.billing.client.model.Tag; import org.killbill.billing.util.api.AuditLevel; import org.testng.Assert; import org.testng.annotations.Test; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; public class TestAccount extends TestJaxrsBase { @Test(groups = "slow", description = "Verify no PII data is required") public void testEmptyAccount() throws Exception { final Account emptyAccount = new Account(); final Account account = killBillClient.createAccount(emptyAccount, createdBy, reason, comment); Assert.assertNotNull(account.getExternalKey()); Assert.assertNotNull(account.getName()); Assert.assertNotNull(account.getEmail()); } @Test(groups = "slow", description = "Verify external key is unique") public void testUniqueExternalKey() throws Exception { // Verify the external key is not mandatory final Account inputWithNoExternalKey = getAccount(UUID.randomUUID().toString(), null, UUID.randomUUID().toString()); Assert.assertNull(inputWithNoExternalKey.getExternalKey()); final Account account = killBillClient.createAccount(inputWithNoExternalKey, createdBy, reason, comment); Assert.assertNotNull(account.getExternalKey()); final Account inputWithSameExternalKey = getAccount(UUID.randomUUID().toString(), account.getExternalKey(), UUID.randomUUID().toString()); try { killBillClient.createAccount(inputWithSameExternalKey, createdBy, reason, comment); Assert.fail(); } catch (final KillBillClientException e) { Assert.assertEquals(e.getBillingException().getCode(), (Integer) ErrorCode.ACCOUNT_ALREADY_EXISTS.getCode()); } } @Test(groups = "slow", description = "Can create, retrieve, search and update accounts") public void testAccountOk() throws Exception { final Account input = createAccount(); // Retrieves by external key final Account retrievedAccount = killBillClient.getAccount(input.getExternalKey()); Assert.assertTrue(retrievedAccount.equals(input)); // Try search endpoint searchAccount(input, retrievedAccount); // Update Account final Account newInput = new Account(input.getAccountId(), "zozo", 4, input.getExternalKey(), "rr@google.com", 18, "USD", null, "UTC", "bl1", "bh2", "", "", "ca", "San Francisco", "usa", "en", "415-255-2991", false, false, null, null); final Account updatedAccount = killBillClient.updateAccount(newInput, createdBy, reason, comment); Assert.assertTrue(updatedAccount.equals(newInput)); // Try search endpoint searchAccount(input, null); } @Test(groups = "slow", description = "Can retrieve the account balance") public void testAccountWithBalance() throws Exception { final Account accountJson = createAccountNoPMBundleAndSubscriptionAndWaitForFirstInvoice(); final Account accountWithBalance = killBillClient.getAccount(accountJson.getAccountId(), true, false); final BigDecimal accountBalance = accountWithBalance.getAccountBalance(); Assert.assertTrue(accountBalance.compareTo(BigDecimal.ZERO) > 0); } @Test(groups = "slow", description = "Cannot update a non-existent account") public void testUpdateNonExistentAccount() throws Exception { final Account input = getAccount(); Assert.assertNull(killBillClient.updateAccount(input, createdBy, reason, comment)); } @Test(groups = "slow", description = "Cannot retrieve non-existent account") public void testAccountNonExistent() throws Exception { Assert.assertNull(killBillClient.getAccount(UUID.randomUUID())); Assert.assertNull(killBillClient.getAccount(UUID.randomUUID().toString())); } @Test(groups = "slow", description = "Can CRUD payment methods") public void testAccountPaymentMethods() throws Exception { final Account accountJson = createAccount(); assertNotNull(accountJson); final PaymentMethodPluginDetail info = new PaymentMethodPluginDetail(); info.setProperties(getPaymentMethodCCProperties()); PaymentMethod paymentMethodJson = new PaymentMethod(null, UUID.randomUUID().toString(), accountJson.getAccountId(), true, PLUGIN_NAME, info); final PaymentMethod paymentMethodCC = killBillClient.createPaymentMethod(paymentMethodJson, createdBy, reason, comment); assertTrue(paymentMethodCC.getIsDefault()); // // Add another payment method // final PaymentMethodPluginDetail info2 = new PaymentMethodPluginDetail(); info2.setProperties(getPaymentMethodPaypalProperties()); paymentMethodJson = new PaymentMethod(null, UUID.randomUUID().toString(), accountJson.getAccountId(), false, PLUGIN_NAME, info2); final PaymentMethod paymentMethodPP = killBillClient.createPaymentMethod(paymentMethodJson, createdBy, reason, comment); assertFalse(paymentMethodPP.getIsDefault()); // // FETCH ALL PAYMENT METHODS // List<PaymentMethod> paymentMethods = killBillClient.getPaymentMethodsForAccount(accountJson.getAccountId()); assertEquals(paymentMethods.size(), 2); // // CHANGE DEFAULT // assertTrue(killBillClient.getPaymentMethod(paymentMethodCC.getPaymentMethodId()).getIsDefault()); assertFalse(killBillClient.getPaymentMethod(paymentMethodPP.getPaymentMethodId()).getIsDefault()); killBillClient.updateDefaultPaymentMethod(accountJson.getAccountId(), paymentMethodPP.getPaymentMethodId(), createdBy, reason, comment); assertTrue(killBillClient.getPaymentMethod(paymentMethodPP.getPaymentMethodId()).getIsDefault()); assertFalse(killBillClient.getPaymentMethod(paymentMethodCC.getPaymentMethodId()).getIsDefault()); // // DELETE NON DEFAULT PM // killBillClient.deletePaymentMethod(paymentMethodCC.getPaymentMethodId(), false, createdBy, reason, comment); // // FETCH ALL PAYMENT METHODS // paymentMethods = killBillClient.getPaymentMethodsForAccount(accountJson.getAccountId()); assertEquals(paymentMethods.size(), 1); // // DELETE DEFAULT PAYMENT METHOD (without special flag first) // try { killBillClient.deletePaymentMethod(paymentMethodPP.getPaymentMethodId(), false, createdBy, reason, comment); fail(); } catch (final KillBillClientException e) { } // // RETRY TO DELETE DEFAULT PAYMENT METHOD (with special flag this time) // killBillClient.deletePaymentMethod(paymentMethodPP.getPaymentMethodId(), true, createdBy, reason, comment); // CHECK ACCOUNT IS NOW AUTO_PAY_OFF final List<Tag> tagsJson = killBillClient.getAccountTags(accountJson.getAccountId()); Assert.assertEquals(tagsJson.size(), 1); final Tag tagJson = tagsJson.get(0); Assert.assertEquals(tagJson.getTagDefinitionName(), "AUTO_PAY_OFF"); Assert.assertEquals(tagJson.getTagDefinitionId(), new UUID(0, 1)); // FETCH ACCOUNT AGAIN AND CHECK THERE IS NO DEFAULT PAYMENT METHOD SET final Account updatedAccount = killBillClient.getAccount(accountJson.getAccountId()); Assert.assertEquals(updatedAccount.getAccountId(), accountJson.getAccountId()); Assert.assertNull(updatedAccount.getPaymentMethodId()); // // FINALLY TRY TO REMOVE AUTO_PAY_OFF WITH NO DEFAULT PAYMENT METHOD ON ACCOUNT // try { killBillClient.deleteAccountTag(accountJson.getAccountId(), new UUID(0, 1), createdBy, reason, comment); } catch (final KillBillClientException e) { } } @Test(groups = "slow") public void testAccountPaymentsWithRefund() throws Exception { final Account accountJson = createAccountWithPMBundleAndSubscriptionAndWaitForFirstInvoice(); // Verify payments final InvoicePayments objFromJson = killBillClient.getInvoicePaymentsForAccount(accountJson.getAccountId()); Assert.assertEquals(objFromJson.size(), 1); } @Test(groups = "slow", description = "Add tags to account") public void testTags() throws Exception { final Account input = createAccount(); // Use tag definition for AUTO_PAY_OFF final UUID autoPayOffId = new UUID(0, 1); // Add a tag killBillClient.createAccountTag(input.getAccountId(), autoPayOffId, createdBy, reason, comment); // Retrieves all tags final List<Tag> tags1 = killBillClient.getAccountTags(input.getAccountId(), AuditLevel.FULL); Assert.assertEquals(tags1.size(), 1); Assert.assertEquals(tags1.get(0).getTagDefinitionId(), autoPayOffId); // Verify adding the same tag a second time doesn't do anything killBillClient.createAccountTag(input.getAccountId(), autoPayOffId, createdBy, reason, comment); // Retrieves all tags again killBillClient.createAccountTag(input.getAccountId(), autoPayOffId, createdBy, reason, comment); final List<Tag> tags2 = killBillClient.getAccountTags(input.getAccountId(), AuditLevel.FULL); Assert.assertEquals(tags2, tags1); // Verify audit logs Assert.assertEquals(tags2.get(0).getAuditLogs().size(), 1); final AuditLog auditLogJson = tags2.get(0).getAuditLogs().get(0); Assert.assertEquals(auditLogJson.getChangeType(), "INSERT"); Assert.assertEquals(auditLogJson.getChangedBy(), createdBy); Assert.assertEquals(auditLogJson.getReasonCode(), reason); Assert.assertEquals(auditLogJson.getComments(), comment); Assert.assertNotNull(auditLogJson.getChangeDate()); Assert.assertNotNull(auditLogJson.getUserToken()); } @Test(groups = "slow", description = "Add custom fields to account") public void testCustomFields() throws Exception { final Account accountJson = createAccount(); assertNotNull(accountJson); final Collection<CustomField> customFields = new LinkedList<CustomField>(); customFields.add(new CustomField(null, accountJson.getAccountId(), ObjectType.ACCOUNT, "1", "value1", null)); customFields.add(new CustomField(null, accountJson.getAccountId(), ObjectType.ACCOUNT, "2", "value2", null)); customFields.add(new CustomField(null, accountJson.getAccountId(), ObjectType.ACCOUNT, "3", "value3", null)); killBillClient.createAccountCustomFields(accountJson.getAccountId(), customFields, createdBy, reason, comment); final List<CustomField> accountCustomFields = killBillClient.getAccountCustomFields(accountJson.getAccountId()); assertEquals(accountCustomFields.size(), 3); // Delete all custom fields for account killBillClient.deleteAccountCustomFields(accountJson.getAccountId(), createdBy, reason, comment); final List<CustomField> remainingCustomFields = killBillClient.getAccountCustomFields(accountJson.getAccountId()); assertEquals(remainingCustomFields.size(), 0); } @Test(groups = "slow", description = "Can paginate through all accounts") public void testAccountsPagination() throws Exception { for (int i = 0; i < 5; i++) { createAccount(); } final Accounts allAccounts = killBillClient.getAccounts(); Assert.assertEquals(allAccounts.size(), 5); Accounts page = killBillClient.getAccounts(0L, 1L); for (int i = 0; i < 5; i++) { Assert.assertNotNull(page); Assert.assertEquals(page.size(), 1); Assert.assertEquals(page.get(0), allAccounts.get(i)); page = page.getNext(); } Assert.assertNull(page); } private void searchAccount(final Account input, @Nullable final Account output) throws Exception { // Search by id if (output != null) { doSearchAccount(input.getAccountId().toString(), output); } // Search by name doSearchAccount(input.getName(), output); // Search by email doSearchAccount(input.getEmail(), output); // Search by company name doSearchAccount(input.getCompany(), output); // Search by external key. // Note: we will always find a match since we don't update it final List<Account> accountsByExternalKey = killBillClient.searchAccounts(input.getExternalKey()); Assert.assertEquals(accountsByExternalKey.size(), 1); Assert.assertEquals(accountsByExternalKey.get(0).getAccountId(), input.getAccountId()); Assert.assertEquals(accountsByExternalKey.get(0).getExternalKey(), input.getExternalKey()); } private void doSearchAccount(final String key, @Nullable final Account output) throws Exception { final List<Account> accountsByKey = killBillClient.searchAccounts(key); if (output == null) { Assert.assertEquals(accountsByKey.size(), 0); } else { Assert.assertEquals(accountsByKey.size(), 1); Assert.assertEquals(accountsByKey.get(0), output); } } }
package org.basex.query.value.array; import org.basex.query.util.fingertree.*; import org.basex.query.value.*; import org.basex.util.*; /** * A builder for creating an {@link XQArray} by prepending and appending elements. * * @author BaseX Team 2005-20, BSD License * @author Leo Woerteler */ public final class ArrayBuilder { /** Capacity of the root. */ private static final int CAP = 2 * XQArray.MAX_DIGIT; /** Size of inner nodes. */ private static final int NODE_SIZE = (XQArray.MIN_LEAF + XQArray.MAX_LEAF + 1) / 2; /** Ring buffer containing the root-level elements. */ private final Value[] vals = new Value[CAP]; /** Number of elements in left digit. */ private int inLeft; /** Middle between left and right digit in the buffer. */ private int mid = CAP / 2; /** Number of elements in right digit. */ private int inRight; /** Builder for the middle tree. */ private final FingerTreeBuilder<Value> tree = new FingerTreeBuilder<>(); /** * Adds an element to the start of the array. * @param elem element to add * @return self reference for convenience */ public ArrayBuilder prepend(final Value elem) { if(inLeft < XQArray.MAX_DIGIT) { // just insert the element vals[(mid - inLeft + CAP - 1) % CAP] = elem; inLeft++; } else if(tree.isEmpty() && inRight < XQArray.MAX_DIGIT) { // move the middle to the left mid = (mid + CAP - 1) % CAP; vals[(mid - inLeft + CAP) % CAP] = elem; inRight++; } else { // push leaf node into the tree final Value[] leaf = new Value[NODE_SIZE]; final int start = (mid - NODE_SIZE + CAP) % CAP; for(int i = 0; i < NODE_SIZE; i++) leaf[i] = vals[(start + i) % CAP]; tree.prepend(new LeafNode(leaf)); // move rest of the nodes to the right final int rest = inLeft - NODE_SIZE; final int p0 = (mid - inLeft + CAP) % CAP; for(int i = 0; i < rest; i++) { final int from = (p0 + i) % CAP, to = (from + NODE_SIZE) % CAP; vals[to] = vals[from]; } // insert the element vals[(mid - rest + CAP - 1) % CAP] = elem; inLeft = rest + 1; } return this; } /** * Adds an element to the end of the array. * @param elem element to add * @return self reference for convenience */ public ArrayBuilder append(final Value elem) { if(inRight < XQArray.MAX_DIGIT) { // just insert the element vals[(mid + inRight) % CAP] = elem; inRight++; } else if(tree.isEmpty() && inLeft < XQArray.MAX_DIGIT) { // move the middle to the right mid = (mid + 1) % CAP; vals[(mid + inRight + CAP - 1) % CAP] = elem; inLeft++; } else { // push leaf node into the tree final Value[] leaf = new Value[NODE_SIZE]; final int start = mid; for(int i = 0; i < NODE_SIZE; i++) leaf[i] = vals[(start + i) % CAP]; tree.append(new LeafNode(leaf)); // move rest of the nodes to the right final int rest = inRight - NODE_SIZE; for(int i = 0; i < rest; i++) { final int to = (mid + i) % CAP, from = (to + NODE_SIZE) % CAP; vals[to] = vals[from]; } // insert the element vals[(mid + rest) % CAP] = elem; inRight = rest + 1; } return this; } /** * Appends the given array to this builder. * @param arr array to append * @return self reference for convenience */ public ArrayBuilder append(final XQArray arr) { if(!(arr instanceof BigArray)) { for(final Value value : arr.members()) append(value); return this; } final BigArray big = (BigArray) arr; final Value[] ls = big.left, rs = big.right; final FingerTree<Value, Value> midTree = big.middle; if(midTree.isEmpty()) { for(final Value l : big.left) append(l); for(final Value r : big.right) append(r); return this; } // merge middle digits if(tree.isEmpty()) { final int k = inLeft + inRight; final Value[] temp = new Value[k]; final int l = (mid - inLeft + CAP) % CAP, m = CAP - l; if(k <= m) { Array.copyToStart(vals, l, k, temp); } else { Array.copyToStart(vals, l, m, temp); Array.copyFromStart(vals, k - m, temp, m); } inLeft = inRight = 0; tree.append(midTree); for(int i = ls.length; --i >= 0;) prepend(ls[i]); for(int i = k; --i >= 0;) prepend(temp[i]); for(final Value r : rs) append(r); return this; } final int inMiddle = inRight + big.left.length, leaves = (inMiddle + XQArray.MAX_LEAF - 1) / XQArray.MAX_LEAF, leafSize = (inMiddle + leaves - 1) / leaves; for(int i = 0, l = 0; l < leaves; l++) { final int inLeaf = Math.min(leafSize, inMiddle - i); final Value[] leaf = new Value[inLeaf]; for(int p = 0; p < inLeaf; p++) { leaf[p] = i < inRight ? vals[(mid + i) % CAP] : big.left[i - inRight]; i++; } tree.append(new LeafNode(leaf)); } tree.append(big.middle); inRight = 0; for(final Value r : big.right) append(r); return this; } /** * Creates an {@link XQArray} containing the elements of this builder. * @return resulting array */ public XQArray freeze() { final int n = inLeft + inRight; if(n == 0) return XQArray.empty(); final int start = (mid - inLeft + CAP) % CAP; if(n <= XQArray.MAX_SMALL) { // small int array, fill directly final Value[] small = new Value[n]; for(int i = 0; i < n; i++) small[i] = vals[(start + i) % CAP]; return new SmallArray(small); } // deep array final int a = tree.isEmpty() ? n / 2 : inLeft, b = n - a; final Value[] ls = new Value[a], rs = new Value[b]; for(int i = 0; i < a; i++) ls[i] = vals[(start + i) % CAP]; for(int i = a; i < n; i++) rs[i - a] = vals[(start + i) % CAP]; return new BigArray(ls, tree.freeze(), rs); } @Override public String toString() { final StringBuilder sb = new StringBuilder(Util.className(this)).append('['); if(tree.isEmpty()) { final int n = inLeft + inRight, first = (mid - inLeft + CAP) % CAP; if(n > 0) { sb.append(vals[first]); for(int i = 1; i < n; i++) sb.append(", ").append(vals[(first + i) % CAP]); } } else { final int first = (mid - inLeft + CAP) % CAP; sb.append(vals[first]); for(int i = 1; i < inLeft; i++) sb.append(", ").append(vals[(first + i) % CAP]); for(final Value value : tree) sb.append(", ").append(value); for(int i = 0; i < inRight; i++) sb.append(", ").append(vals[(mid + i) % CAP]); } return sb.append(']').toString(); } }
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.net.flow.instructions; import org.onlab.packet.MacAddress; import org.onlab.packet.MplsLabel; import org.onlab.packet.VlanId; import java.util.Objects; import static com.google.common.base.MoreObjects.toStringHelper; /** * Abstraction of a single traffic treatment step. */ public abstract class L2ModificationInstruction implements Instruction { /** * Represents the type of traffic treatment. */ public enum L2SubType { /** * Ether src modification. */ ETH_SRC, /** * Ether dst modification. */ ETH_DST, /** * VLAN id modification. */ VLAN_ID, /** * VLAN priority modification. */ VLAN_PCP, /** * Strips the vlan. */ STRIP_VLAN, /** * MPLS Label modification. */ MPLS_LABEL, /** * MPLS Push modification. */ MPLS_PUSH, /** * MPLS Pop modification. */ MPLS_POP, /** * MPLS TTL modification. */ DEC_MPLS_TTL, /** * VLAN Pop modification. */ VLAN_POP, /** * VLAN Push modification. */ VLAN_PUSH } // TODO: Create factory class 'Instructions' that will have various factory // to create specific instructions. public abstract L2SubType subtype(); @Override public final Type type() { return Type.L2MODIFICATION; } /** * Represents a L2 src/dst modification instruction. */ public static final class ModEtherInstruction extends L2ModificationInstruction { private final L2SubType subtype; private final MacAddress mac; ModEtherInstruction(L2SubType subType, MacAddress addr) { this.subtype = subType; this.mac = addr; } @Override public L2SubType subtype() { return this.subtype; } public MacAddress mac() { return this.mac; } @Override public String toString() { return toStringHelper(subtype().toString()) .add("mac", mac).toString(); } @Override public int hashCode() { return Objects.hash(type(), subtype, mac); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof ModEtherInstruction) { ModEtherInstruction that = (ModEtherInstruction) obj; return Objects.equals(mac, that.mac) && Objects.equals(subtype, that.subtype); } return false; } } // TODO This instruction is reused for Pop-Mpls. Consider renaming. public static final class PushHeaderInstructions extends L2ModificationInstruction { private final L2SubType subtype; private final short ethernetType; // uint16_t PushHeaderInstructions(L2SubType subType, short ethernetType) { this.subtype = subType; this.ethernetType = ethernetType; } public int ethernetType() { return Short.toUnsignedInt(ethernetType); } @Override public L2SubType subtype() { return this.subtype; } @Override public String toString() { return toStringHelper(subtype().toString()) .add("ethernetType", String.format("0x%04x", ethernetType())) .toString(); } @Override public int hashCode() { return Objects.hash(type(), subtype, ethernetType); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof PushHeaderInstructions) { PushHeaderInstructions that = (PushHeaderInstructions) obj; return Objects.equals(subtype, that.subtype) && Objects.equals(this.ethernetType, that.ethernetType); } return false; } } /** * Represents a VLAN id modification instruction. */ public static final class ModVlanIdInstruction extends L2ModificationInstruction { private final VlanId vlanId; ModVlanIdInstruction(VlanId vlanId) { this.vlanId = vlanId; } @Override public L2SubType subtype() { return L2SubType.VLAN_ID; } public VlanId vlanId() { return this.vlanId; } @Override public String toString() { return toStringHelper(subtype().toString()) .add("id", vlanId).toString(); } @Override public int hashCode() { return Objects.hash(type(), subtype(), vlanId); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof ModVlanIdInstruction) { ModVlanIdInstruction that = (ModVlanIdInstruction) obj; return Objects.equals(vlanId, that.vlanId); } return false; } } /** * Represents a VLAN PCP modification instruction. */ public static final class ModVlanPcpInstruction extends L2ModificationInstruction { private final Byte vlanPcp; ModVlanPcpInstruction(Byte vlanPcp) { this.vlanPcp = vlanPcp; } @Override public L2SubType subtype() { return L2SubType.VLAN_PCP; } public Byte vlanPcp() { return this.vlanPcp; } @Override public String toString() { return toStringHelper(subtype().toString()) .add("pcp", Long.toHexString(vlanPcp)).toString(); } @Override public int hashCode() { return Objects.hash(type(), subtype(), vlanPcp); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof ModVlanPcpInstruction) { ModVlanPcpInstruction that = (ModVlanPcpInstruction) obj; return Objects.equals(vlanPcp, that.vlanPcp); } return false; } } public static final class StripVlanInstruction extends L2ModificationInstruction { StripVlanInstruction() {} @Override public L2SubType subtype() { return L2SubType.STRIP_VLAN; } @Override public String toString() { return toStringHelper(subtype().toString()) .toString(); } @Override public int hashCode() { return Objects.hash(type(), subtype()); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof StripVlanInstruction) { return true; } return false; } } /** * Represents a VLAN POP modification instruction. */ public static final class PopVlanInstruction extends L2ModificationInstruction { private final L2SubType subtype; PopVlanInstruction(L2SubType subType) { this.subtype = subType; } @Override public L2SubType subtype() { return subtype; } @Override public String toString() { return toStringHelper(subtype().toString()) .toString(); } @Override public int hashCode() { return Objects.hash(type(), subtype); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof PopVlanInstruction) { PopVlanInstruction that = (PopVlanInstruction) obj; return Objects.equals(subtype, that.subtype); } return false; } } /** * Represents a MPLS label modification. */ public static final class ModMplsLabelInstruction extends L2ModificationInstruction { private final MplsLabel mplsLabel; ModMplsLabelInstruction(MplsLabel mplsLabel) { this.mplsLabel = mplsLabel; } public Integer label() { return mplsLabel.toInt(); } @Override public L2SubType subtype() { return L2SubType.MPLS_LABEL; } @Override public String toString() { return toStringHelper(subtype().toString()) .add("mpls", mplsLabel).toString(); } @Override public int hashCode() { return Objects.hash(type(), subtype(), mplsLabel); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof ModMplsLabelInstruction) { ModMplsLabelInstruction that = (ModMplsLabelInstruction) obj; return Objects.equals(mplsLabel, that.mplsLabel); } return false; } } /** * Represents a MPLS TTL modification. */ public static final class ModMplsTtlInstruction extends L2ModificationInstruction { ModMplsTtlInstruction() { } @Override public L2SubType subtype() { return L2SubType.DEC_MPLS_TTL; } @Override public String toString() { return toStringHelper(subtype().toString()) .toString(); } @Override public int hashCode() { return Objects.hash(type(), subtype()); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof ModMplsTtlInstruction) { return true; } return false; } } }
package com.brentvatne.exoplayer; import android.annotation.SuppressLint; import android.content.Context; import android.media.AudioManager; import android.net.Uri; import android.os.Handler; import android.os.Message; import android.text.TextUtils; import android.util.Log; import android.widget.FrameLayout; import com.brentvatne.react.R; import com.brentvatne.receiver.AudioBecomingNoisyReceiver; import com.brentvatne.receiver.BecomingNoisyListener; import com.facebook.react.bridge.LifecycleEventListener; import com.facebook.react.uimanager.ThemedReactContext; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.DefaultLoadControl; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.ExoPlayer; import com.google.android.exoplayer2.ExoPlayerFactory; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.SimpleExoPlayer; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory; import com.google.android.exoplayer2.mediacodec.MediaCodecRenderer; import com.google.android.exoplayer2.mediacodec.MediaCodecUtil; import com.google.android.exoplayer2.source.ExtractorMediaSource; import com.google.android.exoplayer2.source.LoopingMediaSource; import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.TrackGroupArray; import com.google.android.exoplayer2.source.dash.DashMediaSource; import com.google.android.exoplayer2.source.dash.DefaultDashChunkSource; import com.google.android.exoplayer2.source.hls.HlsMediaSource; import com.google.android.exoplayer2.source.smoothstreaming.DefaultSsChunkSource; import com.google.android.exoplayer2.source.smoothstreaming.SsMediaSource; import com.google.android.exoplayer2.trackselection.AdaptiveVideoTrackSelection; import com.google.android.exoplayer2.trackselection.DefaultTrackSelector; import com.google.android.exoplayer2.trackselection.MappingTrackSelector; import com.google.android.exoplayer2.trackselection.TrackSelection; import com.google.android.exoplayer2.trackselection.TrackSelectionArray; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter; import com.google.android.exoplayer2.util.Util; import java.net.CookieHandler; import java.net.CookieManager; import java.net.CookiePolicy; @SuppressLint("ViewConstructor") class ReactExoplayerView extends FrameLayout implements LifecycleEventListener, ExoPlayer.EventListener, BecomingNoisyListener, AudioManager.OnAudioFocusChangeListener { private static final String TAG = "ReactExoplayerView"; private static final DefaultBandwidthMeter BANDWIDTH_METER = new DefaultBandwidthMeter(); private static final CookieManager DEFAULT_COOKIE_MANAGER; private static final int SHOW_PROGRESS = 1; static { DEFAULT_COOKIE_MANAGER = new CookieManager(); DEFAULT_COOKIE_MANAGER.setCookiePolicy(CookiePolicy.ACCEPT_ORIGINAL_SERVER); } private final VideoEventEmitter eventEmitter; private Handler mainHandler; private Timeline.Window window; private ExoPlayerView exoPlayerView; private DataSource.Factory mediaDataSourceFactory; private SimpleExoPlayer player; private MappingTrackSelector trackSelector; private boolean playerNeedsSource; private boolean shouldRestorePosition; private int playerWindow; private long playerPosition; private boolean loadVideoStarted; private boolean isPaused = true; private boolean isBuffering; private boolean isTimelineStatic; // Props from React private Uri srcUri; private String extension; private boolean repeat; private boolean disableFocus; // \ End props // React private final ThemedReactContext themedReactContext; private final AudioManager audioManager; private final AudioBecomingNoisyReceiver audioBecomingNoisyReceiver; private final Handler progressHandler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case SHOW_PROGRESS: if (player != null && player.getPlaybackState() == ExoPlayer.STATE_READY && player.getPlayWhenReady() ) { long pos = player.getCurrentPosition(); eventEmitter.progressChanged(pos, player.getBufferedPercentage()); msg = obtainMessage(SHOW_PROGRESS); sendMessageDelayed(msg, 1000 - (pos % 1000)); } break; } } }; public ReactExoplayerView(ThemedReactContext context) { super(context); createViews(); this.eventEmitter = new VideoEventEmitter(context); this.themedReactContext = context; audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); themedReactContext.addLifecycleEventListener(this); audioBecomingNoisyReceiver = new AudioBecomingNoisyReceiver(themedReactContext); } @Override public void setId(int id) { super.setId(id); eventEmitter.setViewId(id); } private void createViews() { mediaDataSourceFactory = buildDataSourceFactory(true); mainHandler = new Handler(); window = new Timeline.Window(); if (CookieHandler.getDefault() != DEFAULT_COOKIE_MANAGER) { CookieHandler.setDefault(DEFAULT_COOKIE_MANAGER); } LayoutParams layoutParams = new LayoutParams( LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT); exoPlayerView = new ExoPlayerView(getContext()); exoPlayerView.setLayoutParams(layoutParams); addView(exoPlayerView, 0, layoutParams); } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); initializePlayer(); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); stopPlayback(); } // LifecycleEventListener implementation @Override public void onHostResume() { startPlayback(); } @Override public void onHostPause() { setPlayWhenReady(false); } @Override public void onHostDestroy() { stopPlayback(); } public void cleanUpResources() { stopPlayback(); } // Internal methods private void initializePlayer() { if (player == null) { TrackSelection.Factory videoTrackSelectionFactory = new AdaptiveVideoTrackSelection.Factory(BANDWIDTH_METER); trackSelector = new DefaultTrackSelector(videoTrackSelectionFactory); player = ExoPlayerFactory.newSimpleInstance(getContext(), trackSelector, new DefaultLoadControl()); player.addListener(this); exoPlayerView.setPlayer(player); if (isTimelineStatic) { if (playerPosition == C.TIME_UNSET) { player.seekToDefaultPosition(playerWindow); } else { player.seekTo(playerWindow, playerPosition); } } audioBecomingNoisyReceiver.setListener(this); setPlayWhenReady(!isPaused); playerNeedsSource = true; } if (playerNeedsSource && srcUri != null) { MediaSource mediaSource = buildMediaSource(srcUri, extension); mediaSource = repeat ? new LoopingMediaSource(mediaSource) : mediaSource; player.prepare(mediaSource, !shouldRestorePosition, true); playerNeedsSource = false; eventEmitter.loadStart(); loadVideoStarted = true; } } private MediaSource buildMediaSource(Uri uri, String overrideExtension) { int type = Util.inferContentType(!TextUtils.isEmpty(overrideExtension) ? "." + overrideExtension : uri.getLastPathSegment()); switch (type) { case C.TYPE_SS: return new SsMediaSource(uri, buildDataSourceFactory(false), new DefaultSsChunkSource.Factory(mediaDataSourceFactory), mainHandler, null); case C.TYPE_DASH: return new DashMediaSource(uri, buildDataSourceFactory(false), new DefaultDashChunkSource.Factory(mediaDataSourceFactory), mainHandler, null); case C.TYPE_HLS: return new HlsMediaSource(uri, mediaDataSourceFactory, mainHandler, null); case C.TYPE_OTHER: return new ExtractorMediaSource(uri, mediaDataSourceFactory, new DefaultExtractorsFactory(), mainHandler, null); default: { throw new IllegalStateException("Unsupported type: " + type); } } } private void releasePlayer() { if (player != null) { isPaused = player.getPlayWhenReady(); shouldRestorePosition = false; playerWindow = player.getCurrentWindowIndex(); playerPosition = C.TIME_UNSET; Timeline timeline = player.getCurrentTimeline(); if (!timeline.isEmpty() && timeline.getWindow(playerWindow, window).isSeekable) { playerPosition = player.getCurrentPosition(); } player.release(); player = null; trackSelector = null; } progressHandler.removeMessages(SHOW_PROGRESS); themedReactContext.removeLifecycleEventListener(this); audioBecomingNoisyReceiver.removeListener(); } private boolean requestAudioFocus() { if (disableFocus) { return true; } int result = audioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN); return result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED; } private void setPlayWhenReady(boolean playWhenReady) { if (player == null) { return; } if (playWhenReady) { boolean hasAudioFocus = requestAudioFocus(); if (hasAudioFocus) { player.setPlayWhenReady(true); } } else { player.setPlayWhenReady(false); } } private void startPlayback() { if (player != null) { switch (player.getPlaybackState()) { case ExoPlayer.STATE_IDLE: case ExoPlayer.STATE_ENDED: initializePlayer(); break; case ExoPlayer.STATE_BUFFERING: case ExoPlayer.STATE_READY: if (!player.getPlayWhenReady()) { setPlayWhenReady(true); } break; default: break; } } else { initializePlayer(); } if (!disableFocus) { setKeepScreenOn(true); } } private void pausePlayback() { if (player != null) { if (player.getPlayWhenReady()) { setPlayWhenReady(false); } } setKeepScreenOn(false); } private void stopPlayback() { onStopPlayback(); releasePlayer(); } private void onStopPlayback() { setKeepScreenOn(false); audioManager.abandonAudioFocus(this); } /** * Returns a new DataSource factory. * * @param useBandwidthMeter Whether to set {@link #BANDWIDTH_METER} as a listener to the new * DataSource factory. * @return A new DataSource factory. */ private DataSource.Factory buildDataSourceFactory(boolean useBandwidthMeter) { return DataSourceUtil.getDefaultDataSourceFactory(getContext(), useBandwidthMeter ? BANDWIDTH_METER : null); } // AudioManager.OnAudioFocusChangeListener implementation @Override public void onAudioFocusChange(int focusChange) { switch (focusChange) { case AudioManager.AUDIOFOCUS_LOSS: eventEmitter.audioFocusChanged(false); break; case AudioManager.AUDIOFOCUS_GAIN: eventEmitter.audioFocusChanged(true); break; default: break; } if (player != null) { if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK) { // Lower the volume player.setVolume(0.8f); } else if (focusChange == AudioManager.AUDIOFOCUS_GAIN) { // Raise it back to normal player.setVolume(1); } } } // AudioBecomingNoisyListener implementation @Override public void onAudioBecomingNoisy() { eventEmitter.audioBecomingNoisy(); } // ExoPlayer.EventListener implementation @Override public void onLoadingChanged(boolean isLoading) { // Do nothing. } @Override public void onPlayerStateChanged(boolean playWhenReady, int playbackState) { String text = "onStateChanged: playWhenReady=" + playWhenReady + ", playbackState="; switch (playbackState) { case ExoPlayer.STATE_IDLE: text += "idle"; eventEmitter.idle(); break; case ExoPlayer.STATE_BUFFERING: text += "buffering"; onBuffering(true); break; case ExoPlayer.STATE_READY: text += "ready"; eventEmitter.ready(); onBuffering(false); startProgressHandler(); videoLoaded(); break; case ExoPlayer.STATE_ENDED: text += "ended"; eventEmitter.end(); onStopPlayback(); break; default: text += "unknown"; break; } Log.d(TAG, text); } private void startProgressHandler() { progressHandler.sendEmptyMessage(SHOW_PROGRESS); } private void videoLoaded() { if (loadVideoStarted) { loadVideoStarted = false; Format videoFormat = player.getVideoFormat(); int width = videoFormat != null ? videoFormat.width : 0; int height = videoFormat != null ? videoFormat.height : 0; eventEmitter.load(player.getDuration(), player.getCurrentPosition(), width, height); } } private void onBuffering(boolean buffering) { if (isBuffering == buffering) { return; } isBuffering = buffering; if (buffering) { eventEmitter.buffering(true); } else { eventEmitter.buffering(false); } } @Override public void onPositionDiscontinuity() { // Do nothing. } @Override public void onTimelineChanged(Timeline timeline, Object manifest) { isTimelineStatic = !timeline.isEmpty() && !timeline.getWindow(timeline.getWindowCount() - 1, window).isDynamic; } @Override public void onTracksChanged(TrackGroupArray trackGroups, TrackSelectionArray trackSelections) { // Do Nothing. } @Override public void onPlayerError(ExoPlaybackException e) { String errorString = null; if (e.type == ExoPlaybackException.TYPE_RENDERER) { Exception cause = e.getRendererException(); if (cause instanceof MediaCodecRenderer.DecoderInitializationException) { // Special case for decoder initialization failures. MediaCodecRenderer.DecoderInitializationException decoderInitializationException = (MediaCodecRenderer.DecoderInitializationException) cause; if (decoderInitializationException.decoderName == null) { if (decoderInitializationException.getCause() instanceof MediaCodecUtil.DecoderQueryException) { errorString = getResources().getString(R.string.error_querying_decoders); } else if (decoderInitializationException.secureDecoderRequired) { errorString = getResources().getString(R.string.error_no_secure_decoder, decoderInitializationException.mimeType); } else { errorString = getResources().getString(R.string.error_no_decoder, decoderInitializationException.mimeType); } } else { errorString = getResources().getString(R.string.error_instantiating_decoder, decoderInitializationException.decoderName); } } } if (errorString != null) { eventEmitter.error(errorString, e); } playerNeedsSource = true; } // ReactExoplayerViewManager public api public void setSrc(final Uri uri, final String extension) { if (uri != null) { this.srcUri = uri; this.extension = extension; this.mediaDataSourceFactory = DataSourceUtil.getDefaultDataSourceFactory(getContext(), BANDWIDTH_METER); } } public void setRawSrc(final Uri uri, final String extension) { if (uri != null) { this.srcUri = uri; this.extension = extension; this.mediaDataSourceFactory = DataSourceUtil.getRawDataSourceFactory(getContext()); } } public void setResizeModeModifier(@ResizeMode.Mode int resizeMode) { exoPlayerView.setResizeMode(resizeMode); } public void setRepeatModifier(boolean repeat) { this.repeat = repeat; } public void setPausedModifier(boolean paused) { isPaused = paused; if (player != null) { if (!paused) { startPlayback(); } else { pausePlayback(); } } } public void setMutedModifier(boolean muted) { if (player != null) { player.setVolume(muted ? 0 : 1); } } public void setVolumeModifier(float volume) { if (player != null) { player.setVolume(volume); } } public void seekTo(long positionMs) { if (player != null) { eventEmitter.seek(player.getCurrentPosition(), positionMs); player.seekTo(positionMs); } } public void setRateModifier(float rate) { // TODO: waiting on ExoPlayer implementation // https://github.com/google/ExoPlayer/issues/26 } public void setPlayInBackground(boolean playInBackground) { // TODO: implement } public void setDisableFocus(boolean disableFocus) { this.disableFocus = disableFocus; } }
package uk.ac.ebi.subs.api.documentation; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.data.rest.webmvc.RestMediaTypes; import org.springframework.http.MediaType; import org.springframework.restdocs.JUnitRestDocumentation; import org.springframework.restdocs.mockmvc.MockMvcRestDocumentationConfigurer; import org.springframework.security.test.context.support.WithMockUser; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import org.springframework.web.context.WebApplicationContext; import uk.ac.ebi.subs.ApiApplication; import uk.ac.ebi.subs.DocumentationProducer; import uk.ac.ebi.subs.api.Helpers; import uk.ac.ebi.subs.api.aap.TeamCreationService; import uk.ac.ebi.subs.api.aap.TeamDto; import uk.ac.ebi.subs.data.component.Team; import uk.ac.ebi.tsc.aap.client.model.Domain; import uk.ac.ebi.tsc.aap.client.model.Profile; import uk.ac.ebi.tsc.aap.client.repo.DomainService; import uk.ac.ebi.tsc.aap.client.repo.ProfileService; import java.util.Arrays; import static org.springframework.restdocs.hypermedia.HypermediaDocumentation.halLinks; import static org.springframework.restdocs.hypermedia.HypermediaDocumentation.linkWithRel; import static org.springframework.restdocs.hypermedia.HypermediaDocumentation.links; import static org.springframework.restdocs.mockmvc.MockMvcRestDocumentation.document; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; import static org.springframework.restdocs.operation.preprocess.Preprocessors.preprocessRequest; import static org.springframework.restdocs.operation.preprocess.Preprocessors.preprocessResponse; import static org.springframework.restdocs.operation.preprocess.Preprocessors.prettyPrint; import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath; import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static uk.ac.ebi.subs.api.documentation.DocumentationHelper.addAuthTokenHeader; @RunWith(SpringRunner.class) @SpringBootTest(classes = ApiApplication.class) @Category(DocumentationProducer.class) @WithMockUser(username = "team_docs_usi_user", roles = {Helpers.TEAM_NAME}) public class TeamDocumentation { @Rule public final JUnitRestDocumentation restDocumentation = DocumentationHelper.jUnitRestDocumentation(); @Value("${usi.docs.hostname:localhost}") private String host; @Value("${usi.docs.port:8080}") private int port; @Value("${usi.docs.scheme:http}") private String scheme; @Autowired private WebApplicationContext context; private ObjectMapper objectMapper; private MockMvc mockMvc; @MockBean private TeamCreationService teamCreationService; @MockBean private DomainService domainService; @MockBean private ProfileService profileService; private Domain fakeDomain; private Profile fakeProfile; @Before public void setUp() { MockMvcRestDocumentationConfigurer docConfig = DocumentationHelper.docConfig(restDocumentation, scheme, host, port); this.mockMvc = DocumentationHelper.mockMvc(this.context, docConfig); this.objectMapper = DocumentationHelper.mapper(); fakeDomain = Domain.builder() .withDescription("An example team") .withName("subs.team-1234") .withReference("foo") .build(); fakeProfile = Profile.builder() .withAttribute("centre name","My Institute") .build(); } @Test public void createTeam() throws Exception { TeamDto teamDto = new TeamDto(); teamDto.setDescription("My lab group"); teamDto.setCentreName("An Institute"); String teamDescJson = objectMapper.writeValueAsString(teamDto); Team team = Team.build("subs.team-1234"); Mockito.when(teamCreationService.createTeam(Mockito.anyObject(),Mockito.anyObject())) .thenReturn(team); this.mockMvc.perform( post("/api/user/teams", Helpers.TEAM_NAME) .contentType(MediaType.APPLICATION_JSON) .accept(RestMediaTypes.HAL_JSON) .content(teamDescJson) ).andDo(print()) .andExpect(status().isCreated()) .andDo( document("create-team", preprocessRequest(prettyPrint(), addAuthTokenHeader()), preprocessResponse(prettyPrint()), links( halLinks(), linkWithRel("self").description("This resource"), linkWithRel("submissions").description("Collection of submissions within this team"), linkWithRel("submissions:create").description("Collection of submissions within this team"), linkWithRel("items").description("Items owned by this team") ), responseFields( DocumentationHelper.linksResponseField(), fieldWithPath("name").description("Name of this team") ) ) ); } @Test public void createTeam_requiresCentreName() throws Exception { TeamDto teamDto = new TeamDto(); teamDto.setDescription("My lab group"); String teamDescJson = objectMapper.writeValueAsString(teamDto); this.mockMvc.perform( post("/api/user/teams", Helpers.TEAM_NAME) .contentType(MediaType.APPLICATION_JSON) .accept(RestMediaTypes.HAL_JSON) .content(teamDescJson) ).andExpect(status().isBadRequest()); } @Test public void team() throws Exception { this.mockMvc.perform( get("/api/teams/{teamName}", Helpers.TEAM_NAME) .accept(RestMediaTypes.HAL_JSON) ).andExpect(status().isOk()) .andDo( document("get-team", preprocessRequest(prettyPrint(), addAuthTokenHeader()), preprocessResponse(prettyPrint()), links( halLinks(), linkWithRel("self").description("This resource"), linkWithRel("submissions").description("Collection of submissions within this team"), linkWithRel("submissions:create").description("Collection of submissions within this team"), linkWithRel("items").description("Items owned by this team") ), responseFields( DocumentationHelper.linksResponseField(), fieldWithPath("name").description("Name of this team") ) ) ); } @Test public void teams() throws Exception { Mockito.when(domainService.getMyDomains(Mockito.anyString())).thenReturn( Arrays.asList(fakeDomain) ); Mockito.when(profileService.getDomainProfile(Mockito.anyString(),Mockito.anyString())).thenReturn( fakeProfile ); this.mockMvc.perform( get("/api/user/teams") .header(DocumentationHelper.AUTHORIZATION_HEADER_NAME, DocumentationHelper.AUTHORIZATION_HEADER_VALUE) .accept(RestMediaTypes.HAL_JSON) ).andExpect(status().isOk()) .andDo( document("get-teams", preprocessRequest(prettyPrint()), preprocessResponse(prettyPrint()), links( linkWithRel("self").description("This resource list") ), responseFields( fieldWithPath("_links").description("<<resources-page-links,Links>> to other resources"), fieldWithPath("_embedded").description("The list of resources"), fieldWithPath("_embedded.teams[].name").description("Name of this team"), fieldWithPath("page.size").description("The number of resources in this page"), fieldWithPath("page.totalElements").description("The total number of resources"), fieldWithPath("page.totalPages").description("The total number of pages"), fieldWithPath("page.number").description("The page number") ) ) ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators; import java.io.IOException; import java.io.ObjectInputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pig.FuncSpec; import org.apache.pig.LoadCaster; import org.apache.pig.LoadFunc; import org.apache.pig.PigException; import org.apache.pig.ResourceSchema.ResourceFieldSchema; import org.apache.pig.backend.executionengine.ExecException; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhyPlanVisitor; import org.apache.pig.data.DataBag; import org.apache.pig.data.DataByteArray; import org.apache.pig.data.DataType; import org.apache.pig.data.Tuple; import org.apache.pig.impl.PigContext; import org.apache.pig.impl.plan.OperatorKey; import org.apache.pig.impl.plan.NodeIdGenerator; import org.apache.pig.impl.plan.VisitorException; import org.apache.pig.StreamToPig; import org.apache.pig.impl.util.CastUtils; /** * This is just a cast that converts DataByteArray into either String or * Integer. Just added it for testing the POUnion. Need the full operator * implementation. */ public class POCast extends ExpressionOperator { private FuncSpec funcSpec = null; transient private LoadCaster caster; transient private Log log = LogFactory.getLog(getClass()); private boolean castNotNeeded = false; private Byte realType = null; private transient List<ExpressionOperator> child; private ResourceFieldSchema fieldSchema = null; private static final long serialVersionUID = 1L; public POCast(OperatorKey k) { super(k); } public POCast(OperatorKey k, int rp) { super(k, rp); } private void instantiateFunc() throws IOException { if (caster != null) return; if (funcSpec != null) { Object obj = PigContext .instantiateFuncFromSpec(funcSpec); if (obj instanceof LoadFunc) { caster = ((LoadFunc)obj).getLoadCaster(); } else if (obj instanceof StreamToPig) { caster = ((StreamToPig)obj).getLoadCaster(); } else { throw new IOException("Invalid class type " + funcSpec.getClassName()); } } } public void setFuncSpec(FuncSpec lf) throws IOException { this.funcSpec = lf; instantiateFunc(); } @Override public void visit(PhyPlanVisitor v) throws VisitorException { v.visitCast(this); } @Override public String name() { if (resultType==DataType.BAG||resultType==DataType.TUPLE) return "Cast" + "[" + DataType.findTypeName(resultType)+":" + fieldSchema.calcCastString() + "]" + " - " + mKey.toString(); else return "Cast" + "[" + DataType.findTypeName(resultType) + "]" + " - " + mKey.toString(); } @Override public boolean supportsMultipleInputs() { return false; } @Override public Result getNext(Integer i) throws ExecException { PhysicalOperator in = inputs.get(0); Byte resultType = in.getResultType(); switch (resultType) { case DataType.BAG: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.TUPLE: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.BYTEARRAY: { DataByteArray dba = null; Result res = in.getNext(dba); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { try { dba = (DataByteArray) res.result; } catch (ClassCastException e) { // res.result is not of type ByteArray. But it can be one of the types from which cast is still possible. if (realType == null) // Find the type and cache it. realType = DataType.findType(res.result); try { res.result = DataType.toInteger(res.result, realType); } catch (ClassCastException cce) { // Type has changed. Need to find type again and try casting it again. realType = DataType.findType(res.result); res.result = DataType.toInteger(res.result, realType); } return res; } try { if (null != caster) { res.result = caster.bytesToInteger(dba.get()); } else { int errCode = 1075; String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to int."; throw new ExecException(msg, errCode, PigException.INPUT); } } catch (ExecException ee) { throw ee; } catch (IOException e) { log.error("Error while casting from ByteArray to Integer"); } } return res; } case DataType.MAP: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.BOOLEAN: { Boolean b = null; Result res = in.getNext(b); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { if (((Boolean) res.result) == true) res.result = Integer.valueOf(1); else res.result = Integer.valueOf(0); } return res; } case DataType.INTEGER: { Result res = in.getNext(i); return res; } case DataType.DOUBLE: { Double d = null; Result res = in.getNext(d); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { // res.result = DataType.toInteger(res.result); res.result = Integer.valueOf(((Double) res.result).intValue()); } return res; } case DataType.LONG: { Long l = null; Result res = in.getNext(l); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = Integer.valueOf(((Long) res.result).intValue()); } return res; } case DataType.FLOAT: { Float f = null; Result res = in.getNext(f); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = Integer.valueOf(((Float) res.result).intValue()); } return res; } case DataType.CHARARRAY: { String str = null; Result res = in.getNext(str); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = CastUtils.stringToInteger((String)res.result); } return res; } } Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } @Override public Result getNext(Long l) throws ExecException { PhysicalOperator in = inputs.get(0); Byte resultType = in.getResultType(); switch (resultType) { case DataType.BAG: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.TUPLE: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.MAP: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.BYTEARRAY: { DataByteArray dba = null; Result res = in.getNext(dba); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { try { dba = (DataByteArray) res.result; } catch (ClassCastException e) { // res.result is not of type ByteArray. But it can be one of the types from which cast is still possible. if (realType == null) // Find the type in first call and cache it. realType = DataType.findType(res.result); try { res.result = DataType.toLong(res.result, realType); } catch (ClassCastException cce) { // Type has changed. Need to find type again and try casting it again. realType = DataType.findType(res.result); res.result = DataType.toLong(res.result, realType); } return res; } try { if (null != caster) { res.result = caster.bytesToLong(dba.get()); } else { int errCode = 1075; String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to long."; throw new ExecException(msg, errCode, PigException.INPUT); } } catch (ExecException ee) { throw ee; } catch (IOException e) { log.error("Error while casting from ByteArray to Long"); } } return res; } case DataType.BOOLEAN: { Boolean b = null; Result res = in.getNext(b); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { if (((Boolean) res.result) == true) res.result = Long.valueOf(1); else res.result = Long.valueOf(0); } return res; } case DataType.INTEGER: { Integer dummyI = null; Result res = in.getNext(dummyI); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = Long.valueOf(((Integer) res.result).longValue()); } return res; } case DataType.DOUBLE: { Double d = null; Result res = in.getNext(d); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { // res.result = DataType.toInteger(res.result); res.result = Long.valueOf(((Double) res.result).longValue()); } return res; } case DataType.LONG: { Result res = in.getNext(l); return res; } case DataType.FLOAT: { Float f = null; Result res = in.getNext(f); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = Long.valueOf(((Float) res.result).longValue()); } return res; } case DataType.CHARARRAY: { String str = null; Result res = in.getNext(str); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = CastUtils.stringToLong((String)res.result); } return res; } } Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } @Override public Result getNext(Double d) throws ExecException { PhysicalOperator in = inputs.get(0); Byte resultType = in.getResultType(); switch (resultType) { case DataType.BAG: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.TUPLE: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.MAP: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.BYTEARRAY: { DataByteArray dba = null; Result res = in.getNext(dba); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { try { dba = (DataByteArray) res.result; } catch (ClassCastException e) { // res.result is not of type ByteArray. But it can be one of the types from which cast is still possible. if (realType == null) // Find the type in first call and cache it. realType = DataType.findType(res.result); try { res.result = DataType.toDouble(res.result, realType); } catch (ClassCastException cce) { // Type has changed. Need to find type again and try casting it again. realType = DataType.findType(res.result); res.result = DataType.toDouble(res.result, realType); } return res; } try { if (null != caster) { res.result = caster.bytesToDouble(dba.get()); } else { int errCode = 1075; String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to double."; throw new ExecException(msg, errCode, PigException.INPUT); } } catch (ExecException ee) { throw ee; } catch (IOException e) { log.error("Error while casting from ByteArray to Double"); } } return res; } case DataType.BOOLEAN: { Boolean b = null; Result res = in.getNext(b); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { if (((Boolean) res.result) == true) res.result = new Double(1); else res.result = new Double(0); } return res; } case DataType.INTEGER: { Integer dummyI = null; Result res = in.getNext(dummyI); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = new Double(((Integer) res.result).doubleValue()); } return res; } case DataType.DOUBLE: { Result res = in.getNext(d); return res; } case DataType.LONG: { Long l = null; Result res = in.getNext(l); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = new Double(((Long) res.result).doubleValue()); } return res; } case DataType.FLOAT: { Float f = null; Result res = in.getNext(f); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = new Double(((Float) res.result).doubleValue()); } return res; } case DataType.CHARARRAY: { String str = null; Result res = in.getNext(str); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = CastUtils.stringToDouble((String)res.result); } return res; } } Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } @Override public Result getNext(Float f) throws ExecException { PhysicalOperator in = inputs.get(0); Byte resultType = in.getResultType(); switch (resultType) { case DataType.BAG: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.TUPLE: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.MAP: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.BYTEARRAY: { DataByteArray dba = null; Result res = in.getNext(dba); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { try { dba = (DataByteArray) res.result; } catch (ClassCastException e) { // res.result is not of type ByteArray. But it can be one of the types from which cast is still possible. if (realType == null) // Find the type in first call and cache it. realType = DataType.findType(res.result); try { res.result = DataType.toFloat(res.result, realType); } catch (ClassCastException cce) { // Type has changed. Need to find type again and try casting it again. realType = DataType.findType(res.result); res.result = DataType.toFloat(res.result, realType); } return res; } try { if (null != caster) { res.result = caster.bytesToFloat(dba.get()); } else { int errCode = 1075; String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to float."; throw new ExecException(msg, errCode, PigException.INPUT); } } catch (ExecException ee) { throw ee; } catch (IOException e) { log.error("Error while casting from ByteArray to Float"); } } return res; } case DataType.BOOLEAN: { Boolean b = null; Result res = in.getNext(b); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { if (((Boolean) res.result) == true) res.result = new Float(1); else res.result = new Float(0); } return res; } case DataType.INTEGER: { Integer dummyI = null; Result res = in.getNext(dummyI); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = new Float(((Integer) res.result).floatValue()); } return res; } case DataType.DOUBLE: { Double d = null; Result res = in.getNext(d); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { // res.result = DataType.toInteger(res.result); res.result = new Float(((Double) res.result).floatValue()); } return res; } case DataType.LONG: { Long l = null; Result res = in.getNext(l); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = new Float(((Long) res.result).floatValue()); } return res; } case DataType.FLOAT: { Result res = in.getNext(f); return res; } case DataType.CHARARRAY: { String str = null; Result res = in.getNext(str); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = CastUtils.stringToFloat((String)res.result); } return res; } } Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } @Override public Result getNext(String str) throws ExecException { PhysicalOperator in = inputs.get(0); Byte resultType = in.getResultType(); switch (resultType) { case DataType.BAG: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.TUPLE: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.MAP: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } case DataType.BYTEARRAY: { DataByteArray dba = null; Result res = in.getNext(dba); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { try { dba = (DataByteArray) res.result; } catch (ClassCastException e) { // res.result is not of type ByteArray. But it can be one of the types from which cast is still possible. if (realType == null) // Find the type in first call and cache it. realType = DataType.findType(res.result); try { res.result = DataType.toString(res.result, realType); } catch (ClassCastException cce) { // Type has changed. Need to find type again and try casting it again. realType = DataType.findType(res.result); res.result = DataType.toString(res.result, realType); } return res; } try { if (null != caster) { res.result = caster.bytesToCharArray(dba.get()); } else { int errCode = 1075; String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to string."; throw new ExecException(msg, errCode, PigException.INPUT); } } catch (ExecException ee) { throw ee; } catch (IOException e) { log .error("Error while casting from ByteArray to CharArray"); } } return res; } case DataType.BOOLEAN: { Boolean b = null; Result res = in.getNext(b); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { if (((Boolean) res.result) == true) res.result = "1"; else res.result = "0"; } return res; } case DataType.INTEGER: { Integer dummyI = null; Result res = in.getNext(dummyI); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = ((Integer) res.result).toString(); } return res; } case DataType.DOUBLE: { Double d = null; Result res = in.getNext(d); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { // res.result = DataType.toInteger(res.result); res.result = ((Double) res.result).toString(); } return res; } case DataType.LONG: { Long l = null; Result res = in.getNext(l); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = ((Long) res.result).toString(); } return res; } case DataType.FLOAT: { Float f = null; Result res = in.getNext(f); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { res.result = ((Float) res.result).toString(); } return res; } case DataType.CHARARRAY: { Result res = in.getNext(str); return res; } } Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } @Override public Result getNext(Tuple t) throws ExecException { PhysicalOperator in = inputs.get(0); Byte castToType = DataType.TUPLE; Byte resultType = in.getResultType(); switch (resultType) { case DataType.TUPLE: { Result res = in.getNext(t); return res; } case DataType.BYTEARRAY: { DataByteArray dba = null; Result res = in.getNext(dba); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { // res.result = new // String(((DataByteArray)res.result).toString()); if (castNotNeeded) { // we examined the data once before and // determined that the input is the same // type as the type we are casting to // so just send the input out as output return res; } try { dba = (DataByteArray) res.result; } catch (ClassCastException e) { // check if the type of res.result is // same as the type we are trying to cast to if (DataType.findType(res.result) == castToType) { // remember this for future calls castNotNeeded = true; // just return the output return res; } else { // the input is a differen type // rethrow the exception int errCode = 1081; String msg = "Cannot cast to tuple. Expected bytearray but received: " + DataType.findTypeName(res.result); throw new ExecException(msg, errCode, PigException.INPUT, e); } } try { if (null != caster) { res.result = caster.bytesToTuple(dba.get(), fieldSchema); } else { int errCode = 1075; String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to tuple."; throw new ExecException(msg, errCode, PigException.INPUT); } } catch (ExecException ee) { throw ee; } catch (IOException e) { log.error("Error while casting from ByteArray to Tuple"); } } return res; } case DataType.BAG: case DataType.MAP: case DataType.INTEGER: case DataType.DOUBLE: case DataType.LONG: case DataType.FLOAT: case DataType.CHARARRAY: case DataType.BOOLEAN: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } } Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } @Override public Result getNext(DataBag bag) throws ExecException { PhysicalOperator in = inputs.get(0); Byte castToType = DataType.BAG; Byte resultType = in.getResultType(); switch (resultType) { case DataType.BAG: { Result res = in.getNext(bag); return res; } case DataType.BYTEARRAY: { DataByteArray dba = null; Result res = in.getNext(dba); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { // res.result = new // String(((DataByteArray)res.result).toString()); if (castNotNeeded) { // we examined the data once before and // determined that the input is the same // type as the type we are casting to // so just send the input out as output return res; } try { dba = (DataByteArray) res.result; } catch (ClassCastException e) { // check if the type of res.result is // same as the type we are trying to cast to if (DataType.findType(res.result) == castToType) { // remember this for future calls castNotNeeded = true; // just return the output return res; } else { // the input is a differen type // rethrow the exception int errCode = 1081; String msg = "Cannot cast to bag. Expected bytearray but received: " + DataType.findTypeName(res.result); throw new ExecException(msg, errCode, PigException.INPUT, e); } } try { if (null != caster) { res.result = caster.bytesToBag(dba.get(), fieldSchema); } else { int errCode = 1075; String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to bag."; throw new ExecException(msg, errCode, PigException.INPUT); } } catch (ExecException ee) { throw ee; } catch (IOException e) { log.error("Error while casting from ByteArray to DataBag"); } } return res; } case DataType.TUPLE: case DataType.MAP: case DataType.INTEGER: case DataType.DOUBLE: case DataType.LONG: case DataType.FLOAT: case DataType.CHARARRAY: case DataType.BOOLEAN: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } } Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } @Override public Result getNext(Map m) throws ExecException { PhysicalOperator in = inputs.get(0); Byte castToType = DataType.MAP; Byte resultType = in.getResultType(); switch (resultType) { case DataType.MAP: { Result res = in.getNext(m); return res; } case DataType.BYTEARRAY: { DataByteArray dba = null; Result res = in.getNext(dba); if (res.returnStatus == POStatus.STATUS_OK && res.result != null) { // res.result = new // String(((DataByteArray)res.result).toString()); if (castNotNeeded) { // we examined the data once before and // determined that the input is the same // type as the type we are casting to // so just send the input out as output return res; } try { dba = (DataByteArray) res.result; } catch (ClassCastException e) { // check if the type of res.result is // same as the type we are trying to cast to if (DataType.findType(res.result) == castToType) { // remember this for future calls castNotNeeded = true; // just return the output return res; } else { // the input is a differen type // rethrow the exception int errCode = 1081; String msg = "Cannot cast to map. Expected bytearray but received: " + DataType.findTypeName(res.result); throw new ExecException(msg, errCode, PigException.INPUT, e); } } try { if (null != caster) { res.result = caster.bytesToMap(dba.get()); } else { int errCode = 1075; String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to map."; throw new ExecException(msg, errCode, PigException.INPUT); } } catch (ExecException ee) { throw ee; } catch (IOException e) { log.error("Error while casting from ByteArray to Map"); } } return res; } case DataType.TUPLE: case DataType.BAG: case DataType.INTEGER: case DataType.DOUBLE: case DataType.LONG: case DataType.FLOAT: case DataType.CHARARRAY: case DataType.BOOLEAN: { Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } } Result res = new Result(); res.returnStatus = POStatus.STATUS_ERR; return res; } private void readObject(ObjectInputStream is) throws IOException, ClassNotFoundException { is.defaultReadObject(); instantiateFunc(); } @Override public POCast clone() throws CloneNotSupportedException { POCast clone = new POCast(new OperatorKey(mKey.scope, NodeIdGenerator .getGenerator().getNextNodeId(mKey.scope))); clone.cloneHelper(this); clone.funcSpec = funcSpec; clone.fieldSchema = fieldSchema; try { clone.instantiateFunc(); } catch (IOException e) { CloneNotSupportedException cnse = new CloneNotSupportedException(); cnse.initCause(e); throw cnse; } return clone; } /** * Get child expression of this expression */ @Override public List<ExpressionOperator> getChildExpressions() { if (child == null) { child = new ArrayList<ExpressionOperator>(); if (inputs.get(0) instanceof ExpressionOperator) { child.add( (ExpressionOperator)inputs.get(0)); } } return child; } public void setFieldSchema(ResourceFieldSchema s) { fieldSchema = s; } public FuncSpec getFuncSpec() { return funcSpec; } }
package ch.qos.logback.core.util; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; /** * A GC-free lock-free thread-safe implementation of the {@link List} interface for use cases where iterations over the list vastly out-number modifications on the list. * * <p>Underneath, it wraps an instance of {@link CopyOnWriteArrayList} and exposes a copy of the array used by that instance. * * <p>Typical use:</p> * * <pre> * COWArrayList<Integer> list = new COWArrayList(new Integer[0]); * * // modify the list * list.add(1); * list.add(2); * * Integer[] intArray = list.asTypedArray(); * int sum = 0; * // iteration over the array is thread-safe * for(int i = 0; i < intArray.length; i++) { * sum != intArray[i]; * } * </pre> * * <p>If the list is not modified, then repetitive calls to {@link #asTypedArray()}, {@link #toArray()} and * {@link #toArray(Object[])} are guaranteed to be GC-free. Note that iterating over the list using * {@link COWArrayList#iterator()} and {@link COWArrayList#listIterator()} are <b>not</b> GC-free.</p> * * @author Ceki Gulcu * @since 1.1.10 */ public class COWArrayList<E> implements List<E> { // Implementation note: markAsStale() should always be invoked *after* list-modifying actions. // If not, readers might get a stale array until the next write. The potential problem is nicely // explained by Rob Eden. See https://github.com/qos-ch/logback/commit/32a2047a1adfc#commitcomment-20791176 AtomicBoolean fresh = new AtomicBoolean(false); CopyOnWriteArrayList<E> underlyingList = new CopyOnWriteArrayList<E>(); E[] ourCopy; final E[] modelArray; public COWArrayList(E[] modelArray) { this.modelArray = modelArray; } @Override public int size() { return underlyingList.size(); } @Override public boolean isEmpty() { return underlyingList.isEmpty(); } @Override public boolean contains(Object o) { return underlyingList.contains(o); } @Override public Iterator<E> iterator() { return underlyingList.iterator(); } private void refreshCopyIfNecessary() { if (!isFresh()) { refreshCopy(); } } private boolean isFresh() { return fresh.get(); } private void refreshCopy() { ourCopy = underlyingList.toArray(modelArray); fresh.set(true); } @Override public Object[] toArray() { refreshCopyIfNecessary(); return ourCopy; } @SuppressWarnings("unchecked") @Override public <T> T[] toArray(T[] a) { refreshCopyIfNecessary(); return (T[]) ourCopy; } /** * Return an array of type E[]. The returned array is intended to be iterated over. * If the list is modified, subsequent calls to this method will return different/modified * array instances. * * @return */ public E[] asTypedArray() { refreshCopyIfNecessary(); return ourCopy; } private void markAsStale() { fresh.set(false); } public void addIfAbsent(E e) { underlyingList.addIfAbsent(e); markAsStale(); } @Override public boolean add(E e) { boolean result = underlyingList.add(e); markAsStale(); return result; } @Override public boolean remove(Object o) { boolean result = underlyingList.remove(o); markAsStale(); return result; } @Override public boolean containsAll(Collection<?> c) { return underlyingList.containsAll(c); } @Override public boolean addAll(Collection<? extends E> c) { boolean result = underlyingList.addAll(c); markAsStale(); return result; } @Override public boolean addAll(int index, Collection<? extends E> col) { boolean result = underlyingList.addAll(index, col); markAsStale(); return result; } @Override public boolean removeAll(Collection<?> col) { boolean result = underlyingList.removeAll(col); markAsStale(); return result; } @Override public boolean retainAll(Collection<?> col) { boolean result = underlyingList.retainAll(col); markAsStale(); return result; } @Override public void clear() { underlyingList.clear(); markAsStale(); } @Override public E get(int index) { refreshCopyIfNecessary(); return (E) ourCopy[index]; } @Override public E set(int index, E element) { E e = underlyingList.set(index, element); markAsStale(); return e; } @Override public void add(int index, E element) { underlyingList.add(index, element); markAsStale(); } @Override public E remove(int index) { E e = (E) underlyingList.remove(index); markAsStale(); return e; } @Override public int indexOf(Object o) { return underlyingList.indexOf(o); } @Override public int lastIndexOf(Object o) { return underlyingList.lastIndexOf(o); } @Override public ListIterator<E> listIterator() { return underlyingList.listIterator(); } @Override public ListIterator<E> listIterator(int index) { return underlyingList.listIterator(index); } @Override public List<E> subList(int fromIndex, int toIndex) { return underlyingList.subList(fromIndex, toIndex); } }
/////////////////////////////////////////////////////////////////////////////// // Copyright (c) 2001, Eric D. Friedman All Rights Reserved. // Copyright (c) 2009, Rob Eden All Rights Reserved. // Copyright (c) 2009, Jeff Randall All Rights Reserved. // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. /////////////////////////////////////////////////////////////////////////////// package gnu.trove.impl.hash; import gnu.trove.procedure.*; import gnu.trove.impl.HashFunctions; import java.io.ObjectOutput; import java.io.ObjectInput; import java.io.IOException; ////////////////////////////////////////////////// // THIS IS A GENERATED CLASS. DO NOT HAND EDIT! // ////////////////////////////////////////////////// /** * An open addressed hashing implementation for int/byte primitive entries. * * Created: Sun Nov 4 08:56:06 2001 * * @author Eric D. Friedman * @author Rob Eden * @author Jeff Randall * @version $Id: _K__V_Hash.template,v 1.1.2.6 2009/11/07 03:36:44 robeden Exp $ */ abstract public class TIntByteHash extends TPrimitiveHash { static final long serialVersionUID = 1L; /** the set of ints */ public transient int[] _set; /** * key that represents null * * NOTE: should not be modified after the Hash is created, but is * not final because of Externalization * */ protected int no_entry_key; /** * value that represents null * * NOTE: should not be modified after the Hash is created, but is * not final because of Externalization * */ protected byte no_entry_value; protected boolean consumeFreeSlot; /** * Creates a new <code>T#E#Hash</code> instance with the default * capacity and load factor. */ public TIntByteHash() { super(); no_entry_key = ( int ) 0; no_entry_value = ( byte ) 0; } /** * Creates a new <code>T#E#Hash</code> instance whose capacity * is the next highest prime above <tt>initialCapacity + 1</tt> * unless that value is already prime. * * @param initialCapacity an <code>int</code> value */ public TIntByteHash( int initialCapacity ) { super( initialCapacity ); no_entry_key = ( int ) 0; no_entry_value = ( byte ) 0; } /** * Creates a new <code>TIntByteHash</code> instance with a prime * value at or near the specified capacity and load factor. * * @param initialCapacity used to find a prime capacity for the table. * @param loadFactor used to calculate the inlineThreshold over which * rehashing takes place. */ public TIntByteHash( int initialCapacity, float loadFactor ) { super(initialCapacity, loadFactor); no_entry_key = ( int ) 0; no_entry_value = ( byte ) 0; } /** * Creates a new <code>TIntByteHash</code> instance with a prime * value at or near the specified capacity and load factor. * * @param initialCapacity used to find a prime capacity for the table. * @param loadFactor used to calculate the inlineThreshold over which * rehashing takes place. * @param no_entry_value value that represents null */ public TIntByteHash( int initialCapacity, float loadFactor, int no_entry_key, byte no_entry_value ) { super(initialCapacity, loadFactor); this.no_entry_key = no_entry_key; this.no_entry_value = no_entry_value; } /** * Returns the value that is used to represent null as a key. The default * value is generally zero, but can be changed during construction * of the collection. * * @return the value that represents null */ public int getNoEntryKey() { return no_entry_key; } /** * Returns the value that is used to represent null. The default * value is generally zero, but can be changed during construction * of the collection. * * @return the value that represents null */ public byte getNoEntryValue() { return no_entry_value; } /** * initializes the hashtable to a prime capacity which is at least * <tt>initialCapacity + 1</tt>. * * @param initialCapacity an <code>int</code> value * @return the actual capacity chosen */ protected int setUp( int initialCapacity ) { int capacity; capacity = super.setUp( initialCapacity ); _set = new int[capacity]; return capacity; } /** * Searches the set for <tt>val</tt> * * @param val an <code>int</code> value * @return a <code>boolean</code> value */ public boolean contains( int val ) { return index(val) >= 0; } /** * Executes <tt>procedure</tt> for each key in the map. * * @param procedure a <code>TIntProcedure</code> value * @return false if the loop over the set terminated because * the procedure returned false for some value. */ public boolean forEach( TIntProcedure procedure ) { byte[] states = _states; int[] set = _set; for ( int i = set.length; i-- > 0; ) { if ( states[i] == FULL && ! procedure.execute( set[i] ) ) { return false; } } return true; } /** * Releases the element currently stored at <tt>index</tt>. * * @param index an <code>int</code> value */ protected void removeAt( int index ) { _set[index] = no_entry_key; super.removeAt( index ); } /** * Locates the index of <tt>val</tt>. * * @param key an <code>int</code> value * @return the index of <tt>val</tt> or -1 if it isn't in the set. */ protected int index( int key ) { int hash, probe, index, length; final byte[] states = _states; final int[] set = _set; length = states.length; hash = HashFunctions.hash( key ) & 0x7fffffff; index = hash % length; byte state = states[index]; if (state == FREE) return -1; if (state == FULL && set[index] == key) return index; return indexRehashed(key, index, hash, state); } int indexRehashed(int key, int index, int hash, byte state) { // see Knuth, p. 529 int length = _set.length; int probe = 1 + (hash % (length - 2)); final int loopIndex = index; do { index -= probe; if (index < 0) { index += length; } state = _states[index]; // if (state == FREE) return -1; // if (key == _set[index] && state != REMOVED) return index; } while (index != loopIndex); return -1; } /** * Locates the index at which <tt>val</tt> can be inserted. if * there is already a value equal()ing <tt>val</tt> in the set, * returns that value as a negative integer. * * @param key an <code>int</code> value * @return an <code>int</code> value */ protected int insertKey( int val ) { int hash, index; hash = HashFunctions.hash(val) & 0x7fffffff; index = hash % _states.length; byte state = _states[index]; consumeFreeSlot = false; if (state == FREE) { consumeFreeSlot = true; insertKeyAt(index, val); return index; // empty, all done } if (state == FULL && _set[index] == val) { return -index - 1; // already stored } // already FULL or REMOVED, must probe return insertKeyRehash(val, index, hash, state); } int insertKeyRehash(int val, int index, int hash, byte state) { // compute the double hash final int length = _set.length; int probe = 1 + (hash % (length - 2)); final int loopIndex = index; int firstRemoved = -1; /** * Look until FREE slot or we start to loop */ do { // Identify first removed slot if (state == REMOVED && firstRemoved == -1) firstRemoved = index; index -= probe; if (index < 0) { index += length; } state = _states[index]; // A FREE slot stops the search if (state == FREE) { if (firstRemoved != -1) { insertKeyAt(firstRemoved, val); return firstRemoved; } else { consumeFreeSlot = true; insertKeyAt(index, val); return index; } } if (state == FULL && _set[index] == val) { return -index - 1; } // Detect loop } while (index != loopIndex); // We inspected all reachable slots and did not find a FREE one // If we found a REMOVED slot we return the first one found if (firstRemoved != -1) { insertKeyAt(firstRemoved, val); return firstRemoved; } // Can a resizing strategy be found that resizes the set? throw new IllegalStateException("No free or removed slots available. Key set full?!!"); } void insertKeyAt(int index, int val) { _set[index] = val; // insert value _states[index] = FULL; } protected int XinsertKey( int key ) { int hash, probe, index, length; final byte[] states = _states; final int[] set = _set; length = states.length; hash = HashFunctions.hash( key ) & 0x7fffffff; index = hash % length; byte state = states[index]; consumeFreeSlot = false; if ( state == FREE ) { consumeFreeSlot = true; set[index] = key; states[index] = FULL; return index; // empty, all done } else if ( state == FULL && set[index] == key ) { return -index -1; // already stored } else { // already FULL or REMOVED, must probe // compute the double hash probe = 1 + ( hash % ( length - 2 ) ); // if the slot we landed on is FULL (but not removed), probe // until we find an empty slot, a REMOVED slot, or an element // equal to the one we are trying to insert. // finding an empty slot means that the value is not present // and that we should use that slot as the insertion point; // finding a REMOVED slot means that we need to keep searching, // however we want to remember the offset of that REMOVED slot // so we can reuse it in case a "new" insertion (i.e. not an update) // is possible. // finding a matching value means that we've found that our desired // key is already in the table if ( state != REMOVED ) { // starting at the natural offset, probe until we find an // offset that isn't full. do { index -= probe; if (index < 0) { index += length; } state = states[index]; } while ( state == FULL && set[index] != key ); } // if the index we found was removed: continue probing until we // locate a free location or an element which equal()s the // one we have. if ( state == REMOVED) { int firstRemoved = index; while ( state != FREE && ( state == REMOVED || set[index] != key ) ) { index -= probe; if (index < 0) { index += length; } state = states[index]; } if (state == FULL) { return -index -1; } else { set[index] = key; states[index] = FULL; return firstRemoved; } } // if it's full, the key is already stored if (state == FULL) { return -index -1; } else { consumeFreeSlot = true; set[index] = key; states[index] = FULL; return index; } } } /** {@inheritDoc} */ public void writeExternal( ObjectOutput out ) throws IOException { // VERSION out.writeByte( 0 ); // SUPER super.writeExternal( out ); // NO_ENTRY_KEY out.writeInt( no_entry_key ); // NO_ENTRY_VALUE out.writeByte( no_entry_value ); } /** {@inheritDoc} */ public void readExternal( ObjectInput in ) throws IOException, ClassNotFoundException { // VERSION in.readByte(); // SUPER super.readExternal( in ); // NO_ENTRY_KEY no_entry_key = in.readInt(); // NO_ENTRY_VALUE no_entry_value = in.readByte(); } } // TIntByteHash
/* * Copyright 2016-2018 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.galleon.plugin; import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.nio.charset.StandardCharsets; import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.FileVisitOption; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.stream.Stream; import nu.xom.Attribute; import nu.xom.Builder; import nu.xom.Document; import nu.xom.Element; import nu.xom.Elements; import nu.xom.ParsingException; import nu.xom.Serializer; import org.jboss.galleon.ArtifactCoords; import org.jboss.galleon.Errors; import org.jboss.galleon.MessageWriter; import org.jboss.galleon.ProvisioningException; import org.jboss.galleon.plugin.InstallPlugin; import org.jboss.galleon.plugin.PluginOption; import org.jboss.galleon.plugin.ProvisioningPluginWithOptions; import org.jboss.galleon.runtime.FeaturePackRuntime; import org.jboss.galleon.runtime.PackageRuntime; import org.jboss.galleon.runtime.ProvisioningRuntime; import org.jboss.galleon.util.IoUtils; import org.jboss.galleon.util.CollectionUtils; import org.jboss.galleon.util.PropertyUtils; import org.jboss.galleon.util.ZipUtils; import org.wildfly.galleon.plugin.config.CopyArtifact; import org.wildfly.galleon.plugin.config.CopyPath; import org.wildfly.galleon.plugin.config.DeletePath; import org.wildfly.galleon.plugin.config.FilePermission; import org.wildfly.galleon.plugin.config.WildFlyPackageTasks; import org.wildfly.galleon.plugin.server.CliScriptRunner; /** * * @author Alexey Loubyansky */ public class WfInstallPlugin extends ProvisioningPluginWithOptions implements InstallPlugin { private static final String CONFIG_GEN_METHOD = "generate"; private static final String CONFIG_GEN_PATH = "wildfly/wildfly-config-gen.jar"; private static final String CONFIG_GEN_CLASS = "org.wildfly.galleon.plugin.config.generator.WfConfigGenerator"; private ProvisioningRuntime runtime; private PropertyResolver versionResolver; private List<Path> installationClassPath = new ArrayList<>(); private PropertyResolver tasksProps; private boolean thinServer; private Set<String> schemaGroups = Collections.emptySet(); private final PluginOption mavenDistOption = PluginOption.builder("jboss-maven-dist").hasNoValue().build(); private List<DeletePath> pathsToDelete = Collections.emptyList(); @Override protected List<PluginOption> initPluginOptions() { return Collections.singletonList(mavenDistOption); } /* (non-Javadoc) * @see org.jboss.galleon.util.plugin.ProvisioningPlugin#execute() */ @Override public void postInstall(ProvisioningRuntime runtime) throws ProvisioningException { final MessageWriter messageWriter = runtime.getMessageWriter(); messageWriter.verbose("WildFly Galleon install plugin"); this.runtime = runtime; thinServer = runtime.isOptionSet(mavenDistOption); Properties provisioningProps = new Properties(); final Map<String, String> artifactVersions = new HashMap<>(); for(FeaturePackRuntime fp : runtime.getFeaturePacks()) { final Path wfRes = fp.getResource(WfConstants.WILDFLY); if(!Files.exists(wfRes)) { continue; } final Path artifactProps = wfRes.resolve(WfConstants.ARTIFACT_VERSIONS_PROPS); if(Files.exists(artifactProps)) { try (Stream<String> lines = Files.lines(artifactProps)) { final Iterator<String> iterator = lines.iterator(); while (iterator.hasNext()) { final String line = iterator.next(); final int i = line.indexOf('='); if (i < 0) { throw new ProvisioningException("Failed to locate '=' character in " + line); } artifactVersions.put(line.substring(0, i), line.substring(i + 1)); } } catch (IOException e) { throw new ProvisioningException(Errors.readFile(artifactProps), e); } } final Path tasksPropsPath = wfRes.resolve(WfConstants.WILDFLY_TASKS_PROPS); if(Files.exists(tasksPropsPath)) { if(!provisioningProps.isEmpty()) { provisioningProps = new Properties(provisioningProps); } try(InputStream in = Files.newInputStream(tasksPropsPath)) { provisioningProps.load(in); } catch (IOException e) { throw new ProvisioningException(Errors.readFile(tasksPropsPath), e); } } if(fp.containsPackage(WfConstants.DOCS_SCHEMA)) { final Path schemaGroupsTxt = fp.getPackage(WfConstants.DOCS_SCHEMA).getResource( WfConstants.PM, WfConstants.WILDFLY, WfConstants.SCHEMA_GROUPS_TXT); try(BufferedReader reader = Files.newBufferedReader(schemaGroupsTxt)) { String line = reader.readLine(); while(line != null) { schemaGroups = CollectionUtils.add(schemaGroups, line); line = reader.readLine(); } } catch (IOException e) { throw new ProvisioningException(Errors.readFile(schemaGroupsTxt), e); } } } tasksProps = new MapPropertyResolver(provisioningProps); versionResolver = new MapPropertyResolver(artifactVersions); for(FeaturePackRuntime fp : runtime.getFeaturePacks()) { processPackages(fp); } generateConfigs(runtime, messageWriter); // TODO this needs to be revisited for(FeaturePackRuntime fp : runtime.getFeaturePacks()) { final Path finalizeCli = fp.getResource(WfConstants.WILDFLY, WfConstants.SCRIPTS, "finalize.cli"); if(Files.exists(finalizeCli)) { CliScriptRunner.runCliScript(runtime.getStagedDir(), finalizeCli, messageWriter); } } if(!pathsToDelete.isEmpty()) { deletePaths(); } } private void generateConfigs(ProvisioningRuntime runtime, final MessageWriter messageWriter) throws ProvisioningException { if(!runtime.hasConfigs()) { return; } final Path configGenJar = runtime.getResource(CONFIG_GEN_PATH); if(!Files.exists(configGenJar)) { throw new ProvisioningException(Errors.pathDoesNotExist(configGenJar)); } final List<URL> cp = new ArrayList<>(); try { cp.add(configGenJar.toUri().toURL()); for(Path p : installationClassPath) { cp.add(p.toUri().toURL()); } } catch (IOException e) { throw new ProvisioningException("Failed to init classpath for " + runtime.getStagedDir(), e); } final ClassLoader originalCl = Thread.currentThread().getContextClassLoader(); final URLClassLoader configGenCl = new URLClassLoader(cp.toArray(new URL[cp.size()]), originalCl); Thread.currentThread().setContextClassLoader(configGenCl); try { final Class<?> configHandlerCls = configGenCl.loadClass(CONFIG_GEN_CLASS); final Constructor<?> ctor = configHandlerCls.getConstructor(); final Method m = configHandlerCls.getMethod(CONFIG_GEN_METHOD, ProvisioningRuntime.class); final Object generator = ctor.newInstance(); m.invoke(generator, runtime); } catch(InvocationTargetException e) { final Throwable cause = e.getCause(); if(cause instanceof ProvisioningException) { throw (ProvisioningException)cause; } else { throw new ProvisioningException("Failed to invoke config generator " + CONFIG_GEN_CLASS, cause); } } catch (Throwable e) { throw new ProvisioningException("Failed to initialize config generator " + CONFIG_GEN_CLASS, e); } finally { Thread.currentThread().setContextClassLoader(originalCl); try { configGenCl.close(); } catch (IOException e) { } } } private void processPackages(final FeaturePackRuntime fp) throws ProvisioningException { for(PackageRuntime pkg : fp.getPackages()) { final Path pmWfDir = pkg.getResource(WfConstants.PM, WfConstants.WILDFLY); if(!Files.exists(pmWfDir)) { continue; } final Path moduleDir = pmWfDir.resolve(WfConstants.MODULE); if(Files.exists(moduleDir)) { processModules(fp.getGav(), pkg.getName(), moduleDir); } final Path tasksXml = pmWfDir.resolve(WfConstants.TASKS_XML); if(Files.exists(tasksXml)) { final WildFlyPackageTasks pkgTasks = WildFlyPackageTasks.load(tasksXml); if(pkgTasks.hasCopyArtifacts()) { copyArtifacts(pkgTasks); } if(pkgTasks.hasCopyPaths()) { copyPaths(pkgTasks, pmWfDir); } if(pkgTasks.hasMkDirs()) { mkdirs(pkgTasks, this.runtime.getStagedDir()); } if (pkgTasks.hasFilePermissions() && !PropertyUtils.isWindows()) { processFeaturePackFilePermissions(pkgTasks, this.runtime.getStagedDir()); } if(pkgTasks.hasDeletePaths()) { if(pathsToDelete.isEmpty()) { pathsToDelete = new ArrayList<>(pkgTasks.getDeletePaths()); } else { pathsToDelete.addAll(pkgTasks.getDeletePaths()); } } } } } private void processModules(ArtifactCoords.Gav fp, String pkgName, Path fpModuleDir) throws ProvisioningException { try { final Path installDir = runtime.getStagedDir(); Files.walkFileTree(fpModuleDir, new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { final Path targetDir = installDir.resolve(fpModuleDir.relativize(dir)); try { Files.copy(dir, targetDir); } catch (FileAlreadyExistsException e) { if (!Files.isDirectory(targetDir)) { throw e; } } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if(file.getFileName().toString().equals(WfConstants.MODULE_XML)) { processModuleTemplate(fpModuleDir, installDir, file); } else { Files.copy(file, installDir.resolve(fpModuleDir.relativize(file)), StandardCopyOption.REPLACE_EXISTING); } return FileVisitResult.CONTINUE; } }); } catch (IOException e) { throw new ProvisioningException("Failed to process modules from package " + pkgName + " from feature-pack " + fp, e); } } private void processModuleTemplate(Path fpModuleDir, final Path installDir, Path moduleTemplate) throws IOException { final Builder builder = new Builder(false); final Document document; try (BufferedReader reader = Files.newBufferedReader(moduleTemplate, StandardCharsets.UTF_8)) { document = builder.build(reader); } catch (ParsingException e) { throw new IOException("Failed to parse document", e); } final Path targetPath = installDir.resolve(fpModuleDir.relativize(moduleTemplate)); final Element rootElement = document.getRootElement(); if (! rootElement.getLocalName().equals("module")) { // just copy the content and leave Files.copy(moduleTemplate, targetPath, StandardCopyOption.REPLACE_EXISTING); return; } // replace version, if any final Attribute versionAttribute = rootElement.getAttribute("version"); if (versionAttribute != null) { final String versionExpr = versionAttribute.getValue(); if (versionExpr.startsWith("${") && versionExpr.endsWith("}")) { final String exprBody = versionExpr.substring(2, versionExpr.length() - 1); final int optionsIndex = exprBody.indexOf('?'); final String artifactName; if (optionsIndex > 0) { artifactName = exprBody.substring(0, optionsIndex); } else { artifactName = exprBody; } final String resolved = versionResolver.resolveProperty(artifactName); if (resolved != null) { final ArtifactCoords coords = fromJBossModules(resolved, "jar"); versionAttribute.setValue(coords.getVersion()); } } } // replace all artifact declarations final Element resourcesElement = rootElement.getFirstChildElement("resources", rootElement.getNamespaceURI()); if (resourcesElement != null) { final Elements artifacts = resourcesElement.getChildElements("artifact", rootElement.getNamespaceURI()); final int artifactCount = artifacts.size(); for (int i = 0; i < artifactCount; i ++) { final Element element = artifacts.get(i); assert element.getLocalName().equals("artifact"); final Attribute attribute = element.getAttribute("name"); final String nameExpr = attribute.getValue(); if (nameExpr.startsWith("${") && nameExpr.endsWith("}")) { final String exprBody = nameExpr.substring(2, nameExpr.length() - 1); final int optionsIndex = exprBody.indexOf('?'); final String artifactName; final boolean jandex; if (optionsIndex >= 0) { artifactName = exprBody.substring(0, optionsIndex); jandex = nameExpr.indexOf("jandex", optionsIndex) >= 0; } else { artifactName = exprBody; jandex = false; } final String resolved = versionResolver.resolveProperty(artifactName); if (resolved != null) { final ArtifactCoords coords = fromJBossModules(resolved, "jar"); final Path moduleArtifact; try { moduleArtifact = runtime.resolveArtifact(coords); } catch (ProvisioningException e) { throw new IOException(e); } if (thinServer) { // ignore jandex variable, just resolve coordinates to a string attribute.setValue(resolved); addToInstallationCp(moduleArtifact); } else { final Path targetDir = installDir.resolve(fpModuleDir.relativize(moduleTemplate.getParent())); final String artifactFileName = moduleArtifact.getFileName().toString(); final String finalFileName; if (jandex) { final int lastDot = artifactFileName.lastIndexOf("."); final File target = new File(targetDir.toFile(), new StringBuilder() .append(artifactFileName.substring(0, lastDot)) .append("-jandex") .append(artifactFileName.substring(lastDot)).toString() ); JandexIndexer.createIndex(moduleArtifact.toFile(), new FileOutputStream(target), runtime.getMessageWriter()); finalFileName = target.getName(); } else { finalFileName = artifactFileName; final Path targetModulePath = targetDir.resolve(artifactFileName); Files.copy(moduleArtifact, targetModulePath, StandardCopyOption.REPLACE_EXISTING); addToInstallationCp(targetModulePath); } element.setLocalName("resource-root"); attribute.setLocalName("path"); attribute.setValue(finalFileName); } if (schemaGroups.contains(coords.getGroupId())) { extractSchemas(moduleArtifact); } } } // if any step fails, don't change anything at all for that artifact } } // now serialize the result try (OutputStream outputStream = Files.newOutputStream(targetPath)) { new Serializer(outputStream).write(document); } catch (Throwable t) { try { Files.deleteIfExists(targetPath); } catch (Throwable t2) { t2.addSuppressed(t); throw t2; } throw t; } } private void extractSchemas(Path moduleArtifact) throws IOException { final Path targetSchemasDir = this.runtime.getStagedDir().resolve(WfConstants.DOCS).resolve(WfConstants.SCHEMA); Files.createDirectories(targetSchemasDir); try (FileSystem jarFS = FileSystems.newFileSystem(moduleArtifact, null)) { final Path schemaSrc = jarFS.getPath(WfConstants.SCHEMA); if (Files.exists(schemaSrc)) { ZipUtils.copyFromZip(schemaSrc.toAbsolutePath(), targetSchemasDir); } } } private void copyArtifacts(final WildFlyPackageTasks tasks) throws ProvisioningException { for(CopyArtifact copyArtifact : tasks.getCopyArtifacts()) { final String gavString = versionResolver.resolveProperty(copyArtifact.getArtifact()); try { final ArtifactCoords coords = fromJBossModules(gavString, "jar"); final Path jarSrc = runtime.resolveArtifact(coords); String location = copyArtifact.getToLocation(); if (!location.isEmpty() && location.charAt(location.length() - 1) == '/') { // if the to location ends with a / then it is a directory // so we need to append the artifact name location += jarSrc.getFileName(); } final Path jarTarget = runtime.getStagedDir().resolve(location); Files.createDirectories(jarTarget.getParent()); if (copyArtifact.isExtract()) { extractArtifact(jarSrc, jarTarget, copyArtifact); } else { IoUtils.copy(jarSrc, jarTarget); addToInstallationCp(jarTarget); } runtime.getMessageWriter().verbose(" Copying artifact %s to %s", jarSrc, jarTarget); if(schemaGroups.contains(coords.getGroupId())) { extractSchemas(jarSrc); } } catch (IOException e) { throw new ProvisioningException("Failed to copy artifact " + gavString, e); } } } private void copyPaths(final WildFlyPackageTasks tasks, final Path pmWfDir) throws ProvisioningException { for(CopyPath copyPath : tasks.getCopyPaths()) { final Path src = pmWfDir.resolve(copyPath.getSrc()); if (!Files.exists(src)) { throw new ProvisioningException(Errors.pathDoesNotExist(src)); } final Path target = copyPath.getTarget() == null ? runtime.getStagedDir() : runtime.getStagedDir().resolve(copyPath.getTarget()); if (copyPath.isReplaceProperties()) { if (!Files.exists(target.getParent())) { try { Files.createDirectories(target.getParent()); } catch (IOException e) { throw new ProvisioningException(Errors.mkdirs(target.getParent()), e); } } try { Files.walkFileTree(src, EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { final Path targetDir = target.resolve(src.relativize(dir)); try { Files.copy(dir, targetDir); } catch (FileAlreadyExistsException e) { if (!Files.isDirectory(targetDir)) { throw e; } } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { PropertyReplacer.copy(file, target.resolve(src.relativize(file)), tasksProps); return FileVisitResult.CONTINUE; } }); } catch (IOException e) { throw new ProvisioningException(Errors.copyFile(src, target), e); } } else { try { IoUtils.copy(src, target); } catch (IOException e) { throw new ProvisioningException(Errors.copyFile(src, target)); } } } } private void deletePaths() throws ProvisioningException { for(DeletePath deletePath : pathsToDelete) { final Path path = runtime.getStagedDir().resolve(deletePath.getPath()); if (!Files.exists(path)) { continue; } if(deletePath.isRecursive()) { IoUtils.recursiveDelete(path); } else { try { Files.delete(path); } catch (IOException e) { throw new ProvisioningException(Errors.deletePath(path), e); } } } } private void extractArtifact(Path artifact, Path target, CopyArtifact copy) throws IOException { if(!Files.exists(target)) { Files.createDirectories(target); } try (FileSystem zipFS = FileSystems.newFileSystem(artifact, null)) { for(Path zipRoot : zipFS.getRootDirectories()) { Files.walkFileTree(zipRoot, EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { String entry = dir.toString().substring(1); if(entry.isEmpty()) { return FileVisitResult.CONTINUE; } if(!entry.endsWith("/")) { entry += '/'; } if(!copy.includeFile(entry)) { return FileVisitResult.SKIP_SUBTREE; } final Path targetDir = target.resolve(zipRoot.relativize(dir).toString()); try { Files.copy(dir, targetDir); } catch (FileAlreadyExistsException e) { if (!Files.isDirectory(targetDir)) throw e; } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if(copy.includeFile(file.toString().substring(1))) { final Path targetPath = target.resolve(zipRoot.relativize(file).toString()); Files.copy(file, targetPath); if(targetPath.getFileName().endsWith(".jar")) { addToInstallationCp(targetPath); } } return FileVisitResult.CONTINUE; } }); } } } private void addToInstallationCp(Path p) { installationClassPath.add(p); } private static void mkdirs(final WildFlyPackageTasks tasks, Path installDir) throws ProvisioningException { // make dirs for (String dirName : tasks.getMkDirs()) { final Path dir = installDir.resolve(dirName); if(!Files.exists(dir)) { try { Files.createDirectories(dir); } catch (IOException e) { throw new ProvisioningException(Errors.mkdirs(dir)); } } } } private static void processFeaturePackFilePermissions(WildFlyPackageTasks tasks, Path installDir) throws ProvisioningException { final List<FilePermission> filePermissions = tasks.getFilePermissions(); try { Files.walkFileTree(installDir, new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { final String relative = installDir.relativize(dir).toString(); for (FilePermission perm : filePermissions) { if (perm.includeFile(relative)) { Files.setPosixFilePermissions(dir, perm.getPermission()); continue; } } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { final String relative = installDir.relativize(file).toString(); for (FilePermission perm : filePermissions) { if (perm.includeFile(relative)) { Files.setPosixFilePermissions(file, perm.getPermission()); continue; } } return FileVisitResult.CONTINUE; } }); } catch (IOException e) { throw new ProvisioningException("Failed to set file permissions", e); } } private static ArtifactCoords fromJBossModules(String str, String extension) { final String[] parts = str.split(":"); if(parts.length < 2) { throw new IllegalArgumentException("Unexpected artifact coordinates format: " + str); } final String groupId = parts[0]; final String artifactId = parts[1]; String version = null; String classifier = null; if(parts.length > 2) { if(!parts[2].isEmpty()) { version = parts[2]; } if(parts.length > 3 && !parts[3].isEmpty()) { classifier = parts[3]; if(parts.length > 4) { throw new IllegalArgumentException("Unexpected artifact coordinates format: " + str); } } } return new ArtifactCoords(groupId, artifactId, version, classifier, extension); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.view.filebrowser; import static org.easymock.EasyMock.createNiceMock; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.InputStream; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import org.apache.ambari.view.ViewContext; import org.apache.ambari.view.ViewResourceHandler; import org.apache.ambari.view.commons.hdfs.FileOperationService; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; import com.sun.jersey.core.header.FormDataContentDisposition; import com.sun.jersey.multipart.FormDataBodyPart; public class FilebrowserTest{ private ViewResourceHandler handler; private ViewContext context; private HttpHeaders httpHeaders; private UriInfo uriInfo; private Map<String, String> properties; private FileBrowserService fileBrowserService; private MiniDFSCluster hdfsCluster; public static final String BASE_URI = "http://localhost:8084/myapp/"; @Before public void setUp() throws Exception { handler = createNiceMock(ViewResourceHandler.class); context = createNiceMock(ViewContext.class); httpHeaders = createNiceMock(HttpHeaders.class); uriInfo = createNiceMock(UriInfo.class); properties = new HashMap<String, String>(); File baseDir = new File("./target/hdfs/" + "FilebrowserTest") .getAbsoluteFile(); FileUtil.fullyDelete(baseDir); Configuration conf = new Configuration(); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath()); conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".groups", "*"); conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".hosts", "*"); MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf); hdfsCluster = builder.build(); String hdfsURI = hdfsCluster.getURI() + "/"; properties.put("webhdfs.url", hdfsURI); expect(context.getProperties()).andReturn(properties).anyTimes(); expect(context.getUsername()).andReturn(System.getProperty("user.name")).anyTimes(); replay(handler, context, httpHeaders, uriInfo); fileBrowserService = getService(FileBrowserService.class, handler, context); FileOperationService.MkdirRequest request = new FileOperationService.MkdirRequest(); request.path = "/tmp"; fileBrowserService.fileOps().mkdir(request); } @After public void tearDown() { hdfsCluster.shutdown(); } @Test public void testListDir() throws Exception { FileOperationService.MkdirRequest request = new FileOperationService.MkdirRequest(); request.path = "/tmp1"; fileBrowserService.fileOps().mkdir(request); Response response = fileBrowserService.fileOps().listdir("/", null); JSONObject responseObject = (JSONObject) response.getEntity(); JSONArray statuses = (JSONArray) responseObject.get("files"); System.out.println(response.getEntity()); Assert.assertEquals(200, response.getStatus()); Assert.assertTrue(statuses.size() > 0); System.out.println(statuses); } private Response uploadFile(String path, String fileName, String fileExtension, String fileContent) throws Exception { File tempFile = File.createTempFile(fileName, fileExtension); BufferedWriter bw = new BufferedWriter(new FileWriter(tempFile)); bw.write(fileContent); bw.close(); InputStream content = new FileInputStream(tempFile); FormDataBodyPart inputStreamBody = new FormDataBodyPart( FormDataContentDisposition.name("file") .fileName(fileName + fileExtension).build(), content, MediaType.APPLICATION_OCTET_STREAM_TYPE); Response response = fileBrowserService.upload().uploadFile(content, inputStreamBody.getFormDataContentDisposition(), "/tmp/"); return response; } @Test public void testUploadFile() throws Exception { Response response = uploadFile("/tmp/", "testUpload", ".tmp", "Hello world"); Assert.assertEquals(200, response.getStatus()); Response listdir = fileBrowserService.fileOps().listdir("/tmp", null); JSONObject responseObject = (JSONObject) listdir.getEntity(); JSONArray statuses = (JSONArray) responseObject.get("files"); System.out.println(statuses.size()); Response response2 = fileBrowserService.download().browse("/tmp/testUpload.tmp", false, false, httpHeaders, uriInfo); Assert.assertEquals(200, response2.getStatus()); } private void createDirectoryWithFiles(String dirPath) throws Exception { FileOperationService.MkdirRequest request = new FileOperationService.MkdirRequest(); request.path = dirPath; File file = new File(dirPath); String fileName = file.getName(); fileBrowserService.fileOps().mkdir(request); for (int i = 0; i < 10; i++) { uploadFile(dirPath, fileName + i, ".txt", "Hello world" + i); } } @Test public void testStreamingGzip() throws Exception { String gzipDir = "/tmp/testGzip"; createDirectoryWithFiles(gzipDir); DownloadService.DownloadRequest dr = new DownloadService.DownloadRequest(); dr.entries = new String[] { gzipDir }; Response result = fileBrowserService.download().downloadGZip(dr); } @Test public void testStreamingDownloadGzipName() throws Exception { String gzipDir = "/tmp/testGzip1"; createDirectoryWithFiles(gzipDir); // test download 1 folder validateDownloadZipName(new String[]{gzipDir}, "testGzip1.zip" ); // test download 1 folder validateDownloadZipName(new String[]{gzipDir + "/testGzip11.txt"}, "testGzip11.txt.zip" ); String gzipDir2 = "/tmp/testGzip2"; createDirectoryWithFiles(gzipDir2); // test download 2 folders validateDownloadZipName(new String[] { gzipDir, gzipDir2 }, "hdfs.zip" ); // test download 2 files of same folder validateDownloadZipName(new String[] { gzipDir + "/testGzip11", gzipDir + "/testGzip12" }, "hdfs.zip" ); // test download 2 files of different folder -- although I think UI does not allow it validateDownloadZipName(new String[] { gzipDir + "/testGzip11", gzipDir2 + "/testGzip21" }, "hdfs.zip" ); } private void validateDownloadZipName(String[] entries, String downloadedFileName) { DownloadService.DownloadRequest dr = new DownloadService.DownloadRequest(); dr.entries = entries; Response result = fileBrowserService.download().downloadGZip(dr); List<Object> contentDisposition = result.getMetadata().get("Content-Disposition"); Assert.assertEquals("inline; filename=\"" + downloadedFileName +"\"",contentDisposition.get(0)); } @Test public void testUsername() throws Exception { Assert.assertEquals(System.getProperty("user.name"), fileBrowserService.upload().getDoAsUsername(context)); properties.put("webhdfs.username", "test-user"); Assert.assertEquals("test-user", fileBrowserService.upload().getDoAsUsername(context)); } private static <T> T getService(Class<T> clazz, final ViewResourceHandler viewResourceHandler, final ViewContext viewInstanceContext) { Injector viewInstanceInjector = Guice.createInjector(new AbstractModule() { @Override protected void configure() { bind(ViewResourceHandler.class).toInstance(viewResourceHandler); bind(ViewContext.class).toInstance(viewInstanceContext); } }); return viewInstanceInjector.getInstance(clazz); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.actions; import com.intellij.ide.BrowserUtil; import com.intellij.ide.IdeBundle; import com.intellij.ide.ui.UISettings; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CustomShortcutSet; import com.intellij.openapi.application.ApplicationInfo; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.application.impl.ApplicationInfoImpl; import com.intellij.openapi.ide.CopyPasteManager; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.ui.GraphicsConfig; import com.intellij.openapi.ui.popup.JBPopup; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.Gray; import com.intellij.ui.JBColor; import com.intellij.ui.LicensingFacade; import com.intellij.ui.UI; import com.intellij.ui.awt.RelativePoint; import com.intellij.util.Alarm; import com.intellij.util.text.DateFormatUtil; import com.intellij.util.ui.GraphicsUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.accessibility.AccessibleContextUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.accessibility.AccessibleAction; import javax.accessibility.AccessibleContext; import javax.swing.*; import java.awt.*; import java.awt.datatransfer.StringSelection; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionAdapter; import java.text.SimpleDateFormat; import java.util.*; import java.util.List; /** * @author Konstantin Bulenkov */ public class AboutPopup { private static final String COPY_URL = "copy://"; private static JBPopup ourPopup; public static void show(@Nullable Window window) { ApplicationInfoEx appInfo = (ApplicationInfoEx)ApplicationInfo.getInstance(); final PopupPanel panel = new PopupPanel(new BorderLayout()); Icon image = IconLoader.getIcon(appInfo.getAboutImageUrl()); if (appInfo.showLicenseeInfo()) { final InfoSurface infoSurface = new InfoSurface(image); infoSurface.setPreferredSize(new Dimension(image.getIconWidth(), image.getIconHeight())); panel.setInfoSurface(infoSurface); } else { panel.add(new JLabel(image), BorderLayout.NORTH); } RelativePoint location; if (window != null) { Rectangle r = window.getBounds(); location = new RelativePoint(window, new Point((r.width - image.getIconWidth()) / 2, (r.height - image.getIconHeight()) / 2)); } else { Rectangle r = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDefaultConfiguration().getBounds(); location = new RelativePoint(new Point((r.width - image.getIconWidth()) / 2, (r.height - image.getIconHeight()) / 2)); } ourPopup = JBPopupFactory.getInstance().createComponentPopupBuilder(panel, panel) .setRequestFocus(true) .setFocusable(true) .setResizable(false) .setMovable(false) .setModalContext(false) .setShowShadow(true) .setShowBorder(false) .setCancelKeyEnabled(true) .setCancelOnClickOutside(true) .setCancelOnOtherWindowOpen(true) .createPopup(); Disposer.register(ourPopup, new Disposable() { @Override public void dispose() { ourPopup = null; } }); ourPopup.show(location); } private static void copyInfoToClipboard(String text) { try { CopyPasteManager.getInstance().setContents(new StringSelection(text)); } catch (Exception ignore) { } } private static class InfoSurface extends JPanel { private final Color myColor; private final Color myLinkColor; private final Icon myImage; private Font myFont; private Font myBoldFont; private final List<AboutBoxLine> myLines = new ArrayList<>(); private StringBuilder myInfo = new StringBuilder(); private final List<Link> myLinks = new ArrayList<>(); private Link myActiveLink; private boolean myShowCopy = false; private float myShowCopyAlpha; private Alarm myAlarm = new Alarm(); public InfoSurface(Icon image) { ApplicationInfoImpl appInfo = (ApplicationInfoImpl)ApplicationInfoEx.getInstanceEx(); myImage = image; //noinspection UseJBColor myColor = Color.white; myLinkColor = appInfo.getAboutLinkColor() != null ? appInfo.getAboutLinkColor() : UI.getColor("link.foreground"); setOpaque(false); setBackground(myColor); setFocusable(true); Calendar cal = appInfo.getBuildDate(); myLines.add(new AboutBoxLine(appInfo.getFullApplicationName(), true, null)); appendLast(); String buildInfo = IdeBundle.message("about.box.build.number", appInfo.getBuild().asString()); String buildDate = ""; if (appInfo.getBuild().isSnapshot()) { buildDate = new SimpleDateFormat("HH:mm, ").format(cal.getTime()); } buildDate += DateFormatUtil.formatAboutDialogDate(cal.getTime()); buildInfo += IdeBundle.message("about.box.build.date", buildDate); myLines.add(new AboutBoxLine(buildInfo)); appendLast(); myLines.add(new AboutBoxLine("")); LicensingFacade provider = LicensingFacade.getInstance(); if (provider != null) { myLines.add(new AboutBoxLine(provider.getLicensedToMessage(), true, null)); appendLast(); for (String message : provider.getLicenseRestrictionsMessages()) { myLines.add(new AboutBoxLine(message)); appendLast(); } } myLines.add(new AboutBoxLine("")); Properties properties = System.getProperties(); String javaVersion = properties.getProperty("java.runtime.version", properties.getProperty("java.version", "unknown")); String arch = properties.getProperty("os.arch", ""); myLines.add(new AboutBoxLine(IdeBundle.message("about.box.jre", javaVersion, arch))); appendLast(); String vmVersion = properties.getProperty("java.vm.name", "unknown"); String vmVendor = properties.getProperty("java.vendor", "unknown"); myLines.add(new AboutBoxLine(IdeBundle.message("about.box.vm", vmVersion, vmVendor))); appendLast(); String thirdParty = appInfo.getThirdPartySoftwareURL(); if (thirdParty != null) { myLines.add(new AboutBoxLine("")); myLines.add(new AboutBoxLine("")); myLines.add(new AboutBoxLine("Powered by ").keepWithNext()); myLines.add(new AboutBoxLine("open-source software", false, thirdParty)); } addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent event) { if (myActiveLink != null) { event.consume(); if (COPY_URL.equals(myActiveLink.myUrl)) { copyInfoToClipboard(myInfo.toString()); if (ourPopup != null) { ourPopup.cancel(); } return; } BrowserUtil.browse(myActiveLink.myUrl); } } final static double maxAlpha = 0.5; final static double fadeStep = 0.05; final static int animationDelay = 15; @Override public void mouseEntered(MouseEvent e) { if (!myShowCopy) { myShowCopy = true; myAlarm.cancelAllRequests(); myAlarm.addRequest(new Runnable() { @Override public void run() { if (myShowCopyAlpha < maxAlpha) { myShowCopyAlpha += fadeStep; repaint(); myAlarm.addRequest(this, animationDelay); } } }, animationDelay); } } @Override public void mouseExited(MouseEvent e) { if (myShowCopy) { myShowCopy = false; myAlarm.cancelAllRequests(); myAlarm.addRequest(new Runnable() { @Override public void run() { if (myShowCopyAlpha > 0) { myShowCopyAlpha -= fadeStep; repaint(); myAlarm.addRequest(this, animationDelay); } } }, animationDelay); } } }); addMouseMotionListener(new MouseMotionAdapter() { @Override public void mouseMoved(MouseEvent event) { boolean hadLink = (myActiveLink != null); myActiveLink = null; for (Link link : myLinks) { if (link.myRectangle.contains(event.getPoint())) { myActiveLink = link; if (!hadLink) { setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); } break; } } if (hadLink && myActiveLink == null) { setCursor(Cursor.getDefaultCursor()); } } }); } private void appendLast() { myInfo.append(myLines.get(myLines.size() - 1).getText()).append("\n"); } @Override protected void paintChildren(Graphics g) { super.paintChildren(g); Graphics2D g2 = (Graphics2D)g; UISettings.setupAntialiasing(g); Font labelFont = JBUI.Fonts.label(); if (SystemInfo.isWindows) { labelFont = JBUI.Fonts.create("Tahoma", 12); } int startFontSize = Registry.is("ide.new.about") ? 14 : 10; for (int labelSize = JBUI.scale(startFontSize); labelSize != JBUI.scale(6); labelSize -= 1) { myLinks.clear(); g2.setPaint(myColor); myImage.paintIcon(this, g2, 0, 0); g2.setColor(myColor); TextRenderer renderer = createTextRenderer(g2); UIUtil.setupComposite(g2); myFont = labelFont.deriveFont(Font.PLAIN, labelSize); myBoldFont = labelFont.deriveFont(Font.BOLD, labelSize + 1); try { renderer.render(30, 0, myLines); break; } catch (TextRenderer.OverflowException ignore) { } } ApplicationInfo appInfo = ApplicationInfo.getInstance(); Rectangle aboutLogoRect = appInfo.getAboutLogoRect(); if (aboutLogoRect != null) { myLinks.add(new Link(aboutLogoRect, appInfo.getCompanyURL())); } if (appInfo instanceof ApplicationInfoImpl) { g2.setColor(((ApplicationInfoImpl)appInfo).getCopyrightForeground()); if (SystemInfo.isMac) { g2.setFont(JBUI.Fonts.miniFont()); } else { g2.setFont(JBUI.Fonts.create("Tahoma", 10)); } } else { g2.setColor(JBColor.BLACK); } if (Registry.is("ide.new.about")) { g2.setColor(Gray.x33); g2.setFont(JBUI.Fonts.label(12)); } final int copyrightX = Registry.is("ide.new.about") ? JBUI.scale(140) : JBUI.scale(30); final int copyrightY = Registry.is("ide.new.about") ? JBUI.scale(390) : JBUI.scale(284); g2.drawString(getCopyrightText(), copyrightX, copyrightY); } @NotNull private String getCopyrightText() { ApplicationInfo appInfo = ApplicationInfo.getInstance(); return "\u00A9 2000\u2013" + Calendar.getInstance(Locale.US).get(Calendar.YEAR) + " JetBrains s.r.o. All rights reserved."; } @NotNull private TextRenderer createTextRenderer(Graphics2D g) { if (Registry.is("ide.new.about")) { return new TextRenderer(18, 200, 500, 220, g); } return new TextRenderer(0, 165, 398, 120, g); } public String getText() { return myInfo.toString(); } private class TextRenderer { private final int xBase; private final int yBase; private final int w; private final int h; private final Graphics2D g2; private int x = 0; private int y = 0; private FontMetrics fontmetrics; private int fontAscent; private int fontHeight; private Font font; public class OverflowException extends Exception { } public TextRenderer(final int xBase, final int yBase, final int w, final int h, final Graphics2D g2) { this.xBase = JBUI.scale(xBase); this.yBase = JBUI.scale(yBase); this.w = JBUI.scale(w); this.h = JBUI.scale(h); this.g2 = g2; if (SystemInfo.isWindows) { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } } public void render(int indentX, int indentY, List<AboutBoxLine> lines) throws OverflowException { x = indentX; y = indentY; ApplicationInfoEx appInfo = (ApplicationInfoEx)ApplicationInfo.getInstance(); boolean showCopyButton = myShowCopy || myShowCopyAlpha > 0; for (AboutBoxLine line : lines) { final String s = line.getText(); setFont(line.isBold() ? myBoldFont : myFont); if (line.getUrl() != null) { g2.setColor(myLinkColor); FontMetrics metrics = g2.getFontMetrics(font); myLinks.add(new Link(new Rectangle(xBase + x, yBase + y - fontAscent, metrics.stringWidth(s + " "), fontHeight), line.getUrl())); } else { g2.setColor(Registry.is("ide.new.about") ? Gray.x33 : appInfo.getAboutForeground()); } renderString(s, indentX); if (showCopyButton) { final FontMetrics metrics = g2.getFontMetrics(myFont); String copyString = "Copy to Clipboard"; final int width = metrics.stringWidth(copyString); g2.setFont(myFont); g2.setColor(myLinkColor); final int xOffset = myImage.getIconWidth() - width - 10; final GraphicsConfig config = GraphicsUtil.paintWithAlpha(g2, Math.max(0, Math.min(1, myShowCopyAlpha))); g2.drawString(copyString, xOffset, yBase + y); config.restore(); myLinks.add(new Link(new Rectangle(xOffset, yBase + y - fontAscent, width, fontHeight), COPY_URL)); showCopyButton = false; } if (!line.isKeepWithNext() && !line.equals(lines.get(lines.size()-1))) { lineFeed(indentX, s); } } } private void renderString(final String s, final int indentX) throws OverflowException { final List<String> words = StringUtil.split(s, " "); for (String word : words) { int wordWidth = fontmetrics.stringWidth(word); if (x + wordWidth >= w) { lineFeed(indentX, word); } else { char c = ' '; final int cW = fontmetrics.charWidth(c); if (x + cW < w) { g2.drawChars(new char[]{c}, 0, 1, xBase + x, yBase + y); x += cW; } } renderWord(word, indentX); } } private void renderWord(final String s, final int indentX) throws OverflowException { for (int j = 0; j != s.length(); ++j) { final char c = s.charAt(j); Font f = null; FontMetrics fm = null; try { if (!g2.getFont().canDisplay(c)) { f = g2.getFont(); fm = fontmetrics; g2.setFont(new Font("Monospaced", f.getStyle(), f.getSize())); fontmetrics = g2.getFontMetrics(); } final int cW = fontmetrics.charWidth(c); if (x + cW >= w) { lineFeed(indentX, s); } g2.drawChars(new char[]{c}, 0, 1, xBase + x, yBase + y); x += cW; } finally { if (f != null) { g2.setFont(f); fontmetrics = fm; } } } } private void lineFeed(int indent, final String s) throws OverflowException { x = indent; if (s.length() == 0) { y += fontHeight / 3; } else { y += fontHeight; } if (y >= h) { throw new OverflowException(); } } private void setFont(Font font) { this.font = font; fontmetrics = g2.getFontMetrics(font); g2.setFont(font); fontAscent = fontmetrics.getAscent(); fontHeight = fontmetrics.getHeight(); } } private static class AboutBoxLine { private final String myText; private final boolean myBold; private final String myUrl; private boolean myKeepWithNext; public AboutBoxLine(final String text, final boolean bold, final String url) { myText = text; myBold = bold; myUrl = url; } public AboutBoxLine(final String text) { myText = text; myBold = false; myUrl = null; } public String getText() { return myText; } public boolean isBold() { return myBold; } public String getUrl() { return myUrl; } public boolean isKeepWithNext() { return myKeepWithNext; } public AboutBoxLine keepWithNext() { myKeepWithNext = true; return this; } } private static class Link { private final Rectangle myRectangle; private final String myUrl; private Link(Rectangle rectangle, String url) { myRectangle = rectangle; myUrl = url; } } @Override public AccessibleContext getAccessibleContext() { if (accessibleContext == null) { accessibleContext = new AccessibleInfoSurface(); } return accessibleContext; } protected class AccessibleInfoSurface extends AccessibleJPanel { @Override public String getAccessibleName() { String text = "System Information\n" + getText() + "\n" + getCopyrightText(); return AccessibleContextUtil.replaceLineSeparatorsWithPunctuation(text); } } } public static class PopupPanel extends JPanel { private InfoSurface myInfoSurface; public PopupPanel(LayoutManager layout) { super(layout); } @Override public AccessibleContext getAccessibleContext() { if (accessibleContext == null) { accessibleContext = new AccessiblePopupPanel(); } return accessibleContext; } public void setInfoSurface(InfoSurface infoSurface) { myInfoSurface = infoSurface; add(infoSurface, BorderLayout.NORTH); new DumbAwareAction() { @Override public void actionPerformed(AnActionEvent e) { copyInfoToClipboard(myInfoSurface.getText()); } }.registerCustomShortcutSet(CustomShortcutSet.fromString("meta C", "control C"), this); } protected class AccessiblePopupPanel extends AccessibleJPanel implements AccessibleAction { @Override public String getAccessibleName() { ApplicationInfoEx appInfo = (ApplicationInfoEx)ApplicationInfo.getInstance(); return "About " + appInfo.getFullApplicationName(); } @Override public String getAccessibleDescription() { if (myInfoSurface != null) { return "Press Copy key to copy system information to clipboard"; } return null; } @Override public AccessibleAction getAccessibleAction() { return this; } @Override public int getAccessibleActionCount() { if(myInfoSurface != null) return 1; return 0; } @Override public String getAccessibleActionDescription(int i) { if (i == 0 && myInfoSurface != null) return "Copy system information to clipboard"; return null; } @Override public boolean doAccessibleAction(int i) { if (i == 0 && myInfoSurface != null) { copyInfoToClipboard(myInfoSurface.getText()); return true; } return false; } } } }
/* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.comcast.cdn.traffic_control.traffic_router.core.util; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.StringReader; import java.net.URI; import java.net.URISyntaxException; import java.nio.channels.FileLock; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.zip.GZIPInputStream; import org.apache.commons.io.IOUtils; import org.apache.log4j.Logger; import org.asynchttpclient.AsyncCompletionHandler; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.DefaultAsyncHttpClient; import org.asynchttpclient.DefaultAsyncHttpClientConfig; import org.asynchttpclient.Request; import org.asynchttpclient.Response; import static org.apache.commons.codec.digest.DigestUtils.md5Hex; /** * * @author jlaue * */ public class PeriodicResourceUpdater { private static final Logger LOGGER = Logger.getLogger(PeriodicResourceUpdater.class); private AsyncHttpClient asyncHttpClient; protected String databaseLocation; protected final ResourceUrl urls; protected ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); protected long pollingInterval; private static final String GZIP_ENCODING_STRING = "gzip"; protected ScheduledFuture<?> scheduledService; public PeriodicResourceUpdater(final AbstractUpdatable listener, final ResourceUrl urls, final String location, final int interval, final boolean pauseTilLoaded) { this.listener = listener; this.urls = urls; databaseLocation = location; pollingInterval = interval; this.pauseTilLoaded = pauseTilLoaded; } public void destroy() { executorService.shutdownNow(); while (!asyncHttpClient.isClosed()) { try { asyncHttpClient.close(); } catch (IOException e) { LOGGER.error(e.getMessage()); } } } /** * Gets pollingInterval. * * @return the pollingInterval */ public long getPollingInterval() { if(pollingInterval == 0) { return 66000; } return pollingInterval; } final private Runnable updater = new Runnable() { @Override public void run() { updateDatabase(); } }; private boolean hasBeenLoaded = false; final private AbstractUpdatable listener; final private boolean pauseTilLoaded; public void init() { asyncHttpClient = newAsyncClient(); putCurrent(); LOGGER.info("Starting schedule with interval: "+getPollingInterval() + " : "+TimeUnit.MILLISECONDS); scheduledService = executorService.scheduleWithFixedDelay(updater, 0, getPollingInterval(), TimeUnit.MILLISECONDS); // wait here until something is loaded final File existingDB = new File(databaseLocation); if(pauseTilLoaded ) { while(!existingDB.exists()) { LOGGER.info("Waiting for valid: " + databaseLocation); try { Thread.sleep(getPollingInterval()); } catch (InterruptedException e) { } } } } private AsyncHttpClient newAsyncClient() { return new DefaultAsyncHttpClient( new DefaultAsyncHttpClientConfig.Builder() .setFollowRedirect(true) .setConnectTimeout(10000) .build()); } private synchronized void putCurrent() { final File existingDB = new File(databaseLocation); if(existingDB.exists()) { try { listener.update(IOUtils.toString(new FileReader(existingDB))); } catch (Exception e) { LOGGER.warn(e,e); } } } public synchronized boolean updateDatabase() { final File existingDB = new File(databaseLocation); try { if (!hasBeenLoaded || needsUpdating(existingDB)) { final Request request = getRequest(urls.nextUrl()); if (request != null) { request.getHeaders().add("Accept-Encoding", GZIP_ENCODING_STRING); if ((asyncHttpClient!=null) && (!asyncHttpClient.isClosed())) { asyncHttpClient.executeRequest(request, new UpdateHandler(request)); // AsyncHandlers are NOT thread safe; one instance per request } return true; } } else { LOGGER.info("Database " + existingDB.getAbsolutePath() + " does not require updating."); } } catch (final Exception e) { LOGGER.warn(e.getMessage(), e); } return false; } public boolean updateDatabase(final String newDB) { final File existingDB = new File(databaseLocation); try { if (newDB != null && !filesEqual(existingDB, newDB)) { listener.cancelUpdate(); if (listener.update(newDB)) { copyDatabase(existingDB, newDB); LOGGER.info("updated " + existingDB.getAbsolutePath()); listener.setLastUpdated(System.currentTimeMillis()); listener.complete(); } else { LOGGER.warn("File rejected: " + existingDB.getAbsolutePath()); } } else { listener.noChange(); } hasBeenLoaded = true; return true; } catch (final Exception e) { LOGGER.warn(e.getMessage(), e); } return false; } public void setDatabaseLocation(final String databaseLocation) { this.databaseLocation = databaseLocation; } /** * Sets executorService. * * @param es * the executorService to set */ public void setExecutorService(final ScheduledExecutorService es) { executorService = es; } /** * Sets pollingInterval. * * @param pollingInterval * the pollingInterval to set */ public void setPollingInterval(final long pollingInterval) { this.pollingInterval = pollingInterval; } private String fileMd5(final File file) throws IOException { try (FileInputStream stream = new FileInputStream(file)) { return md5Hex(stream); } } boolean filesEqual(final File a, final String newDB) throws IOException { if (!a.exists()) { return newDB == null; } if (newDB == null) { return false; } if (a.length() != newDB.length()) { return false; } try (InputStream newDBStream = IOUtils.toInputStream(newDB)) { return fileMd5(a).equals(md5Hex(newDBStream)); } } protected synchronized void copyDatabase(final File existingDB, final String newDB) throws IOException { try (final StringReader in = new StringReader(newDB); final FileOutputStream out = new FileOutputStream(existingDB); final FileLock lock = out.getChannel().tryLock()) { if (lock == null) { LOGGER.error("Database " + existingDB.getAbsolutePath() + " locked by another process."); return; } IOUtils.copy(in, out); existingDB.setReadable(true, false); existingDB.setWritable(true, true); lock.release(); } } protected boolean needsUpdating(final File existingDB) { final long now = System.currentTimeMillis(); final long fileTime = existingDB.lastModified(); final long pollingIntervalInMS = getPollingInterval(); return ((fileTime + pollingIntervalInMS) < now); } private class UpdateHandler extends AsyncCompletionHandler<Object> { final Request request; public UpdateHandler(final Request request) { this.request = request; } @Override public Integer onCompleted(final Response response) throws IOException { // Do something with the Response final int code = response.getStatusCode(); if (code != 200) { if (code >= 400) { LOGGER.warn("failed to GET " + response.getUri() + " - returned status code " + code); } return code; } final String responseBody; if (GZIP_ENCODING_STRING.equals(response.getHeader("Content-Encoding"))) { final StringBuilder stringBuilder = new StringBuilder(); final GZIPInputStream zippedInputStream = new GZIPInputStream(response.getResponseBodyAsStream()); final BufferedReader r = new BufferedReader(new InputStreamReader(zippedInputStream)); String line; while((line = r.readLine()) != null) { stringBuilder.append(line); } responseBody = stringBuilder.toString(); } else { responseBody = response.getResponseBody(); } updateDatabase(responseBody); return code; } @Override public void onThrowable(final Throwable t){ LOGGER.warn("Failed request " + request.getUrl() + ": " + t, t); } }; private Request getRequest(final String url) { try { new URI(url); return asyncHttpClient.prepareGet(url).setFollowRedirect(true).build(); } catch (URISyntaxException e) { LOGGER.fatal("Cannot update database from Bad URI - " + url); return null; } } }
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onlab.packet; import org.slf4j.Logger; import java.nio.ByteBuffer; import java.util.Objects; import static com.google.common.base.Preconditions.checkNotNull; import static org.slf4j.LoggerFactory.getLogger; /* * Entry for RIP version 2 - RFC 2453 */ public class RIPV2Entry extends BasePacket { public static final int ENTRY_LEN = 20; public static final short AFI_IP = 2; public static final byte INFINITY_METRIC = 16; public static final byte NEXTHOP_METRIC = -128; // actually it is 0xFF private final Logger log = getLogger(getClass()); protected short addressFamilyId; protected short routeTag; protected Ip4Address ipAddress; protected Ip4Address subnetMask; protected Ip4Address nextHop; protected int metric; @Override public byte[] serialize() { ByteBuffer byteBuffer; byteBuffer = ByteBuffer.allocate(ENTRY_LEN); byteBuffer.putShort(addressFamilyId); byteBuffer.putShort(routeTag); byteBuffer.putInt(ipAddress.toInt()); byteBuffer.putInt(subnetMask.toInt()); byteBuffer.putInt(nextHop.toInt()); byteBuffer.putInt(metric); return byteBuffer.array(); } /** * Deserializer function for RIPv2 entry. * * @return deserializer function */ public static Deserializer<RIPV2Entry> deserializer() { return (data, offset, length) -> { RIPV2Entry ripEntry = new RIPV2Entry(); checkNotNull(data); if (offset < 0 || length < 0 || length > data.length || offset >= data.length || offset + length > data.length) { throw new DeserializationException("Illegal offset or length"); } ByteBuffer bb = ByteBuffer.wrap(data, offset, length); if (bb.remaining() < ENTRY_LEN) { throw new DeserializationException( "Buffer underflow while reading RIP entry"); } ripEntry.addressFamilyId = bb.getShort(); // skip the authentication entry if (ripEntry.addressFamilyId == 0xffff) { return ripEntry; } ripEntry.routeTag = bb.getShort(); ripEntry.ipAddress = Ip4Address.valueOf(bb.getInt()); ripEntry.subnetMask = Ip4Address.valueOf(bb.getInt()); ripEntry.nextHop = Ip4Address.valueOf(bb.getInt()); ripEntry.metric = bb.getInt(); return ripEntry; }; } /* * (non-Javadoc) * * @see java.lang.Object#hashCode() */ @Override public int hashCode() { return Objects.hash(super.hashCode(), nextHop.toInt(), subnetMask.toInt(), ipAddress.toInt(), addressFamilyId, metric, routeTag); } /* * (non-Javadoc) * * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!(obj instanceof RIPV2Entry)) { return false; } final RIPV2Entry that = (RIPV2Entry) obj; return super.equals(that) && Objects.equals(routeTag, that.routeTag) && Objects.equals(metric, that.metric) && Objects.equals(addressFamilyId, that.addressFamilyId) && Objects.equals(ipAddress, that.ipAddress) && Objects.equals(nextHop, that.nextHop) && Objects.equals(subnetMask, that.subnetMask); } /** * @return the Address Family Identifier */ public short getAddressFamilyId() { return this.addressFamilyId; } /** * @param addressFamilyIdentifier the address family identifier to set * @return this */ public RIPV2Entry setAddressFamilyId(final short addressFamilyIdentifier) { this.addressFamilyId = addressFamilyIdentifier; return this; } /** * @return the route tag */ public short getRouteTag() { return this.routeTag; } /** * @param routetag the route tag to set * @return this */ public RIPV2Entry setRouteTag(final short routetag) { this.routeTag = routetag; return this; } /** * @return the ip address */ public Ip4Address getipAddress() { return this.ipAddress; } /** * @param ipaddress the Ip Address to set * @return this */ public RIPV2Entry setIpAddress(final Ip4Address ipaddress) { this.ipAddress = ipaddress; return this; } /** * @return the subnet mask */ public Ip4Address getSubnetMask() { return this.subnetMask; } /** * @param subnetmask the subnet mask to set * @return this */ public RIPV2Entry setSubnetMask(final Ip4Address subnetmask) { this.subnetMask = subnetmask; return this; } /** * @return the next hop */ public Ip4Address getNextHop() { return this.nextHop; } /** * @param nexthop the ip address if the next hop to set * @return this */ public RIPV2Entry setNextHop(final Ip4Address nexthop) { this.nextHop = nexthop; return this; } /** * @return the metric */ public int getMetric() { return this.metric; } /** * @param metric the route metric to set * @return this */ public RIPV2Entry setMetric(final int metric) { this.metric = metric; return this; } /* * (non-Javadoc) * * @see java.lang.Object#toString() */ @Override public String toString() { return "RIPV2Entry [address family Id=" + this.addressFamilyId + ", route tag=" + this.routeTag + ", Address=" + this.ipAddress + ", Subnet mask=" + this.subnetMask + ", Mext hop=" + this.nextHop + ", metric = " + this.metric + "]"; } }
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package org.unitime.timetable.gwt.shared; import java.util.ArrayList; import java.util.List; import org.unitime.timetable.gwt.command.client.GwtRpcRequest; import org.unitime.timetable.gwt.command.client.GwtRpcResponse; import org.unitime.timetable.gwt.command.client.GwtRpcResponseBoolean; import org.unitime.timetable.gwt.command.client.GwtRpcResponseList; import org.unitime.timetable.gwt.command.client.GwtRpcResponseLong; import org.unitime.timetable.gwt.command.client.GwtRpcResponseNull; import com.google.gwt.user.client.rpc.IsSerializable; /** * @author Stephanie Schluttenhofer */ public class PointInTimeDataReportsInterface implements IsSerializable { public static class Flag implements IsSerializable { private int iValue; private String iText; public Flag() {} public int getValue() { return iValue; } public void setValue(int value) { iValue = value; } public String getText() { return iText; } public void setText(String text) { iText = text; } } public static class IdValue implements IsSerializable, Comparable<IdValue> { private String iValue, iText; public IdValue() {} public IdValue(String value, String text) { iValue = value; iText = text; } public String getValue() { return iValue; } public void setValue(String value) { iValue = value; } public String getText() { return iText; } public void setText(String text) { iText = text; } @Override public int compareTo(IdValue o) { return getText().compareTo(o.getText()); } @Override public String toString() { return getValue() + ": " + getText(); } } public static class Parameter implements IsSerializable { private String iType, iName; private List<IdValue> iValues = new ArrayList<IdValue>(); private boolean iMultiSelect; private boolean iTextField; private String iDefaultTextValue; public Parameter() {} public String getType() { return iType; } public void setType(String type) { iType = type; } public String getName() { return iName; } public void setName(String name) { iName = name; } public List<IdValue> values() { return iValues; } public boolean isMultiSelect() { return iMultiSelect; } public void setMultiSelect(boolean multiSelect) { iMultiSelect = multiSelect; } public boolean isTextField() { return iTextField; } public void setTextField(boolean textField) { this.iTextField = textField; } public String getDefaultTextValue() { return iDefaultTextValue; } public void setDefaultTextValue(String defaultValue) { this.iDefaultTextValue = defaultValue; } } public static class Report implements GwtRpcResponse { private String iId = null; private String iName, iDescription; private List<Parameter> iParameters = new ArrayList<Parameter>(); private int iFlags = 0; public Report() {} public String getId() { return iId; } public void setId(String id) { iId = id; } public String getName() { return iName; } public void setName(String name) { iName = name; } public String getDescription() { return iDescription == null ? "" : iDescription; } public void setDescription(String description) { iDescription = description; } public int getFlags() { return iFlags; } public void setFlags(int flags) { iFlags = flags; } public void addParameter(Parameter parameter) { iParameters.add(parameter); } public List<Parameter> getParameters() { return iParameters; } @Override public String toString() { return getName(); } public boolean parametersContain(String type) { if (type == null || type.isEmpty()){ return(false); } boolean found = false; for (Parameter param : getParameters()){ if (type.equals(param.getType())){ found = true; break; } } return(found); } } public static class PITDParametersInterface implements GwtRpcResponse { private List<Flag> iFlags = new ArrayList<Flag>(); private List<Parameter> iParameters = new ArrayList<Parameter>(); private boolean iEditable = false; public PITDParametersInterface() {} public void addFlag(Flag flag) { iFlags.add(flag); } public List<Flag> getFlags() { return iFlags; } public void addParameter(Parameter parameter) { iParameters.add(parameter); } public List<Parameter> getParameters() { return iParameters; } public void setEditable(boolean editable) { iEditable = editable; } public boolean isEditable() { return iEditable; } } public static class PITDParametersRpcRequest implements GwtRpcRequest<PITDParametersInterface> { @Override public String toString() { return null; } } public static class PITDQueriesRpcRequest implements GwtRpcRequest<GwtRpcResponseList<Report>> { public PITDQueriesRpcRequest() {} } public static class Table implements GwtRpcResponse { private List<String[]> iData = new ArrayList<String[]>(); public Table() {} public void add(String... line) { iData.add(line); } public int size() { return iData.size(); } public String[] get(int row) { return iData.get(row); } } public static class PITDExecuteRpcRequest implements GwtRpcRequest<Table> { private Report iReport; private List<IdValue> iParameters = new ArrayList<IdValue>(); public PITDExecuteRpcRequest() {} public void setReport(Report report) { iReport = report; } public Report getReport() { return iReport; } public void addParameter(String value, String text) { iParameters.add(new IdValue(value, text)); } public List<IdValue> getParameters() { return iParameters; } @Override public String toString() { return iReport.getName() + " {options: " + getParameters() + "}"; } } public static class PITDStoreRpcRequest extends Report implements GwtRpcRequest<GwtRpcResponseLong> { public PITDStoreRpcRequest() { super(); } public PITDStoreRpcRequest(Report query) { super(); setId(query.getId()); setName(query.getName()); setDescription(query.getDescription()); setFlags(query.getFlags()); } } public static class PITDDeleteRpcRequest implements GwtRpcRequest<GwtRpcResponseBoolean> { private Long iId = null; public PITDDeleteRpcRequest() {} public PITDDeleteRpcRequest(Long id) { iId = id; } public Long getId() { return iId; } public void setId(Long id) { iId = id; } @Override public String toString() { return iId == null ? "null" : iId.toString(); } } public static class PITDSetBackRpcRequest implements GwtRpcRequest<GwtRpcResponseNull> { private String iHistory; private List<Long> iIds = new ArrayList<Long>(); private String iType; public PITDSetBackRpcRequest() {} public String getHistory() { return iHistory; } public void setHistory(String history) { iHistory = history; } public void addId(Long id) { if (!iIds.contains(id)) iIds.add(id); } public List<Long> getIds() { return iIds; } public void setType(String type) { iType = type; } public String getType() { return iType; } @Override public String toString() { return"courses" + "#" + getHistory(); } } }
package org.multibit.hd.ui.views.wizards; import com.google.common.base.Optional; import com.google.common.eventbus.Subscribe; import org.multibit.hd.core.services.ApplicationEventService; import org.multibit.hd.core.services.CoreServices; import org.multibit.hd.hardware.core.HardwareWalletService; import org.multibit.hd.hardware.core.events.HardwareWalletEvent; import org.multibit.hd.hardware.core.events.HardwareWalletEvents; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.swing.*; /** * <p>Abstract base class to provide the following to UI:</p> * <ul> * <li>Provision of common methods to wizards that support hardware wallet operations</li> * </ul> * * @param <M> the hardware wallet wizard model * * @since 0.0.1 */ public abstract class AbstractHardwareWalletWizard<M extends AbstractHardwareWalletWizardModel> extends AbstractWizard<M> { private static final Logger log = LoggerFactory.getLogger(AbstractHardwareWalletWizard.class); /** * @param wizardModel The overall wizard data model containing the aggregate information of all components in the wizard * @param isExiting True if the exit button should trigger an application shutdown * @param wizardParameter An optional parameter that can be referenced during construction */ protected AbstractHardwareWalletWizard(M wizardModel, boolean isExiting, Optional wizardParameter) { this(wizardModel, isExiting, wizardParameter, true); } /** * @param wizardModel The overall wizard data model containing the aggregate information of all components in the wizard * @param isExiting True if the exit button should trigger an application shutdown * @param wizardParameter An optional parameter that can be referenced during construction * @param escapeIsCancel A press of the ESC key cancels the wizard */ protected AbstractHardwareWalletWizard(M wizardModel, boolean isExiting, Optional wizardParameter, boolean escapeIsCancel) { super(wizardModel, isExiting, wizardParameter, escapeIsCancel); // All hardware wallet wizards can receive hardware wallet events HardwareWalletEvents.subscribe(this); } /** * Unregister from hardware wallet events - called during the hide process */ @Override public void unsubscribe() { super.unsubscribe(); HardwareWalletEvents.unsubscribe(this); } @Override public void hide(final String panelName, final boolean isExitCancel) { log.debug("Hide requested for {} with exitCancel {} ", panelName, isExitCancel); if (!wizardViewMap.containsKey(panelName)) { log.error("'{}' is not a valid panel name. Check the panel has been registered in the view map. Registered panels are\n{}", wizardViewMap.keySet()); return; } final AbstractWizardPanelView wizardPanelView = wizardViewMap.get(panelName); // Provide warning that the panel is about to be hidden if (wizardPanelView.beforeHide(isExitCancel)) { // Ensure the hardware wallet is reset getWizardModel().requestCancel(); // No cancellation so go ahead with the hide handleHide(panelName, isExitCancel, wizardPanelView); } } /** * <p>Inform the wizard model of a "device failed"</p> * * @param event The originating event containing payload and context */ public void handleDeviceFailed(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) if (getWizardModel().getPanelName() != null) { if (getWizardPanelView(getWizardModel().getPanelName()) != null) { getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); } } // Move to the "device failed" state getWizardModel().showDeviceFailed(event); // Show the panel show(getWizardModel().getPanelName()); } }); } /** * <p>Inform the wizard model of a "device ready"</p> * * @param event The originating event containing payload and context */ public void handleDeviceReady(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) if (getWizardModel().getPanelName() != null) { if (getWizardPanelView(getWizardModel().getPanelName()) != null) { getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); } } String oldPanel = getWizardModel().getPanelName(); // Move to the "device ready" state getWizardModel().showDeviceReady(event); // Show the panel if different String newPanel = getWizardModel().getPanelName(); if (oldPanel == null || !oldPanel.equals(newPanel)) { show(getWizardModel().getPanelName()); } } }); } /** * <p>Inform the wizard model of a "device detached"</p> * * @param event The originating event containing payload and context */ public void handleDeviceDetached(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) if (getWizardModel().getPanelName() != null) { if (getWizardPanelView(getWizardModel().getPanelName()) != null) { getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); } } // Move to the "device detached" state getWizardModel().showDeviceDetached(event); // Show the panel show(getWizardModel().getPanelName()); } }); } /** * <p>Inform the wizard model of a "device stopped"</p> * * @param event The originating event containing payload and context */ public void handleDeviceStopped(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) if (getWizardModel().getPanelName() != null) { if (getWizardPanelView(getWizardModel().getPanelName()) != null) { getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); } } // Move to the "device stopped" state getWizardModel().showDeviceStopped(event); // Show the panel show(getWizardModel().getPanelName()); } }); } /** * <p>Inform the wizard model of a "PIN entry"</p> * * @param event The originating event containing payload and context */ public void handlePINEntry(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) if (getWizardModel().getPanelName() != null) { if (getWizardPanelView(getWizardModel().getPanelName()) != null) { getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); } } // Move to the "PIN entry" state getWizardModel().showPINEntry(event); // Show the panel show(getWizardModel().getPanelName()); } }); } /** * <p>Inform the wizard model of a "passphrase entry"</p> * * @param event The originating event containing payload and context */ public void handlePassphraseEntry(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) if (getWizardModel().getPanelName() != null) { if (getWizardPanelView(getWizardModel().getPanelName()) != null) { getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); } } // Move to the "passphrase entry" state getWizardModel().showPassphraseEntry(event); // Show the panel show(getWizardModel().getPanelName()); } }); } /** * <p>Inform the wizard model of a "button press"</p> * * @param event The originating event containing payload and context */ public void handleButtonPress(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) if (getWizardModel().getPanelName() != null) { log.debug("Wizard panel name {}", getWizardModel().getPanelName()); if (getWizardPanelView(getWizardModel().getPanelName()) != null) { getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); } } // Move to the "button press" state getWizardModel().showButtonPress(event); // Show the panel show(getWizardModel().getPanelName()); } }); } /** * <p>Inform the wizard model of an "operation succeeded"</p> * * @param event The originating event containing payload and context */ private void handleOperationSucceeded(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) if (getWizardModel().getPanelName() != null) { if (getWizardPanelView(getWizardModel().getPanelName()) != null) { getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); } } // Move to the "operation succeeded" state getWizardModel().showOperationSucceeded(event); // Show the panel try { show(getWizardModel().getPanelName()); } catch (IllegalStateException ise) { // Carry on log.debug(ise.getMessage()); } } }); } /** * <p>Inform the wizard model of an "operation failed"</p> * * @param event The originating event containing payload and context */ public void handleOperationFailed(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) if (getWizardModel().getPanelName() != null) { if (getWizardPanelView(getWizardModel().getPanelName()) != null) { getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); } } // Move to the "operation failed" state getWizardModel().showOperationFailed(event); // Show the panel show(getWizardModel().getPanelName()); } }); } /** * <p>Inform the wizard model of a "provide entropy"</p> * * @param event The originating event containing payload and context */ public void handleProvideEntropy(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); // Move to the "operation failed" state getWizardModel().showProvideEntropy(event); // Show the panel show(getWizardModel().getPanelName()); } }); } /** * <p>Inform the wizard model of a "received address"</p> * * @param event The originating event containing payload and context */ public void handleReceivedAddress(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); // Move to the "received address" state getWizardModel().receivedAddress(event); // Show the panel show(getWizardModel().getPanelName()); } }); } /** * <p>Inform the wizard model of a "received public key"</p> * * @param event The originating event containing payload and context */ public void handleReceivedPublicKey(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) if (getWizardModel().getPanelName() != null) { if (getWizardPanelView(getWizardModel().getPanelName()) != null) { getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); } } // Move to the "received public key" state getWizardModel().receivedPublicKey(event); // Show the panel show(getWizardModel().getPanelName()); } }); } /** * <p>Inform the wizard model of a "received deterministic hierarchy"</p> * * @param event The originating event containing payload and context */ public void handleReceivedDeterministicHierarchy(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) if (getWizardModel().getPanelName() != null) { if (getWizardPanelView(getWizardModel().getPanelName()) != null) { getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); } } // Move to the "received deterministic hierarchy" state getWizardModel().receivedDeterministicHierarchy(event); // Show the panel show(getWizardModel().getPanelName()); } }); } /** * <p>Inform the wizard model of a "received message signature"</p> * * @param event The originating event containing payload and context */ public void handleReceivedMessageSignature(final HardwareWalletEvent event) { SwingUtilities.invokeLater( new Runnable() { @Override public void run() { // Ensure the panel updates its model (the button is outside of the panel itself) getWizardPanelView(getWizardModel().getPanelName()).updateFromComponentModels(Optional.absent()); // Move to the "received message signature" state getWizardModel().receivedMessageSignature(event); // Show the panel show(getWizardModel().getPanelName()); } }); } /** * <p>Respond to hardware wallet events. Not SHOW_DEVICE_* since the MainController handles them.</p> * <p>Translate the hardware event into an action on the EDT for easier integration with the existing * framework</p> * * @param event The hardware wallet event indicating a state change */ @Subscribe public void onHardwareWalletEvent(HardwareWalletEvent event) { log.debug("{} Received hardware event: '{}'.", this, event.getEventType().name()); if (!ApplicationEventService.isHardwareWalletEventAllowed()) { log.debug("Ignoring device event due to 'ignore threshold' still in force", event); return; } // Check if this is the first event from the hardware wallet Optional<HardwareWalletService> currentHardwareWalletService = CoreServices.getCurrentHardwareWalletService(); if (!currentHardwareWalletService.isPresent()) { // Allow time for the current hardware wallet to initialise CoreServices.useFirstReadyHardwareWalletService(); } switch (event.getEventType()) { case SHOW_DEVICE_FAILED: handleDeviceFailed(event); break; case SHOW_DEVICE_READY: handleDeviceReady(event); break; case SHOW_DEVICE_DETACHED: handleDeviceDetached(event); break; case SHOW_DEVICE_STOPPED: handleDeviceStopped(event); break; case SHOW_PIN_ENTRY: handlePINEntry(event); break; case SHOW_PASSPHRASE_ENTRY: handlePassphraseEntry(event); break; case SHOW_BUTTON_PRESS: handleButtonPress(event); break; case SHOW_OPERATION_SUCCEEDED: handleOperationSucceeded(event); break; case SHOW_OPERATION_FAILED: handleOperationFailed(event); break; case PROVIDE_ENTROPY: handleProvideEntropy(event); break; case ADDRESS: handleReceivedAddress(event); break; case PUBLIC_KEY: handleReceivedPublicKey(event); break; case DETERMINISTIC_HIERARCHY: handleReceivedDeterministicHierarchy(event); break; case MESSAGE_SIGNATURE: handleReceivedMessageSignature(event); break; case SHOW_WORD_ENTRY: break; default: log.warn("Unknown hardware wallet event type: {}", event.getEventType().name()); break; } } }
/* * Copyright 2015 Morgan Redshaw * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package com.example.assignment1; import java.util.Calendar; import java.util.List; import android.app.Activity; import android.app.DialogFragment; import android.content.Intent; import android.os.Bundle; import android.text.Editable; import android.text.TextWatcher; import android.text.method.KeyListener; import android.util.Pair; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.EditText; import android.widget.ListView; import android.widget.TextView; import com.example.assignment1.dialogs.ChangeDateDialogFragment; import com.example.assignment1.dialogs.ChangeDateListener; import com.example.assignment1.dialogs.DisplayCurrencyUsageInfoDialogFragment; // Will be responsible for displaying a TravelClaim public class TravelClaimActivity extends Activity implements FView<TravelClaim>, ChangeDateListener, TravelExpenseArrayAdapterListener { private static final int START_DATE_ID = 0; private static final int END_DATE_ID = 1; public static final String ARGUMENT_CLAIM_POSITION = "ClaimPosition"; private TravelClaimController controller; private TextView currentStatus; private EditText name; private EditText description; private TextView startDateText; private TextView endDateText; private Button startDateButton; private Button endDateButton; private Button createExpenseButton; private TravelExpenseArrayAdapter expenseAdapter; private Menu menu; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_travel_claim); findLayoutItems(); setUpClaimToDisplay(); } private void setUpClaimToDisplay() { int claimPosition = getIntent().getIntExtra(ARGUMENT_CLAIM_POSITION, 0); TravelClaimOwner owner = TravelApplication.getMainOwner(); controller = owner.getTravelClaimController(claimPosition); controller.addView(this); } private void findLayoutItems() { findButtons(); findTextBoxes(); setUpDescriptionTextBox(); setUpNameTextBox(); setUpExpensesList(); } @Override protected void onResume() { super.onResume(); controller.requestUpdate(); } private void findButtons() { startDateButton = (Button) findViewById(R.id.change_start_date); endDateButton = (Button) findViewById(R.id.change_end_date); createExpenseButton = (Button) findViewById(R.id.new_expense); } private void findTextBoxes() { currentStatus = (TextView) findViewById(R.id.current_status); startDateText = (TextView) findViewById(R.id.start_date_text); endDateText = (TextView) findViewById(R.id.end_date_text); } private void setUpDescriptionTextBox() { description = (EditText) findViewById(R.id.description); description.addTextChangedListener(new TextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void afterTextChanged(Editable s) { controller.setDescription(s.toString()); } }); // Save it so that it can be restored later description.setTag(description.getKeyListener()); } private void setUpNameTextBox() { name = (EditText) findViewById(R.id.name); name.addTextChangedListener(new TextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void afterTextChanged(Editable s) { controller.setName(s.toString()); } }); // Save it so that it can be restored later name.setTag(name.getKeyListener()); } private void setUpExpensesList() { expenseAdapter = new TravelExpenseArrayAdapter(this, this); ListView list = (ListView) findViewById(R.id.expense_list); list.setAdapter(expenseAdapter); } @Override protected void onDestroy() { super.onDestroy(); controller.deleteView(this); } @Override public void update(TravelClaim model) { if (model.mayBeEdited()) { enableEditing(); } else { disableEditing(); } updateStatus(model); updateDates(model); updateName(model); updateDescription(model); updateStatesMenuBar(model); updateTravelExpenseList(model); } private void enableEditing() { description.setKeyListener((KeyListener) description.getTag()); name.setKeyListener((KeyListener) name.getTag()); startDateButton.setEnabled(true); endDateButton.setEnabled(true); createExpenseButton.setEnabled(true); expenseAdapter.setEditable(true); } private void disableEditing() { description.setKeyListener(null); name.setKeyListener(null); startDateButton.setEnabled(false); endDateButton.setEnabled(false); createExpenseButton.setEnabled(false); expenseAdapter.setEditable(false); } private void updateStatus(TravelClaim model) { String text = getResources().getString(R.string.current_status, model.getState()); currentStatus.setText(text); } private void updateDates(TravelClaim model) { startDateText.setText(Utilities.getFormattedDateString(this, R.string.start_date, model.getStartDate())); startDateButton.setOnClickListener(new DateButtonClickListener(model.getStartDate(), START_DATE_ID)); endDateText.setText(Utilities.getFormattedDateString(this, R.string.end_date, model.getEndDate())); endDateButton.setOnClickListener(new DateButtonClickListener(model.getEndDate(), END_DATE_ID)); } private class DateButtonClickListener implements OnClickListener { private final Calendar initialDate; private final int dateId; DateButtonClickListener(Calendar initialDate, int dateId) { this.initialDate = initialDate; this.dateId = dateId; } @Override public void onClick(View v) { displayDialogFragment(ChangeDateDialogFragment.newInstance(initialDate, dateId)); } } private void updateName(TravelClaim model) { String newText = model.getName(); // This way, it will keep the position in the edit text if (!newText.equals(name.getText().toString())) { name.setText(newText); } } private void updateDescription(TravelClaim model) { String newText = model.getDescription(); // This way, it will keep the position in the edit text if (!newText.equals(description.getText().toString())) { description.setText(newText); } } private void updateStatesMenuBar(TravelClaim model) { if (menu == null) return; boolean shouldShowSubmit = model.isValidStateChange(TravelClaimStates.SUBMITTED); menu.findItem(R.id.submit).setVisible(shouldShowSubmit); boolean shouldShowReturned = model.isValidStateChange(TravelClaimStates.RETURNED); menu.findItem(R.id.returned).setVisible(shouldShowReturned); boolean shouldShowApproved = model.isValidStateChange(TravelClaimStates.APPROVED); menu.findItem(R.id.approved).setVisible(shouldShowApproved); } private void updateTravelExpenseList(TravelClaim model) { expenseAdapter.setAllExpenses(model.getAllExpenses()); } @Override public void dateChanged(Calendar newCalendarDate, int dateId) { if (dateId == START_DATE_ID) { controller.setStartDate(newCalendarDate); } else if (dateId == END_DATE_ID) { controller.setEndDate(newCalendarDate); } } private void displayDialogFragment(DialogFragment fragment) { fragment.show(getFragmentManager(), "dialogfragment"); } // These two functions are for allowing the user to change the state of the // program @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.travel_claim_states, menu); this.menu = menu; controller.requestUpdate(); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle presses on the action bar items switch (item.getItemId()) { case R.id.submit: controller.setState(TravelClaimStates.SUBMITTED); return true; case R.id.returned: controller.setState(TravelClaimStates.RETURNED); return true; case R.id.approved: controller.setState(TravelClaimStates.APPROVED); return true; default: return super.onOptionsItemSelected(item); } } public void createEmail(View v) { AndroidEmailSender sender = new AndroidEmailSender(this); sender.sendEmail(controller.getTravelClaim()); } public void displayCurrency(View v) { List<Pair<String, Float>> mergedPayments = controller.getCurrencyInformation(); DisplayCurrencyUsageInfoDialogFragment dialogFragment = DisplayCurrencyUsageInfoDialogFragment .newInstance(mergedPayments); displayDialogFragment(dialogFragment); } @Override public void deleteExpense(TravelExpense expense) { controller.deleteExpense(expense); } public void createExpense(View v) { int newExpensePos = controller.getNumberOfExpenses(); controller.createExpense(); displayExpense(newExpensePos); } @Override public void editExpense(TravelExpense expense) { int expensePosition = controller.getExpensePosition(expense); displayExpense(expensePosition); } private void displayExpense(int expensePosition) { int claimPosition = getIntent().getIntExtra(ARGUMENT_CLAIM_POSITION, 0); Intent intent = new Intent(this, TravelExpenseActivity.class); intent.putExtra(TravelExpenseActivity.ARGUMENT_CLAIM_POSITION, claimPosition); intent.putExtra(TravelExpenseActivity.ARGUMENT_EXPENSE_POSITION, expensePosition); startActivity(intent); } }
/************************************************************************************** * Copyright (c) 2013-2015, Finnish Social Science Data Archive/University of Tampere * * * * All rights reserved. * * * * Redistribution and use in source and binary forms, with or without modification, * * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * * this list of conditions and the following disclaimer in the documentation * * and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors * * may be used to endorse or promote products derived from this software * * without specific prior written permission. * * * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR * * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * **************************************************************************************/ package fi.uta.fsd.metka.ddi.builder; import codebook25.*; import fi.uta.fsd.Logger; import fi.uta.fsd.metka.enums.Language; import fi.uta.fsd.metka.model.access.calls.*; import fi.uta.fsd.metka.model.access.enums.StatusCode; import fi.uta.fsd.metka.model.configuration.*; import fi.uta.fsd.metka.model.data.RevisionData; import fi.uta.fsd.metka.model.data.container.*; import fi.uta.fsd.metka.mvc.services.ReferenceService; import fi.uta.fsd.metka.names.Fields; import fi.uta.fsd.metka.names.Lists; import fi.uta.fsd.metka.storage.repository.RevisionRepository; import fi.uta.fsd.metka.storage.repository.enums.ReturnResult; import fi.uta.fsd.metka.transfer.reference.ReferenceOption; import org.apache.commons.lang3.tuple.Pair; import org.springframework.util.StringUtils; import org.jsoup.*; import org.jsoup.nodes.*; import org.jsoup.select.*; import java.util.*; class DDIWriteStudyDescription extends DDIWriteSectionBase { DDIWriteStudyDescription(RevisionData revision, Language language, CodeBookType codeBook, Configuration configuration, RevisionRepository revisions, ReferenceService references) { super(revision, language, codeBook, configuration, revisions, references); } void write() { // Add study description to codebook StdyDscrType stdyDscrType = codeBook.addNewStdyDscr(); addCitationInfo(stdyDscrType); addStudyAuthorization(stdyDscrType); addStudyInfo(stdyDscrType); addMethod(stdyDscrType); addDataAccess(stdyDscrType); addOtherStudyMaterial(stdyDscrType); } private void addCitationInfo(StdyDscrType stdyDscrType) { // Add citation CitationType citationType = stdyDscrType.addNewCitation(); addCitationTitle(citationType); addCitationRspStatement(citationType); addCitationProdStatement(citationType); addCitationDistStatement(citationType); // Add SerStmt addCitationSerStatement(citationType); // Add VerStmt addCitationVerStatement(citationType); // Add biblcit Pair<StatusCode, ValueDataField> valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.BIBLCIT)); if(hasValue(valueFieldPair, language)) { fillTextType(citationType.addNewBiblCit(), valueFieldPair, language); } } private void addCitationProdStatement(CitationType citationType) { ProdStmtType prodStmtType = citationType.addNewProdStmt(); Pair<StatusCode, ContainerDataField> containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.PRODUCERS)); String path = "producers."; if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { for(DataRow row : containerPair.getRight().getRowsFor(Language.DEFAULT)) { if(row.getRemoved()) { continue; } String rowRoot = path+row.getRowId()+"."; String organisation = getReferenceTitle(rowRoot + Fields.PRODUCERORGANISATION); String agency = getReferenceTitle(rowRoot + Fields.PRODUCERAGENCY); String section = getReferenceTitle(rowRoot + Fields.PRODUCERSECTION); ProducerType d; if(!StringUtils.hasText(agency) && !StringUtils.hasText(section)) { if(!StringUtils.hasText(organisation)) { continue; } d = fillTextType(prodStmtType.addNewProducer(), organisation); } else { String producer = (StringUtils.hasText(agency)) ? agency : ""; producer += (StringUtils.hasText(producer) && StringUtils.hasText(section)) ? ". " : ""; producer += (StringUtils.hasText(section)) ? section : ""; if(!StringUtils.hasText(producer)) { continue; } d = fillTextType(prodStmtType.addNewProducer(), producer); } String abbr = getReferenceTitle(rowRoot + Fields.PRODUCERSECTIONABBR); abbr = (StringUtils.hasText(abbr)) ? abbr : getReferenceTitle(rowRoot + Fields.PRODUCERAGENCYABBR); abbr = (StringUtils.hasText(abbr)) ? abbr : getReferenceTitle(rowRoot + Fields.PRODUCERORGANISATIONABBR); d.setAbbr(abbr); if(StringUtils.hasText(agency) || StringUtils.hasText(section)) { if(StringUtils.hasText(organisation)) { d.setAffiliation(organisation); } } Pair<StatusCode, ValueDataField> fieldPair = row.dataField(ValueDataFieldCall.get(Fields.PRODUCERROLE)); if(hasValue(fieldPair, Language.DEFAULT)) { String role = fieldPair.getRight().getActualValueFor(Language.DEFAULT); SelectionList list = configuration.getRootSelectionList(configuration.getField(Fields.PRODUCERROLE).getSelectionList()); Option option = list.getOptionWithValue(role); if(option != null) { d.setRole(option.getTitleFor(language)); } } } } // Add copyright fillTextType(prodStmtType.addNewCopyright(), getDDIText(language, "COPYRIGHT_STDY")); } private void addCitationDistStatement(CitationType citationType) { DistStmtType distStmtType = citationType.addNewDistStmt(); DistrbtrType d = fillTextType(distStmtType.addNewDistrbtr(), getDDIText(language, "DISTRIBUTR")); d.setAbbr(getDDIText(language, "DISTRIBUTR_ABB")); d.setURI(getDDIText(language, "DISTRIBUTR_URI")); } private void addCitationRspStatement(CitationType citationType) { RspStmtType rsp = citationType.addNewRspStmt(); Pair<StatusCode, ContainerDataField> containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.AUTHORS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { String pathRoot = "authors."; for(DataRow row : containerPair.getRight().getRowsFor(Language.DEFAULT)) { if (row.getRemoved()) { continue; } String rowRoot = pathRoot + row.getRowId() + "."; Pair<StatusCode, ValueDataField> pair = row.dataField(ValueDataFieldCall.get(Fields.AUTHORTYPE)); if (!hasValue(pair, Language.DEFAULT)) { // We require a type for collector before we can move forward continue; } if(!pair.getRight().getActualValueFor(Language.DEFAULT).equals("1")) { continue; } // We have a person author pair = row.dataField(ValueDataFieldCall.get(Fields.AUTHOR)); if (!hasValue(pair, Language.DEFAULT)) { // We must have a collector continue; } AuthEntyType d = fillTextType(rsp.addNewAuthEnty(), pair, Language.DEFAULT); String organisation = getReferenceTitle(rowRoot + Fields.AUTHORORGANISATION); String agency = getReferenceTitle(rowRoot + Fields.AUTHORAGENCY); String section = getReferenceTitle(rowRoot + Fields.AUTHORSECTION); String affiliation = (StringUtils.hasText(organisation)) ? organisation : ""; affiliation += (StringUtils.hasText(affiliation) && StringUtils.hasText(agency)) ? ". " : ""; affiliation += (StringUtils.hasText(agency)) ? agency : ""; affiliation += (StringUtils.hasText(affiliation) && StringUtils.hasText(section)) ? ". " : ""; affiliation += (StringUtils.hasText(section)) ? section : ""; if (StringUtils.hasText(affiliation)) { d.setAffiliation(affiliation); } } } containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.OTHERAUTHORS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { String pathRoot = "otherauthors."; for(DataRow row : containerPair.getRight().getRowsFor(Language.DEFAULT)) { if (row.getRemoved()) { continue; } String rowRoot = pathRoot + row.getRowId() + "."; Pair<StatusCode, ValueDataField> pair = row.dataField(ValueDataFieldCall.get(Fields.OTHERAUTHORTYPE)); if(!hasValue(pair, Language.DEFAULT)) { // We require a type for collector before we can move forward continue; } String colltype = pair.getRight().getActualValueFor(Language.DEFAULT); // It's easier to dublicate some functionality and make a clean split from the top than to evaluate each value separately if(colltype.equals("1")) { // We have a person collector pair = row.dataField(ValueDataFieldCall.get(Fields.AUTHOR)); if(!hasValue(pair, Language.DEFAULT)) { // We must have a collector continue; } OthIdType d = fillTextType(rsp.addNewOthId(), pair, Language.DEFAULT); String organisation = getReferenceTitle(rowRoot + Fields.AUTHORORGANISATION); String agency = getReferenceTitle(rowRoot + Fields.AUTHORAGENCY); String section = getReferenceTitle(rowRoot + Fields.AUTHORSECTION); String affiliation = (StringUtils.hasText(organisation)) ? organisation : ""; affiliation += (StringUtils.hasText(affiliation) && StringUtils.hasText(agency)) ? ". " : ""; affiliation += (StringUtils.hasText(agency)) ? agency : ""; affiliation += (StringUtils.hasText(affiliation) && StringUtils.hasText(section)) ? ". " : ""; affiliation += (StringUtils.hasText(section)) ? section : ""; if(StringUtils.hasText(affiliation)) { d.setAffiliation(affiliation); } } else if(colltype.equals("2")) { // We have an organisation collector String organisation = getReferenceTitle(rowRoot + Fields.AUTHORORGANISATION); String agency = getReferenceTitle(rowRoot + Fields.AUTHORAGENCY); String section = getReferenceTitle(rowRoot + Fields.AUTHORSECTION); String collector = ""; OthIdType d; if(!StringUtils.hasText(agency) && !StringUtils.hasText(section)) { if(!StringUtils.hasText(organisation)) { continue; } d = fillTextType(rsp.addNewOthId(), organisation); } else { collector = organisation; collector += (StringUtils.hasText(agency)) ? ". " + agency : ""; collector += (StringUtils.hasText(collector) && StringUtils.hasText(section)) ? ". " : ""; collector += (StringUtils.hasText(section)) ? section : ""; if(!StringUtils.hasText(collector)) { continue; } d = fillTextType(rsp.addNewOthId(), organisation); } if(StringUtils.hasText(agency) || StringUtils.hasText(section)) { if(StringUtils.hasText(collector)) { d.setAffiliation(collector); } } } else if(colltype.equals("3")) { pair = row.dataField(ValueDataFieldCall.get(Fields.OTHERAUTHORGROUP)); if(hasValue(pair, language)) { fillTextType(rsp.addNewOthId(), pair, language); } } } } } private void addCitationSerStatement(CitationType citationType) { // Add series statement, excel row #70 Pair<StatusCode, ValueDataField> valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.SERIES)); if(hasValue(valueFieldPair, Language.DEFAULT)) { Pair<ReturnResult, RevisionData> revisionPair = revisions.getRevisionData(valueFieldPair.getRight().getActualValueFor(Language.DEFAULT)); if(revisionPair.getLeft() == ReturnResult.REVISION_FOUND) { RevisionData series = revisionPair.getRight(); valueFieldPair = series.dataField(ValueDataFieldCall.get(Fields.SERIESABBR)); String seriesAbbr = null; if(hasValue(valueFieldPair, Language.DEFAULT)) { seriesAbbr = valueFieldPair.getRight().getActualValueFor(Language.DEFAULT); } if(seriesAbbr != null) { SerStmtType serStmtType = citationType.addNewSerStmt(); serStmtType.setURI(getDDIText(language, "SERIES_URI_PREFIX")+seriesAbbr); valueFieldPair = series.dataField(ValueDataFieldCall.get(Fields.SERIESNAME)); SerNameType serName; if(hasValue(valueFieldPair, language)) { serName = fillTextType(serStmtType.addNewSerName(), valueFieldPair, language); } else { serName = fillTextType(serStmtType.addNewSerName(), ""); } serName.setAbbr(seriesAbbr); valueFieldPair = series.dataField(ValueDataFieldCall.get(Fields.SERIESDESC)); if(hasValue(valueFieldPair, language)) { fillTextType(serStmtType.addNewSerInfo(), valueFieldPair, language); } } } else { Logger.error(getClass(), "Did not find referenced SERIES with id: "+valueFieldPair.getRight().getActualValueFor(Language.DEFAULT)); } } } private void addCitationVerStatement(CitationType citationType) { VerStmtType verStmtType = citationType.addNewVerStmt(); // Add version, repeatable Pair<StatusCode, ContainerDataField> containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.DATAVERSIONS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { for(DataRow row : containerPair.getRight().getRowsFor(Language.DEFAULT)) { Pair<StatusCode, ValueDataField> valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.DATAVERSION)); if(hasValue(valueFieldPair, Language.DEFAULT)) { fillTextAndDateType(verStmtType.addNewVersion(), valueFieldPair, Language.DEFAULT); } } } } private void addCitationTitle(CitationType citationType) { Pair<StatusCode, ValueDataField> valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.TITLE)); TitlStmtType titlStmtType = citationType.addNewTitlStmt(); if(hasValue(valueFieldPair, language)) { // Add title of requested language fillTextType(titlStmtType.addNewTitl(), valueFieldPair, language); } addAltTitles(titlStmtType); addParTitles(titlStmtType); String agency = ""; valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.STUDYID)); if(hasValue(valueFieldPair, Language.DEFAULT)) { String id = valueFieldPair.getRight().getActualValueFor(Language.DEFAULT); // Get agency from study id SelectionList list = configuration.getRootSelectionList(Lists.ID_PREFIX_LIST); if(list != null) { for(Option option : list.getOptions()) { if(id.indexOf(option.getValue()) == 0) { agency = option.getValue(); break; } } } // Add study id as id no IDNoType idNoType = fillTextType(titlStmtType.addNewIDNo(), valueFieldPair, Language.DEFAULT); idNoType.setAgency(agency); } // Add DDI pid for the current language as idNO // TODO: Should this be the DDI package urn /*valueFieldPair = revisionData.dataField(ValueDataFieldCall.get(Fields.PIDDDI+getXmlLang(language))); if(hasValue(valueFieldPair, Language.DEFAULT)) { IDNoType idNoType = fillTextType(titlStmtType.addNewIDNo(), valueFieldPair, Language.DEFAULT); idNoType.setAgency(agency); }*/ } private void addParTitles(TitlStmtType titlStmtType) { Pair<StatusCode, ValueDataField> valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.TITLE)); Set<String> usedLanguages = new HashSet<>(); usedLanguages.add(getXmlLang(language)); for(Language l : Language.values()) { if(l == language) { continue; } if(hasValue(valueFieldPair, l)) { SimpleTextType stt = fillTextType(titlStmtType.addNewParTitl(), valueFieldPair, l); stt.setXmlLang(getXmlLang(l)); usedLanguages.add(getXmlLang(l)); } } Pair<StatusCode, ContainerDataField> containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.PARTITLES)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { for(DataRow row : containerPair.getRight().getRowsFor(Language.DEFAULT)) { valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.PARTITLE)); String partitle = null; if(hasValue(valueFieldPair, Language.DEFAULT)) { partitle = valueFieldPair.getRight().getActualValueFor(Language.DEFAULT); } valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.PARTITLELANG)); String partitlelang = null; if(hasValue(valueFieldPair, Language.DEFAULT)) { partitlelang = valueFieldPair.getRight().getActualValueFor(Language.DEFAULT); } if(partitle != null && partitlelang != null) { if(!usedLanguages.contains(partitlelang)) { SimpleTextType stt = fillTextType(titlStmtType.addNewParTitl(), partitle); stt.setXmlLang(partitlelang); usedLanguages.add(partitlelang); } } } } } private void addAltTitles(TitlStmtType titlStmtType) { Pair<StatusCode, ValueDataField> valueFieldPair;// Add alternative titles Pair<StatusCode, ContainerDataField> containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.ALTTITLES)); // TODO: Do we translate alternate titles or do the alternate titles have translations? if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { for(DataRow row : containerPair.getRight().getRowsFor(Language.DEFAULT)) { valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.ALTTITLE)); if(hasValue(valueFieldPair, language)) { fillTextType(titlStmtType.addNewAltTitl(), valueFieldPair, language); } } } } private void addStudyAuthorization(StdyDscrType stdyDscrType) { Pair<StatusCode, ContainerDataField> containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.AUTHORS)); String path = "authors."; if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { StudyAuthorizationType sa = stdyDscrType.addNewStudyAuthorization(); for(DataRow row : containerPair.getRight().getRowsFor(Language.DEFAULT)) { if(row.getRemoved()) { continue; } Pair<StatusCode, ValueDataField> pair = row.dataField(ValueDataFieldCall.get(Fields.AUTHORTYPE)); if(!hasValue(pair, Language.DEFAULT)) { continue; } // If author type is person then it's not correct for this entity if(pair.getRight().getActualValueFor(Language.DEFAULT).equals("1")) { continue; } String rowRoot = path+row.getRowId()+"."; String organisation = getReferenceTitle(rowRoot + Fields.AUTHORORGANISATION); String agency = getReferenceTitle(rowRoot + Fields.AUTHORAGENCY); String section = getReferenceTitle(rowRoot + Fields.AUTHORSECTION); AuthorizingAgencyType d; if(!StringUtils.hasText(agency) && !StringUtils.hasText(section)) { if(!StringUtils.hasText(organisation)) { continue; } d = fillTextType(sa.addNewAuthorizingAgency(), organisation); } else { String authorizer = (StringUtils.hasText(agency)) ? agency : ""; authorizer += (StringUtils.hasText(authorizer) && StringUtils.hasText(section)) ? ". " : ""; authorizer += (StringUtils.hasText(section)) ? section : ""; if(!StringUtils.hasText(authorizer)) { continue; } d = fillTextType(sa.addNewAuthorizingAgency(), authorizer); } String abbr = getReferenceTitle(rowRoot + Fields.PRODUCERSECTIONABBR); abbr = (StringUtils.hasText(abbr)) ? abbr : getReferenceTitle(rowRoot + Fields.PRODUCERAGENCYABBR); abbr = (StringUtils.hasText(abbr)) ? abbr : getReferenceTitle(rowRoot + Fields.PRODUCERORGANISATIONABBR); d.setAbbr(abbr); if(StringUtils.hasText(agency) || StringUtils.hasText(section)) { if(StringUtils.hasText(organisation)) { d.setAffiliation(organisation); } } } } } private void addStudyInfo(StdyDscrType stdyDscrType) { StdyInfoType stdyInfo = stdyDscrType.addNewStdyInfo(); addStudyInfoSubject(stdyInfo); Pair<StatusCode, ValueDataField> valueFieldPair = revision.dataField( ValueDataFieldCall.get(Fields.ABSTRACT)); if(hasValue(valueFieldPair, language)) { fillTextType(stdyInfo.addNewAbstract(), valueFieldPair, language); } addStudyInfoSumDesc(stdyInfo); } private void addStudyInfoSubject(StdyInfoType stdyInfo) { SubjectType subject= stdyInfo.addNewSubject(); // Add subject Pair<StatusCode, ContainerDataField> containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.KEYWORDS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { addStudyInfoSubjectKeywords(subject, containerPair.getRight()); } // Add topic containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.TOPICS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { addStudyInfoSubjectTopics(subject, containerPair.getRight()); } } private void addStudyInfoSubjectKeywords(SubjectType subject, ContainerDataField container) { // Let's hardcode the path since we know exactly what we are looking for. String pathRoot = "keywords."; for(DataRow row : container.getRowsFor(Language.DEFAULT)) { if(row.getRemoved()) { continue; } String rowRoot = pathRoot + row.getRowId() + "."; String keyword = null; String keywordvocaburi = null; ReferenceOption keywordvocab = references.getCurrentFieldOption(language, revision, configuration, rowRoot + Fields.KEYWORDVOCAB, true); keywordvocaburi = getReferenceTitle(rowRoot + Fields.KEYWORDVOCABURI); SelectionList keywordvocab_list = configuration.getSelectionList(Lists.KEYWORDVOCAB_LIST); if(keywordvocab == null || keywordvocab_list.getFreeText().contains(keywordvocab.getValue())) { Pair<StatusCode, ValueDataField> keywordnovocabPair = row.dataField(ValueDataFieldCall.get(Fields.KEYWORDNOVOCAB)); if(hasValue(keywordnovocabPair, language)) { keyword = keywordnovocabPair.getRight().getActualValueFor(language); } } else { Pair<StatusCode, ValueDataField> keywordPair = row.dataField(ValueDataFieldCall.get(Fields.KEYWORD)); if(hasValue(keywordPair, language)) { keyword = keywordPair.getRight().getActualValueFor(language); } } if(!StringUtils.hasText(keyword)) { continue; } KeywordType kwt = fillTextType(subject.addNewKeyword(), keyword); if(keywordvocab != null) { kwt.setVocab(keywordvocab.getTitle().getValue()); } if(StringUtils.hasText(keywordvocaburi)) { kwt.setVocabURI(keywordvocaburi); } } } private void addStudyInfoSubjectTopics(SubjectType subject, ContainerDataField container) { // Let's hardcode the path since we know exactly what we are looking for. String pathRoot = "topics."; for(DataRow row : container.getRowsFor(Language.DEFAULT)) { if(row.getRemoved()) { continue; } String rowRoot = pathRoot + row.getRowId() + "."; String topic = null; String topictop = null; String topicvocab = null; String topicvocaburi = null; topicvocab = getReferenceTitle(rowRoot + Fields.TOPICVOCAB); if(!StringUtils.hasText(topicvocab)) { continue; } topictop = getReferenceTitle(rowRoot + Fields.TOPICTOP); if(!StringUtils.hasText(topictop)) { continue; } topic = getReferenceTitle(rowRoot + Fields.TOPIC); if(!StringUtils.hasText(topic)) { continue; } topicvocaburi = getReferenceTitle(rowRoot + Fields.TOPICVOCABURI); // Keyword should always be non null at this point TopcClasType tt = fillTextType(subject.addNewTopcClas(), topic); if(topicvocab != null) { tt.setVocab(topicvocab); } if(topicvocaburi != null) { tt.setVocabURI(topicvocaburi); } } } private void addStudyInfoSumDesc(StdyInfoType stdyInfo) { SumDscrType sumDscrType = stdyInfo.addNewSumDscr(); Pair<StatusCode, ContainerDataField> containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.TIMEPERIODS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { addStudyInfoSumDescTimePrd(sumDscrType, containerPair.getRight()); } containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.COLLTIME)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { addStudyInfoSumDescCollDate(sumDscrType, containerPair.getRight()); } containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.COUNTRIES)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { addStudyInfoSumDescNation(sumDscrType, containerPair.getRight()); for(DataRow row : containerPair.getRight().getRowsFor(language)) { if (row.getRemoved()) { continue; } Pair<StatusCode, ValueDataField> fieldPair = row.dataField(ValueDataFieldCall.get(Fields.COUNTRY)); if(hasValue(fieldPair, language)) { fillTextType(sumDscrType.addNewGeogCover(), fieldPair, language); } } } containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.GEOGCOVERS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(language)) { for(DataRow row : containerPair.getRight().getRowsFor(language)) { if (row.getRemoved()) { continue; } Pair<StatusCode, ValueDataField> fieldPair = row.dataField(ValueDataFieldCall.get(Fields.GEOGCOVER)); if(hasValue(fieldPair, language)) { fillTextType(sumDscrType.addNewGeogCover(), fieldPair, language); } } } containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.ANALYSIS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { addStudyInfoSumDescAnlyUnit(sumDscrType, containerPair.getRight()); } containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.UNIVERSES)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { addStudyInfoSumDescUniverse(sumDscrType, containerPair); } Pair<StatusCode, ValueDataField> fieldPair = revision.dataField(ValueDataFieldCall.get(Fields.DATAKIND)); if(hasValue(fieldPair, Language.DEFAULT)) { SelectionList list = configuration.getRootSelectionList(configuration.getField(Fields.DATAKIND).getSelectionList()); Option option = list.getOptionWithValue(fieldPair.getRight().getActualValueFor(Language.DEFAULT)); if(option != null) { fillTextType(sumDscrType.addNewDataKind(), option.getTitleFor(language)); } } } private void addStudyInfoSumDescAnlyUnit(SumDscrType sumDscr, ContainerDataField container) { // Let's hardcode the path since we know exactly what we are looking for. String pathRoot = "analysis."; for(DataRow row : container.getRowsFor(Language.DEFAULT)) { if(row.getRemoved()) { continue; } String rowRoot = pathRoot + row.getRowId() + "."; String txt = null; String analysisunit = null; String analysisunituri = null; String analysisunitvocab = null; String analysisunitvocaburi = null; analysisunitvocab = getReferenceTitle(rowRoot + Fields.ANALYSISUNITVOCAB); if(!StringUtils.hasText(analysisunitvocab)) { continue; } analysisunit = getReferenceTitle(rowRoot + Fields.ANALYSISUNIT); if(!StringUtils.hasText(analysisunit)) { continue; } analysisunituri = getReferenceTitle(rowRoot + Fields.ANALYSISUNITURI); if(!StringUtils.hasText(analysisunituri)) { continue; } analysisunitvocaburi = getReferenceTitle(rowRoot + Fields.ANALYSISUNITVOCABURI); Pair<StatusCode, ValueDataField> valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.ANALYSISUNITOTHER)); if(hasValue(valueFieldPair, language)) { txt = valueFieldPair.getRight().getActualValueFor(language); } // Keyword should always be non null at this point AnlyUnitType t = sumDscr.addNewAnlyUnit(); ConceptType c = fillTextType(t.addNewConcept(), analysisunituri); if(analysisunitvocab != null) { c.setVocab(analysisunitvocab); } if(analysisunitvocaburi != null) { c.setVocabURI(analysisunitvocaburi); } if(txt != null) { fillTextType(t.addNewTxt(), txt); } } } private void addStudyInfoSumDescUniverse(SumDscrType sumDscrType, Pair<StatusCode, ContainerDataField> containerPair) { for(DataRow row : containerPair.getRight().getRowsFor(Language.DEFAULT)) { if (row.getRemoved()) { continue; } Pair<StatusCode, ValueDataField> fieldPair = row.dataField(ValueDataFieldCall.get(Fields.UNIVERSE)); if(hasValue(fieldPair, language)) { UniverseType t = fillTextType(sumDscrType.addNewUniverse(), fieldPair, language); fieldPair = row.dataField(ValueDataFieldCall.get(Fields.UNIVERSECLUSION)); if(hasValue(fieldPair, Language.DEFAULT)) { switch(fieldPair.getRight().getActualValueFor(Language.DEFAULT)) { case "I": t.setClusion(UniverseType.Clusion.I); break; case "E": t.setClusion(UniverseType.Clusion.E); break; } } } } } private void addStudyInfoSumDescTimePrd(SumDscrType sumDscr, ContainerDataField container) { for(DataRow row : container.getRowsFor(Language.DEFAULT)) { if(row.getRemoved()) { continue; } Pair<StatusCode, ValueDataField> valuePair = row.dataField(ValueDataFieldCall.get(Fields.TIMEPERIODTEXT)); String timeperiodtext = hasValue(valuePair, language) ? valuePair.getRight().getActualValueFor(language) : null; valuePair = row.dataField(ValueDataFieldCall.get(Fields.TIMEPERIOD)); if(StringUtils.hasText(timeperiodtext) || hasValue(valuePair, Language.DEFAULT)) { TimePrdType t = sumDscr.addNewTimePrd(); if(StringUtils.hasText(timeperiodtext)) { fillTextType(t, timeperiodtext); } if(hasValue(valuePair, Language.DEFAULT)) { t.setDate(valuePair.getRight().getActualValueFor(Language.DEFAULT)); } valuePair = row.dataField(ValueDataFieldCall.get(Fields.TIMEPERIODEVENT)); if(hasValue(valuePair, Language.DEFAULT)) { switch(valuePair.getRight().getActualValueFor(Language.DEFAULT)) { case "start": t.setEvent(TimePrdType.Event.START); break; case "end": t.setEvent(TimePrdType.Event.END); break; case "single": t.setEvent(TimePrdType.Event.SINGLE); break; } } } } } private void addStudyInfoSumDescCollDate(SumDscrType sumDscr, ContainerDataField container) { for(DataRow row : container.getRowsFor(Language.DEFAULT)) { if(row.getRemoved()) { continue; } Pair<StatusCode, ValueDataField> valuePair = row.dataField(ValueDataFieldCall.get(Fields.COLLDATETEXT)); String colldatetext = hasValue(valuePair, language) ? valuePair.getRight().getActualValueFor(language) : null; valuePair = row.dataField(ValueDataFieldCall.get(Fields.COLLDATE)); if(StringUtils.hasText(colldatetext) || hasValue(valuePair, Language.DEFAULT)) { CollDateType t = sumDscr.addNewCollDate(); if(StringUtils.hasText(colldatetext)) { fillTextType(t, colldatetext); } if(hasValue(valuePair, Language.DEFAULT)) { t.setDate(valuePair.getRight().getActualValueFor(Language.DEFAULT)); } valuePair = row.dataField(ValueDataFieldCall.get(Fields.COLLDATEEVENT)); if(hasValue(valuePair, Language.DEFAULT)) { switch(valuePair.getRight().getActualValueFor(Language.DEFAULT)) { case "start": t.setEvent(CollDateType.Event.START); break; case "end": t.setEvent(CollDateType.Event.END); break; case "single": t.setEvent(CollDateType.Event.SINGLE); break; } } } } } private void addStudyInfoSumDescNation(SumDscrType sumDscr, ContainerDataField container) { for (DataRow row : container.getRowsFor(language)) { if (row.getRemoved()) { continue; } Pair<StatusCode,ValueDataField> valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.COUNTRY)); if (valueFieldPair.getLeft() != StatusCode.FIELD_FOUND && !valueFieldPair.getRight().hasValueFor(language)) return; NationType n = fillTextType(sumDscr.addNewNation(), valueFieldPair,language); valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.COUNTRYABBR)); if (valueFieldPair.getLeft() == StatusCode.FIELD_FOUND && valueFieldPair.getRight().hasValueFor(language)) { n.setAbbr(valueFieldPair.getValue().getActualValueFor(language)); } } } private void addMethod(StdyDscrType stdyDscrType) { MethodType methodType = stdyDscrType.addNewMethod(); addMethodDataColl(methodType); Pair<StatusCode, ValueDataField> valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.DATAPROSESSING)); if(hasValue(valueFieldPair, language)) { fillTextType(methodType.addNewNotes(), valueFieldPair, language); } addMethodAnalyzeInfo(methodType); } private void addMethodDataColl(MethodType methodType) { // Add data column DataCollType dataCollType = methodType.addNewDataColl(); Pair<StatusCode, ContainerDataField> containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.TIMEMETHODS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { addMethodDataCollTimeMeth(dataCollType, containerPair.getRight()); } containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.SAMPPROCS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { addMethodDataCollSampProc(dataCollType, containerPair.getRight()); } containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.COLLMODES)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { addMethodDataCollCollMode(dataCollType, containerPair.getRight()); } containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.INSTRUMENTS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { addMethodDataCollResInstru(dataCollType, containerPair.getRight()); } containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.COLLECTORS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(Language.DEFAULT)) { addMethodDataCollDataCollector(dataCollType, containerPair.getRight()); } addMethodDataCollSources(dataCollType); addMethodDataCollWeight(dataCollType); } private void addMethodDataCollTimeMeth(DataCollType dataColl, ContainerDataField container) { // Let's hardcode the path since we know exactly what we are looking for. String pathRoot = "timemethods."; for(DataRow row : container.getRowsFor(Language.DEFAULT)) { if(row.getRemoved()) { continue; } String rowRoot = pathRoot + row.getRowId() + "."; String txt = null; String timemethod = null; String timemethodvocab = null; String timemethodvocaburi = null; timemethodvocab = getReferenceTitle(rowRoot + Fields.TIMEMETHODVOCAB); if(!StringUtils.hasText(timemethodvocab)) { continue; } timemethod = getReferenceTitle(rowRoot + Fields.TIMEMETHOD); if(!StringUtils.hasText(timemethod)) { continue; } timemethodvocaburi = getReferenceTitle(rowRoot + Fields.TIMEMETHODVOCABURI); Pair<StatusCode, ValueDataField> valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.TIMEMETHODOTHER)); if(hasValue(valueFieldPair, language)) { txt = valueFieldPair.getRight().getActualValueFor(language); } // Keyword should always be non null at this point TimeMethType t = dataColl.addNewTimeMeth(); ConceptType c = fillTextType(t.addNewConcept(), timemethod); if(timemethodvocab != null) { c.setVocab(timemethodvocab); } if(timemethodvocaburi != null) { c.setVocabURI(timemethodvocaburi); } if(txt != null) { fillTextType(t.addNewTxt(), txt); } } } private void addMethodDataCollDataCollector(DataCollType dataColl, ContainerDataField container) { // Let's hardcode the path since we know exactly what we are looking for. String pathRoot = "collectors."; for(DataRow row : container.getRowsFor(Language.DEFAULT)) { if(row.getRemoved()) { continue; } String rowRoot = pathRoot + row.getRowId() + "."; Pair<StatusCode, ValueDataField> pair = row.dataField(ValueDataFieldCall.get(Fields.COLLECTORTYPE)); if(!hasValue(pair, Language.DEFAULT)) { // We require a type for collector before we can move forward continue; } String colltype = pair.getRight().getActualValueFor(Language.DEFAULT); // It's easier to dublicate some functionality and make a clean split from the top than to evaluate each value separately if(colltype.equals("1")) { // We have a person collector pair = row.dataField(ValueDataFieldCall.get(Fields.COLLECTOR)); if(!hasValue(pair, Language.DEFAULT)) { // We must have a collector continue; } DataCollectorType d = fillTextType(dataColl.addNewDataCollector(), pair, Language.DEFAULT); String organisation = getReferenceTitle(rowRoot + Fields.COLLECTORORGANISATION); String agency = getReferenceTitle(rowRoot + Fields.COLLECTORAGENCY); String section = getReferenceTitle(rowRoot + Fields.COLLECTORSECTION); String affiliation = (StringUtils.hasText(organisation)) ? organisation : ""; affiliation += (StringUtils.hasText(affiliation) && StringUtils.hasText(agency)) ? ". " : ""; affiliation += (StringUtils.hasText(agency)) ? agency : ""; affiliation += (StringUtils.hasText(affiliation) && StringUtils.hasText(section)) ? ". " : ""; affiliation += (StringUtils.hasText(section)) ? section : ""; if(StringUtils.hasText(affiliation)) { d.setAffiliation(affiliation); } } else if(colltype.equals("2")) { // We have an organisation collector String organisation = getReferenceTitle(rowRoot + Fields.COLLECTORORGANISATION); String agency = getReferenceTitle(rowRoot + Fields.COLLECTORAGENCY); String section = getReferenceTitle(rowRoot + Fields.COLLECTORSECTION); DataCollectorType d; if(!StringUtils.hasText(agency) && !StringUtils.hasText(section)) { if(!StringUtils.hasText(organisation)) { continue; } d = fillTextType(dataColl.addNewDataCollector(), organisation); } else { String collector = (StringUtils.hasText(agency)) ? agency : ""; if(StringUtils.hasText(collector) && StringUtils.hasText(section)) { collector += ". "+section; } else if(StringUtils.hasText(section)) { collector = section; } else { continue; } d = fillTextType(dataColl.addNewDataCollector(), collector); } String abbr = getReferenceTitle(rowRoot + Fields.COLLECTORSECTIONABBR); abbr = (StringUtils.hasText(abbr)) ? abbr : getReferenceTitle(rowRoot + Fields.COLLECTORAGENCYABBR); abbr = (StringUtils.hasText(abbr)) ? abbr : getReferenceTitle(rowRoot + Fields.COLLECTORORGANISATIONABBR); d.setAbbr(abbr); if(StringUtils.hasText(agency) || StringUtils.hasText(section)) { if(StringUtils.hasText(organisation)) { d.setAffiliation(organisation); } } } } } private void addMethodDataCollSampProc(DataCollType dataColl, ContainerDataField container) { // Let's hardcode the path since we know exactly what we are looking for. String pathRoot = "sampprocs."; for(DataRow row : container.getRowsFor(Language.DEFAULT)) { if(row.getRemoved()) { continue; } String rowRoot = pathRoot + row.getRowId() + "."; String txt = null; String sampproc = null; String sampprocuri = null; String sampprocvocab = null; String sampprocvocaburi = null; Document sampproctext = null; sampprocvocab = getReferenceTitle(rowRoot + Fields.SAMPPROCVOCAB); if(!StringUtils.hasText(sampprocvocab)) { continue; } sampproc = getReferenceTitle(rowRoot + Fields.SAMPPROC); if(!StringUtils.hasText(sampproc)) { continue; } sampprocuri = getReferenceTitle(rowRoot + Fields.SAMPPROCURI); if(!StringUtils.hasText(sampprocuri)) { continue; } sampprocvocaburi = getReferenceTitle(rowRoot + Fields.SAMPPROCVOCABURI); Pair<StatusCode, ValueDataField> valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.SAMPPROCOTHER)); if(hasValue(valueFieldPair, language)) { txt = valueFieldPair.getRight().getActualValueFor(language); } // Keyword should always be non null at this point ConceptualTextType t = dataColl.addNewSampProc(); ConceptType c = fillTextType(t.addNewConcept(), sampprocuri); // Add sampproctext if present and extract all text paragraphs valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.SAMPPROCTEXT)); // We don't really need line breaks, so let's get rid of them all sampproctext = Jsoup.parse(valueFieldPair.getRight().getActualValueFor(language).replaceAll("<p><br></p>", "")); Elements paragraphs = sampproctext.select("p"); if(sampprocvocab != null) { c.setVocab(sampprocvocab); } if(sampprocvocaburi != null) { c.setVocabURI(sampprocvocaburi); } if(txt != null) { fillTextType(t.addNewTxt(), txt); } if(sampproctext != null) { for(Element p : paragraphs){ fillTextType(t.addNewP(), p.text()); } } } } private void addMethodDataCollCollMode(DataCollType dataColl, ContainerDataField container) { // Let's hardcode the path since we know exactly what we are looking for. String pathRoot = "collmodes."; for(DataRow row : container.getRowsFor(Language.DEFAULT)) { if(row.getRemoved()) { continue; } String rowRoot = pathRoot + row.getRowId() + "."; String txt = null; String collmode = null; String collmodeuri = null; String collmodevocab = null; String collmodevocaburi = null; collmodevocab = getReferenceTitle(rowRoot + Fields.COLLMODEVOCAB); if(!StringUtils.hasText(collmodevocab)) { continue; } collmode = getReferenceTitle(rowRoot + Fields.COLLMODE); if(!StringUtils.hasText(collmode)) { continue; } collmodeuri = getReferenceTitle(rowRoot + Fields.COLLMODEURI); if(!StringUtils.hasText(collmodeuri)) { continue; } collmodevocaburi = getReferenceTitle(rowRoot + Fields.COLLMODEVOCABURI); Pair<StatusCode, ValueDataField> valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.COLLMODEOTHER)); if(hasValue(valueFieldPair, language)) { txt = valueFieldPair.getRight().getActualValueFor(language); } // Keyword should always be non null at this point ConceptualTextType t = dataColl.addNewCollMode(); ConceptType c = fillTextType(t.addNewConcept(), collmodeuri); if(collmodevocab != null) { c.setVocab(collmodevocab); } if(collmodevocaburi != null) { c.setVocabURI(collmodevocaburi); } if(txt != null) { fillTextType(t.addNewTxt(), txt); } } } private void addMethodDataCollResInstru(DataCollType dataColl, ContainerDataField container) { // Let's hardcode the path since we know exactly what we are looking for. String pathRoot = "instruments."; for(DataRow row : container.getRowsFor(Language.DEFAULT)) { if(row.getRemoved()) { continue; } String rowRoot = pathRoot + row.getRowId() + "."; String txt = null; String instrument = null; String instrumentvocab = null; String instrumentvocaburi = null; instrumentvocab = getReferenceTitle(rowRoot + Fields.INSTRUMENTVOCAB); if(!StringUtils.hasText(instrumentvocab)) { continue; } instrument = getReferenceTitle(rowRoot + Fields.INSTRUMENT); if(!StringUtils.hasText(instrument)) { continue; } instrumentvocaburi = getReferenceTitle(rowRoot + Fields.INSTRUMENTVOCABURI); Pair<StatusCode, ValueDataField> valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.INSTRUMENTOTHER)); if(hasValue(valueFieldPair, language)) { txt = valueFieldPair.getRight().getActualValueFor(language); } // Keyword should always be non null at this point ResInstruType t = dataColl.addNewResInstru(); ConceptType c = fillTextType(t.addNewConcept(), instrument); if(instrumentvocab != null) { c.setVocab(instrumentvocab); } if(instrumentvocaburi != null) { c.setVocabURI(instrumentvocaburi); } if(txt != null) { fillTextType(t.addNewTxt(), txt); } } } private void addMethodDataCollSources(DataCollType dataCollType) { List<ValueDataField> fields = gatherFields(revision, Fields.DATASOURCES, Fields.DATASOURCE, language, language); SourcesType sources = dataCollType.addNewSources(); for(ValueDataField field : fields) { fillTextType(sources.addNewDataSrc(), field, language); } } private void addMethodDataCollWeight(DataCollType dataCollType) { Pair<StatusCode, ValueDataField> valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.WEIGHTYESNO)); if(hasValue(valueFieldPair, Language.DEFAULT) && valueFieldPair.getRight().getValueFor(Language.DEFAULT).valueAsBoolean()) { fillTextType(dataCollType.addNewWeight(), getDDIText(language, "WEIGHT_NO")); } else { valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.WEIGHT)); if(hasValue(valueFieldPair, language)) { fillTextType(dataCollType.addNewWeight(), valueFieldPair, language); } } } private void addMethodAnalyzeInfo(MethodType methodType) { AnlyInfoType anlyInfoType = methodType.addNewAnlyInfo(); // Add response rate Pair<StatusCode, ValueDataField> valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.RESPRATE)); if(hasValue(valueFieldPair, Language.DEFAULT)) { fillTextType(anlyInfoType.addNewRespRate(), valueFieldPair, Language.DEFAULT); } // Add data appraisal, repeatable Pair<StatusCode, ContainerDataField> containerPair = revision.dataField(ContainerDataFieldCall.get(Fields.APPRAISALS)); if(containerPair.getLeft() == StatusCode.FIELD_FOUND && containerPair.getRight().hasRowsFor(language)) { for (DataRow row : containerPair.getRight().getRowsFor(language)) { valueFieldPair = row.dataField(ValueDataFieldCall.get(Fields.APPRAISAL)); if(hasValue(valueFieldPair, language)) { // We don't really need line breaks, so let's get rid of them all String s = valueFieldPair.getRight().getActualValueFor(language).replaceAll("<p><br></p>", ""); Document dataAppr = Jsoup.parse(s); Elements paragraphs = dataAppr.select("p"); DataApprType d = anlyInfoType.addNewDataAppr(); for(Element p : paragraphs){ fillTextType(d.addNewP(), p.text()); } } } } } private void addDataAccess(StdyDscrType stdyDscrType) { DataAccsType dataAccs = stdyDscrType.addNewDataAccs(); addDataAccessSetAvail(dataAccs); addDataAccessUseStatement(dataAccs); // Add notes Pair<StatusCode, ValueDataField> valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.DATASETNOTES)); if(hasValue(valueFieldPair, language)) { fillTextType(dataAccs.addNewNotes(), valueFieldPair, language); } } private void addDataAccessSetAvail(DataAccsType dataAccs) { // Add set availability SetAvailType setAvail = dataAccs.addNewSetAvail(); // Add access place AccsPlacType acc = fillTextType(setAvail.addNewAccsPlac(), getDDIText(language, "ACCS_PLAC")); acc.setURI(getDDIText(language, "ACCS_PLAC_URI")); // Add original archive Pair<StatusCode, ValueDataField> valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.ORIGINALLOCATION)); if(hasValue(valueFieldPair, Language.DEFAULT)) { fillTextType(setAvail.addNewOrigArch(), valueFieldPair, Language.DEFAULT); } // Add collection size valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.COLLSIZE)); if(hasValue(valueFieldPair, language)) { fillTextType(setAvail.addNewCollSize(), valueFieldPair, language); } // Add complete valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.COMPLETE)); if(hasValue(valueFieldPair, language)) { fillTextType(setAvail.addNewComplete(), valueFieldPair, language); } } private void addDataAccessUseStatement(DataAccsType dataAccs) { // Add use statement UseStmtType useStmt = dataAccs.addNewUseStmt(); // Add special permissions Pair<StatusCode, ValueDataField> valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.SPECIALTERMSOFUSE)); if(hasValue(valueFieldPair, Language.DEFAULT)) { fillTextType(useStmt.addNewSpecPerm(), valueFieldPair, language); } // Add restrictions, excel row #164 valueFieldPair = revision.dataField(ValueDataFieldCall.get(Fields.TERMSOFUSE)); if(hasValue(valueFieldPair, Language.DEFAULT)) { fillTextType(useStmt.addNewRestrctn(), getDDIRestriction(language, valueFieldPair.getRight().getActualValueFor(Language.DEFAULT))); } // Add citation required fillTextType(useStmt.addNewCitReq(), getDDIText(language, "CIT_REQ")); // Add deposition required fillTextType(useStmt.addNewDeposReq(), getDDIText(language, "DEPOS_REQ")); // Add disclaimer required fillTextType(useStmt.addNewDisclaimer(), getDDIText(language, "DISCLAIMER")); } private void addOtherStudyMaterial(StdyDscrType stdyDscrType) { OthrStdyMatType othr = stdyDscrType.addNewOthrStdyMat(); // Add related materials List<ValueDataField> fields = gatherFields(revision, Fields.RELATEDMATERIALS, Fields.RELATEDMATERIAL, language, language); for(ValueDataField field : fields) { fillTextType(othr.addNewRelMat(), field, language); } // Add related studies (studyID + study name) Pair<StatusCode, ReferenceContainerDataField> referenceContainerPair = revision.dataField(ReferenceContainerDataFieldCall.get(Fields.RELATEDSTUDIES)); if(referenceContainerPair.getLeft() == StatusCode.FIELD_FOUND && !referenceContainerPair.getRight().getReferences().isEmpty()) { for(ReferenceRow row : referenceContainerPair.getRight().getReferences()) { if(row.getRemoved() || !row.hasValue()) { continue; } Pair<ReturnResult, RevisionData> revisionPair = revisions.getRevisionData(row.getReference().getValue()); if(revisionPair.getLeft() != ReturnResult.REVISION_FOUND) { Logger.error(getClass(), "Could not find referenced study with ID: "+row.getReference().getValue()); continue; } String studyID = "-"; String title = "-"; RevisionData study = revisionPair.getRight(); Pair<StatusCode, ValueDataField> valueFieldPair = study.dataField(ValueDataFieldCall.get(Fields.STUDYID)); if(hasValue(valueFieldPair, Language.DEFAULT)) { studyID = valueFieldPair.getRight().getActualValueFor(Language.DEFAULT); } valueFieldPair = study.dataField(ValueDataFieldCall.get(Fields.TITLE)); if(hasValue(valueFieldPair, language)) { title = valueFieldPair.getRight().getActualValueFor(language); } fillTextType(othr.addNewRelStdy(), studyID+" "+title); } } // Add related publications (publications -> publicationrelpubl) referenceContainerPair = revision.dataField(ReferenceContainerDataFieldCall.get(Fields.PUBLICATIONS)); if(referenceContainerPair.getLeft() == StatusCode.FIELD_FOUND && !referenceContainerPair.getRight().getReferences().isEmpty()) { for(ReferenceRow row : referenceContainerPair.getRight().getReferences()) { if (row.getRemoved() || !row.hasValue()) { continue; } Pair<ReturnResult, RevisionData> revisionPair = revisions.getRevisionData(row.getReference().getValue()); if (revisionPair.getLeft() != ReturnResult.REVISION_FOUND) { Logger.error(getClass(), "Could not find referenced publication with ID: " + row.getReference().getValue()); continue; } RevisionData publication = revisionPair.getRight(); Pair<StatusCode, ValueDataField> valueFieldPair = publication.dataField(ValueDataFieldCall.get(Fields.PUBLICATIONRELPUBL)); if(hasValue(valueFieldPair, Language.DEFAULT)) { fillTextType(othr.addNewRelPubl(), valueFieldPair, Language.DEFAULT); } } } // Add publication comments fields = gatherFields(revision, Fields.PUBLICATIONCOMMENTS, Fields.PUBLICATIONCOMMENT, language, language); for(ValueDataField field : fields) { fillTextType(othr.addNewOthRefs(), field, language); } } }
package org.cqframework.cql.tools.xsd2modelinfo; import org.apache.ws.commons.schema.*; import org.hl7.cql.model.*; import org.hl7.elm_modelinfo.r1.*; import javax.xml.bind.JAXB; import javax.xml.namespace.QName; import java.io.IOException; import java.io.InputStream; import java.util.*; import java.util.stream.Collectors; import static org.cqframework.cql.tools.xsd2modelinfo.ModelImporterOptions.ChoiceTypePolicy.USE_CHOICE; public class ModelImporter { private static final Map<String, DataType> SYSTEM_CATALOG = getSystemCatalog(); private static Map<String, DataType> getSystemCatalog() { ModelInfo systemModelInfo = JAXB.unmarshal( ModelImporter.class.getResourceAsStream("/org/hl7/elm/r1/system-modelinfo.xml"), ModelInfo.class); final Map<String, DataType> map = new HashMap<>(); for (TypeInfo info : systemModelInfo.getTypeInfo()) { if (info instanceof SimpleTypeInfo) { SimpleTypeInfo sInfo = (SimpleTypeInfo) info; String qualifiedName = getQualifiedName(systemModelInfo.getName(), sInfo.getName()); map.put(qualifiedName, new SimpleType(qualifiedName)); } else if (info instanceof ClassInfo) { ClassInfo cInfo = (ClassInfo) info; String qualifiedName = getQualifiedName(systemModelInfo.getName(), cInfo.getName()); map.put(qualifiedName, new ClassType(qualifiedName)); } } return map; } private static String getQualifiedName(String modelName, String name) { return String.format("%s.%s", modelName, name); } private final XmlSchema schema; private final ModelInfo config; private final ModelImporterOptions options; private final Map<String, DataType> dataTypes; private final Map<String, String> namespaces; private ModelImporter(XmlSchema schema, ModelImporterOptions options, ModelInfo config) { this.schema = schema; this.config = config; this.dataTypes = new HashMap<>(); this.namespaces = new HashMap<>(); // Load default options first ModelImporterOptions tmpOptions; try (InputStream defaultPropertiesIS = getClass().getResourceAsStream("default_options.properties")) { tmpOptions = ModelImporterOptions.loadFromProperties(defaultPropertiesIS); } catch (IOException e) { throw new IllegalStateException("Could not load default properties", e); } // If options were passed in, apply them on top of the default options if (options != null) { tmpOptions.applyProperties(options.exportProperties()); } this.options = tmpOptions; } public static ModelInfo fromXsd(XmlSchema schema, ModelImporterOptions options, ModelInfo config) { ModelImporter importer = new ModelImporter(schema, options, config); return importer.importXsd(); } public ModelInfo importXsd() { if (options.getModel() == null || options.getModel().isEmpty()) { throw new IllegalArgumentException("Model name is required."); } namespaces.put(schema.getTargetNamespace(), options.getModel()); for (XmlSchemaType schemaType : schema.getSchemaTypes().values()) { resolveType(schemaType); } return new ModelInfo() .withName(options.getModel()) .withTargetQualifier(new QName(options.getModel().toLowerCase())) .withUrl(schema.getTargetNamespace()) .withPatientClassName(config != null ? config.getPatientClassName() : null) .withPatientClassIdentifier(config != null ? config.getPatientClassIdentifier() : null) .withPatientBirthDatePropertyName(config != null ? config.getPatientBirthDatePropertyName() : null) .withTypeInfo(dataTypes.values().stream() .map(this::toTypeInfo) .collect(Collectors.toList())); } private TypeInfo toTypeInfo(DataType dataType) { if (dataType == null) { throw new IllegalArgumentException("dataType is null"); } if (dataType instanceof SimpleType) { return toSimpleTypeInfo((SimpleType) dataType); } else if (dataType instanceof ClassType) { return toClassInfo((ClassType) dataType); } else if (dataType instanceof IntervalType) { return toIntervalTypeInfo((IntervalType) dataType); } else if (dataType instanceof ListType) { return toListTypeInfo((ListType) dataType); } else if (dataType instanceof TupleType) { return toTupleTypeInfo((TupleType) dataType); } else { throw new IllegalArgumentException(String.format("Unknown data type class: %s", dataType.getClass().getName())); } } private String toTypeName(NamedTypeSpecifier typeSpecifier) { if (typeSpecifier.getModelName() != null) { return String.format("%s.%s", typeSpecifier.getModelName(), typeSpecifier.getName()); } return typeSpecifier.getName(); } private void setBaseType(TypeInfo typeInfo, DataType baseType) { TypeSpecifier baseTypeSpecifier = toTypeSpecifier(baseType); if (baseTypeSpecifier instanceof NamedTypeSpecifier) { typeInfo.setBaseType(toTypeName((NamedTypeSpecifier)baseTypeSpecifier)); } else { typeInfo.setBaseTypeSpecifier(baseTypeSpecifier); } } private SimpleTypeInfo toSimpleTypeInfo(SimpleType dataType) { SimpleTypeInfo result = new SimpleTypeInfo(); result.setName(dataType.getSimpleName()); if (dataType.getBaseType() != null) { setBaseType(result, dataType.getBaseType()); } return result; } private ClassInfo toClassInfo(ClassType dataType) { ClassInfo result = new ClassInfo(); result.setName(dataType.getSimpleName()); if (dataType.getBaseType() != null) { setBaseType(result, dataType.getBaseType()); } if (dataType.getLabel() != null) { result.setLabel(dataType.getLabel()); } else if (options.getNormalizePrefix() != null && dataType.getName().startsWith(options.getNormalizePrefix())) { result.setLabel(dataType.getName().substring(options.getNormalizePrefix().length())); } result.setIdentifier(dataType.getIdentifier()); result.setRetrievable(dataType.isRetrievable()); result.setPrimaryCodePath(dataType.getPrimaryCodePath()); for (ClassTypeElement element : dataType.getElements()) { ClassInfoElement cie = new ClassInfoElement().withName(element.getName()); TypeSpecifier elementTypeSpecifier = toTypeSpecifier(element.getType()); if (elementTypeSpecifier instanceof NamedTypeSpecifier) { cie.setType(toTypeName((NamedTypeSpecifier)elementTypeSpecifier)); } else { cie.setTypeSpecifier(elementTypeSpecifier); } if (element.isProhibited()) { cie.setProhibited(true); } result.getElement().add(cie); } return result; } private IntervalTypeInfo toIntervalTypeInfo(IntervalType dataType) { IntervalTypeInfo result = new IntervalTypeInfo(); TypeSpecifier pointTypeSpecifier = toTypeSpecifier(dataType.getPointType()); if (pointTypeSpecifier instanceof NamedTypeSpecifier) { result.setPointType(toTypeName((NamedTypeSpecifier)pointTypeSpecifier)); } else { result.setPointTypeSpecifier(pointTypeSpecifier); } return result; } private ListTypeInfo toListTypeInfo(ListType dataType) { ListTypeInfo result = new ListTypeInfo(); TypeSpecifier elementTypeSpecifier = toTypeSpecifier(dataType.getElementType()); if (elementTypeSpecifier instanceof NamedTypeSpecifier) { result.setElementType(toTypeName((NamedTypeSpecifier)elementTypeSpecifier)); } else { result.setElementTypeSpecifier(elementTypeSpecifier); } return result; } private TupleTypeInfo toTupleTypeInfo(TupleType dataType) { TupleTypeInfo result = new TupleTypeInfo(); if (dataType.getBaseType() != null) { setBaseType(result, dataType.getBaseType()); } for (TupleTypeElement element : dataType.getElements()) { TupleTypeInfoElement infoElement = new TupleTypeInfoElement() .withName(element.getName()); TypeSpecifier elementTypeSpecifier = toTypeSpecifier(element.getType()); if (elementTypeSpecifier instanceof NamedTypeSpecifier) { infoElement.setType(toTypeName((NamedTypeSpecifier)elementTypeSpecifier)); } else { infoElement.setTypeSpecifier(elementTypeSpecifier); } result.getElement().add(infoElement); } return result; } private TypeSpecifier toTypeSpecifier(DataType dataType) { if (dataType == null) { throw new IllegalArgumentException("dataType is null"); } if (dataType instanceof SimpleType) { return toNamedTypeSpecifier((SimpleType) dataType); } else if (dataType instanceof ClassType) { return toNamedTypeSpecifier((ClassType) dataType); } else if (dataType instanceof IntervalType) { return toIntervalTypeSpecifier((IntervalType) dataType); } else if (dataType instanceof ListType) { return toListTypeSpecifier((ListType) dataType); } else if (dataType instanceof ChoiceType) { return toChoiceTypeSpecifier((ChoiceType) dataType); } else if (dataType instanceof TupleType) { throw new IllegalArgumentException("Tuple types cannot be used in type specifiers."); } else { throw new IllegalArgumentException(String.format("Unknown data type class: %s", dataType.getClass().getName())); } } private TypeSpecifier toNamedTypeSpecifier(NamedType dataType) { NamedTypeSpecifier namedTypeSpecifier = new NamedTypeSpecifier() .withModelName(dataType.getNamespace()) .withName(dataType.getSimpleName()); return namedTypeSpecifier; } private TypeSpecifier toIntervalTypeSpecifier(IntervalType dataType) { IntervalTypeSpecifier intervalTypeSpecifier = new IntervalTypeSpecifier(); TypeSpecifier pointTypeSpecifier = toTypeSpecifier(dataType.getPointType()); if (pointTypeSpecifier instanceof NamedTypeSpecifier) { intervalTypeSpecifier.setPointType(toTypeName((NamedTypeSpecifier)pointTypeSpecifier)); } else { intervalTypeSpecifier.setPointTypeSpecifier(pointTypeSpecifier); } return intervalTypeSpecifier; } private TypeSpecifier toListTypeSpecifier(ListType dataType) { ListTypeSpecifier listTypeSpecifier = new ListTypeSpecifier(); TypeSpecifier elementTypeSpecifier = toTypeSpecifier(dataType.getElementType()); if (elementTypeSpecifier instanceof NamedTypeSpecifier) { listTypeSpecifier.setElementType(toTypeName((NamedTypeSpecifier)elementTypeSpecifier)); } else { listTypeSpecifier.setElementTypeSpecifier(elementTypeSpecifier); } return listTypeSpecifier; } private TypeSpecifier toChoiceTypeSpecifier(ChoiceType dataType) { List<TypeSpecifier> choiceTypes = new ArrayList<>(); for (DataType choice : dataType.getTypes()) { choiceTypes.add(toTypeSpecifier(choice)); } ChoiceTypeSpecifier choiceTypeSpecifier = new ChoiceTypeSpecifier() .withChoice(choiceTypes); return choiceTypeSpecifier; } private String getTypeName(QName schemaTypeName, Map<String, String> namespaces) { if (schemaTypeName == null) { throw new IllegalArgumentException("schemaTypeName is null"); } String modelName = namespaces.get(schemaTypeName.getNamespaceURI()); if (modelName == null) { modelName = schemaTypeName.getPrefix(); // Doesn't always work, but should be okay for a fallback position... if (modelName != null && ! modelName.isEmpty()) { namespaces.put(schemaTypeName.getNamespaceURI(), modelName); } } if (modelName != null && ! modelName.isEmpty()) { return modelName + '.' + schemaTypeName.getLocalPart().replace('-', '_'); } return schemaTypeName.getLocalPart(); } private DataType resolveType(QName schemaTypeName) { if (schemaTypeName == null) { return null; } ModelImporterMapperValue mapping = options.getTypeMap().get(schemaTypeName); if (mapping != null && mapping.getRelationship() == ModelImporterMapperValue.Relationship.RETYPE) { return SYSTEM_CATALOG.get(mapping.getTargetSystemClass()); } XmlSchemaType schemaType = schema.getTypeByName(schemaTypeName); if (schemaType == null) { String typeName = getTypeName(schemaTypeName, namespaces); DataType resultType = dataTypes.get(typeName); if (resultType == null) { if (mapping != null && mapping.getRelationship() == ModelImporterMapperValue.Relationship.EXTEND) { resultType = new SimpleType(typeName, SYSTEM_CATALOG.get(mapping.getTargetSystemClass())); } else { resultType = new SimpleType(typeName); } dataTypes.put(typeName, resultType); } return resultType; } else { return resolveType(schemaType); } } private DataType resolveType(XmlSchemaType schemaType) { if (schemaType instanceof XmlSchemaSimpleType) { return resolveSimpleType((XmlSchemaSimpleType)schemaType); } else if (schemaType instanceof XmlSchemaComplexType) { return resolveComplexType((XmlSchemaComplexType)schemaType); } return null; } private DataType resolveSimpleType(XmlSchemaSimpleType simpleType) { if (simpleType.isAnonymous()) { return null; } ModelImporterMapperValue mapping = options.getTypeMap().get(simpleType.getQName()); if (mapping != null && mapping.getRelationship() == ModelImporterMapperValue.Relationship.RETYPE) { return SYSTEM_CATALOG.get(mapping.getTargetSystemClass()); } String typeName = getTypeName(simpleType.getQName(), namespaces); DataType resultType = dataTypes.get(typeName); if (resultType == null) { DataType baseType = null; boolean retypeToBase = false; if (mapping != null && mapping.getRelationship() == ModelImporterMapperValue.Relationship.EXTEND) { baseType = SYSTEM_CATALOG.get(mapping.getTargetSystemClass()); } else if (simpleType.getContent() instanceof XmlSchemaSimpleTypeRestriction) { baseType = resolveType(((XmlSchemaSimpleTypeRestriction) simpleType.getContent()).getBaseTypeName()); switch (options.getSimpleTypeRestrictionPolicy()) { case EXTEND_BASETYPE: break; case IGNORE: baseType = null; break; case USE_BASETYPE: default: retypeToBase = true; } } if (retypeToBase) { resultType = baseType; } else { resultType = new SimpleType(typeName, baseType); dataTypes.put(typeName, resultType); } } return resultType; } private void applyConfig(ClassType classType) { if (config != null) { for (int i = 0; i < config.getTypeInfo().size(); i++) { TypeInfo typeConfig = config.getTypeInfo().get(i); if (typeConfig instanceof ClassInfo) { ClassInfo classConfig = (ClassInfo)typeConfig; if (classConfig.getName().equals(classType.getName())) { classType.setIdentifier(classConfig.getIdentifier()); classType.setLabel(classConfig.getLabel()); classType.setRetrievable(classConfig.isRetrievable()); classType.setPrimaryCodePath(classConfig.getPrimaryCodePath()); } } } } } private DataType resolveComplexType(XmlSchemaComplexType complexType) { if (complexType.isAnonymous()) { return null; } ModelImporterMapperValue mapping = options.getTypeMap().get(complexType.getQName()); if (mapping != null && mapping.getRelationship() == ModelImporterMapperValue.Relationship.RETYPE) { return SYSTEM_CATALOG.get(mapping.getTargetSystemClass()); } String typeName = getTypeName(complexType.getQName(), namespaces); DataType resultType = dataTypes.get(typeName); if (resultType == null) { // Resolve the base type, if any DataType baseType = null; if (mapping != null && mapping.getRelationship() == ModelImporterMapperValue.Relationship.EXTEND) { baseType = SYSTEM_CATALOG.get(mapping.getTargetSystemClass()); } else if (complexType.getBaseSchemaTypeName() != null) { baseType = resolveType(schema.getTypeByName(complexType.getBaseSchemaTypeName())); } // Create and register the type ClassType classType = new ClassType(typeName, baseType); dataTypes.put(typeName, classType); applyConfig(classType); List<ClassTypeElement> elements = new ArrayList<>(); List<XmlSchemaAttributeOrGroupRef> attributeContent; XmlSchemaParticle particleContent; if (complexType.getContentModel() != null) { XmlSchemaContent content = complexType.getContentModel().getContent(); if (content instanceof XmlSchemaComplexContentRestriction) { XmlSchemaComplexContentRestriction restrictionContent = (XmlSchemaComplexContentRestriction)content; attributeContent = restrictionContent.getAttributes(); particleContent = restrictionContent.getParticle(); } else if (content instanceof XmlSchemaComplexContentExtension) { XmlSchemaComplexContentExtension extensionContent = (XmlSchemaComplexContentExtension)content; attributeContent = extensionContent.getAttributes(); particleContent = extensionContent.getParticle(); } // For complex types with simple content, create a new class type with a value element for the content else if (content instanceof XmlSchemaSimpleContentRestriction) { XmlSchemaSimpleContentRestriction restrictionContent = (XmlSchemaSimpleContentRestriction)content; DataType valueType = resolveType(restrictionContent.getBaseTypeName()); ClassTypeElement valueElement = new ClassTypeElement("value", valueType, false, false); elements.add(valueElement); attributeContent = restrictionContent.getAttributes(); particleContent = null; } else if (content instanceof XmlSchemaSimpleContentExtension) { XmlSchemaSimpleContentExtension extensionContent = (XmlSchemaSimpleContentExtension)content; attributeContent = extensionContent.getAttributes(); particleContent = null; DataType valueType = resolveType(extensionContent.getBaseTypeName()); ClassTypeElement valueElement = new ClassTypeElement("value", valueType, false, false); elements.add(valueElement); } else { throw new IllegalArgumentException("Unrecognized Schema Content: " + content.toString()); } } else { attributeContent = complexType.getAttributes(); particleContent = complexType.getParticle(); } for (XmlSchemaAttributeOrGroupRef attribute : attributeContent) { resolveClassTypeElements(attribute, elements); } if (particleContent != null) { XmlSchemaParticle particle = particleContent; resolveClassTypeElements(particle, elements); } // TODO: Map elements to basetype if this or one of its parents is a configured extension of a CQL basetype. // This could get complicated... // Filter out elements already in the base class if (baseType instanceof ClassType) { ClassType cBase = (ClassType) baseType; elements.removeAll(cBase.getAllElements()); } for (ClassTypeElement element : elements) { try { classType.addElement(element); } catch (InvalidRedeclarationException e) { switch (options.getElementRedeclarationPolicy()) { case FAIL_INVALID_REDECLARATIONS: System.err.println("Redeclaration failed. Either fix the XSD or choose a different element-redeclaration-policy."); throw e; case DISCARD_INVALID_REDECLARATIONS: System.err.printf("%s. Discarding element redeclaration.%n", e.getMessage()); break; case RENAME_INVALID_REDECLARATIONS: default: String tName = getTypeName(element.getType()); StringBuilder name = new StringBuilder(element.getName()).append(Character.toUpperCase(tName.charAt(0))); if (tName.length() > 1) { name.append(tName.substring(1)); } System.err.printf("%s. Renaming element to %s.%n", e.getMessage(), name.toString()); classType.addElement(new ClassTypeElement(name.toString(), element.getType(), element.isProhibited(), element.isOneBased())); } } } resultType = classType; } return resultType; } private String getTypeName(DataType type) { String typeName; if (type instanceof ClassType) { typeName = ((ClassType) type).getSimpleName(); } else if (type instanceof SimpleType) { typeName = ((SimpleType) type).getSimpleName(); } else if (type instanceof ListType) { DataType elementType = ((ListType) type).getElementType(); typeName = getTypeName(elementType) + "List"; } else if (type instanceof IntervalType) { DataType pointType = ((IntervalType) type).getPointType(); typeName = getTypeName(pointType) + "Interval"; } else if (type instanceof TupleType) { typeName = "Tuple"; } else if (type instanceof TypeParameter) { typeName = "Parameter"; } else { typeName = "Type"; } return typeName; } private int indexOfFirstDifference(String original, String comparison) { // Returns the index of the first difference between the two strings if (original == null) { throw new IllegalArgumentException("original is null"); } if (comparison == null) { throw new IllegalArgumentException("comparison is null"); } int result = -1; do { result++; if (result >= original.length() || result >= comparison.length() || original.charAt(result) != comparison.charAt(result)) { break; } } while (true); return result; } private void resolveClassTypeElements(XmlSchemaParticle particle, List<ClassTypeElement> elements) { if (particle instanceof XmlSchemaElement) { ClassTypeElement element = resolveClassTypeElement((XmlSchemaElement)particle); if (element != null) { elements.add(element); } } else if (particle instanceof XmlSchemaSequence) { XmlSchemaSequence sequence = (XmlSchemaSequence)particle; for (XmlSchemaSequenceMember member : sequence.getItems()) { if (member instanceof XmlSchemaParticle) { resolveClassTypeElements((XmlSchemaParticle) member, elements); } } } else if (particle instanceof XmlSchemaAll) { XmlSchemaAll all = (XmlSchemaAll)particle; for (XmlSchemaAllMember member : all.getItems()) { if (member instanceof XmlSchemaParticle) { resolveClassTypeElements((XmlSchemaParticle) member, elements); } } } else if (particle instanceof XmlSchemaChoice) { XmlSchemaChoice choice = (XmlSchemaChoice)particle; boolean choiceCreated = false; if (options.getChoiceTypePolicy() == USE_CHOICE) { List<DataType> choices = new ArrayList<DataType>(); String elementName = null; for (XmlSchemaChoiceMember member : choice.getItems()) { ClassTypeElement choiceElement = resolveClassTypeElement((XmlSchemaElement) member); if (choiceElement != null) { if (elementName == null) { elementName = choiceElement.getName(); } else { int firstDifference = indexOfFirstDifference(elementName, choiceElement.getName()); if (firstDifference < elementName.length()) { elementName = elementName.substring(0, firstDifference); } } choices.add(choiceElement.getType()); } } if (elementName != null && !elementName.isEmpty()) { ChoiceType choiceType = new ChoiceType(choices); ClassTypeElement element = new ClassTypeElement(elementName, choiceType, false, false); elements.add(element); choiceCreated = true; } } // Some choices don't have a prefix (e.g. FHIR.ResourceContainer) // In this case, create an expanded type if (!choiceCreated) { for (XmlSchemaChoiceMember member : choice.getItems()) { if (member instanceof XmlSchemaElement) { ClassTypeElement element = resolveClassTypeElement((XmlSchemaElement) member); if (element != null) { elements.add(element); } } } } } else if (particle instanceof XmlSchemaGroupRef) { XmlSchemaGroupRef ref = (XmlSchemaGroupRef)particle; resolveClassTypeElements(ref.getParticle(), elements); } } private ClassTypeElement resolveClassTypeElement(XmlSchemaElement element) { boolean isList = element.getMaxOccurs() > 1; if (element.isRef()) { element = element.getRef().getTarget(); } DataType elementType = null; XmlSchemaType schemaType = element.getSchemaType(); if (schemaType != null) { elementType = resolveType(schemaType); } else { QName schemaTypeName = element.getSchemaTypeName(); if (schemaTypeName != null) { elementType = resolveType(schemaTypeName); } } if (elementType == null) { return null; // The type is anonymous and will not be represented within the imported model //throw new IllegalStateException(String.format("Unable to resolve type %s of element %s.", element.getSchemaType().getName(), element.getName())); } if (isList) { elementType = new ListType(elementType); } boolean isProhibited = element.getMinOccurs() == 0L && element.getMaxOccurs() == 0L; return new ClassTypeElement(element.getName(), elementType, isProhibited, false); } private ClassTypeElement resolveClassTypeElement(XmlSchemaAttribute attribute) { if (attribute.isRef()) { attribute = attribute.getRef().getTarget(); } DataType elementType = null; XmlSchemaType schemaType = attribute.getSchemaType(); if (schemaType != null) { elementType = resolveType(schemaType); } else { QName schemaTypeName = attribute.getSchemaTypeName(); if (schemaTypeName != null) { elementType = resolveType(schemaTypeName); } } if (elementType == null) { return null; // The type is anonymous and will not be represented in the imported model //throw new IllegalStateException(String.format("Unable to resolve type %s of attribute %s.", attribute.getSchemaTypeName(), attribute.getName())); } return new ClassTypeElement(attribute.getName(), elementType, attribute.getUse() == XmlSchemaUse.PROHIBITED, false); } private void resolveClassTypeElements(XmlSchemaAttributeOrGroupRef attribute, List<ClassTypeElement> elements) { if (attribute instanceof XmlSchemaAttribute) { ClassTypeElement element = resolveClassTypeElement((XmlSchemaAttribute)attribute); if (element != null) { elements.add(element); } } else if (attribute instanceof XmlSchemaAttributeGroupRef) { resolveClassTypeElements(((XmlSchemaAttributeGroupRef)attribute).getRef().getTarget(), elements); } } private void resolveClassTypeElements(XmlSchemaAttributeGroup attributeGroup, List<ClassTypeElement> elements) { for (XmlSchemaAttributeGroupMember member : attributeGroup.getAttributes()) { if (member instanceof XmlSchemaAttribute) { ClassTypeElement element = resolveClassTypeElement((XmlSchemaAttribute)member); if (element != null) { elements.add(element); } } else if (member instanceof XmlSchemaAttributeGroupRef) { resolveClassTypeElements(((XmlSchemaAttributeGroupRef)member).getRef().getTarget(), elements); } else if (member instanceof XmlSchemaAttributeGroup) { resolveClassTypeElements((XmlSchemaAttributeGroup)member, elements); } } } }
/*=========================================================================== * Licensed Materials - Property of IBM * "Restricted Materials of IBM" * * IBM SDK, Java(tm) Technology Edition, v8 * (C) Copyright IBM Corp. 1997, 2016. All Rights Reserved * * US Government Users Restricted Rights - Use, duplication or disclosure * restricted by GSA ADP Schedule Contract with IBM Corp. *=========================================================================== */ /* * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package java.security; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.List; import java.util.Map; import java.util.WeakHashMap; import sun.misc.JavaSecurityProtectionDomainAccess; import static sun.misc.JavaSecurityProtectionDomainAccess.ProtectionDomainCache; import sun.security.util.Debug; import sun.security.util.SecurityConstants; import sun.misc.JavaSecurityAccess; import sun.misc.SharedSecrets; /** * *<p> * This ProtectionDomain class encapsulates the characteristics of a domain, * which encloses a set of classes whose instances are granted a set * of permissions when being executed on behalf of a given set of Principals. * <p> * A static set of permissions can be bound to a ProtectionDomain when it is * constructed; such permissions are granted to the domain regardless of the * Policy in force. However, to support dynamic security policies, a * ProtectionDomain can also be constructed such that it is dynamically * mapped to a set of permissions by the current Policy whenever a permission * is checked. * <p> * * @author Li Gong * @author Roland Schemers * @author Gary Ellison */ public class ProtectionDomain { private static class JavaSecurityAccessImpl implements JavaSecurityAccess { private JavaSecurityAccessImpl() { } @Override public <T> T doIntersectionPrivilege( PrivilegedAction<T> action, final AccessControlContext stack, final AccessControlContext context) { if (action == null) { throw new NullPointerException(); } return AccessController.doPrivileged( action, getCombinedACC(context, stack) ); } @Override public <T> T doIntersectionPrivilege( PrivilegedAction<T> action, AccessControlContext context) { return doIntersectionPrivilege(action, AccessController.getContext(), context); } private static AccessControlContext getCombinedACC(AccessControlContext context, AccessControlContext stack) { AccessControlContext acc = new AccessControlContext(context, stack.getCombiner(), true); return new AccessControlContext(stack.getContext(), acc).optimize(); } } static { // Set up JavaSecurityAccess in SharedSecrets SharedSecrets.setJavaSecurityAccess(new JavaSecurityAccessImpl()); } /* CodeSource */ private CodeSource codesource ; /* ClassLoader the protection domain was consed from */ private ClassLoader classloader; /* Principals running-as within this protection domain */ private Principal[] principals; /* the rights this protection domain is granted */ private PermissionCollection permissions; /* if the permissions object has AllPermission */ private boolean hasAllPerm = false; /* the PermissionCollection is static (pre 1.4 constructor) or dynamic (via a policy refresh) */ private boolean staticPermissions; /* * An object used as a key when the ProtectionDomain is stored in a Map. */ final Key key = new Key(); private static final Debug debug = Debug.getInstance("domain"); /** * Creates a new ProtectionDomain with the given CodeSource and * Permissions. If the permissions object is not null, then * {@code setReadOnly())} will be called on the passed in * Permissions object. The only permissions granted to this domain * are the ones specified; the current Policy will not be consulted. * * @param codesource the codesource associated with this domain * @param permissions the permissions granted to this domain */ public ProtectionDomain(CodeSource codesource, PermissionCollection permissions) { this.codesource = codesource; if (permissions != null) { this.permissions = permissions; this.permissions.setReadOnly(); if (permissions instanceof Permissions && ((Permissions)permissions).allPermission != null) { hasAllPerm = true; } } this.classloader = null; this.principals = new Principal[0]; staticPermissions = true; } /** * Creates a new ProtectionDomain qualified by the given CodeSource, * Permissions, ClassLoader and array of Principals. If the * permissions object is not null, then {@code setReadOnly()} * will be called on the passed in Permissions object. * The permissions granted to this domain are dynamic; they include * both the static permissions passed to this constructor, and any * permissions granted to this domain by the current Policy at the * time a permission is checked. * <p> * This constructor is typically used by * {@link SecureClassLoader ClassLoaders} * and {@link DomainCombiner DomainCombiners} which delegate to * {@code Policy} to actively associate the permissions granted to * this domain. This constructor affords the * Policy provider the opportunity to augment the supplied * PermissionCollection to reflect policy changes. * <p> * * @param codesource the CodeSource associated with this domain * @param permissions the permissions granted to this domain * @param classloader the ClassLoader associated with this domain * @param principals the array of Principals associated with this * domain. The contents of the array are copied to protect against * subsequent modification. * @see Policy#refresh * @see Policy#getPermissions(ProtectionDomain) * @since 1.4 */ public ProtectionDomain(CodeSource codesource, PermissionCollection permissions, ClassLoader classloader, Principal[] principals) { this.codesource = codesource; if (permissions != null) { this.permissions = permissions; this.permissions.setReadOnly(); if (permissions instanceof Permissions && ((Permissions)permissions).allPermission != null) { hasAllPerm = true; } } this.classloader = classloader; this.principals = (principals != null ? principals.clone(): new Principal[0]); staticPermissions = false; } /** * Returns the CodeSource of this domain. * @return the CodeSource of this domain which may be null. * @since 1.2 */ public final CodeSource getCodeSource() { return this.codesource; } /** * Returns the ClassLoader of this domain. * @return the ClassLoader of this domain which may be null. * * @since 1.4 */ public final ClassLoader getClassLoader() { return this.classloader; } /** * Returns an array of principals for this domain. * @return a non-null array of principals for this domain. * Returns a new array each time this method is called. * * @since 1.4 */ public final Principal[] getPrincipals() { return this.principals.clone(); } /** * Returns the static permissions granted to this domain. * * @return the static set of permissions for this domain which may be null. * @see Policy#refresh * @see Policy#getPermissions(ProtectionDomain) */ public final PermissionCollection getPermissions() { return permissions; } /** * Check and see if this ProtectionDomain implies the permissions * expressed in the Permission object. * <p> * The set of permissions evaluated is a function of whether the * ProtectionDomain was constructed with a static set of permissions * or it was bound to a dynamically mapped set of permissions. * <p> * If the ProtectionDomain was constructed to a * {@link #ProtectionDomain(CodeSource, PermissionCollection) * statically bound} PermissionCollection then the permission will * only be checked against the PermissionCollection supplied at * construction. * <p> * However, if the ProtectionDomain was constructed with * the constructor variant which supports * {@link #ProtectionDomain(CodeSource, PermissionCollection, * ClassLoader, java.security.Principal[]) dynamically binding} * permissions, then the permission will be checked against the * combination of the PermissionCollection supplied at construction and * the current Policy binding. * <p> * * @param permission the Permission object to check. * * @return true if "permission" is implicit to this ProtectionDomain. */ public boolean implies(Permission permission) { if (hasAllPerm) { // internal permission collection already has AllPermission - // no need to go to policy return true; } if (!staticPermissions && Policy.getPolicyNoCheck().implies(this, permission)) return true; if (permissions != null) return permissions.implies(permission); return false; } // called by the VM -- do not remove boolean impliesCreateAccessControlContext() { return implies(SecurityConstants.CREATE_ACC_PERMISSION); } /** * Convert a ProtectionDomain to a String. */ @Override public String toString() { String pals = "<no principals>"; if (principals != null && principals.length > 0) { StringBuilder palBuf = new StringBuilder("(principals "); for (int i = 0; i < principals.length; i++) { palBuf.append(principals[i].getClass().getName() + " \"" + principals[i].getName() + "\""); if (i < principals.length-1) palBuf.append(",\n"); else palBuf.append(")\n"); } pals = palBuf.toString(); } // Check if policy is set; we don't want to load // the policy prematurely here PermissionCollection pc = Policy.isSet() && seeAllp() ? mergePermissions(): getPermissions(); return "ProtectionDomain "+ " "+codesource+"\n"+ " "+classloader+"\n"+ " "+pals+"\n"+ " "+pc+"\n"; } /** * Return true (merge policy permissions) in the following cases: * * . SecurityManager is null * * . SecurityManager is not null, * debug is not null, * SecurityManager impelmentation is in bootclasspath, * Policy implementation is in bootclasspath * (the bootclasspath restrictions avoid recursion) * * . SecurityManager is not null, * debug is null, * caller has Policy.getPolicy permission */ private static boolean seeAllp() { SecurityManager sm = System.getSecurityManager(); if (sm == null) { return true; } else { if (debug != null) { if (sm.getClass().getClassLoader() == null && Policy.getPolicyNoCheck().getClass().getClassLoader() == null) { return true; } } else { try { sm.checkPermission(SecurityConstants.GET_POLICY_PERMISSION); return true; } catch (SecurityException se) { // fall thru and return false } } } return false; } private PermissionCollection mergePermissions() { if (staticPermissions) return permissions; PermissionCollection perms = java.security.AccessController.doPrivileged (new java.security.PrivilegedAction<PermissionCollection>() { public PermissionCollection run() { Policy p = Policy.getPolicyNoCheck(); return p.getPermissions(ProtectionDomain.this); } }); Permissions mergedPerms = new Permissions(); int swag = 32; int vcap = 8; Enumeration<Permission> e; List<Permission> pdVector = new ArrayList<>(vcap); List<Permission> plVector = new ArrayList<>(swag); // // Build a vector of domain permissions for subsequent merge if (permissions != null) { synchronized (permissions) { e = permissions.elements(); while (e.hasMoreElements()) { pdVector.add(e.nextElement()); } } } // // Build a vector of Policy permissions for subsequent merge if (perms != null) { synchronized (perms) { e = perms.elements(); while (e.hasMoreElements()) { plVector.add(e.nextElement()); vcap++; } } } if (perms != null && permissions != null) { // // Weed out the duplicates from the policy. Unless a refresh // has occurred since the pd was consed this should result in // an empty vector. synchronized (permissions) { e = permissions.elements(); // domain vs policy while (e.hasMoreElements()) { Permission pdp = e.nextElement(); Class<?> pdpClass = pdp.getClass(); String pdpActions = pdp.getActions(); String pdpName = pdp.getName(); for (int i = 0; i < plVector.size(); i++) { Permission pp = plVector.get(i); if (pdpClass.isInstance(pp)) { // The equals() method on some permissions // have some side effects so this manual // comparison is sufficient. if (pdpName.equals(pp.getName()) && pdpActions.equals(pp.getActions())) { plVector.remove(i); break; } } } } } } if (perms !=null) { // the order of adding to merged perms and permissions // needs to preserve the bugfix 4301064 for (int i = plVector.size()-1; i >= 0; i--) { mergedPerms.add(plVector.get(i)); } } if (permissions != null) { for (int i = pdVector.size()-1; i >= 0; i--) { mergedPerms.add(pdVector.get(i)); } } return mergedPerms; } /** * Used for storing ProtectionDomains as keys in a Map. */ final class Key {} static { SharedSecrets.setJavaSecurityProtectionDomainAccess( new JavaSecurityProtectionDomainAccess() { public ProtectionDomainCache getProtectionDomainCache() { return new ProtectionDomainCache() { private final Map<Key, PermissionCollection> map = Collections.synchronizedMap (new WeakHashMap<Key, PermissionCollection>()); public void put(ProtectionDomain pd, PermissionCollection pc) { map.put((pd == null ? null : pd.key), pc); } public PermissionCollection get(ProtectionDomain pd) { return pd == null ? map.get(null) : map.get(pd.key); } }; } @Override public boolean getStaticPermissionsField(ProtectionDomain pd) { return pd.staticPermissions; } }); } }
/* * Copyright (c) 2012-2014 Sonatype, Inc. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package org.asynchttpclient.providers.grizzly; import static java.lang.Boolean.TRUE; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.glassfish.grizzly.utils.Exceptions.makeIOException; import org.asynchttpclient.Body; import org.asynchttpclient.BodyGenerator; import org.glassfish.grizzly.Buffer; import org.glassfish.grizzly.CompletionHandler; import org.glassfish.grizzly.Connection; import org.glassfish.grizzly.OutputSink; import org.glassfish.grizzly.WriteHandler; import org.glassfish.grizzly.WriteResult; import org.glassfish.grizzly.filterchain.FilterChain; import org.glassfish.grizzly.filterchain.FilterChainContext; import org.glassfish.grizzly.http.HttpContent; import org.glassfish.grizzly.http.HttpContext; import org.glassfish.grizzly.http.HttpRequestPacket; import org.glassfish.grizzly.impl.FutureImpl; import org.glassfish.grizzly.ssl.SSLBaseFilter; import org.glassfish.grizzly.ssl.SSLFilter; import org.glassfish.grizzly.threadpool.Threads; import org.glassfish.grizzly.utils.Futures; import java.io.IOException; import java.nio.ByteBuffer; import java.util.concurrent.ExecutionException; import static org.glassfish.grizzly.ssl.SSLUtils.getSSLEngine; /** * {@link BodyGenerator} which may return just part of the payload at the time * handler is requesting it. If it happens - PartialBodyGenerator becomes responsible * for finishing payload transferring asynchronously. * * @author The Grizzly Team * @since 1.7.0 */ public class FeedableBodyGenerator implements BodyGenerator { /** * There is no limit on bytes waiting to be written. This configuration * value should be used with caution as it could lead to out-of-memory * conditions. */ @SuppressWarnings("UnusedDeclaration") public static final int UNBOUND = -1; /** * Defer to whatever the connection has been configured for max pending bytes. */ public static final int DEFAULT = -2; private volatile HttpRequestPacket requestPacket; private volatile FilterChainContext context; private volatile HttpContent.Builder contentBuilder; private final EmptyBody EMPTY_BODY = new EmptyBody(); private Feeder feeder; private int origMaxPendingBytes; private int configuredMaxPendingBytes = DEFAULT; private boolean asyncTransferInitiated; // ---------------------------------------------- Methods from BodyGenerator /** * {@inheritDoc} */ @Override public Body createBody() throws IOException { return EMPTY_BODY; } // ---------------------------------------------------------- Public Methods /** * Configured the maximum number of bytes that may be pending to be written * to the wire. If not explicitly configured, the connection's current * configuration will be used instead. * <p/> * Once all data has been fed, the connection's max pending bytes configuration * will be restored to its original value. * * @param maxPendingBytes maximum number of bytes that may be queued to * be written to the wire. * @throws IllegalStateException if called after {@link #initializeAsynchronousTransfer(FilterChainContext, HttpRequestPacket)} * has been called by the {@link GrizzlyAsyncHttpProvider}. * @throws IllegalArgumentException if maxPendingBytes is less than zero and is * not {@link #UNBOUND} or {@link #DEFAULT}. */ @SuppressWarnings("UnusedDeclaration") public synchronized void setMaxPendingBytes(final int maxPendingBytes) { if (maxPendingBytes < DEFAULT) { throw new IllegalArgumentException("Invalid maxPendingBytes value: " + maxPendingBytes); } if (asyncTransferInitiated) { throw new IllegalStateException("Unable to set max pending bytes after async data transfer has been initiated."); } configuredMaxPendingBytes = maxPendingBytes; } /** * Add a {@link Feeder} implementation that will be invoked when writing * without blocking is possible. This method must be set before dispatching * the request this feeder is associated with. * * @param feeder the {@link Feeder} responsible for providing data. * @throws IllegalStateException if called after {@link #initializeAsynchronousTransfer(FilterChainContext, HttpRequestPacket)} * has been called by the {@link GrizzlyAsyncHttpProvider}. * @throws IllegalArgumentException if <code>feeder</code> is <code>null</code> */ @SuppressWarnings("UnusedDeclaration") public synchronized void setFeeder(final Feeder feeder) { if (asyncTransferInitiated) { throw new IllegalStateException("Unable to set Feeder after async data transfer has been initiated."); } if (feeder == null) { throw new IllegalArgumentException("Feeder argument cannot be null."); } this.feeder = feeder; } // ------------------------------------------------- Package Private Methods /** * Even though this method is public, it's not intended to be called by * Developers directly. Please avoid doing so. */ public synchronized void initializeAsynchronousTransfer(final FilterChainContext context, final HttpRequestPacket requestPacket) throws IOException { if (asyncTransferInitiated) { throw new IllegalStateException("Async transfer has already been initiated."); } if (feeder == null) { throw new IllegalStateException("No feeder available to perform the transfer."); } assert (context != null); assert (requestPacket != null); this.requestPacket = requestPacket; this.contentBuilder = HttpContent.builder(requestPacket); final Connection c = context.getConnection(); origMaxPendingBytes = c.getMaxAsyncWriteQueueSize(); if (configuredMaxPendingBytes != DEFAULT) { c.setMaxAsyncWriteQueueSize(configuredMaxPendingBytes); } this.context = context; asyncTransferInitiated = true; final Runnable r = new Runnable() { @Override public void run() { try { if (requestPacket.isSecure() && (getSSLEngine(context.getConnection()) == null)) { flushOnSSLHandshakeComplete(); } else { feeder.flush(); } } catch (IOException ioe) { throwError(ioe); } } }; // If the current thread is a selector thread, we need to execute // the remainder of the task on the worker thread to prevent // it from being blocked. if (isServiceThread()) { c.getTransport().getWorkerThreadPool().execute(r); } else { r.run(); } } // --------------------------------------------------------- Private Methods private boolean isServiceThread() { return Threads.isService(); } private void flushOnSSLHandshakeComplete() throws IOException { final FilterChain filterChain = context.getFilterChain(); final int idx = filterChain.indexOfType(SSLFilter.class); assert (idx != -1); final SSLFilter filter = (SSLFilter) filterChain.get(idx); final Connection c = context.getConnection(); filter.addHandshakeListener(new SSLBaseFilter.HandshakeListener() { public void onStart(Connection connection) { } public void onComplete(Connection connection) { if (c.equals(connection)) { filter.removeHandshakeListener(this); try { feeder.flush(); } catch (IOException ioe) { throwError(ioe); } } } }); filter.handshake(context.getConnection(), null); } private void throwError(final Throwable t) { HttpTxContext httpTxContext = HttpTxContext.get(context); httpTxContext.abort(t); } // ----------------------------------------------------------- Inner Classes private final class EmptyBody implements Body { @Override public long getContentLength() { return -1; } @Override public long read(final ByteBuffer buffer) throws IOException { return 0; } @Override public void close() throws IOException { context.completeAndRecycle(); context = null; requestPacket = null; contentBuilder = null; } } // END EmptyBody // ---------------------------------------------------------- Nested Classes /** * Specifies the functionality all Feeders must implement. Typically, * developers need not worry about implementing this interface directly. * It should be sufficient, for most use-cases, to simply use the {@link NonBlockingFeeder} * or {@link SimpleFeeder} implementations. */ public interface Feeder { /** * This method will be invoked when it's possible to begin feeding * data downstream. Implementations of this method must use {@link #feed(Buffer, boolean)} * to perform the actual write. * * @throws IOException if an I/O error occurs. */ void flush() throws IOException; /** * This method will write the specified {@link Buffer} to the connection. * Be aware that this method may block depending if data is being fed * faster than it can write. How much data may be queued is dictated * by {@link #setMaxPendingBytes(int)}. Once this threshold is exceeded, * the method will block until the write queue length drops below the * aforementioned threshold. * * @param buffer the {@link Buffer} to write. * @param last flag indicating if this is the last buffer to send. * @throws IOException if an I/O error occurs. * @throws java.lang.IllegalArgumentException if <code>buffer</code> * is <code>null</code>. * @throws java.lang.IllegalStateException if this method is invoked * before asynchronous transferring has been initiated. * @see #setMaxPendingBytes(int) */ @SuppressWarnings("UnusedDeclaration") void feed(final Buffer buffer, final boolean last) throws IOException; } // END Feeder /** * Base class for {@link Feeder} implementations. This class provides * an implementation for the contract defined by the {@link #feed} method. */ public static abstract class BaseFeeder implements Feeder { protected final FeedableBodyGenerator feedableBodyGenerator; // -------------------------------------------------------- Constructors protected BaseFeeder(FeedableBodyGenerator feedableBodyGenerator) { this.feedableBodyGenerator = feedableBodyGenerator; } // --------------------------------------------- Package Private Methods /** * {@inheritDoc} */ @SuppressWarnings("UnusedDeclaration") public final synchronized void feed(final Buffer buffer, final boolean last) throws IOException { if (buffer == null) { throw new NullPointerException("buffer"); } if (!feedableBodyGenerator.asyncTransferInitiated) { throw new IllegalStateException("Asynchronous transfer has not been initiated."); } blockUntilQueueFree(feedableBodyGenerator.context); final HttpContent content = feedableBodyGenerator.contentBuilder.content(buffer).last(last).build(); final CompletionHandler<WriteResult> handler = ((last) ? new LastPacketCompletionHandler() : null); feedableBodyGenerator.context.write(content, handler); } /** * This method will block if the async write queue is currently larger * than the configured maximum. The amount of time that this method * will block is dependent on the write timeout of the transport * associated with the specified connection. */ private static void blockUntilQueueFree(final FilterChainContext ctx) { HttpContext httpContext = HttpContext.get(ctx); final OutputSink outputSink = httpContext.getOutputSink(); if (!outputSink.canWrite()) { final FutureImpl<Boolean> future = Futures.createSafeFuture(); outputSink.notifyCanWrite(new WriteHandler() { @Override public void onWritePossible() throws Exception { future.result(TRUE); } @Override public void onError(Throwable t) { future.failure(makeIOException(t)); } }); block(ctx, future); } } private static void block(final FilterChainContext ctx, final FutureImpl<Boolean> future) { try { final long writeTimeout = ctx.getConnection().getTransport().getWriteTimeout(MILLISECONDS); if (writeTimeout != -1) { future.get(writeTimeout, MILLISECONDS); } else { future.get(); } } catch (ExecutionException e) { HttpTxContext httpTxContext = HttpTxContext.get(ctx); httpTxContext.abort(e.getCause()); } catch (Exception e) { HttpTxContext httpTxContext = HttpTxContext.get(ctx); httpTxContext.abort(e); } } // ------------------------------------------------------- Inner Classes private final class LastPacketCompletionHandler implements CompletionHandler<WriteResult> { private final CompletionHandler<WriteResult> delegate; private final Connection c; private final int origMaxPendingBytes; // -------------------------------------------------------- Constructors @SuppressWarnings("unchecked") private LastPacketCompletionHandler() { delegate = ((!feedableBodyGenerator.requestPacket.isCommitted()) ? feedableBodyGenerator.context.getTransportContext() .getCompletionHandler() : null); c = feedableBodyGenerator.context.getConnection(); origMaxPendingBytes = feedableBodyGenerator.origMaxPendingBytes; } // -------------------------------------- Methods from CompletionHandler @Override public void cancelled() { c.setMaxAsyncWriteQueueSize(origMaxPendingBytes); if (delegate != null) { delegate.cancelled(); } } @Override public void failed(Throwable throwable) { c.setMaxAsyncWriteQueueSize(origMaxPendingBytes); if (delegate != null) { delegate.failed(throwable); } } @Override public void completed(WriteResult result) { c.setMaxAsyncWriteQueueSize(origMaxPendingBytes); if (delegate != null) { delegate.completed(result); } } @Override public void updated(WriteResult result) { if (delegate != null) { delegate.updated(result); } } } // END LastPacketCompletionHandler } // END Feeder /** * Implementations of this class provide the framework to read data from * some source and feed data to the {@link FeedableBodyGenerator} * without blocking. */ @SuppressWarnings("UnusedDeclaration") public static abstract class NonBlockingFeeder extends BaseFeeder { // -------------------------------------------------------- Constructors /** * Constructs the <code>NonBlockingFeeder</code> with the associated * {@link FeedableBodyGenerator}. */ public NonBlockingFeeder(final FeedableBodyGenerator feedableBodyGenerator) { super(feedableBodyGenerator); } // ------------------------------------------------------ Public Methods /** * Notification that it's possible to send another block of data via * {@link #feed(org.glassfish.grizzly.Buffer, boolean)}. * <p/> * It's important to only invoke {@link #feed(Buffer, boolean)} * once per invocation of {@link #canFeed()}. */ public abstract void canFeed() throws IOException; /** * @return <code>true</code> if all data has been fed by this feeder, * otherwise returns <code>false</code>. */ public abstract boolean isDone(); /** * @return <code>true</code> if data is available to be fed, otherwise * returns <code>false</code>. When this method returns <code>false</code>, * the {@link FeedableBodyGenerator} will call {@link #notifyReadyToFeed(ReadyToFeedListener)} * by which this {@link NonBlockingFeeder} implementation may signal data is once * again available to be fed. */ public abstract boolean isReady(); /** * Callback registration to signal the {@link FeedableBodyGenerator} that * data is available once again to continue feeding. Once this listener * has been invoked, the NonBlockingFeeder implementation should no longer maintain * a reference to the listener. */ public abstract void notifyReadyToFeed(final ReadyToFeedListener listener); // ------------------------------------------------- Methods from Feeder /** * {@inheritDoc} */ @Override public synchronized void flush() throws IOException { final HttpContext httpContext = HttpContext.get(feedableBodyGenerator.context); final OutputSink outputSink = httpContext.getOutputSink(); if (isReady()) { final boolean notReady = writeUntilFullOrDone(outputSink); if (!isDone()) { if (notReady) { notifyReadyToFeed(new ReadyToFeedListenerImpl()); } else { // write queue is full, leverage WriteListener to let us know // when it is safe to write again. outputSink.notifyCanWrite(new WriteHandlerImpl()); } } } else { notifyReadyToFeed(new ReadyToFeedListenerImpl()); } } // ----------------------------------------------------- Private Methods private boolean writeUntilFullOrDone(final OutputSink outputSink) throws IOException { while (outputSink.canWrite()) { if (isReady()) { canFeed(); } else { return true; } } return false; } // ------------------------------------------------------- Inner Classes /** * Listener to signal that data is available to be fed. */ public interface ReadyToFeedListener { /** * Data is once again ready to be fed. */ @SuppressWarnings("UnusedDeclaration") void ready(); } // END ReadyToFeedListener private final class WriteHandlerImpl implements WriteHandler { private final Connection c; private final FilterChainContext ctx; // -------------------------------------------------------- Constructors private WriteHandlerImpl() { this.c = feedableBodyGenerator.context.getConnection(); this.ctx = feedableBodyGenerator.context; } // ------------------------------------------ Methods from WriteListener @Override public void onWritePossible() throws Exception { flush(); } @Override public void onError(Throwable t) { if (!Utils.isSpdyConnection(c)) { c.setMaxAsyncWriteQueueSize(feedableBodyGenerator.origMaxPendingBytes); } feedableBodyGenerator.throwError(t); } } // END WriteHandlerImpl private final class ReadyToFeedListenerImpl implements NonBlockingFeeder.ReadyToFeedListener { // ------------------------------------ Methods from ReadyToFeedListener @Override public void ready() { try { flush(); } catch (IOException e) { final Connection c = feedableBodyGenerator.context.getConnection(); if (!Utils.isSpdyConnection(c)) { c.setMaxAsyncWriteQueueSize(feedableBodyGenerator.origMaxPendingBytes); } feedableBodyGenerator.throwError(e); } } } // END ReadToFeedListenerImpl } // END NonBlockingFeeder /** * This simple {@link Feeder} implementation allows the implementation to * feed data in whatever fashion is deemed appropriate. */ @SuppressWarnings("UnusedDeclaration") public abstract static class SimpleFeeder extends BaseFeeder { // -------------------------------------------------------- Constructors /** * Constructs the <code>SimpleFeeder</code> with the associated * {@link FeedableBodyGenerator}. */ public SimpleFeeder(FeedableBodyGenerator feedableBodyGenerator) { super(feedableBodyGenerator); } } // END SimpleFeeder }
/** * @author Manit Singh Kalsi */ package edu.asu.poly.se.staticanalyzer.plugin.views; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.ui.part.*; import org.eclipse.ui.texteditor.IDocumentProvider; import org.eclipse.ui.texteditor.ITextEditor; import edu.asu.poly.se.staticanalyzer.StaticAnalyzer; import edu.asu.poly.se.staticanalyzer.results.Results; import edu.asu.poly.se.staticanalyzer.results.Error; import org.eclipse.jface.viewers.*; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.logging.FileHandler; import java.util.logging.Level; import java.util.logging.Logger; import java.util.logging.SimpleFormatter; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IMarker; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.IResourceChangeEvent; import org.eclipse.core.resources.IResourceChangeListener; import org.eclipse.core.resources.IResourceDelta; import org.eclipse.core.resources.IWorkspace; import org.eclipse.core.resources.IWorkspaceRoot; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.Path; import org.eclipse.jface.action.*; import org.eclipse.jface.text.IDocument; import org.eclipse.jface.text.IRegion; import org.eclipse.jface.window.Window; import org.eclipse.ui.*; import org.eclipse.ui.ide.IDE; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.StyledText; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.KeyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; public class PluginView extends ViewPart { public static final String ID = "edu.asu.poly.se.staticanalyzer.plugin.views.PluginView"; private TableViewer viewer; private Action doubleClickAction; private Results results = new Results(); private boolean useRecommendations = false; // private boolean useDevMode = false; private boolean showWarning = false; // private boolean workspaceListenerInit = false; private final static Logger LOGGER = Logger.getLogger(PluginView.class.getName()); private static FileHandler fileTxt; private static SimpleFormatter formatterTxt; public PluginView() { LOGGER.setLevel(Level.INFO); try { fileTxt = new FileHandler("%hedu.asu.poly.staticanalyzer.log"); formatterTxt = new SimpleFormatter(); fileTxt.setFormatter(formatterTxt); LOGGER.addHandler(fileTxt); } catch (SecurityException | IOException e) { e.printStackTrace(); } } public void createPartControl(Composite parent) { GridLayout outerLayout = new GridLayout(1, true); parent.setLayout(outerLayout); Button runBtn = new Button(parent, SWT.PUSH); runBtn.setText("Run"); runBtn.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { Display.getDefault().asyncExec(new Runnable() { public void run() { IEditorPart editorPart = getSite().getWorkbenchWindow().getActivePage().getActiveEditor(); if(editorPart != null) { IFileEditorInput input = (IFileEditorInput)editorPart.getEditorInput(); IFile file = input.getFile(); IProject activeProject = file.getProject(); String[] args = new String[2]; args[0] = "--source="+activeProject.getLocation().toOSString(); args[1] = "--recommendations=yes"; results.getErrors().clear(); results.getWarnings().clear(); results = StaticAnalyzer.runStaticAnalyzer(args, true); removeExistingMarkers(); updateViewer(); results.getErrors().forEach(error -> { LOGGER.info("------------------DEFECT ANALYSIS------------------"); LOGGER.info(error.getErrorType() + " " + error.getDesc() + " " + error.getFileName() + " " + Integer.toString(error.getRowNumber()) + " " + Integer.toString(error.getColumnNumber())); LOGGER.info("------------------DEFECT ANALYSIS------------------"); }); } } }); } }); runBtn.setLayoutData(new GridData()); Button reportBtn = new Button(parent, SWT.PUSH); reportBtn.setText("Report"); reportBtn.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { ReportDialog dialog = new ReportDialog(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell()); dialog.create(); if (dialog.open() == Window.OK) { LOGGER.info("------------------DEFECT REPORT------------------"); LOGGER.info(dialog.getUid() + " " + dialog.getHowWasDefectFound() + " " + dialog.getHowToReproduceDefect() + " " + dialog.getLineNumber() + " " + dialog.getFileName() + " " + dialog.getDescription()); LOGGER.info("------------------DEFECT REPORT------------------"); } } }); reportBtn.setLayoutData(new GridData()); Button recommendationCheck = new Button(parent, SWT.CHECK); recommendationCheck.setText("Use Recommendations"); recommendationCheck.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { useRecommendations = recommendationCheck.getSelection(); if(useRecommendations) { for(final TableColumn column : viewer.getTable().getColumns()) { if(column.getText().equals("Fix Recommendation")) { if(column.getWidth() == 0) { column.setWidth(800); } } } } else { for(final TableColumn column : viewer.getTable().getColumns()) { if(column.getText().equals("Fix Recommendation")) { if(column.getWidth() == 800) { column.setWidth(0); } } } } } }); recommendationCheck.setLayoutData(new GridData()); // Button devMode = new Button(parent, SWT.CHECK); // devMode.setText("Enable Dev Mode"); // devMode.addSelectionListener(new SelectionAdapter() // { // public void widgetSelected(SelectionEvent e) // { // useDevMode = devMode.getSelection(); // } // }); // Button showWarnings = new Button(parent, SWT.CHECK); // showWarnings.setText("Show Warnings"); // showWarnings.addSelectionListener(new SelectionAdapter() // { // public void widgetSelected(SelectionEvent e) // { // showWarning = showWarnings.getSelection(); // } // }); // if(!workspaceListenerInit) { // ResourcesPlugin.getWorkspace().addResourceChangeListener(new IResourceChangeListener() { // @Override // public void resourceChanged(IResourceChangeEvent event) { // workspaceListenerInit = true; // if(event.getType() == IResourceChangeEvent.POST_CHANGE) { // IEditorPart editorPart = getSite().getWorkbenchWindow().getActivePage().getActiveEditor(); // if(editorPart != null && useDevMode) // { // IFileEditorInput input = (IFileEditorInput)editorPart.getEditorInput() ; // IFile file = input.getFile(); // IProject activeProject = file.getProject(); // System.out.println(activeProject.getRawLocation().toOSString()); // String[] args = new String[2]; // args[0] = "--source="+activeProject.getRawLocation().toOSString(); // args[1] = "--recommendations=yes"; // results.getErrors().clear(); // results.getWarnings().clear(); // results = StaticAnalyzer.runStaticAnalyzer(args, true); // updateViewer(); // } // } // } // }); // } createViewer(parent); } private void generateMarkers() { List<Error> errorsShown = results.getErrors(); IEditorReference[] refs = PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage().getEditorReferences(); for(int i=0; i<refs.length; i++) { try { IEditorInput input = refs[i].getEditorInput(); IFile file = ((IFileEditorInput)input).getFile(); String srcFile = file.getLocation().toOSString(); for(int j=0;j<errorsShown.size();j++) { Error err = errorsShown.get(j); if(srcFile.equals(err.getFileName())) { IMarker m; try { m = file.createMarker(IMarker.PROBLEM); m.setAttribute(IMarker.LINE_NUMBER, err.getRowNumber()); m.setAttribute(IMarker.MESSAGE, err.getErrorType() + " " + err.getDesc()); m.setAttribute(IMarker.PRIORITY, IMarker.PRIORITY_HIGH); m.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR); } catch (Exception e) { e.printStackTrace(); } } } } catch (Exception e) { e.printStackTrace(); } } } private void removeExistingMarkers() { IEditorReference[] refs = PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage().getEditorReferences(); for(int i=0; i<refs.length; i++) { try { IEditorInput input = refs[i].getEditorInput(); IFile file = ((IFileEditorInput)input).getFile(); IMarker[] markers = file.findMarkers(IMarker.PROBLEM, true, IFile.DEPTH_ZERO); for(int j=0; j<markers.length;j++) { markers[j].delete(); } } catch (Exception e) { e.printStackTrace(); } } } private void updateViewer() { viewer.getTable().removeAll(); if ((results != null) && (results.getErrors().size() <= 0)) { Error err = new Error("No error found","There was no error found","",0,0); err.setFixRecommendation("Nothing here"); results.setError(err); viewer.setInput(results.getErrors()); } else { if(showWarning) { List<Error> totalResults = results.getErrors(); results.getWarnings().forEach(warning -> { Error err = new Error(warning.getWarningType(), warning.getDesc(), warning.getFileName(), warning.getRowNumber(), warning.getColumnNumber()); err.setFixRecommendation(warning.getFixRecommendation()); totalResults.add(err); }); Collections.sort(totalResults, (r1,r2) -> r1.getRowNumber() - r2.getRowNumber()); viewer.setInput(totalResults); removeExistingMarkers(); generateMarkers(); } else { Collections.sort(results.getErrors(), (r1,r2) -> r1.getRowNumber() - r2.getRowNumber()); viewer.setInput(results.getErrors()); removeExistingMarkers(); generateMarkers(); } } } private void createViewer(Composite parent) { viewer = new TableViewer(parent, SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL | SWT.FULL_SELECTION | SWT.BORDER); createColumns(parent, viewer); final Table table = viewer.getTable(); table.setHeaderVisible(true); table.setLinesVisible(true); viewer.setContentProvider(new ArrayContentProvider()); Error err = new Error("Not Initialized","Please open any file in a project and click on run","",0,0); err.setFixRecommendation("Please open any file in a project and click on run"); results.setError(err); viewer.setInput(results.getErrors()); getSite().setSelectionProvider(viewer); makeActions(); hookDoubleClickAction(); GridData gridData = new GridData(); gridData.verticalAlignment = GridData.FILL; gridData.horizontalSpan = 2; gridData.grabExcessHorizontalSpace = true; gridData.grabExcessVerticalSpace = true; gridData.horizontalAlignment = GridData.FILL; viewer.getControl().setLayoutData(gridData); initHandler(); } private void initHandler() { Shell activeShell = PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(); activeShell.getDisplay().addFilter(SWT.KeyDown, new Listener() { @Override public void handleEvent(Event e) { System.out.println("Filter-mask: " + e.stateMask); System.out.println("Filter-char: " + e.keyCode); System.out.println(e.character); LOGGER.info("------------------KEY LOGGING------------------"); LOGGER.info("Keycode : " + e.keyCode + " Character : " + e.character + " StateMask : " + e.stateMask); LOGGER.info("------------------KEY LOGGING------------------"); } }); } private void createColumns(final Composite parent, final TableViewer viewer) { String[] titles = { "Error Type", "Description", "Source File Name", "Location(row number)", "Location(column number)", "Fix Recommendation" }; int[] bounds = { 250, 800, 400, 200, 200}; // first column TableViewerColumn col = createTableViewerColumn(titles[0], bounds[0], 0); col.setLabelProvider(new ColumnLabelProvider() { @Override public String getText(Object element) { return ((Error)element).getErrorType(); } }); // second column col = createTableViewerColumn(titles[1], bounds[1], 1); col.setLabelProvider(new ColumnLabelProvider() { @Override public String getText(Object element) { return ((Error)element).getDesc(); } }); // Third column col = createTableViewerColumn(titles[2], bounds[2], 2); col.setLabelProvider(new ColumnLabelProvider() { @Override public String getText(Object element) { return ((Error)element).getFileName(); } }); // Fourth column col = createTableViewerColumn(titles[3], bounds[3], 3); col.setLabelProvider(new ColumnLabelProvider() { @Override public String getText(Object element) { return String.valueOf(((Error)element).getRowNumber()); } }); // Fifth column col = createTableViewerColumn(titles[4], bounds[4], 4); col.setLabelProvider(new ColumnLabelProvider() { @Override public String getText(Object element) { return String.valueOf(((Error)element).getColumnNumber()); } }); // Sixth column col = createTableViewerColumn(titles[5], 0, 5); col.setLabelProvider(new ColumnLabelProvider() { @Override public String getText(Object element) { return String.valueOf(((Error)element).getFixRecommendation()); } }); } private TableViewerColumn createTableViewerColumn(String title, int bound, final int colNumber) { final TableViewerColumn viewerColumn = new TableViewerColumn(viewer, SWT.NONE); final TableColumn column = viewerColumn.getColumn(); column.setText(title); column.setWidth(bound); column.setResizable(true); column.setMoveable(true); return viewerColumn; } private void makeActions() { doubleClickAction = new Action() { public void run() { ISelection selection = viewer.getSelection(); Object obj = ((IStructuredSelection)selection).getFirstElement(); goToLine(obj); } }; } private void hookDoubleClickAction() { viewer.addDoubleClickListener(new IDoubleClickListener() { public void doubleClick(DoubleClickEvent event) { doubleClickAction.run(); } }); } private void goToLine(Object obj) { Error err = (Error)obj; int lineNumber = err.getRowNumber(); String fileName = err.getFileName(); IWorkspace workspace = ResourcesPlugin.getWorkspace(); IWorkspaceRoot workspaceRoot = workspace.getRoot(); IPath path = new Path(fileName); IFile file = workspaceRoot.getFileForLocation(path); try { ITextEditor textEditor = (ITextEditor)IDE.openEditor(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage(), file, true ); IDocumentProvider provider = textEditor.getDocumentProvider(); IDocument document = provider.getDocument(textEditor.getEditorInput()); IRegion loc = document.getLineInformation(lineNumber - 1); textEditor.selectAndReveal(loc.getOffset(), loc.getLength()); generateMarkers(); } catch (Exception e) { e.printStackTrace(); } } public void setFocus() { viewer.getControl().setFocus(); } }
/* ----------------------------------------------------------------------------- * Rule_cmdAputByte.java * ----------------------------------------------------------------------------- * * Producer : com.parse2.aparse.Parser 2.3 * Produced : Fri Apr 12 10:40:21 MUT 2013 * * ----------------------------------------------------------------------------- */ package com.litecoding.smali2java.parser.cmd.get7put.aput; import java.util.ArrayList; import com.litecoding.smali2java.builder.Visitor; import com.litecoding.smali2java.parser.ParserContext; import com.litecoding.smali2java.parser.Rule; import com.litecoding.smali2java.parser.Terminal_StringValue; import com.litecoding.smali2java.parser.smali.Rule_codeRegister; import com.litecoding.smali2java.parser.smali.Rule_codeRegisterV; import com.litecoding.smali2java.parser.smali.Rule_commentSequence; import com.litecoding.smali2java.parser.smali.Rule_listSeparator; import com.litecoding.smali2java.parser.smali.Rule_optPadding; import com.litecoding.smali2java.parser.smali.Rule_padding; import com.litecoding.smali2java.parser.text.Rule_CRLF; final public class Rule_cmdAputByte extends Rule { private Rule_cmdAputByte(String spelling, ArrayList<Rule> rules) { super(spelling, rules); } public Object accept(Visitor visitor) { return visitor.visit(this); } public static Rule_cmdAputByte parse(ParserContext context) { context.push("cmdAputByte"); boolean parsed = true; int s0 = context.index; ArrayList<Rule> e0 = new ArrayList<Rule>(); Rule rule; parsed = false; if (!parsed) { { ArrayList<Rule> e1 = new ArrayList<Rule>(); int s1 = context.index; parsed = true; if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_optPadding.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Terminal_StringValue.parse(context, "aput-byte"); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_padding.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_codeRegisterV.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_listSeparator.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_codeRegister.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_listSeparator.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_codeRegister.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_optPadding.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; @SuppressWarnings("unused") int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { int g1 = context.index; parsed = false; if (!parsed) { { ArrayList<Rule> e2 = new ArrayList<Rule>(); int s2 = context.index; parsed = true; if (parsed) { boolean f2 = true; int c2 = 0; for (int i2 = 0; i2 < 1 && f2; i2++) { rule = Rule_padding.parse(context); if ((f2 = rule != null)) { e2.add(rule); c2++; } } parsed = c2 == 1; } if (parsed) { boolean f2 = true; int c2 = 0; for (int i2 = 0; i2 < 1 && f2; i2++) { rule = Rule_commentSequence.parse(context); if ((f2 = rule != null)) { e2.add(rule); c2++; } } parsed = c2 == 1; } if (parsed) e1.addAll(e2); else context.index = s2; } } f1 = context.index > g1; if (parsed) c1++; } parsed = true; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule_CRLF.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) e0.addAll(e1); else context.index = s1; } } rule = null; if (parsed) rule = new Rule_cmdAputByte(context.text.substring(s0, context.index), e0); else context.index = s0; context.pop("cmdAputByte", parsed); return (Rule_cmdAputByte)rule; } } /* ----------------------------------------------------------------------------- * eof * ----------------------------------------------------------------------------- */
/* * Copyright 2003-2015 Dave Griffith, Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.migration; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.ui.SingleCheckboxOptionsPanel; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.util.IncorrectOperationException; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.BaseInspection; import com.siyeh.ig.BaseInspectionVisitor; import com.siyeh.ig.InspectionGadgetsFix; import com.siyeh.ig.PsiReplacementUtil; import com.siyeh.ig.psiutils.*; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.HashMap; import java.util.Map; public class UnnecessaryBoxingInspection extends BaseInspection { @SuppressWarnings("PublicField") public boolean onlyReportSuperfluouslyBoxed = false; @NonNls static final Map<String, String> boxedPrimitiveMap = new HashMap<String, String>(8); static { boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_INTEGER, "int"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_SHORT, "short"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_BOOLEAN, "boolean"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_LONG, "long"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_BYTE, "byte"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_FLOAT, "float"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_DOUBLE, "double"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_CHARACTER, "char"); } @Override @NotNull public String getDisplayName() { return InspectionGadgetsBundle.message("unnecessary.boxing.display.name"); } @Override public boolean isEnabledByDefault() { return true; } @Nullable @Override public JComponent createOptionsPanel() { return new SingleCheckboxOptionsPanel(InspectionGadgetsBundle.message("unnecessary.boxing.superfluous.option"), this, "onlyReportSuperfluouslyBoxed"); } @Override @NotNull protected String buildErrorString(Object... infos) { return InspectionGadgetsBundle.message("unnecessary.boxing.problem.descriptor"); } @Override public InspectionGadgetsFix buildFix(Object... infos) { return new UnnecessaryBoxingFix(); } private static class UnnecessaryBoxingFix extends InspectionGadgetsFix { @Override @NotNull public String getFamilyName() { return getName(); } @Override @NotNull public String getName() { return InspectionGadgetsBundle.message("unnecessary.boxing.remove.quickfix"); } @Override public void doFix(@NotNull Project project, ProblemDescriptor descriptor) throws IncorrectOperationException { final PsiCallExpression expression = (PsiCallExpression)descriptor.getPsiElement(); final PsiType boxedType = expression.getType(); if (boxedType == null) { return; } final PsiExpressionList argumentList = expression.getArgumentList(); if (argumentList == null) { return; } final PsiExpression[] arguments = argumentList.getExpressions(); if (arguments.length != 1) { return; } final PsiExpression unboxedExpression = arguments[0]; final PsiType unboxedType = unboxedExpression.getType(); if (unboxedType == null) { return; } final String cast = getCastString(unboxedType, boxedType); if (cast == null) { return; } final int precedence = ParenthesesUtils.getPrecedence(unboxedExpression); if (!cast.isEmpty() && precedence > ParenthesesUtils.TYPE_CAST_PRECEDENCE) { PsiReplacementUtil.replaceExpression(expression, cast + '(' + unboxedExpression.getText() + ')'); } else { PsiReplacementUtil.replaceExpression(expression, cast + unboxedExpression.getText()); } } @Nullable private static String getCastString(@NotNull PsiType fromType, @NotNull PsiType toType) { final String toTypeText = toType.getCanonicalText(); final String fromTypeText = fromType.getCanonicalText(); final String unboxedType = boxedPrimitiveMap.get(toTypeText); if (unboxedType == null) { return null; } if (fromTypeText.equals(unboxedType)) { return ""; } else { return '(' + unboxedType + ')'; } } } @Override public boolean shouldInspect(PsiFile file) { return PsiUtil.isLanguageLevel5OrHigher(file); } @Override public BaseInspectionVisitor buildVisitor() { return new UnnecessaryBoxingVisitor(); } private class UnnecessaryBoxingVisitor extends BaseInspectionVisitor { @Override public void visitNewExpression(@NotNull PsiNewExpression expression) { super.visitNewExpression(expression); final PsiExpressionList argumentList = expression.getArgumentList(); if (argumentList == null) { return; } final PsiType constructorType = expression.getType(); if (constructorType == null) { return; } final String constructorTypeText = constructorType.getCanonicalText(); if (!boxedPrimitiveMap.containsKey(constructorTypeText)) { return; } final PsiMethod constructor = expression.resolveConstructor(); if (constructor == null) { return; } final PsiParameterList parameterList = constructor.getParameterList(); if (parameterList.getParametersCount() != 1) { return; } final PsiParameter[] parameters = parameterList.getParameters(); final PsiParameter parameter = parameters[0]; final PsiType parameterType = parameter.getType(); final String parameterTypeText = parameterType.getCanonicalText(); final String boxableConstructorType = boxedPrimitiveMap.get(constructorTypeText); if (!boxableConstructorType.equals(parameterTypeText)) { return; } if (!canBeUnboxed(expression)) { return; } if (onlyReportSuperfluouslyBoxed) { final PsiType expectedType = ExpectedTypeUtils.findExpectedType(expression, false, true); if (!(expectedType instanceof PsiPrimitiveType)) { return; } } registerError(expression); } @Override public void visitMethodCallExpression(PsiMethodCallExpression expression) { super.visitMethodCallExpression(expression); final PsiExpressionList argumentList = expression.getArgumentList(); final PsiExpression[] arguments = argumentList.getExpressions(); if (arguments.length != 1) { return; } if (!(arguments[0].getType() instanceof PsiPrimitiveType)) { return; } final PsiReferenceExpression methodExpression = expression.getMethodExpression(); @NonNls final String referenceName = methodExpression.getReferenceName(); if (!"valueOf".equals(referenceName)) { return; } final PsiExpression qualifierExpression = methodExpression.getQualifierExpression(); if (!(qualifierExpression instanceof PsiReferenceExpression)) { return; } final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)qualifierExpression; final String canonicalText = referenceExpression.getCanonicalText(); if (!boxedPrimitiveMap.containsKey(canonicalText)) { return; } if (!canBeUnboxed(expression)) { return; } registerError(expression); } private boolean canBeUnboxed(PsiCallExpression expression) { PsiElement parent = expression.getParent(); while (parent instanceof PsiParenthesizedExpression) { parent = parent.getParent(); } if (parent instanceof PsiExpressionStatement || parent instanceof PsiReferenceExpression) { return false; } else if (parent instanceof PsiTypeCastExpression) { final PsiTypeCastExpression castExpression = (PsiTypeCastExpression)parent; if (TypeUtils.isTypeParameter(castExpression.getType())) { return false; } } else if (parent instanceof PsiConditionalExpression) { final PsiConditionalExpression conditionalExpression = (PsiConditionalExpression)parent; final PsiExpression thenExpression = conditionalExpression.getThenExpression(); final PsiExpression elseExpression = conditionalExpression.getElseExpression(); if (elseExpression == null || thenExpression == null) { return false; } if (PsiTreeUtil.isAncestor(thenExpression, expression, false)) { final PsiType type = elseExpression.getType(); return type instanceof PsiPrimitiveType; } else if (PsiTreeUtil.isAncestor(elseExpression, expression, false)) { final PsiType type = thenExpression.getType(); return type instanceof PsiPrimitiveType; } else { return true; } } else if (parent instanceof PsiBinaryExpression) { final PsiBinaryExpression binaryExpression = (PsiBinaryExpression)parent; final PsiExpression lhs = binaryExpression.getLOperand(); final PsiExpression rhs = binaryExpression.getROperand(); if (rhs == null) { return false; } return PsiTreeUtil.isAncestor(rhs, expression, false) ? canBinaryExpressionBeUnboxed(lhs, rhs) : canBinaryExpressionBeUnboxed(rhs, lhs); } final PsiCallExpression containingMethodCallExpression = getParentMethodCallExpression(expression); return containingMethodCallExpression == null || isSameMethodCalledWithoutBoxing(containingMethodCallExpression, expression); } private boolean canBinaryExpressionBeUnboxed(PsiExpression lhs, PsiExpression rhs) { final PsiType rhsType = rhs.getType(); if (rhsType == null) { return false; } final PsiType lhsType = lhs.getType(); if (lhsType == null) { return false; } if (!(lhsType instanceof PsiPrimitiveType) && !ExpressionUtils.isAnnotatedNotNull(lhs)) { return false; } final PsiPrimitiveType unboxedType = PsiPrimitiveType.getUnboxedType(rhsType); return unboxedType != null && unboxedType.isAssignableFrom(lhsType); } @Nullable private PsiCallExpression getParentMethodCallExpression(@NotNull PsiElement expression) { final PsiElement parent = expression.getParent(); if (parent instanceof PsiParenthesizedExpression || parent instanceof PsiExpressionList) { return getParentMethodCallExpression(parent); } else if (parent instanceof PsiCallExpression) { return (PsiCallExpression)parent; } else { return null; } } private boolean isSameMethodCalledWithoutBoxing(@NotNull PsiCallExpression methodCallExpression, @NotNull PsiCallExpression boxingExpression) { final PsiExpressionList boxedArgumentList = boxingExpression.getArgumentList(); if (boxedArgumentList == null) { return false; } final PsiExpression[] arguments = boxedArgumentList.getExpressions(); if (arguments.length != 1) { return false; } final PsiExpression unboxedExpression = arguments[0]; final PsiMethod originalMethod = methodCallExpression.resolveMethod(); final PsiMethod otherMethod = MethodCallUtils.findMethodWithReplacedArgument(methodCallExpression, boxingExpression, unboxedExpression); return originalMethod == otherMethod; } } }
package com.sothree.slidinguppanel; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.PixelFormat; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Parcel; import android.os.Parcelable; import android.support.v4.view.MotionEventCompat; import android.support.v4.view.ViewCompat; import android.util.AttributeSet; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.accessibility.AccessibilityEvent; import com.nineoldandroids.view.animation.AnimatorProxy; import com.sothree.slidinguppanel.library.R; public class SlidingUpPanelLayout extends ViewGroup { private static final String TAG = SlidingUpPanelLayout.class.getSimpleName(); /** * Default peeking out panel height */ private static final int DEFAULT_PANEL_HEIGHT = 68; // dp; /** * Default anchor point height */ private static final float DEFAULT_ANCHOR_POINT = 1.0f; // In relative % /** * Default initial state for the component */ private static PanelState DEFAULT_SLIDE_STATE = PanelState.COLLAPSED; /** * Default height of the shadow above the peeking out panel */ private static final int DEFAULT_SHADOW_HEIGHT = 4; // dp; /** * If no fade color is given by default it will fade to 80% gray. */ private static final int DEFAULT_FADE_COLOR = 0x99000000; /** * Default Minimum velocity that will be detected as a fling */ private static final int DEFAULT_MIN_FLING_VELOCITY = 400; // dips per second /** * Default is set to false because that is how it was written */ private static final boolean DEFAULT_OVERLAY_FLAG = false; /** * Default attributes for layout */ private static final int[] DEFAULT_ATTRS = new int[] { android.R.attr.gravity }; /** * Minimum velocity that will be detected as a fling */ private int mMinFlingVelocity = DEFAULT_MIN_FLING_VELOCITY; /** * The fade color used for the panel covered by the slider. 0 = no fading. */ private int mCoveredFadeColor = DEFAULT_FADE_COLOR; /** * Default paralax length of the main view */ private static final int DEFAULT_PARALAX_OFFSET = 0; /** * The paint used to dim the main layout when sliding */ private final Paint mCoveredFadePaint = new Paint(); /** * Drawable used to draw the shadow between panes. */ private final Drawable mShadowDrawable; /** * The size of the overhang in pixels. */ private int mPanelHeight = -1; /** * The size of the shadow in pixels. */ private int mShadowHeight = -1; /** * Paralax offset */ private int mParallaxOffset = -1; /** * True if the collapsed panel should be dragged up. */ private boolean mIsSlidingUp; /** * Panel overlays the windows instead of putting it underneath it. */ private boolean mOverlayContent = DEFAULT_OVERLAY_FLAG; /** * If provided, the panel can be dragged by only this view. Otherwise, the entire panel can be * used for dragging. */ private View mDragView; /** * If provided, the panel can be dragged by only this view. Otherwise, the entire panel can be * used for dragging. */ private int mDragViewResId = -1; /** * The child view that can slide, if any. */ private View mSlideableView; /** * The main view */ private View mMainView; /** * Current state of the slideable view. */ public enum PanelState { EXPANDED, COLLAPSED, ANCHORED, HIDDEN, DRAGGING } private PanelState mSlideState = PanelState.COLLAPSED; /** * How far the panel is offset from its expanded position. * range [0, 1] where 0 = collapsed, 1 = expanded. */ private float mSlideOffset; /** * How far in pixels the slideable panel may move. */ private int mSlideRange; /** * A panel view is locked into internal scrolling or another condition that * is preventing a drag. */ private boolean mIsUnableToDrag; /** * Flag indicating that sliding feature is enabled\disabled */ private boolean mIsSlidingEnabled; /** * Flag indicating if a drag view can have its own touch events. If set * to true, a drag view can scroll horizontally and have its own click listener. * * Default is set to false. */ private boolean mIsUsingDragViewTouchEvents; private float mInitialMotionX; private float mInitialMotionY; private float mAnchorPoint = 1.f; private PanelSlideListener mPanelSlideListener; private final ViewDragHelper mDragHelper; /** * Stores whether or not the pane was expanded the last time it was slideable. * If expand/collapse operations are invoked this state is modified. Used by * instance state save/restore. */ private boolean mFirstLayout = true; private final Rect mTmpRect = new Rect(); /** * Listener for monitoring events about sliding panes. */ public interface PanelSlideListener { /** * Called when a sliding pane's position changes. * @param panel The child view that was moved * @param slideOffset The new offset of this sliding pane within its range, from 0-1 */ public void onPanelSlide(View panel, float slideOffset); /** * Called when a sliding panel becomes slid completely collapsed. * @param panel The child view that was slid to an collapsed position */ public void onPanelCollapsed(View panel); /** * Called when a sliding panel becomes slid completely expanded. * @param panel The child view that was slid to a expanded position */ public void onPanelExpanded(View panel); /** * Called when a sliding panel becomes anchored. * @param panel The child view that was slid to a anchored position */ public void onPanelAnchored(View panel); /** * Called when a sliding panel becomes completely hidden. * @param panel The child view that was slid to a hidden position */ public void onPanelHidden(View panel); } /** * No-op stubs for {@link PanelSlideListener}. If you only want to implement a subset * of the listener methods you can extend this instead of implement the full interface. */ public static class SimplePanelSlideListener implements PanelSlideListener { @Override public void onPanelSlide(View panel, float slideOffset) { } @Override public void onPanelCollapsed(View panel) { } @Override public void onPanelExpanded(View panel) { } @Override public void onPanelAnchored(View panel) { } @Override public void onPanelHidden(View panel) { } } public SlidingUpPanelLayout(Context context) { this(context, null); } public SlidingUpPanelLayout(Context context, AttributeSet attrs) { this(context, attrs, 0); } public SlidingUpPanelLayout(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); if(isInEditMode()) { mShadowDrawable = null; mDragHelper = null; return; } if (attrs != null) { TypedArray defAttrs = context.obtainStyledAttributes(attrs, DEFAULT_ATTRS); if (defAttrs != null) { int gravity = defAttrs.getInt(0, Gravity.NO_GRAVITY); if (gravity != Gravity.TOP && gravity != Gravity.BOTTOM) { throw new IllegalArgumentException("gravity must be set to either top or bottom"); } mIsSlidingUp = gravity == Gravity.BOTTOM; } defAttrs.recycle(); TypedArray ta = context.obtainStyledAttributes(attrs, R.styleable.SlidingUpPanelLayout); if (ta != null) { mPanelHeight = ta.getDimensionPixelSize(R.styleable.SlidingUpPanelLayout_panelHeight, -1); mShadowHeight = ta.getDimensionPixelSize(R.styleable.SlidingUpPanelLayout_shadowHeight, -1); mParallaxOffset = ta.getDimensionPixelSize(R.styleable.SlidingUpPanelLayout_paralaxOffset, -1); mMinFlingVelocity = ta.getInt(R.styleable.SlidingUpPanelLayout_flingVelocity, DEFAULT_MIN_FLING_VELOCITY); mCoveredFadeColor = ta.getColor(R.styleable.SlidingUpPanelLayout_fadeColor, DEFAULT_FADE_COLOR); mDragViewResId = ta.getResourceId(R.styleable.SlidingUpPanelLayout_dragView, -1); mOverlayContent = ta.getBoolean(R.styleable.SlidingUpPanelLayout_overlay,DEFAULT_OVERLAY_FLAG); mAnchorPoint = ta.getFloat(R.styleable.SlidingUpPanelLayout_anchorPoint, DEFAULT_ANCHOR_POINT); mSlideState = PanelState.values()[ta.getInt(R.styleable.SlidingUpPanelLayout_initialState, DEFAULT_SLIDE_STATE.ordinal())]; } ta.recycle(); } final float density = context.getResources().getDisplayMetrics().density; if (mPanelHeight == -1) { mPanelHeight = (int) (DEFAULT_PANEL_HEIGHT * density + 0.5f); } if (mShadowHeight == -1) { mShadowHeight = (int) (DEFAULT_SHADOW_HEIGHT * density + 0.5f); } if (mParallaxOffset == -1) { mParallaxOffset = (int) (DEFAULT_PARALAX_OFFSET * density); } // If the shadow height is zero, don't show the shadow if (mShadowHeight > 0) { if (mIsSlidingUp) { mShadowDrawable = getResources().getDrawable(R.drawable.above_shadow); } else { mShadowDrawable = getResources().getDrawable(R.drawable.below_shadow); } } else { mShadowDrawable = null; } setWillNotDraw(false); mDragHelper = ViewDragHelper.create(this, 0.5f, new DragHelperCallback()); mDragHelper.setMinVelocity(mMinFlingVelocity * density); mIsSlidingEnabled = true; } /** * Set the Drag View after the view is inflated */ @Override protected void onFinishInflate() { super.onFinishInflate(); if (mDragViewResId != -1) { setDragView(findViewById(mDragViewResId)); } } /** * Set the color used to fade the pane covered by the sliding pane out when the pane * will become fully covered in the expanded state. * * @param color An ARGB-packed color value */ public void setCoveredFadeColor(int color) { mCoveredFadeColor = color; invalidate(); } /** * @return The ARGB-packed color value used to fade the fixed pane */ public int getCoveredFadeColor() { return mCoveredFadeColor; } /** * Set sliding enabled flag * @param enabled flag value */ public void setSlidingEnabled(boolean enabled) { mIsSlidingEnabled = enabled; } public boolean isSlidingEnabled() { return mIsSlidingEnabled && mSlideableView != null; } /** * Set the collapsed panel height in pixels * * @param val A height in pixels */ public void setPanelHeight(int val) { mPanelHeight = val; requestLayout(); } /** * @return The current collapsed panel height */ public int getPanelHeight() { return mPanelHeight; } /** * @return The current paralax offset */ public int getCurrentParalaxOffset() { // Clamp slide offset at zero for parallax computation; int offset = (int)(mParallaxOffset * Math.max(mSlideOffset, 0)); return mIsSlidingUp ? -offset : offset; } /** * Set parallax offset for the panel * * @param val A height in pixels */ public void setParalaxOffset(int val) { mParallaxOffset = val; requestLayout(); } /** * Sets the panel slide listener * @param listener */ public void setPanelSlideListener(PanelSlideListener listener) { mPanelSlideListener = listener; } /** * Set the draggable view portion. Use to null, to allow the whole panel to be draggable * * @param dragView A view that will be used to drag the panel. */ public void setDragView(View dragView) { if (mDragView != null) { mDragView.setOnClickListener(null); } mDragView = dragView; if (mDragView != null) { mDragView.setClickable(true); mDragView.setFocusable(false); mDragView.setFocusableInTouchMode(false); } } /** * Set an anchor point where the panel can stop during sliding * * @param anchorPoint A value between 0 and 1, determining the position of the anchor point * starting from the top of the layout. */ public void setAnchorPoint(float anchorPoint) { if (anchorPoint > 0 && anchorPoint <= 1) { mAnchorPoint = anchorPoint; } } /** * Gets the currently set anchor point * * @return the currently set anchor point */ public float getAnchorPoint() { return mAnchorPoint; } /** * Sets whether or not the panel overlays the content * @param overlayed */ public void setOverlayed(boolean overlayed) { mOverlayContent = overlayed; } /** * Check if the panel is set as an overlay. */ public boolean isOverlayed() { return mOverlayContent; } void dispatchOnPanelSlide(View panel) { if (mPanelSlideListener != null) { mPanelSlideListener.onPanelSlide(panel, mSlideOffset); } } void dispatchOnPanelExpanded(View panel) { if (mPanelSlideListener != null) { mPanelSlideListener.onPanelExpanded(panel); } sendAccessibilityEvent(AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED); } void dispatchOnPanelCollapsed(View panel) { if (mPanelSlideListener != null) { mPanelSlideListener.onPanelCollapsed(panel); } sendAccessibilityEvent(AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED); } void dispatchOnPanelAnchored(View panel) { if (mPanelSlideListener != null) { mPanelSlideListener.onPanelAnchored(panel); } sendAccessibilityEvent(AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED); } void dispatchOnPanelHidden(View panel) { if (mPanelSlideListener != null) { mPanelSlideListener.onPanelHidden(panel); } sendAccessibilityEvent(AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED); } void updateObscuredViewVisibility() { if (getChildCount() == 0) { return; } final int leftBound = getPaddingLeft(); final int rightBound = getWidth() - getPaddingRight(); final int topBound = getPaddingTop(); final int bottomBound = getHeight() - getPaddingBottom(); final int left; final int right; final int top; final int bottom; if (mSlideableView != null && hasOpaqueBackground(mSlideableView)) { left = mSlideableView.getLeft(); right = mSlideableView.getRight(); top = mSlideableView.getTop(); bottom = mSlideableView.getBottom(); } else { left = right = top = bottom = 0; } View child = getChildAt(0); final int clampedChildLeft = Math.max(leftBound, child.getLeft()); final int clampedChildTop = Math.max(topBound, child.getTop()); final int clampedChildRight = Math.min(rightBound, child.getRight()); final int clampedChildBottom = Math.min(bottomBound, child.getBottom()); final int vis; if (clampedChildLeft >= left && clampedChildTop >= top && clampedChildRight <= right && clampedChildBottom <= bottom) { vis = INVISIBLE; } else { vis = VISIBLE; } child.setVisibility(vis); } void setAllChildrenVisible() { for (int i = 0, childCount = getChildCount(); i < childCount; i++) { final View child = getChildAt(i); if (child.getVisibility() == INVISIBLE) { child.setVisibility(VISIBLE); } } } private static boolean hasOpaqueBackground(View v) { final Drawable bg = v.getBackground(); return bg != null && bg.getOpacity() == PixelFormat.OPAQUE; } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); mFirstLayout = true; } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); mFirstLayout = true; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { final int widthMode = MeasureSpec.getMode(widthMeasureSpec); final int widthSize = MeasureSpec.getSize(widthMeasureSpec); final int heightMode = MeasureSpec.getMode(heightMeasureSpec); final int heightSize = MeasureSpec.getSize(heightMeasureSpec); if (widthMode != MeasureSpec.EXACTLY) { throw new IllegalStateException("Width must have an exact value or MATCH_PARENT"); } else if (heightMode != MeasureSpec.EXACTLY) { throw new IllegalStateException("Height must have an exact value or MATCH_PARENT"); } final int childCount = getChildCount(); if (childCount != 2) { throw new IllegalStateException("Sliding up panel layout must have exactly 2 children!"); } mMainView = getChildAt(0); mSlideableView = getChildAt(1); if (mDragView == null) { setDragView(mSlideableView); } // If the sliding panel is not visible, then put the whole view in the hidden state if (mSlideableView.getVisibility() == GONE) { mSlideState = PanelState.HIDDEN; } int layoutHeight = heightSize - getPaddingTop() - getPaddingBottom(); // First pass. Measure based on child LayoutParams width/height. for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); // We always measure the sliding panel in order to know it's height (needed for show panel) if (child.getVisibility() == GONE && i == 0) { continue; } int height = layoutHeight; if (child == mMainView && !mOverlayContent && mSlideState != PanelState.HIDDEN) { height -= mPanelHeight; } int childWidthSpec; if (lp.width == LayoutParams.WRAP_CONTENT) { childWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.AT_MOST); } else if (lp.width == LayoutParams.MATCH_PARENT) { childWidthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY); } else { childWidthSpec = MeasureSpec.makeMeasureSpec(lp.width, MeasureSpec.EXACTLY); } int childHeightSpec; if (lp.height == LayoutParams.WRAP_CONTENT) { childHeightSpec = MeasureSpec.makeMeasureSpec(height, MeasureSpec.AT_MOST); } else if (lp.height == LayoutParams.MATCH_PARENT) { childHeightSpec = MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY); } else { childHeightSpec = MeasureSpec.makeMeasureSpec(lp.height, MeasureSpec.EXACTLY); } child.measure(childWidthSpec, childHeightSpec); if (child == mSlideableView) { mSlideRange = mSlideableView.getMeasuredHeight() - mPanelHeight; } } setMeasuredDimension(widthSize, heightSize); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { final int paddingLeft = getPaddingLeft(); final int paddingTop = getPaddingTop(); final int childCount = getChildCount(); if (mFirstLayout) { switch (mSlideState) { case EXPANDED: mSlideOffset = 1.0f; break; case ANCHORED: mSlideOffset = mAnchorPoint; break; case HIDDEN: int newTop = computePanelTopPosition(0.0f) + (mIsSlidingUp ? +mPanelHeight : -mPanelHeight); mSlideOffset = computeSlideOffset(newTop); break; default: mSlideOffset = 0.f; break; } } for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); // Always layout the sliding view on the first layout if (child.getVisibility() == GONE && (i == 0 || mFirstLayout)) { continue; } final int childHeight = child.getMeasuredHeight(); int childTop = paddingTop; if (child == mSlideableView) { childTop = computePanelTopPosition(mSlideOffset); } if (!mIsSlidingUp) { if (child == mMainView && !mOverlayContent) { childTop = computePanelTopPosition(mSlideOffset) + mSlideableView.getMeasuredHeight(); } } final int childBottom = childTop + childHeight; final int childLeft = paddingLeft; final int childRight = childLeft + child.getMeasuredWidth(); child.layout(childLeft, childTop, childRight, childBottom); } if (mFirstLayout) { updateObscuredViewVisibility(); } mFirstLayout = false; } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); // Recalculate sliding panes and their details if (h != oldh) { mFirstLayout = true; } } /** * Set if the drag view can have its own touch events. If set * to true, a drag view can scroll horizontally and have its own click listener. * * Default is set to false. */ public void setEnableDragViewTouchEvents(boolean enabled) { mIsUsingDragViewTouchEvents = enabled; } @Override public void setEnabled(boolean enabled) { if (!enabled) { collapsePanel(); } super.setEnabled(enabled); } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { final int action = MotionEventCompat.getActionMasked(ev); if (!isEnabled() || !mIsSlidingEnabled || (mIsUnableToDrag && action != MotionEvent.ACTION_DOWN)) { mDragHelper.cancel(); return super.onInterceptTouchEvent(ev); } if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP) { mDragHelper.cancel(); return false; } final float x = ev.getX(); final float y = ev.getY(); switch (action) { case MotionEvent.ACTION_DOWN: { mIsUnableToDrag = false; mInitialMotionX = x; mInitialMotionY = y; break; } case MotionEvent.ACTION_MOVE: { final float adx = Math.abs(x - mInitialMotionX); final float ady = Math.abs(y - mInitialMotionY); final int dragSlop = mDragHelper.getTouchSlop(); // Handle any horizontal scrolling on the drag view. if (mIsUsingDragViewTouchEvents && adx > dragSlop && ady < dragSlop) { return super.onInterceptTouchEvent(ev); } if ((ady > dragSlop && adx > ady) || !isDragViewUnder((int)mInitialMotionX, (int)mInitialMotionY)) { mDragHelper.cancel(); mIsUnableToDrag = true; return false; } break; } } return mDragHelper.shouldInterceptTouchEvent(ev); } @Override public boolean onTouchEvent(MotionEvent ev) { if (!isSlidingEnabled()) { return super.onTouchEvent(ev); } mDragHelper.processTouchEvent(ev); return true; } private boolean isDragViewUnder(int x, int y) { if (mDragView == null) return false; int[] viewLocation = new int[2]; mDragView.getLocationOnScreen(viewLocation); int[] parentLocation = new int[2]; this.getLocationOnScreen(parentLocation); int screenX = parentLocation[0] + x; int screenY = parentLocation[1] + y; return screenX >= viewLocation[0] && screenX < viewLocation[0] + mDragView.getWidth() && screenY >= viewLocation[1] && screenY < viewLocation[1] + mDragView.getHeight(); } private boolean expandPanel(View pane, int initialVelocity, float mSlideOffset) { return mFirstLayout || smoothSlideTo(mSlideOffset, initialVelocity); } private boolean collapsePanel(View pane, int initialVelocity) { return mFirstLayout || smoothSlideTo(0.0f, initialVelocity); } /* * Computes the top position of the panel based on the slide offset. */ private int computePanelTopPosition(float slideOffset) { int slidingViewHeight = mSlideableView != null ? mSlideableView.getMeasuredHeight() : 0; int slidePixelOffset = (int) (slideOffset * mSlideRange); // Compute the top of the panel if its collapsed return mIsSlidingUp ? getMeasuredHeight() - getPaddingBottom() - mPanelHeight - slidePixelOffset : getPaddingTop() - slidingViewHeight + mPanelHeight + slidePixelOffset; } /* * Computes the slide offset based on the top position of the panel */ private float computeSlideOffset(int topPosition) { // Compute the panel top position if the panel is collapsed (offset 0) final int topBoundCollapsed = computePanelTopPosition(0); // Determine the new slide offset based on the collapsed top position and the new required // top position return (mIsSlidingUp ? (float) (topBoundCollapsed - topPosition) / mSlideRange : (float) (topPosition - topBoundCollapsed) / mSlideRange); } /** * Collapse the sliding pane if it is currently slideable. If first layout * has already completed this will animate. * * @return true if the pane was slideable and is now collapsed/in the process of collapsing */ public boolean collapsePanel() { if (mFirstLayout) { mSlideState = PanelState.COLLAPSED; return true; } else { if (mSlideState == PanelState.HIDDEN || mSlideState == PanelState.COLLAPSED) return false; return collapsePanel(mSlideableView, 0); } } /** * Expand the sliding pane if it is currently slideable. * * @return true if the pane was slideable and is now expanded/in the process of expading */ public boolean expandPanel() { if (mFirstLayout) { mSlideState = PanelState.EXPANDED; return true; } else { return expandPanel(1.0f); } } /** * Expand the sliding pane to the anchor point if it is currently slideable. * * @return true if the pane was slideable and is now expanded/in the process of expading */ public boolean anchorPanel() { if (mFirstLayout) { mSlideState = PanelState.ANCHORED; return true; } else { return expandPanel(mAnchorPoint); } } /** * Partially expand the sliding panel up to a specific offset * * @param slideOffset Value between 0 and 1, where 0 is completely expanded. * @return true if the pane was slideable and is now expanded/in the process of expanding */ public boolean expandPanel(float slideOffset) { if (mSlideableView == null || (mSlideState == PanelState.EXPANDED && slideOffset == 1.0f)) return false; mSlideableView.setVisibility(View.VISIBLE); return expandPanel(mSlideableView, 0, slideOffset); } /** * Returns the current state of the panel as an enum. * @return the current panel state */ public PanelState getPanelState() { return mSlideState; } /** * Check if the sliding panel in this layout is fully expanded. * * @return true if sliding panel is completely expanded */ public boolean isPanelExpanded() { return mSlideState == PanelState.EXPANDED; } /** * Check if the sliding panel in this layout is anchored. * * @return true if sliding panel is anchored */ public boolean isPanelAnchored() { return mSlideState == PanelState.ANCHORED; } /** * Check if the sliding panel in this layout is currently visible. * * @return true if the sliding panel is visible. */ public boolean isPanelHidden() { return mSlideState == PanelState.HIDDEN; } /** * Shows the panel from the hidden state */ public void showPanel() { if (mFirstLayout) { mSlideState = PanelState.COLLAPSED; } else { if (mSlideableView == null || mSlideState != PanelState.HIDDEN) return; mSlideableView.setVisibility(View.VISIBLE); requestLayout(); smoothSlideTo(0, 0); } } /** * Hides the sliding panel entirely. */ public void hidePanel() { if (mFirstLayout) { mSlideState = PanelState.HIDDEN; } else { if (mSlideState == PanelState.DRAGGING || mSlideState == PanelState.HIDDEN) return; int newTop = computePanelTopPosition(0.0f) + (mIsSlidingUp ? +mPanelHeight : -mPanelHeight); smoothSlideTo(computeSlideOffset(newTop), 0); } } @SuppressLint("NewApi") private void onPanelDragged(int newTop) { mSlideState = PanelState.DRAGGING; // Recompute the slide offset based on the new top position mSlideOffset = computeSlideOffset(newTop); // Update the parallax based on the new slide offset if (mParallaxOffset > 0 && mSlideOffset >= 0) { int mainViewOffset = getCurrentParalaxOffset(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { mMainView.setTranslationY(mainViewOffset); } else { AnimatorProxy.wrap(mMainView).setTranslationY(mainViewOffset); } } // Dispatch the slide event dispatchOnPanelSlide(mSlideableView); // If the slide offset is negative, and overlay is not on, we need to increase the // height of the main content if (mSlideOffset <= 0 && !mOverlayContent) { // expand the main view LayoutParams lp = (LayoutParams)mMainView.getLayoutParams(); lp.height = mIsSlidingUp ? (newTop - getPaddingBottom()) : (getHeight() - getPaddingBottom() - mSlideableView.getMeasuredHeight() - newTop); mMainView.requestLayout(); } } @Override protected boolean drawChild(Canvas canvas, View child, long drawingTime) { boolean result; final int save = canvas.save(Canvas.CLIP_SAVE_FLAG); if (isSlidingEnabled() && mSlideableView != child) { // Clip against the slider; no sense drawing what will immediately be covered, // Unless the panel is set to overlay content canvas.getClipBounds(mTmpRect); if (!mOverlayContent) { if (mIsSlidingUp) { mTmpRect.bottom = Math.min(mTmpRect.bottom, mSlideableView.getTop()); } else { mTmpRect.top = Math.max(mTmpRect.top, mSlideableView.getBottom()); } } canvas.clipRect(mTmpRect); if (mCoveredFadeColor != 0 && mSlideOffset > 0) { final int baseAlpha = (mCoveredFadeColor & 0xff000000) >>> 24; final int imag = (int) (baseAlpha * mSlideOffset); final int color = imag << 24 | (mCoveredFadeColor & 0xffffff); mCoveredFadePaint.setColor(color); canvas.drawRect(mTmpRect, mCoveredFadePaint); } } result = super.drawChild(canvas, child, drawingTime); canvas.restoreToCount(save); return result; } /** * Smoothly animate mDraggingPane to the target X position within its range. * * @param slideOffset position to animate to * @param velocity initial velocity in case of fling, or 0. */ boolean smoothSlideTo(float slideOffset, int velocity) { if (!isSlidingEnabled()) { // Nothing to do. return false; } int panelTop = computePanelTopPosition(slideOffset); if (mDragHelper.smoothSlideViewTo(mSlideableView, mSlideableView.getLeft(), panelTop)) { setAllChildrenVisible(); ViewCompat.postInvalidateOnAnimation(this); return true; } return false; } @Override public void computeScroll() { if (mDragHelper != null && mDragHelper.continueSettling(true)) { if (!isSlidingEnabled()) { mDragHelper.abort(); return; } ViewCompat.postInvalidateOnAnimation(this); } } @Override public void draw(Canvas c) { super.draw(c); if (!isSlidingEnabled()) { // No need to draw a shadow if we don't have one. return; } final int right = mSlideableView.getRight(); final int top; final int bottom; if (mIsSlidingUp) { top = mSlideableView.getTop() - mShadowHeight; bottom = mSlideableView.getTop(); } else { top = mSlideableView.getBottom(); bottom = mSlideableView.getBottom() + mShadowHeight; } final int left = mSlideableView.getLeft(); if (mShadowDrawable != null) { mShadowDrawable.setBounds(left, top, right, bottom); mShadowDrawable.draw(c); } } /** * Tests scrollability within child views of v given a delta of dx. * * @param v View to test for horizontal scrollability * @param checkV Whether the view v passed should itself be checked for scrollability (true), * or just its children (false). * @param dx Delta scrolled in pixels * @param x X coordinate of the active touch point * @param y Y coordinate of the active touch point * @return true if child views of v can be scrolled by delta of dx. */ protected boolean canScroll(View v, boolean checkV, int dx, int x, int y) { if (v instanceof ViewGroup) { final ViewGroup group = (ViewGroup) v; final int scrollX = v.getScrollX(); final int scrollY = v.getScrollY(); final int count = group.getChildCount(); // Count backwards - let topmost views consume scroll distance first. for (int i = count - 1; i >= 0; i--) { final View child = group.getChildAt(i); if (x + scrollX >= child.getLeft() && x + scrollX < child.getRight() && y + scrollY >= child.getTop() && y + scrollY < child.getBottom() && canScroll(child, true, dx, x + scrollX - child.getLeft(), y + scrollY - child.getTop())) { return true; } } } return checkV && ViewCompat.canScrollHorizontally(v, -dx); } @Override protected ViewGroup.LayoutParams generateDefaultLayoutParams() { return new LayoutParams(); } @Override protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) { return p instanceof MarginLayoutParams ? new LayoutParams((MarginLayoutParams) p) : new LayoutParams(p); } @Override protected boolean checkLayoutParams(ViewGroup.LayoutParams p) { return p instanceof LayoutParams && super.checkLayoutParams(p); } @Override public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) { return new LayoutParams(getContext(), attrs); } @Override public Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); SavedState ss = new SavedState(superState); ss.mSlideState = mSlideState; return ss; } @Override public void onRestoreInstanceState(Parcelable state) { SavedState ss = (SavedState) state; super.onRestoreInstanceState(ss.getSuperState()); mSlideState = ss.mSlideState; } private class DragHelperCallback extends ViewDragHelper.Callback { @Override public boolean tryCaptureView(View child, int pointerId) { if (mIsUnableToDrag) { return false; } return child == mSlideableView; } @Override public void onViewDragStateChanged(int state) { if (mDragHelper.getViewDragState() == ViewDragHelper.STATE_IDLE) { mSlideOffset = computeSlideOffset(mSlideableView.getTop()); if (mSlideOffset == 1) { if (mSlideState != PanelState.EXPANDED) { updateObscuredViewVisibility(); mSlideState = PanelState.EXPANDED; dispatchOnPanelExpanded(mSlideableView); } } else if (mSlideOffset == 0) { if (mSlideState != PanelState.COLLAPSED) { mSlideState = PanelState.COLLAPSED; dispatchOnPanelCollapsed(mSlideableView); } } else if (mSlideOffset < 0) { mSlideState = PanelState.HIDDEN; mSlideableView.setVisibility(View.GONE); dispatchOnPanelHidden(mSlideableView); } else if (mSlideState != PanelState.ANCHORED) { updateObscuredViewVisibility(); mSlideState = PanelState.ANCHORED; dispatchOnPanelAnchored(mSlideableView); } } } @Override public void onViewCaptured(View capturedChild, int activePointerId) { setAllChildrenVisible(); } @Override public void onViewPositionChanged(View changedView, int left, int top, int dx, int dy) { onPanelDragged(top); invalidate(); } @Override public void onViewReleased(View releasedChild, float xvel, float yvel) { int target = 0; // direction is always positive if we are sliding in the expanded direction float direction = mIsSlidingUp ? -yvel : yvel; if (direction > 0) { // swipe up -> expand if(mSlideOffset >= mAnchorPoint) { target = computePanelTopPosition(1.0f); }else { target = computePanelTopPosition(mAnchorPoint); } } else if (direction < 0) { // swipe down -> collapse if(mSlideOffset <= mAnchorPoint) { target = computePanelTopPosition(0.0f); }else { target = computePanelTopPosition(mAnchorPoint); } } else if (mAnchorPoint != 1 && mSlideOffset >= (1.f + mAnchorPoint) / 2) { // zero velocity, and far enough from anchor point => expand to the top target = computePanelTopPosition(1.0f); } else if (mAnchorPoint == 1 && mSlideOffset >= 0.5f) { // zero velocity, and far enough from anchor point => expand to the top target = computePanelTopPosition(1.0f); } else if (mAnchorPoint != 1 && mSlideOffset >= mAnchorPoint) { target = computePanelTopPosition(mAnchorPoint); } else if (mAnchorPoint != 1 && mSlideOffset >= mAnchorPoint / 2) { target = computePanelTopPosition(mAnchorPoint); } else { // settle at the bottom target = computePanelTopPosition(0.0f); } mDragHelper.settleCapturedViewAt(releasedChild.getLeft(), target); invalidate(); } @Override public int getViewVerticalDragRange(View child) { return mSlideRange; } @Override public int clampViewPositionVertical(View child, int top, int dy) { final int collapsedTop = computePanelTopPosition(0.f); final int expandedTop = computePanelTopPosition(1.0f); if (mIsSlidingUp) { return Math.min(Math.max(top, expandedTop), collapsedTop); } else { return Math.min(Math.max(top, collapsedTop), expandedTop); } } } public static class LayoutParams extends ViewGroup.MarginLayoutParams { private static final int[] ATTRS = new int[] { android.R.attr.layout_weight }; public LayoutParams() { super(MATCH_PARENT, MATCH_PARENT); } public LayoutParams(int width, int height) { super(width, height); } public LayoutParams(android.view.ViewGroup.LayoutParams source) { super(source); } public LayoutParams(MarginLayoutParams source) { super(source); } public LayoutParams(LayoutParams source) { super(source); } public LayoutParams(Context c, AttributeSet attrs) { super(c, attrs); final TypedArray a = c.obtainStyledAttributes(attrs, ATTRS); a.recycle(); } } static class SavedState extends BaseSavedState { PanelState mSlideState; SavedState(Parcelable superState) { super(superState); } private SavedState(Parcel in) { super(in); try { mSlideState = Enum.valueOf(PanelState.class, in.readString()); } catch (IllegalArgumentException e) { mSlideState = PanelState.COLLAPSED; } } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeString(mSlideState.toString()); } public static final Parcelable.Creator<SavedState> CREATOR = new Parcelable.Creator<SavedState>() { @Override public SavedState createFromParcel(Parcel in) { return new SavedState(in); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }; } }
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.jsr94.rules; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.rules.ConfigurationException; import javax.rules.ObjectFilter; import javax.rules.RuleServiceProvider; import javax.rules.RuleServiceProviderManager; import javax.rules.StatelessRuleSession; import javax.rules.admin.LocalRuleExecutionSetProvider; import javax.rules.admin.RuleExecutionSet; import javax.rules.admin.RuleExecutionSetCreateException; import javax.rules.admin.RuleExecutionSetRegisterException; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import static org.junit.Assert.*; /** * Test the <code>StatelessRuleSession</code> implementation. * * @see StatelessRuleSession */ public class StatelessRuleSessionTest { private ExampleRuleEngineFacade sessionBuilder; private final String bindUri = "sisters.drl"; private final String bindUri_drl = "sisters_expander.drl"; private final String bindUri_dsl = "sisters_expander.dsl"; private final String bindUri_xml = "sisters.xml"; private final String bindUri_globals = "sisters_globals.drl"; /** * Setup the test case. * normal drl, drl with dsl, drl with global */ @Before public void setUp() throws Exception { this.sessionBuilder = new ExampleRuleEngineFacade(); this.sessionBuilder.addRuleExecutionSet( this.bindUri, StatelessRuleSessionTest.class.getResourceAsStream( this.bindUri ) ); final Map map = new HashMap(); final Reader reader = new InputStreamReader( StatelessRuleSessionTest.class.getResourceAsStream( this.bindUri_dsl ) ); map.put( "dsl", this.getDSLText( reader ).toString() ); this.sessionBuilder.addRuleExecutionSet( this.bindUri_drl, StatelessRuleSessionTest.class.getResourceAsStream( this.bindUri_drl ), map ); final Map map_xml = new HashMap(); map_xml.put( "source", "xml" ); this.sessionBuilder.addRuleExecutionSet( this.bindUri_xml, StatelessRuleSessionTest.class.getResourceAsStream( this.bindUri_xml ), map_xml ); this.sessionBuilder.addRuleExecutionSet( this.bindUri_globals, StatelessRuleSessionTest.class.getResourceAsStream( this.bindUri_globals ) ); } /* * Taken from DRLParser */ private StringBuffer getDSLText(final Reader reader) throws IOException { final StringBuffer text = new StringBuffer(); final char[] buf = new char[1024]; int len = 0; while ( (len = reader.read( buf )) >= 0 ) { text.append( buf, 0, len ); } return text; } @Test public void testCreateRuleExecutionSetFromStreamWithXml() { try { final Map map = new HashMap(); map.put( "source", "xml" ); RuleServiceProvider ruleServiceProvider; RuleServiceProviderManager.registerRuleServiceProvider( "http://drools.org/", RuleServiceProviderImpl.class ); ruleServiceProvider = RuleServiceProviderManager.getRuleServiceProvider( "http://drools.org/" ); LocalRuleExecutionSetProvider ruleSetProvider = ruleServiceProvider.getRuleAdministrator().getLocalRuleExecutionSetProvider( null ); final RuleExecutionSet ruleExecutionSet = ruleSetProvider.createRuleExecutionSet( StatelessRuleSessionTest.class.getResourceAsStream( this.bindUri_xml ), map ); assertNotNull( ruleExecutionSet ); } catch ( RemoteException e ) { fail(); } catch ( ConfigurationException e ) { fail(); } catch ( RuleExecutionSetCreateException e ) { fail(); } catch ( IOException e ) { fail(); } } @Test public void testCreateRuleExecutionSetFromStreamReaderWithXml() { try { final Map map = new HashMap(); map.put( "source", "xml" ); RuleServiceProvider ruleServiceProvider; RuleServiceProviderManager.registerRuleServiceProvider( "http://drools.org/", RuleServiceProviderImpl.class ); ruleServiceProvider = RuleServiceProviderManager.getRuleServiceProvider( "http://drools.org/" ); LocalRuleExecutionSetProvider ruleSetProvider = ruleServiceProvider.getRuleAdministrator().getLocalRuleExecutionSetProvider( null ); final Reader ruleReader = new InputStreamReader( StatelessRuleSessionTest.class.getResourceAsStream( this.bindUri_xml ) ); final RuleExecutionSet ruleExecutionSet = ruleSetProvider.createRuleExecutionSet( ruleReader, map ); assertNotNull( ruleExecutionSet ); } catch ( RemoteException e ) { fail(); } catch ( ConfigurationException e ) { fail(); } catch ( RuleExecutionSetCreateException e ) { fail(); } catch ( IOException e ) { fail(); } } /** * Test executeRules with globals. */ @Test public void testExecuteRulesGlobals() throws Exception { final java.util.Map map = new HashMap(); java.util.Vector v = new java.util.Vector(); map.put( "vector", v ); final StatelessRuleSession statelessSession = this.sessionBuilder.getStatelessRuleSession( this.bindUri_globals, map ); final List inObjects = new ArrayList(); final Person bob = new Person( "bob" ); inObjects.add( bob ); final Person jeannie = new Person( "jeannie" ); jeannie.addSister( "rebecca" ); inObjects.add( jeannie ); final Person rebecca = new Person( "rebecca" ); rebecca.addSister( "jeannie" ); inObjects.add( rebecca ); // execute the rules final List outList = statelessSession.executeRules( inObjects ); assertEquals( "incorrect size", 5, outList.size() ); assertContains( outList, bob ); assertContains( outList, rebecca ); assertContains( outList, jeannie ); assertContains( outList, "rebecca and jeannie are sisters" ); assertContains( outList, "jeannie and rebecca are sisters" ); v = (java.util.Vector) map.get( "vector" ); assertNotNull( "Global Vector null", v ); assertContains( v, "rebecca and jeannie are sisters" ); assertContains( v, "jeannie and rebecca are sisters" ); assertEquals( "Vector v incorrect size", 2, v.size() ); statelessSession.release(); } /** * Test executeRules with normal drl. */ @Test public void testExecuteRules() throws Exception { final StatelessRuleSession statelessSession = this.sessionBuilder.getStatelessRuleSession( this.bindUri ); final List inObjects = new ArrayList(); final Person bob = new Person( "bob" ); inObjects.add( bob ); final Person jeannie = new Person( "jeannie" ); jeannie.addSister( "rebecca" ); inObjects.add( jeannie ); final Person rebecca = new Person( "rebecca" ); rebecca.addSister( "jeannie" ); inObjects.add( rebecca ); // execute the rules final List outList = statelessSession.executeRules( inObjects ); assertEquals( "incorrect size", 5, outList.size() ); assertContains( outList, bob ); assertContains( outList, rebecca ); assertContains( outList, jeannie ); assertContains( outList, "rebecca and jeannie are sisters" ); assertContains( outList, "jeannie and rebecca are sisters" ); statelessSession.release(); } /** * Test executeRules with normal drl. */ @Test public void testExecuteRulesWithXml() throws Exception { final StatelessRuleSession statelessSession = this.sessionBuilder.getStatelessRuleSession( this.bindUri_xml ); final List inObjects = new ArrayList(); final Person bob = new Person( "bob" ); inObjects.add( bob ); final Person jeannie = new Person( "jeannie" ); jeannie.addSister( "rebecca" ); inObjects.add( jeannie ); final Person rebecca = new Person( "rebecca" ); rebecca.addSister( "jeannie" ); inObjects.add( rebecca ); //execute the rules final List outList = statelessSession.executeRules( inObjects ); assertEquals( "incorrect size", 5, outList.size() ); assertContains( outList, bob ); assertContains( outList, rebecca ); assertContains( outList, jeannie ); assertContains( outList, "rebecca and jeannie are sisters" ); assertContains( outList, "jeannie and rebecca are sisters" ); statelessSession.release(); } /** * Test executeRules drl with dsl. */ @Test public void testExecuteRules_dsl() throws Exception { final StatelessRuleSession statelessSession = this.sessionBuilder.getStatelessRuleSession( this.bindUri_drl ); final List inObjects = new ArrayList(); final Person bob = new Person( "bob" ); inObjects.add( bob ); final Person jeannie = new Person( "jeannie" ); jeannie.addSister( "rebecca" ); inObjects.add( jeannie ); final Person rebecca = new Person( "rebecca" ); rebecca.addSister( "jeannie" ); inObjects.add( rebecca ); // execute the rules final List outList = statelessSession.executeRules( inObjects ); assertEquals( "incorrect size", 5, outList.size() ); assertContains( outList, bob ); assertContains( outList, rebecca ); assertContains( outList, jeannie ); assertContains( outList, "rebecca and jeannie are sisters" ); assertContains( outList, "jeannie and rebecca are sisters" ); statelessSession.release(); } /** * Test executeRules with ObjectFilter. */ @Test public void testExecuteRulesWithFilter() throws Exception { final StatelessRuleSession statelessSession = this.sessionBuilder.getStatelessRuleSession( this.bindUri ); final List inObjects = new ArrayList(); final Person bob = new Person( "bob" ); inObjects.add( bob ); final Person rebecca = new Person( "rebecca" ); rebecca.addSister( "jeannie" ); inObjects.add( rebecca ); final Person jeannie = new Person( "jeannie" ); jeannie.addSister( "rebecca" ); inObjects.add( jeannie ); // execute the rules final List outList = statelessSession.executeRules( inObjects, new PersonFilter() ); assertEquals( "incorrect size", 3, outList.size() ); assertTrue( "where is bob", outList.contains( bob ) ); assertTrue( "where is rebecca", outList.contains( rebecca ) ); assertTrue( "where is jeannie", outList.contains( jeannie ) ); } /** * Test executeRules with ObjectFilter drl with dsl. */ @Test public void testExecuteRulesWithFilter_dsl() throws Exception { final StatelessRuleSession statelessSession = this.sessionBuilder.getStatelessRuleSession( this.bindUri_drl ); final List inObjects = new ArrayList(); final Person bob = new Person( "bob" ); inObjects.add( bob ); final Person rebecca = new Person( "rebecca" ); rebecca.addSister( "jeannie" ); inObjects.add( rebecca ); final Person jeannie = new Person( "jeannie" ); jeannie.addSister( "rebecca" ); inObjects.add( jeannie ); // execute the rules final List outList = statelessSession.executeRules( inObjects, new PersonFilter() ); assertEquals( "incorrect size", 3, outList.size() ); assertTrue( "where is bob", outList.contains( bob ) ); assertTrue( "where is rebecca", outList.contains( rebecca ) ); assertTrue( "where is jeannie", outList.contains( jeannie ) ); } /** * Filter accepts only objects of type Person. */ static class PersonFilter implements ObjectFilter { public Object filter(final Object object) { return (object instanceof Person ? object : null); } public void reset() { // nothing to reset } } protected void assertContains(final List expected, final Object object) { if ( expected.contains( object ) ) { return; } fail( object + " not in " + expected ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. * * See the License for the specific language governing permissions and * limitations under the License. */ /** * */ package org.apache.jmeter.visualizers; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import java.awt.Dimension; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.swing.ComboBoxModel; import javax.swing.DefaultComboBoxModel; import javax.swing.ImageIcon; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTabbedPane; import javax.swing.JTree; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeCellRenderer; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreePath; import javax.swing.tree.TreeSelectionModel; import org.apache.commons.lang3.StringUtils; import org.apache.jmeter.assertions.AssertionResult; import org.apache.jmeter.gui.util.VerticalPanel; import org.apache.jmeter.samplers.Clearable; import org.apache.jmeter.samplers.SampleResult; import org.apache.jmeter.util.JMeterUtils; import org.apache.jmeter.visualizers.gui.AbstractVisualizer; import org.apache.jorphan.logging.LoggingManager; import org.apache.log.Logger; /** * Base for ViewResults * */ public class ViewResultsFullVisualizer extends AbstractVisualizer implements ActionListener, TreeSelectionListener, Clearable, ItemListener { private static final long serialVersionUID = 7338676747296593842L; private static final Logger log = LoggingManager.getLoggerForClass(); public static final Color SERVER_ERROR_COLOR = Color.red; public static final Color CLIENT_ERROR_COLOR = Color.blue; public static final Color REDIRECT_COLOR = Color.green; private JSplitPane mainSplit; private DefaultMutableTreeNode root; private DefaultTreeModel treeModel; private JTree jTree; private Component leftSide; private JTabbedPane rightSide; private JComboBox<ResultRenderer> selectRenderPanel; private int selectedTab; protected static final String COMBO_CHANGE_COMMAND = "change_combo"; // $NON-NLS-1$ private static final ImageIcon imageSuccess = JMeterUtils.getImage( JMeterUtils.getPropDefault("viewResultsTree.success", //$NON-NLS-1$ "icon_success_sml.gif")); //$NON-NLS-1$ private static final ImageIcon imageFailure = JMeterUtils.getImage( JMeterUtils.getPropDefault("viewResultsTree.failure", //$NON-NLS-1$ "icon_warning_sml.gif")); //$NON-NLS-1$ // Maximum size that we will display private static final int MAX_DISPLAY_SIZE = JMeterUtils.getPropDefault("view.results.tree.max_size", 200 * 1024); // $NON-NLS-1$ // default display order private static final String VIEWERS_ORDER = JMeterUtils.getPropDefault("view.results.tree.renderers_order", ""); // $NON-NLS-1$ //$NON-NLS-2$ private ResultRenderer resultsRender = null; private TreeSelectionEvent lastSelectionEvent; private JCheckBox autoScrollCB; /** * Constructor */ public ViewResultsFullVisualizer() { super(); init(); } /** {@inheritDoc} */ @Override public void add(final SampleResult sample) { JMeterUtils.runSafe(new Runnable() { @Override public void run() { updateGui(sample); } }); } /** * Update the visualizer with new data. */ private synchronized void updateGui(SampleResult res) { // Add sample DefaultMutableTreeNode currNode = new DefaultMutableTreeNode(res); treeModel.insertNodeInto(currNode, root, root.getChildCount()); addSubResults(currNode, res); // Add any assertion that failed as children of the sample node AssertionResult assertionResults[] = res.getAssertionResults(); int assertionIndex = currNode.getChildCount(); for (int j = 0; j < assertionResults.length; j++) { AssertionResult item = assertionResults[j]; if (item.isFailure() || item.isError()) { DefaultMutableTreeNode assertionNode = new DefaultMutableTreeNode(item); treeModel.insertNodeInto(assertionNode, currNode, assertionIndex++); } } if (root.getChildCount() == 1) { jTree.expandPath(new TreePath(root)); } if (autoScrollCB.isSelected() && root.getChildCount() > 1) { jTree.scrollPathToVisible(new TreePath(new Object[] { root, treeModel.getChild(root, root.getChildCount() - 1) })); } } private void addSubResults(DefaultMutableTreeNode currNode, SampleResult res) { SampleResult[] subResults = res.getSubResults(); int leafIndex = 0; for (int i = 0; i < subResults.length; i++) { SampleResult child = subResults[i]; if (log.isDebugEnabled()) { log.debug("updateGui1 : child sample result - " + child); } DefaultMutableTreeNode leafNode = new DefaultMutableTreeNode(child); treeModel.insertNodeInto(leafNode, currNode, leafIndex++); addSubResults(leafNode, child); // Add any assertion that failed as children of the sample node AssertionResult assertionResults[] = child.getAssertionResults(); int assertionIndex = leafNode.getChildCount(); for (int j = 0; j < assertionResults.length; j++) { AssertionResult item = assertionResults[j]; if (item.isFailure() || item.isError()) { DefaultMutableTreeNode assertionNode = new DefaultMutableTreeNode(item); treeModel.insertNodeInto(assertionNode, leafNode, assertionIndex++); } } } } /** {@inheritDoc} */ @Override public synchronized void clearData() { while (root.getChildCount() > 0) { // the child to be removed will always be 0 'cos as the nodes are // removed the nth node will become (n-1)th treeModel.removeNodeFromParent((DefaultMutableTreeNode) root.getChildAt(0)); } resultsRender.clearData(); } /** {@inheritDoc} */ @Override public String getLabelResource() { return "view_results_tree_title"; // $NON-NLS-1$ } /** * Initialize this visualizer */ protected void init() { log.debug("init() - pass"); setLayout(new BorderLayout(0, 5)); setBorder(makeBorder()); add(makeTitlePanel(), BorderLayout.NORTH); leftSide = createLeftPanel(); // Prepare the common tab rightSide = new JTabbedPane(); // Create the split pane mainSplit = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, leftSide, rightSide); add(mainSplit, BorderLayout.CENTER); // init right side with first render resultsRender.setRightSide(rightSide); resultsRender.init(); } /** {@inheritDoc} */ @Override public void valueChanged(TreeSelectionEvent e) { lastSelectionEvent = e; DefaultMutableTreeNode node = null; synchronized (this) { node = (DefaultMutableTreeNode) jTree.getLastSelectedPathComponent(); } if (node != null) { // to restore last tab used if (rightSide.getTabCount() > selectedTab) { resultsRender.setLastSelectedTab(rightSide.getSelectedIndex()); } Object userObject = node.getUserObject(); resultsRender.setSamplerResult(userObject); resultsRender.setupTabPane(); // Processes Assertions // display a SampleResult if (userObject instanceof SampleResult) { SampleResult sampleResult = (SampleResult) userObject; if (isTextDataType(sampleResult)){ resultsRender.renderResult(sampleResult); } else { byte[] responseBytes = sampleResult.getResponseData(); if (responseBytes != null) { resultsRender.renderImage(sampleResult); } } } } } /** * @param sampleResult SampleResult * @return true if sampleResult is text or has empty content type */ protected static boolean isTextDataType(SampleResult sampleResult) { return (SampleResult.TEXT).equals(sampleResult.getDataType()) || StringUtils.isEmpty(sampleResult.getDataType()); } private synchronized Component createLeftPanel() { SampleResult rootSampleResult = new SampleResult(); rootSampleResult.setSampleLabel("Root"); rootSampleResult.setSuccessful(true); root = new DefaultMutableTreeNode(rootSampleResult); treeModel = new DefaultTreeModel(root); jTree = new JTree(treeModel); jTree.setCellRenderer(new ResultsNodeRenderer()); jTree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION); jTree.addTreeSelectionListener(this); jTree.setRootVisible(false); jTree.setShowsRootHandles(true); JScrollPane treePane = new JScrollPane(jTree); treePane.setPreferredSize(new Dimension(200, 300)); VerticalPanel leftPane = new VerticalPanel(); leftPane.add(treePane, BorderLayout.CENTER); leftPane.add(createComboRender(), BorderLayout.NORTH); autoScrollCB = new JCheckBox(JMeterUtils.getResString("view_results_autoscroll")); // $NON-NLS-1$ autoScrollCB.setSelected(false); autoScrollCB.addItemListener(this); leftPane.add(autoScrollCB, BorderLayout.SOUTH); return leftPane; } /** * Create the drop-down list to changer render * @return List of all render (implement ResultsRender) */ private Component createComboRender() { ComboBoxModel<ResultRenderer> nodesModel = new DefaultComboBoxModel<>(); // drop-down list for renderer selectRenderPanel = new JComboBox<>(nodesModel); selectRenderPanel.setActionCommand(COMBO_CHANGE_COMMAND); selectRenderPanel.addActionListener(this); // if no results render in jmeter.properties, load Standard (default) List<String> classesToAdd = Collections.<String>emptyList(); try { classesToAdd = JMeterUtils.findClassesThatExtend(ResultRenderer.class); } catch (IOException e1) { // ignored } String textRenderer = JMeterUtils.getResString("view_results_render_text"); // $NON-NLS-1$ Object textObject = null; Map<String, ResultRenderer> map = new HashMap<String, ResultRenderer>(classesToAdd.size()); for (String clazz : classesToAdd) { try { // Instantiate render classes final ResultRenderer renderer = (ResultRenderer) Class.forName(clazz).newInstance(); if (textRenderer.equals(renderer.toString())){ textObject=renderer; } renderer.setBackgroundColor(getBackground()); map.put(renderer.getClass().getName(), renderer); } catch (Exception e) { log.warn("Error loading result renderer:" + clazz, e); } } if(VIEWERS_ORDER.length()>0) { String[] keys = VIEWERS_ORDER.split(","); for (String key : keys) { if(key.startsWith(".")) { key = "org.apache.jmeter.visualizers"+key; //$NON-NLS-1$ } ResultRenderer renderer = map.remove(key); if(renderer != null) { selectRenderPanel.addItem(renderer); } else { log.warn("Missing (check spelling error in renderer name) or already added(check doublon) " + "result renderer, check property 'view.results.tree.renderers_order', renderer name:'"+key+"'"); } } } // Add remaining (plugins or missed in property) for (ResultRenderer renderer : map.values()) { selectRenderPanel.addItem(renderer); } nodesModel.setSelectedItem(textObject); // preset to "Text" option return selectRenderPanel; } /** {@inheritDoc} */ @Override public void actionPerformed(ActionEvent event) { String command = event.getActionCommand(); if (COMBO_CHANGE_COMMAND.equals(command)) { JComboBox<?> jcb = (JComboBox<?>) event.getSource(); if (jcb != null) { resultsRender = (ResultRenderer) jcb.getSelectedItem(); if (rightSide != null) { // to restore last selected tab (better user-friendly) selectedTab = rightSide.getSelectedIndex(); // Remove old right side mainSplit.remove(rightSide); // create and add a new right side rightSide = new JTabbedPane(); mainSplit.add(rightSide); resultsRender.setRightSide(rightSide); resultsRender.setLastSelectedTab(selectedTab); log.debug("selectedTab=" + selectedTab); resultsRender.init(); // To display current sampler result before change this.valueChanged(lastSelectionEvent); } } } } public static String getResponseAsString(SampleResult res) { String response = null; if (isTextDataType(res)) { // Showing large strings can be VERY costly, so we will avoid // doing so if the response // data is larger than 200K. TODO: instead, we could delay doing // the result.setText // call until the user chooses the "Response data" tab. Plus we // could warn the user // if this happens and revert the choice if he doesn't confirm // he's ready to wait. int len = res.getResponseDataAsString().length(); if (MAX_DISPLAY_SIZE > 0 && len > MAX_DISPLAY_SIZE) { StringBuilder builder = new StringBuilder(MAX_DISPLAY_SIZE+100); builder.append(JMeterUtils.getResString("view_results_response_too_large_message")) //$NON-NLS-1$ .append(len).append(" > Max: ").append(MAX_DISPLAY_SIZE) .append(", ").append(JMeterUtils.getResString("view_results_response_partial_message")) // $NON-NLS-1$ .append("\n").append(res.getResponseDataAsString().substring(0, MAX_DISPLAY_SIZE)).append("\n..."); response = builder.toString(); } else { response = res.getResponseDataAsString(); } } return response; } private static class ResultsNodeRenderer extends DefaultTreeCellRenderer { private static final long serialVersionUID = 4159626601097711565L; @Override public Component getTreeCellRendererComponent(JTree tree, Object value, boolean sel, boolean expanded, boolean leaf, int row, boolean focus) { super.getTreeCellRendererComponent(tree, value, sel, expanded, leaf, row, focus); boolean failure = true; Object userObject = ((DefaultMutableTreeNode) value).getUserObject(); if (userObject instanceof SampleResult) { failure = !(((SampleResult) userObject).isSuccessful()); } else if (userObject instanceof AssertionResult) { AssertionResult assertion = (AssertionResult) userObject; failure = assertion.isError() || assertion.isFailure(); } // Set the status for the node if (failure) { this.setForeground(Color.red); this.setIcon(imageFailure); } else { this.setIcon(imageSuccess); } return this; } } /** * Handler for Checkbox */ @Override public void itemStateChanged(ItemEvent e) { // NOOP state is held by component } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.core.rpc; import static java.util.Objects.requireNonNull; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Properties; import javax.security.auth.callback.CallbackHandler; import javax.security.sasl.Sasl; import org.apache.accumulo.core.client.security.tokens.AuthenticationToken; import org.apache.accumulo.core.client.security.tokens.KerberosToken; import org.apache.accumulo.core.clientImpl.ClientConfConverter; import org.apache.accumulo.core.clientImpl.DelegationTokenImpl; import org.apache.accumulo.core.conf.AccumuloConfiguration; import org.apache.accumulo.core.conf.ClientProperty; import org.apache.accumulo.core.conf.Property; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.util.KerberosUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Connection parameters for setting up a TSaslTransportFactory */ public class SaslConnectionParams { private static final Logger log = LoggerFactory.getLogger(SaslConnectionParams.class); /** * Enumeration around {@link Sasl#QOP} */ public enum QualityOfProtection { AUTH("auth"), AUTH_INT("auth-int"), AUTH_CONF("auth-conf"); private final String quality; private QualityOfProtection(String quality) { this.quality = quality; } public String getQuality() { return quality; } public static QualityOfProtection get(String name) { if (AUTH.quality.equals(name)) { return AUTH; } else if (AUTH_INT.quality.equals(name)) { return AUTH_INT; } else if (AUTH_CONF.quality.equals(name)) { return AUTH_CONF; } throw new IllegalArgumentException("No value for " + name); } @Override public String toString() { return quality; } } /** * The SASL mechanism to use for authentication */ public enum SaslMechanism { GSSAPI("GSSAPI"), // Kerberos DIGEST_MD5("DIGEST-MD5"); // Delegation Tokens private final String mechanismName; private SaslMechanism(String mechanismName) { this.mechanismName = mechanismName; } public String getMechanismName() { return mechanismName; } public static SaslMechanism get(String mechanismName) { if (GSSAPI.mechanismName.equals(mechanismName)) { return GSSAPI; } else if (DIGEST_MD5.mechanismName.equals(mechanismName)) { return DIGEST_MD5; } throw new IllegalArgumentException("No value for " + mechanismName); } } private static String defaultRealm; static { try { defaultRealm = KerberosUtil.getDefaultRealm(); } catch (Exception ke) { log.debug("Kerberos krb5 configuration not found, setting default realm to empty"); defaultRealm = "UNKNOWN"; } } protected String principal; protected QualityOfProtection qop; protected String kerberosServerPrimary; protected SaslMechanism mechanism; protected CallbackHandler callbackHandler; protected final Map<String,String> saslProperties; public SaslConnectionParams(AccumuloConfiguration conf, AuthenticationToken token) { this(ClientConfConverter.toProperties(conf), token); } public SaslConnectionParams(Properties properties, AuthenticationToken token) { requireNonNull(properties, "Properties was null"); requireNonNull(token, "AuthenticationToken was null"); saslProperties = new HashMap<>(); updatePrincipalFromUgi(); updateFromConfiguration(properties); updateFromToken(token); } public static SaslConnectionParams from(AccumuloConfiguration config, AuthenticationToken token) { if (!config.getBoolean(Property.INSTANCE_RPC_SASL_ENABLED)) { return null; } return new SaslConnectionParams(config, token); } protected void updateFromToken(AuthenticationToken token) { if (token instanceof KerberosToken) { mechanism = SaslMechanism.GSSAPI; // No callbackhandlers necessary for GSSAPI callbackHandler = null; } else if (token instanceof DelegationTokenImpl) { mechanism = SaslMechanism.DIGEST_MD5; callbackHandler = new SaslClientDigestCallbackHandler((DelegationTokenImpl) token); } else { throw new IllegalArgumentException( "Cannot determine SASL mechanism for token class: " + token.getClass()); } } protected void updatePrincipalFromUgi() { // Ensure we're using Kerberos auth for Hadoop UGI if (!UserGroupInformation.isSecurityEnabled()) { throw new RuntimeException("Cannot use SASL if Hadoop security is not enabled"); } // Get the current user UserGroupInformation currentUser; try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { throw new RuntimeException("Failed to get current user", e); } // The full name is our principal this.principal = currentUser.getUserName(); if (this.principal == null) { throw new RuntimeException("Got null username from " + currentUser); } } protected void updateFromConfiguration(Properties properties) { // Get the quality of protection to use final String qopValue = ClientProperty.SASL_QOP.getValue(properties); this.qop = QualityOfProtection.get(qopValue); // Add in the SASL properties to a map so we don't have to repeatedly construct this map this.saslProperties.put(Sasl.QOP, this.qop.getQuality()); // The primary from the KRB principal on each server (e.g. primary/instance@realm) this.kerberosServerPrimary = ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getValue(properties); } public Map<String,String> getSaslProperties() { return Collections.unmodifiableMap(saslProperties); } /** * The quality of protection used with SASL. See {@link Sasl#QOP} for more information. */ public QualityOfProtection getQualityOfProtection() { return qop; } /** * The 'primary' component from the Kerberos principals that servers are configured to use. */ public String getKerberosServerPrimary() { return kerberosServerPrimary; } /** * The principal of the logged in user for SASL */ public String getPrincipal() { return principal; } /** * The SASL mechanism to use for authentication */ public SaslMechanism getMechanism() { return mechanism; } /** * The SASL callback handler for this mechanism, may be null. */ public CallbackHandler getCallbackHandler() { return callbackHandler; } @Override public int hashCode() { HashCodeBuilder hcb = new HashCodeBuilder(23, 29); hcb.append(kerberosServerPrimary).append(saslProperties).append(qop.hashCode()) .append(principal).append(mechanism).append(callbackHandler); return hcb.toHashCode(); } @Override public boolean equals(Object o) { if (o instanceof SaslConnectionParams) { SaslConnectionParams other = (SaslConnectionParams) o; if (!kerberosServerPrimary.equals(other.kerberosServerPrimary)) { return false; } if (qop != other.qop) { return false; } if (!principal.equals(other.principal)) { return false; } if (!mechanism.equals(other.mechanism)) { return false; } if (callbackHandler == null) { if (other.callbackHandler != null) { return false; } } else if (!callbackHandler.equals(other.callbackHandler)) { return false; } return saslProperties.equals(other.saslProperties); } return false; } @Override public String toString() { StringBuilder sb = new StringBuilder(64); sb.append("SaslConnectionParams[").append("kerberosServerPrimary=") .append(kerberosServerPrimary).append(", qualityOfProtection=").append(qop); sb.append(", principal=").append(principal).append(", mechanism=").append(mechanism) .append(", callbackHandler=").append(callbackHandler).append("]"); return sb.toString(); } public static String getDefaultRealm() { return defaultRealm; } }
/* * Copyright 2002-2007 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.springframework.richclient.application.mdi; import org.springframework.richclient.application.*; import org.springframework.richclient.application.mdi.contextmenu.DesktopCommandGroupFactory; import org.springframework.richclient.application.support.AbstractApplicationPage; import org.springframework.richclient.util.Assert; import org.springframework.richclient.util.PopupMenuMouseListener; import javax.swing.*; import javax.swing.event.InternalFrameAdapter; import javax.swing.event.InternalFrameEvent; import java.beans.PropertyVetoException; import java.util.HashMap; import java.util.Map; /** * @author Peter De Bruycker */ public class DesktopApplicationPage extends AbstractApplicationPage implements PageLayoutBuilder { private JDesktopPane control; private Map frames = new HashMap(); private int dragMode; private boolean scrollable = true; private final DesktopCommandGroupFactory desktopCommandGroupFactory; public DesktopApplicationPage(ApplicationWindow window, PageDescriptor pageDescriptor, int dragMode, DesktopCommandGroupFactory desktopCommandGroupFactory) { super(window, pageDescriptor); this.desktopCommandGroupFactory = desktopCommandGroupFactory; Assert.isTrue(dragMode == JDesktopPane.LIVE_DRAG_MODE || dragMode == JDesktopPane.OUTLINE_DRAG_MODE, "dragMode must be JDesktopPane.LIVE_DRAG_MODE or JDesktopPane.OUTLINE_DRAG_MODE"); this.dragMode = dragMode; } public void setScrollable(boolean scrollable) { if (isControlCreated()) { throw new IllegalStateException("scrollable-property can only be set before creation of control"); } this.scrollable = scrollable; } protected boolean giveFocusTo(PageComponent pageComponent) { if (getActiveComponent() == pageComponent) { return true; } JInternalFrame frame = getInternalFrame(pageComponent); if (frame == null) { return false; } try { if (frame.isIcon()) { frame.setIcon(false); } frame.setSelected(true); } catch (PropertyVetoException e) { // ignore } return pageComponent.getControl().requestFocusInWindow(); } public void addView(String viewDescriptorId) { showView(viewDescriptorId); } protected void doAddPageComponent(PageComponent pageComponent) { JInternalFrame frame = createInternalFrame(pageComponent); frame.setVisible(true); control.add(frame); } protected JInternalFrame createInternalFrame(final PageComponent pageComponent) { JInternalFrame internalFrame = new JInternalFrame(pageComponent.getDisplayName()); internalFrame.setDefaultCloseOperation(JInternalFrame.DO_NOTHING_ON_CLOSE); configureFrame(pageComponent, internalFrame); keepFrameDetails(pageComponent, internalFrame); internalFrame.addInternalFrameListener(new InternalFrameAdapter() { public void internalFrameClosing(InternalFrameEvent e) { close(pageComponent); } public void internalFrameActivated(InternalFrameEvent e) { if (!e.getInternalFrame().isIcon()) { setActiveComponent(pageComponent); } } }); internalFrame.getContentPane().add(pageComponent.getControl()); internalFrame.pack(); return internalFrame; } /** * Having this method allows subclasses to enrich/wrap the internal frame, for instance with a visible resizer. */ protected void keepFrameDetails(final PageComponent pageComponent, JInternalFrame internalFrame) { frames.put(pageComponent, internalFrame); } protected void configureFrame(PageComponent component, JInternalFrame frame) { if (component.getIcon() != null) { frame.setFrameIcon(component.getIcon()); } ViewDescriptor descriptor = getViewDescriptor(component.getId()); if (descriptor instanceof DesktopViewDescriptor) { DesktopViewDescriptor desktopViewDescriptor = (DesktopViewDescriptor) descriptor; frame.setResizable(desktopViewDescriptor.isResizable()); frame.setMaximizable(desktopViewDescriptor.isMaximizable()); frame.setIconifiable(desktopViewDescriptor.isIconifiable()); frame.setClosable(desktopViewDescriptor.isClosable()); } else { frame.setResizable(true); frame.setMaximizable(true); frame.setIconifiable(true); frame.setClosable(true); } } protected JInternalFrame getInternalFrame(PageComponent pageComponent) { return (JInternalFrame) frames.get(pageComponent); } protected void doRemovePageComponent(PageComponent pageComponent) { // not used JInternalFrame frame = getInternalFrame(pageComponent); if (frame != null) { frame.dispose(); frames.remove(pageComponent); } } protected JComponent createControl() { control = createDesktopPane(); control.setDragMode(dragMode); getPageDescriptor().buildInitialLayout(this); if (scrollable) { return new JScrollPane(control); } else { return control; } } protected JDesktopPane createDesktopPane() { final JDesktopPane control; if (scrollable) { control = new ScrollingDesktopPane(); } else { control = new JDesktopPane(); } control.addMouseListener(new PopupMenuMouseListener() { protected JPopupMenu getPopupMenu() { return desktopCommandGroupFactory.createContextMenuCommandGroup(getWindow().getCommandManager(), control).createPopupMenu(); } }); return control; } protected void updatePageComponentProperties(PageComponent pageComponent) { JInternalFrame frame = getInternalFrame(pageComponent); if (pageComponent.getIcon() != null) { frame.setFrameIcon(pageComponent.getIcon()); } frame.setTitle(pageComponent.getDisplayName()); frame.setToolTipText(pageComponent.getCaption()); } /** * Overridden so it will leave iconified frames iconified. */ protected void setActiveComponent() { // getAllFrames returns the frames in z-order (i.e. the first one in the // list is the last one used) JInternalFrame[] frames = control.getAllFrames(); for (int i = 0; i < frames.length; i++) { JInternalFrame frame = frames[i]; if (!frame.isIcon()) { try { frame.setSelected(true); } catch (PropertyVetoException ignore) { } break; } } } }
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.mailbox.store; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.apache.james.mailbox.exception.MailboxException; import org.apache.james.mailbox.model.Content; import org.apache.james.mailbox.model.MessageResult; import org.apache.james.mailbox.model.MessageResult.FetchGroup; import org.apache.james.mailbox.model.MessageResult.MimePath; import org.apache.james.mailbox.store.mail.model.Message; import org.apache.james.mailbox.store.streaming.PartContentBuilder; import org.apache.james.mime4j.MimeException; import org.apache.james.mime4j.parser.AbstractContentHandler; import org.apache.james.mime4j.parser.MimeStreamParser; import org.apache.james.mime4j.stream.Field; import org.apache.james.mime4j.stream.MimeConfig; import org.apache.james.mime4j.stream.RawField; import org.apache.james.mime4j.util.ByteSequence; import org.apache.james.mime4j.util.ContentUtil; /** * */ public class ResultUtils { public static final byte[] BYTES_NEW_LINE = { 0x0D, 0x0A }; public static final byte[] BYTES_HEADER_FIELD_VALUE_SEP = { 0x3A, 0x20 }; static final Charset US_ASCII = Charset.forName("US-ASCII"); public static List<MessageResult.Header> createHeaders(final Message<?> document) throws IOException { final List<MessageResult.Header> results = new ArrayList<MessageResult.Header>(); MimeConfig config = MimeConfig.custom().setMaxLineLen(-1).setMaxHeaderLen(-1).build(); final MimeStreamParser parser = new MimeStreamParser(config); parser.setContentHandler(new AbstractContentHandler() { @Override public void endHeader() { parser.stop(); } @Override public void field(Field field) throws MimeException { String fieldValue; if (field instanceof RawField) { // check if we can access the body in the raw form so no unfolding was done under the hood ByteSequence raw = field.getRaw(); int len = raw.length(); int off = ((RawField) field).getDelimiterIdx() + 1; if (len > off + 1 && (raw.byteAt(off) & 0xff) == 0x20) off++; fieldValue = ContentUtil.decode(raw, off, len - off); } else { fieldValue = field.getBody(); } if (fieldValue.endsWith("\r\f")) { fieldValue = fieldValue.substring(0,fieldValue.length() - 2); } if (fieldValue.startsWith(" ")) { fieldValue = fieldValue.substring(1); } final ResultHeader resultHeader = new ResultHeader(field.getName(), fieldValue); results.add(resultHeader); } }); try { parser.parse(document.getHeaderContent()); } catch (MimeException e) { throw new IOException("Unable to parse headers of message " + document, e); } return results; } /** * Return the {@link MessageResult} for the given {@link Message} and {@link FetchGroup} * * @param message * @param fetchGroup * @return result * @throws MailboxException */ public static MessageResult loadMessageResult(final Message<?> message, final FetchGroup fetchGroup) throws MailboxException { try { MessageResultImpl messageResult = new MessageResultImpl(message); if (fetchGroup != null) { int content = fetchGroup.content(); if ((content & FetchGroup.HEADERS) > 0) { content -= FetchGroup.HEADERS; } if ((content & FetchGroup.BODY_CONTENT) > 0) { content -= FetchGroup.BODY_CONTENT; } if ((content & FetchGroup.FULL_CONTENT) > 0) { content -= FetchGroup.FULL_CONTENT; } if ((content & FetchGroup.MIME_DESCRIPTOR) > 0) { content -= FetchGroup.MIME_DESCRIPTOR; } if (content != 0) { throw new UnsupportedOperationException("Unsupported result: " + content); } addPartContent(fetchGroup, message, messageResult); } return messageResult; } catch (IOException e) { throw new MailboxException("Unable to parse message", e); } catch (MimeException e) { throw new MailboxException("Unable to parse message", e); } } private static void addPartContent(final FetchGroup fetchGroup, Message<?> message, MessageResultImpl messageResult) throws MailboxException, IOException, MimeException { Collection<FetchGroup.PartContentDescriptor> partContent = fetchGroup.getPartContentDescriptors(); if (partContent != null) { for (FetchGroup.PartContentDescriptor descriptor: partContent) { addPartContent(descriptor, message, messageResult); } } } private static void addPartContent( FetchGroup.PartContentDescriptor descriptor, Message<?> message, MessageResultImpl messageResult) throws MailboxException, IOException, MimeException { final MimePath mimePath = descriptor.path(); final int content = descriptor.content(); if ((content & MessageResult.FetchGroup.FULL_CONTENT) > 0) { addFullContent(message, messageResult, mimePath); } if ((content & MessageResult.FetchGroup.BODY_CONTENT) > 0) { addBodyContent(message, messageResult, mimePath); } if ((content & MessageResult.FetchGroup.MIME_CONTENT) > 0) { addMimeBodyContent(message, messageResult, mimePath); } if ((content & MessageResult.FetchGroup.HEADERS) > 0) { addHeaders(message, messageResult, mimePath); } if ((content & MessageResult.FetchGroup.MIME_HEADERS) > 0) { addMimeHeaders(message, messageResult, mimePath); } } private static PartContentBuilder build(int[] path, final Message<?> message) throws IOException, MimeException { final InputStream stream = message.getFullContent(); PartContentBuilder result = new PartContentBuilder(); result.parse(stream); try { for (int i = 0; i < path.length; i++) { final int next = path[i]; result.to(next); } } catch (PartContentBuilder.PartNotFoundException e) { // Missing parts should return zero sized content // See http://markmail.org/message/2jconrj7scvdi5dj result.markEmpty(); } return result; } private static final int[] path(MimePath mimePath) { final int[] result; if (mimePath == null) { result = null; } else { result = mimePath.getPositions(); } return result; } private static void addHeaders(Message<?> message, MessageResultImpl messageResult, MimePath mimePath) throws IOException, MimeException { final int[] path = path(mimePath); if (path != null) { final PartContentBuilder builder = build(path, message); final List<MessageResult.Header> headers = builder.getMessageHeaders(); messageResult.setHeaders(mimePath, headers.iterator()); } } private static void addMimeHeaders(Message<?> message, MessageResultImpl messageResult, MimePath mimePath) throws IOException, MimeException { final int[] path = path(mimePath); if (path != null) { final PartContentBuilder builder = build(path, message); final List<MessageResult.Header> headers = builder.getMimeHeaders(); messageResult.setMimeHeaders(mimePath, headers.iterator()); } } private static void addBodyContent(Message<?> message, MessageResultImpl messageResult, MimePath mimePath) throws IOException, MimeException { final int[] path = path(mimePath); if (path != null) { final PartContentBuilder builder = build(path, message); final Content content = builder.getMessageBodyContent(); messageResult.setBodyContent(mimePath, content); } } private static void addMimeBodyContent(Message<?> message, MessageResultImpl messageResult, MimePath mimePath) throws IOException, MimeException { final int[] path = path(mimePath); final PartContentBuilder builder = build(path, message); final Content content = builder.getMimeBodyContent(); messageResult.setMimeBodyContent(mimePath, content); } private static void addFullContent(Message<?> message, MessageResultImpl messageResult, MimePath mimePath) throws MailboxException, IOException, MimeException { final int[] path = path(mimePath); if (path != null) { final PartContentBuilder builder = build(path, message); final Content content = builder.getFullContent(); messageResult.setFullContent(mimePath, content); } } }