gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.internal.admin.remote; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.Serializable; import java.util.Set; import com.gemstone.gemfire.DataSerializable; import com.gemstone.gemfire.DataSerializer; import com.gemstone.gemfire.cache.Cache; import com.gemstone.gemfire.cache.ClientSession; import com.gemstone.gemfire.distributed.DistributedMember; import com.gemstone.gemfire.cache.InterestRegistrationListener; import com.gemstone.gemfire.cache.server.ServerLoad; import com.gemstone.gemfire.cache.server.ServerLoadProbe; import com.gemstone.gemfire.cache.server.ServerLoadProbeAdapter; import com.gemstone.gemfire.cache.server.ServerMetrics; import com.gemstone.gemfire.cache.server.ClientSubscriptionConfig; import com.gemstone.gemfire.internal.InternalDataSerializer; import com.gemstone.gemfire.internal.Version; import com.gemstone.gemfire.internal.admin.AdminBridgeServer; import com.gemstone.gemfire.internal.cache.AbstractCacheServer; import com.gemstone.gemfire.internal.cache.CacheServerImpl; import com.gemstone.gemfire.internal.i18n.LocalizedStrings; /** * A remote (serializable) implementation of <code>BridgeServer</code> * that is passed between administration VMs and VMs that host caches * with bridge servers. * * @author David Whitlock * @since 4.0 */ public class RemoteBridgeServer extends AbstractCacheServer implements AdminBridgeServer, DataSerializable { private static final long serialVersionUID = 8417391824652384959L; /** Is this bridge server running? */ private boolean isRunning; /** The id of this bridge server */ private int id; // /** // * The name of the directory in which to store overflowed files for client ha // * queue // */ // private String overflowDirectory=null; ////////////////////// Constructors ////////////////////// /** * A "copy constructor" that creates a * <code>RemoteBridgeServer</code> from the contents of the given * <code>BridgeServerImpl</code>. */ RemoteBridgeServer(CacheServerImpl impl) { super(null); this.port = impl.getPort(); this.bindAddress = impl.getBindAddress(); this.hostnameForClients = impl.getHostnameForClients(); if (CacheServerImpl.ENABLE_NOTIFY_BY_SUBSCRIPTION_FALSE) { this.notifyBySubscription = impl.getNotifyBySubscription(); } this.socketBufferSize = impl.getSocketBufferSize(); this.maximumTimeBetweenPings = impl.getMaximumTimeBetweenPings(); this.isRunning = impl.isRunning(); this.maxConnections = impl.getMaxConnections(); this.maxThreads = impl.getMaxThreads(); this.id = System.identityHashCode(impl); this.maximumMessageCount = impl.getMaximumMessageCount(); this.messageTimeToLive = impl.getMessageTimeToLive(); this.groups = impl.getGroups(); this.loadProbe = getProbe(impl.getLoadProbe()); this.loadPollInterval = impl.getLoadPollInterval(); this.tcpNoDelay = impl.getTcpNoDelay(); // added for configuration of ha overflow ClientSubscriptionConfig cscimpl = impl.getClientSubscriptionConfig(); this.clientSubscriptionConfig.setEvictionPolicy(cscimpl.getEvictionPolicy()); this.clientSubscriptionConfig.setCapacity(cscimpl.getCapacity()); String diskStoreName = cscimpl.getDiskStoreName(); if (diskStoreName != null) { this.clientSubscriptionConfig.setDiskStoreName(diskStoreName); } else { this.clientSubscriptionConfig.setOverflowDirectory(cscimpl.getOverflowDirectory()); } } private ServerLoadProbe getProbe(ServerLoadProbe probe) { if(probe == null) { return new RemoteLoadProbe(""); } if(probe instanceof Serializable) { return probe; } else { return new RemoteLoadProbe(probe.toString()); } } /** * Constructor for de-serialization */ public RemoteBridgeServer() { super(null); } //////////////////// Instance Methods //////////////////// @Override public void start() throws IOException { throw new UnsupportedOperationException(LocalizedStrings.RemoteBridgeServer_A_REMOTE_BRIDGESERVER_CANNOT_BE_STARTED.toLocalizedString()); } public boolean isRunning() { return this.isRunning; } public void stop() { throw new UnsupportedOperationException(LocalizedStrings.RemoteBridgeServer_A_REMOTE_BRIDGESERVER_CANNOT_BE_STOPPED.toLocalizedString()); } /** * Returns the cache that is served by this bridge server or * <code>null</code> if this server is not running. */ @Override public Cache getCache() { throw new UnsupportedOperationException(LocalizedStrings.RemoteBridgeServer_CANNOT_GET_THE_CACHE_OF_A_REMOTE_BRIDGESERVER.toLocalizedString()); } public ClientSession getClientSession(String durableClientId) { String s = LocalizedStrings.RemoteBridgeServer_CANNOT_GET_CLIENT_SESSION.toLocalizedString(); throw new UnsupportedOperationException(s); } public ClientSession getClientSession(DistributedMember member) { String s = LocalizedStrings.RemoteBridgeServer_CANNOT_GET_CLIENT_SESSION.toLocalizedString(); throw new UnsupportedOperationException(s); } public Set getAllClientSessions() { String s = LocalizedStrings.RemoteBridgeServer_CANNOT_GET_ALL_CLIENT_SESSIONS.toLocalizedString(); throw new UnsupportedOperationException(s); } public ClientSubscriptionConfig getClientSubscriptionConfig(){ return this.clientSubscriptionConfig; } public int getId() { return this.id; } public void toData(DataOutput out) throws IOException { out.writeInt(this.port); out.writeBoolean(this.notifyBySubscription); out.writeBoolean(this.isRunning); out.writeInt(this.maxConnections); out.writeInt(this.id); out.writeInt(this.maximumTimeBetweenPings); out.writeInt(this.maximumMessageCount); out.writeInt(this.messageTimeToLive); out.writeInt(this.maxThreads); DataSerializer.writeString(this.bindAddress, out); DataSerializer.writeStringArray(this.groups, out); DataSerializer.writeString(this.hostnameForClients, out); DataSerializer.writeObject(this.loadProbe, out); DataSerializer.writePrimitiveLong(this.loadPollInterval, out); out.writeInt(this.socketBufferSize); if (InternalDataSerializer.getVersionForDataStream(out).compareTo(Version.GFE_80) >= 0) { out.writeBoolean(this.tcpNoDelay); } out.writeInt(this.getClientSubscriptionConfig().getCapacity()); DataSerializer.writeString(this.getClientSubscriptionConfig() .getEvictionPolicy(), out); DataSerializer.writeString(this.getClientSubscriptionConfig() .getDiskStoreName(), out); if (this.getClientSubscriptionConfig().getDiskStoreName() == null) { DataSerializer.writeString(this.getClientSubscriptionConfig() .getOverflowDirectory(), out); } } public void fromData(DataInput in) throws IOException, ClassNotFoundException { this.port = in.readInt(); this.notifyBySubscription = in.readBoolean(); this.isRunning = in.readBoolean(); this.maxConnections = in.readInt(); this.id = in.readInt(); this.maximumTimeBetweenPings = in .readInt(); this.maximumMessageCount = in.readInt(); this.messageTimeToLive = in.readInt(); this.maxThreads = in.readInt(); setBindAddress(DataSerializer.readString(in)); setGroups(DataSerializer.readStringArray(in)); setHostnameForClients(DataSerializer.readString(in)); setLoadProbe((ServerLoadProbe)DataSerializer.readObject(in)); setLoadPollInterval(DataSerializer.readPrimitiveLong(in)); this.socketBufferSize = in.readInt(); if (InternalDataSerializer.getVersionForDataStream(in).compareTo(Version.GFE_80) >= 0) { this.tcpNoDelay = in.readBoolean(); } this.getClientSubscriptionConfig().setCapacity(in.readInt()); this.getClientSubscriptionConfig().setEvictionPolicy( DataSerializer.readString(in)); String diskStoreName = DataSerializer.readString(in); if (diskStoreName != null) { this.getClientSubscriptionConfig().setDiskStoreName(diskStoreName); } else { this.getClientSubscriptionConfig().setOverflowDirectory( DataSerializer.readString(in)); } } private static class RemoteLoadProbe extends ServerLoadProbeAdapter { /** The description of this callback */ private final String desc; public RemoteLoadProbe(String desc) { this.desc = desc; } public ServerLoad getLoad(ServerMetrics metrics) { return null; } @Override public String toString() { return desc; } } /** * Registers a new <code>InterestRegistrationListener</code> with the set of * <code>InterestRegistrationListener</code>s. * * @param listener * The <code>InterestRegistrationListener</code> to register * * @since 5.8Beta */ public void registerInterestRegistrationListener( InterestRegistrationListener listener) { final String s = LocalizedStrings. RemoteBridgeServer_INTERESTREGISTRATIONLISTENERS_CANNOT_BE_REGISTERED_ON_A_REMOTE_BRIDGESERVER .toLocalizedString(); throw new UnsupportedOperationException(s); } /** * Unregisters an existing <code>InterestRegistrationListener</code> from * the set of <code>InterestRegistrationListener</code>s. * * @param listener * The <code>InterestRegistrationListener</code> to * unregister * * @since 5.8Beta */ public void unregisterInterestRegistrationListener( InterestRegistrationListener listener) { final String s = LocalizedStrings. RemoteBridgeServer_INTERESTREGISTRATIONLISTENERS_CANNOT_BE_UNREGISTERED_FROM_A_REMOTE_BRIDGESERVER .toLocalizedString(); throw new UnsupportedOperationException(s); } /** * Returns a read-only set of <code>InterestRegistrationListener</code>s * registered with this notifier. * * @return a read-only set of <code>InterestRegistrationListener</code>s * registered with this notifier * * @since 5.8Beta */ public Set getInterestRegistrationListeners() { final String s = LocalizedStrings. RemoteBridgeServer_INTERESTREGISTRATIONLISTENERS_CANNOT_BE_RETRIEVED_FROM_A_REMOTE_BRIDGESERVER .toLocalizedString(); throw new UnsupportedOperationException(s); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security.authorization.cug.impl; import java.security.Principal; import java.util.Collections; import java.util.Set; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.google.common.collect.ImmutableSet; import org.apache.jackrabbit.JcrConstants; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.namepath.NamePathMapper; import org.apache.jackrabbit.oak.plugins.tree.factories.RootFactory; import org.apache.jackrabbit.oak.plugins.tree.factories.TreeFactory; import org.apache.jackrabbit.oak.plugins.tree.TreeLocation; import org.apache.jackrabbit.oak.plugins.tree.TreeType; import org.apache.jackrabbit.oak.plugins.tree.TreeTypeProvider; import org.apache.jackrabbit.oak.plugins.version.ReadOnlyVersionManager; import org.apache.jackrabbit.oak.spi.security.Context; import org.apache.jackrabbit.oak.spi.security.authorization.permission.AggregatedPermissionProvider; import org.apache.jackrabbit.oak.spi.security.authorization.permission.Permissions; import org.apache.jackrabbit.oak.spi.security.authorization.permission.RepositoryPermission; import org.apache.jackrabbit.oak.spi.security.authorization.permission.TreePermission; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBits; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.spi.state.NodeStateUtils; import org.apache.jackrabbit.oak.plugins.tree.TreeUtil; class CugPermissionProvider implements AggregatedPermissionProvider, CugConstants { private static final Set<String> READ_PRIVILEGE_NAMES = ImmutableSet.of( PrivilegeConstants.JCR_READ, PrivilegeConstants.REP_READ_NODES, PrivilegeConstants.REP_READ_PROPERTIES ); private final Root root; private final String workspaceName; private final String[] principalNames; private final TreeTypeProvider typeProvider; private final Context ctx; private final SupportedPaths supportedPaths; private Root immutableRoot; private ReadOnlyVersionManager versionManager; private TopLevelPaths topPaths; CugPermissionProvider(@Nonnull Root root, @Nonnull String workspaceName, @Nonnull Set<Principal> principals, @Nonnull Set<String> supportedPaths, @Nonnull Context ctx) { this.root = root; this.workspaceName = workspaceName; immutableRoot = RootFactory.createReadOnlyRoot(root); principalNames = new String[principals.size()]; int i = 0; for (Principal p : principals) { principalNames[i++] = p.getName(); } this.supportedPaths = new SupportedPaths(supportedPaths); this.typeProvider = new TreeTypeProvider(ctx); this.ctx = ctx; topPaths = new TopLevelPaths(immutableRoot); } @Nonnull TreePermission getTreePermission(@Nonnull Tree parent, @Nonnull TreeType parentType, @Nonnull String childName, @Nonnull NodeState childState, @Nonnull AbstractTreePermission parentPermission) { Tree t = TreeFactory.createReadOnlyTree(parent, childName, childState); TreeType type = typeProvider.getType(t, parentType); return getTreePermission(t, type, parentPermission); } boolean isAllow(@Nonnull Tree cugTree) { PropertyState princNamesState = cugTree.getProperty(REP_PRINCIPAL_NAMES); if (princNamesState != null) { for (String pName : princNamesState.getValue(Type.STRINGS)) { for (String pN : principalNames) { if (pName.equals(pN)) { return true; } } } } return false; } //-------------------------------------------------< PermissionProvider >--- @Override public void refresh() { immutableRoot = RootFactory.createReadOnlyRoot(root); versionManager = null; topPaths = new TopLevelPaths(immutableRoot); } @Nonnull @Override public Set<String> getPrivileges(@Nullable Tree tree) { if (tree != null && canRead(tree)) { return READ_PRIVILEGE_NAMES; } else { return Collections.emptySet(); } } @Override public boolean hasPrivileges(@Nullable Tree tree, @Nonnull String... privilegeNames) { if (tree == null) { return false; } for (String privilegeName : privilegeNames) { if (!READ_PRIVILEGE_NAMES.contains(privilegeName)) { return false; } } return canRead(tree); } @Nonnull @Override public RepositoryPermission getRepositoryPermission() { return RepositoryPermission.EMPTY; } @Nonnull @Override public TreePermission getTreePermission(@Nonnull Tree tree, @Nonnull TreePermission parentPermission) { if (TreePermission.NO_RECOURSE == parentPermission) { throw new IllegalStateException("Attempt to create tree permission for path '"+ tree.getPath() +"', which is either not supported or doesn't contain any CUGs."); } Tree immutableTree = getImmutableTree(tree); TreeType type = typeProvider.getType(immutableTree); return getTreePermission(immutableTree, type, parentPermission); } @Override public boolean isGranted(@Nonnull Tree tree, PropertyState property, long permissions) { if (isRead(permissions)) { return canRead(tree); } else { return false; } } @Override public boolean isGranted(@Nonnull String oakPath, @Nonnull String jcrActions) { TreeLocation location = TreeLocation.create(immutableRoot, oakPath); if (ctx.definesLocation(location) || NodeStateUtils.isHiddenPath(oakPath)) { return false; } long permissions = Permissions.getPermissions(jcrActions, location, false); return isGranted(location, permissions); } //---------------------------------------< AggregatedPermissionProvider >--- @Nonnull @Override public PrivilegeBits supportedPrivileges(@Nullable Tree tree, @Nullable PrivilegeBits privilegeBits) { if (tree == null) { return PrivilegeBits.EMPTY; } PrivilegeBits pb; if (privilegeBits == null) { pb = PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ); } else { pb = PrivilegeBits.getInstance(privilegeBits); pb.retain(PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_READ)); } if (pb.isEmpty() || !includesCug(tree)) { return PrivilegeBits.EMPTY; } else { return pb; } } @Override public long supportedPermissions(@Nullable Tree tree, @Nullable PropertyState property, long permissions) { if (tree == null) { // repository level permissions are not supported return Permissions.NO_PERMISSION; } long supported = permissions & Permissions.READ; if (supported != Permissions.NO_PERMISSION && includesCug(tree)) { return supported; } else { return Permissions.NO_PERMISSION; } } @Override public long supportedPermissions(@Nonnull TreeLocation location, long permissions) { long supported = permissions & Permissions.READ; if (supported != Permissions.NO_PERMISSION && includesCug(getTreeFromLocation(location))) { return supported; } else { return Permissions.NO_PERMISSION; } } @Override public long supportedPermissions(@Nonnull TreePermission treePermission, @Nullable PropertyState property, long permissions) { long supported = permissions & Permissions.READ; if (supported != Permissions.NO_PERMISSION && (treePermission instanceof CugTreePermission) && ((CugTreePermission) treePermission).isInCug()) { return supported; } else { return Permissions.NO_PERMISSION; } } @Override public boolean isGranted(@Nonnull TreeLocation location, long permissions) { if (isRead(permissions)) { Tree tree = getTreeFromLocation(location); if (tree != null) { return isGranted(tree, location.getProperty(), permissions); } } return false; } @Nonnull public TreePermission getTreePermission(@Nonnull Tree immutableTree, @Nonnull TreeType type, @Nonnull TreePermission parentPermission) { if (!isSupportedType(type) || !topPaths.hasAny()) { return TreePermission.NO_RECOURSE; } TreePermission tp; boolean parentIsCugPermission = (parentPermission instanceof CugTreePermission); if (TreeType.VERSION == type) { tp = createVersionPermission(immutableTree, type, parentPermission, parentIsCugPermission); } else { if (parentIsCugPermission) { tp = new CugTreePermission(immutableTree, type, parentPermission, this); } else { String path = immutableTree.getPath(); if (includes(path)) { if (topPaths.contains(path)) { tp = new CugTreePermission(immutableTree, type, parentPermission, this); } else { tp = TreePermission.NO_RECOURSE; } } else if (mayContain(path) || isJcrSystemPath(immutableTree)) { tp = new EmptyCugTreePermission(immutableTree, type, this); } else { tp = TreePermission.NO_RECOURSE; } } } return tp; } //-------------------------------------------------------------------------- private static boolean isJcrSystemPath(@Nonnull Tree tree) { return JcrConstants.JCR_SYSTEM.equals(tree.getName()); } private static boolean isRead(long permission) { return permission == Permissions.READ_NODE || permission == Permissions.READ_PROPERTY || permission == Permissions.READ; } private static boolean isSupportedType(@Nonnull TreeType type) { return type == TreeType.DEFAULT || type == TreeType.VERSION; } private boolean includesCug(@CheckForNull Tree tree) { if (tree != null) { Tree immutableTree = getImmutableTree(tree); TreeType type = typeProvider.getType(immutableTree); if (isSupportedType(type) && topPaths.hasAny()) { return getCugRoot(immutableTree, type) != null; } } return false; } private boolean includes(@Nonnull String path) { return supportedPaths.includes(path); } private boolean mayContain(@Nonnull String path) { return supportedPaths.mayContainCug(path) && topPaths.contains(path); } /** * Returns the {@code tree} that holds a CUG policy in the ancestry of the * given {@code tree} with the specified {@code path} or {@code null} if no * such tree exists and thus no CUG is effective at the specified path. * * @param immutableTree The target tree. * @param type the type of this tree. * @return the {@code tree} holding the CUG policy that effects the specified * path or {@code null} if no such policy exists. */ @CheckForNull private Tree getCugRoot(@Nonnull Tree immutableTree, @Nonnull TreeType type) { Tree tree = immutableTree; String p = immutableTree.getPath(); if (TreeType.VERSION == type && !ReadOnlyVersionManager.isVersionStoreTree(tree)) { tree = getVersionManager().getVersionable(immutableTree, workspaceName); if (tree == null) { return null; } p = tree.getPath(); } if (!includes(p)) { return null; } if (CugUtil.hasCug(tree)) { return tree; } String parentPath; while (!tree.isRoot()) { parentPath = PathUtils.getParentPath(p); if (!includes(parentPath)) { break; } tree = tree.getParent(); if (CugUtil.hasCug(tree)) { return tree; } } return null; } private boolean canRead(@Nonnull Tree tree) { Tree immutableTree = getImmutableTree(tree); TreeType type = typeProvider.getType(immutableTree); if (!isSupportedType(type) || !topPaths.hasAny()) { return false; } Tree cugRoot = getCugRoot(immutableTree, type); if (cugRoot != null) { Tree cugTree = CugUtil.getCug(cugRoot); if (cugTree != null) { return isAllow(cugTree); } } return false; } @Nonnull private Tree getImmutableTree(@Nonnull Tree tree) { return TreeUtil.isReadOnlyTree(tree) ? tree : immutableRoot.getTree(tree.getPath()); } @CheckForNull private static Tree getTreeFromLocation(@Nonnull TreeLocation location) { Tree tree = (location.getProperty() == null) ? location.getTree() : location.getParent().getTree(); while (tree == null && !PathUtils.denotesRoot(location.getPath())) { location = location.getParent(); tree = location.getTree(); } return tree; } @Nonnull private TreePermission createVersionPermission(@Nonnull Tree tree, @Nonnull TreeType type, @Nonnull TreePermission parent, boolean parentIsCugPermission) { if (ReadOnlyVersionManager.isVersionStoreTree(tree)) { if (parentIsCugPermission) { return new CugTreePermission(tree, type, parent, this); } else { return new EmptyCugTreePermission(tree, type, this); } } else { Tree versionableTree = getVersionManager().getVersionable(tree, workspaceName); if (versionableTree == null) { return TreePermission.NO_RECOURSE; } TreeType versionableType = typeProvider.getType(versionableTree); if (!isSupportedType(versionableType)) { return TreePermission.NO_RECOURSE; } String path = versionableTree.getPath(); boolean isSupportedPath = false; // test if the versionable node holds a cug Tree cug = null; if (parentIsCugPermission) { cug = CugUtil.getCug(versionableTree); } else if (includes(path)) { isSupportedPath = true; // the versionable tree might be included in a cug defined by // a parent node -> need to search for inherited cugs as well. Tree cugRoot = getCugRoot(versionableTree, versionableType); if (cugRoot != null) { cug = CugUtil.getCug(cugRoot); } } TreePermission tp; if (cug != null) { // backing versionable tree holds a cug tp = new CugTreePermission(tree, type, parent, this, true, isAllow(cug), CugUtil.hasNestedCug(cug)); } else if (parentIsCugPermission) { CugTreePermission ctp = (CugTreePermission) parent; tp = new CugTreePermission(tree, type, parent, this, ctp.isInCug(), ctp.isAllow(), ctp.hasNestedCug()); } else if (isSupportedPath) { tp = new CugTreePermission(tree, type, parent, this, false, false, false); } else if (mayContain(path)) { tp = new EmptyCugTreePermission(tree, type, this); } else { tp = TreePermission.NO_RECOURSE; } return tp; } } @Nonnull private ReadOnlyVersionManager getVersionManager() { if (versionManager == null) { versionManager = ReadOnlyVersionManager.getInstance(immutableRoot, NamePathMapper.DEFAULT); } return versionManager; } }
/** * Copyright 2017 esutdal Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.reactivetechnologies.mq.disk; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.util.Collection; import java.util.NoSuchElementException; import com.reactivetechnologies.mq.DataSerializable; import com.reactivetechnologies.mq.exceptions.BlazeInternalException; /** * A wrapper on {@linkplain FileBackedQueue} to be used as a generic queue. * @author esutdal * * @param <E> */ public class LocalDurableQueue<E extends DataSerializable> implements Closeable{ /** * Get the absolute path of the underlying file. * @return */ public String getPath() { return file.getFilePath().toAbsolutePath().toString(); } /** * * @param type * @param fileName * @param dir */ public LocalDurableQueue(Class<E> type, String fileName, String dir) { this(type, fileName, dir, true); } private String directory; public String[] listFilesInDir() { File fir = new File(directory); if(fir.isDirectory()) { return fir.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { File f = new File(dir, name); return f.isFile() && name.endsWith(FileBackedQueue.DB_FILE_SUFF); } }); } return null; } /** * * @param type * @param fileName * @param dir * @param createIfAbsent * @throws IOException */ public LocalDurableQueue(Class<E> type, String fileName, String dir, boolean createIfAbsent) { super(); this.type = type; directory = dir; try { file = new FileBackedQueue(dir, fileName, createIfAbsent); } catch (IOException e) { throw new BlazeInternalException("Unable to open queue file", e); } } private FileBackedQueue file; private final Class<E> type; /** * Add all items one by one. This method simply invokes {@link #add()} iteratively. * @param items * @return */ public boolean addAll(Collection<? extends E> items) { for(E each : items) { add(each); } return true; } /** * Delete the associated file and release resources. */ public void destroy() { file.delete(); } /** * If queue is empty * @return */ public boolean isEmpty() { return file.isEmpty(); } /** * Current size of the queue. Note, the file size is always incremental though. * So it is advisable to {@link #destroy()} an empty instance to free up file system. * @return */ public int size() { return file.size(); } /** * Add next item to queue at tail. * @param item * @return */ public boolean add(E item) { try { file.addTail(objectToBytes(item)); return true; } catch (IOException e) { throw new BlazeInternalException("While enqueuing to QueuedFile", e); } } private byte[] objectToBytes(E e) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); if (e != null) { e.writeData(new DataOutputStream(out)); } byte[] b = out.toByteArray(); //System.out.println("LocalDurableQueue.objectToBytes() length: "+b.length); return b; } private E bytesToObject(byte[] b) throws InstantiationException, IllegalAccessException, ClassNotFoundException, IOException { E e = type.newInstance(); if (b != null) { //System.out.println("LocalDurableQueue.bytesToObject() length: "+b.length); e.readData(new DataInputStream(new ByteArrayInputStream(b))); } return e; } /** * Dequeue and return the head of the queue if available, else return null. * @return */ public E poll() { if(file.isEmpty()) return null; return take(); } private E take() { try { byte[] b = file.getHead(); return bytesToObject(b); } catch (Exception e) { throw new BlazeInternalException("While dequeuing from QueuedFile", e); } } /** * Remove and return the head of the queue. This method is similar to {@link #poll()} * only that it will throw an exception if item remains to be dequeued. * @return */ public E remove() { if(isEmpty()) throw new NoSuchElementException(); return take(); } /** * Close the underlying file. This does not delete the file. Use {@link #destroy()} * to do that. */ @Override public void close() throws IOException { file.close(); } }
/******************************************************************************* * * Copyright 2015 Walmart, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *******************************************************************************/ package com.oneops.daq.dao; import com.oneops.daq.Util; import com.oneops.daq.domain.Chart; import com.oneops.daq.domain.Series; import com.oneops.ops.dao.CassandraConstants; import com.oneops.ops.dao.PerfDataAccessor; import me.prettyprint.cassandra.serializers.BytesArraySerializer; import me.prettyprint.cassandra.serializers.DoubleSerializer; import me.prettyprint.cassandra.serializers.LongSerializer; import me.prettyprint.cassandra.serializers.StringSerializer; import me.prettyprint.hector.api.beans.HColumn; import me.prettyprint.hector.api.beans.HSuperColumn; import me.prettyprint.hector.api.mutation.Mutator; import org.apache.log4j.Logger; import org.springframework.stereotype.Component; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; /** * PerfDao - encapsulates cassandra data access for performance data. */ @Component public class PerfDao implements CassandraConstants { protected static final StringSerializer stringSerializer = StringSerializer.get(); protected static final BytesArraySerializer bytesSerializer = BytesArraySerializer.get(); protected static final LongSerializer longSerializer = LongSerializer.get(); protected static final DoubleSerializer doubleSerializer = DoubleSerializer.get(); protected static final String DATA_CF = "data"; protected static final String HEADER_SCF = "header"; protected static final String CI_METRIC_CF = "ci_metric"; protected static final String CHART_SCF = "chart"; private static Logger logger = Logger.getLogger(PerfDao.class); private static FileChannel statChannel; private String stateFilename; private PerfDataAccessor perfDataAccessor; public final AtomicLong eventCounter = new AtomicLong(); public final AtomicLong hectorExceptionCount = new AtomicLong(); public final AtomicLong jmsExceptionCount = new AtomicLong(); public final AtomicLong batchDuration = new AtomicLong(); // tmp counters to show conversion public final AtomicLong oldCount = new AtomicLong(); public final AtomicLong newCount = new AtomicLong(); /** * Sets the state filename and open a file channel for writing. * * @param filename file name of state */ public void setStateFilename(String filename) { stateFilename = filename; File sFile = new File(stateFilename); try { if (!sFile.exists()) { sFile.createNewFile(); } logger.info("Creating the file channel for " + stateFilename); statChannel = FileChannel.open(Paths.get(stateFilename), StandardOpenOption.WRITE); } catch (Exception ex) { logger.error("Error setting stat file." + sFile.getAbsolutePath(), ex); System.exit(1); } } /** * Append stats string to the perfsink.state file. * * @param stat stat message * @throws IOException throws if there is any error writing to the channel. */ public void appendStat(String stat) throws IOException { statChannel.write(ByteBuffer.wrap(stat.getBytes())); statChannel.force(false); } /** * Write stats string to the perfsink.state file. This will truncate the * existing content. * * @param stat stat message * @throws IOException throws if there is any error writing to the channel. */ public void writeStat(String stat) throws IOException { statChannel.truncate(0); appendStat(stat); } /** * Closes the stat file channel. */ public void closeStatFile() { if (statChannel != null && statChannel.isOpen()) { try { statChannel.close(); } catch (IOException ex) { logger.error("Error closing the stat channel for " + stateFilename, ex); } } } /** * Gets the chart. * * @param key the key * @return the chart */ public Chart getChart(String key) { Chart chart = new Chart(); chart.setKey(key); List<HSuperColumn<String, String, String>> superColumns = perfDataAccessor.getChart(key); // create maps for ds,rra,cdp (aggregator will use the zoneMap) Map<String, Series> seriesMap = new HashMap<>(); chart.setSeriesMap(seriesMap); for (int i = 0; i < superColumns.size(); i++) { List<HColumn<String, String>> columns = superColumns.get(i).getColumns(); // SC to Map<String,String> String scName = superColumns.get(i).getName(); Map<String, String> sc = new HashMap<>(); for (int j = 0; j < columns.size(); j++) { sc.put(columns.get(j).getName(), columns.get(j).getValue()); } // chart attributes if (scName.equalsIgnoreCase(CHART)) { chart.setCreated(sc.get("created")); chart.setCreator(sc.get("creator")); chart.setName(sc.get(NAME)); chart.setType(sc.get(TYPE)); chart.setTitle(sc.get("title")); chart.setDescription(sc.get("description")); chart.setStart(sc.get("start")); chart.setEnd(sc.get("end")); chart.setYmax(sc.get("ymax")); chart.setYmin(sc.get("ymin")); chart.setTheme(sc.get("theme")); chart.setStep(sc.get(STEP)); chart.setUpdated(sc.get(UPDATED)); // series attributes } else if (scName.indexOf(SERIES) == 0) { Series series = new Series(); String seriesName = sc.get(NAME); series.setName(seriesName); series.setType(sc.get(TYPE)); series.setDatasource(sc.get("datasource")); series.setxAxisId(sc.get("xAxisId")); series.setyAxisId(sc.get("yAxisId")); series.setStackGroup(sc.get("stackGroup")); series.setColor(sc.get("color")); series.setWeight(sc.get("weight")); series.setRenderer(sc.get("renderer")); series.setOffset(Integer.parseInt(sc.get("yAxisId"))); seriesMap.put(seriesName, series); } else { Util.logMapString("couldnt map: " + scName, sc, logger); } } return chart; } /** * Sets the chart. * * @param chart the new chart */ public void setChart(Chart chart) { Map<String, String> columnsMap = new HashMap<>(); columnsMap.put(NAME, chart.getName()); columnsMap.put(TYPE, chart.getType()); columnsMap.put(STEP, chart.getStep()); columnsMap.put("start", chart.getStart()); columnsMap.put("end", chart.getEnd()); columnsMap.put("title", chart.getTitle()); columnsMap.put("creator", chart.getCreator()); columnsMap.put("created", chart.getCreated()); columnsMap.put("updated", chart.getUpdated()); columnsMap.put("ymax", chart.getYmax()); columnsMap.put("ymin", chart.getYmin()); columnsMap.put("height", chart.getHeight()); columnsMap.put("width", chart.getWidth()); columnsMap.put("theme", chart.getTheme()); columnsMap.put("description", chart.getDescription()); Mutator<String> mutator = perfDataAccessor.newMutator(); //TODO: mutator could be contained inside PerfDataCollector. perfDataAccessor.insert(mutator, chart.getKey(), columnsMap, CHART, CHART); logger.debug("write CHART: "); StringBuilder pendingKeys = new StringBuilder("chart"); // update datasource headers Map<String, Series> seriesMap = chart.getSeries(); for (String seriesKey : seriesMap.keySet()) { Series series = seriesMap.get(seriesKey); columnsMap = new HashMap<>(); columnsMap.put(NAME, series.getName()); columnsMap.put(TYPE, series.getType()); columnsMap.put("datasource", series.getDatasource()); columnsMap.put("xAxisId", series.getxAxisId()); columnsMap.put("yAxisId", series.getyAxisId()); columnsMap.put("stackGroup", series.getStackGroup()); columnsMap.put("color", series.getColor()); columnsMap.put("weight", series.getWeight()); columnsMap.put("renderer", series.getRenderer()); columnsMap.put("offset", Integer.valueOf(series.getOffset()).toString()); perfDataAccessor.insert(mutator, chart.getKey(), columnsMap, CHART, SERIES + "_" + series.getName()); pendingKeys.append(", " + seriesKey); } logger.debug("write keys:" + pendingKeys); perfDataAccessor.execute(mutator); } public PerfDataAccessor getPerfDataAccessor() { return perfDataAccessor; } public void setPerfDataAccessor(PerfDataAccessor perfDataAccessor) { this.perfDataAccessor = perfDataAccessor; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.memory; import com.facebook.presto.ExceededMemoryLimitException; import com.facebook.presto.execution.TaskId; import com.facebook.presto.execution.TaskStateMachine; import com.facebook.presto.memory.context.LocalMemoryContext; import com.facebook.presto.memory.context.MemoryTrackingContext; import com.facebook.presto.operator.DriverContext; import com.facebook.presto.operator.DriverStats; import com.facebook.presto.operator.OperatorContext; import com.facebook.presto.operator.OperatorStats; import com.facebook.presto.operator.PipelineContext; import com.facebook.presto.operator.PipelineStats; import com.facebook.presto.operator.TaskContext; import com.facebook.presto.operator.TaskStats; import com.facebook.presto.spi.QueryId; import com.facebook.presto.spi.memory.MemoryPoolId; import com.facebook.presto.spiller.SpillSpaceTracker; import com.facebook.presto.sql.planner.plan.PlanNodeId; import io.airlift.stats.TestingGcMonitor; import io.airlift.units.DataSize; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.function.Consumer; import java.util.regex.Pattern; import static com.facebook.presto.testing.TestingSession.testSessionBuilder; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.testing.Assertions.assertInstanceOf; import static io.airlift.units.DataSize.Unit.GIGABYTE; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.Executors.newScheduledThreadPool; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; @Test(singleThreaded = true) public class TestMemoryTracking { private static final DataSize queryMaxMemory = new DataSize(1, GIGABYTE); private static final DataSize memoryPoolSize = new DataSize(1, GIGABYTE); private static final DataSize systemMemoryPoolSize = new DataSize(1, GIGABYTE); private static final DataSize maxSpillSize = new DataSize(1, GIGABYTE); private static final DataSize queryMaxSpillSize = new DataSize(1, GIGABYTE); private static final SpillSpaceTracker spillSpaceTracker = new SpillSpaceTracker(maxSpillSize); private QueryContext queryContext; private TaskContext taskContext; private PipelineContext pipelineContext; private DriverContext driverContext; private OperatorContext operatorContext; private MemoryPool userPool; private MemoryPool systemPool; private ExecutorService notificationExecutor; private ScheduledExecutorService yieldExecutor; @BeforeClass public void setUp() { notificationExecutor = newCachedThreadPool(daemonThreadsNamed("local-query-runner-executor-%s")); yieldExecutor = newScheduledThreadPool(2, daemonThreadsNamed("local-query-runner-scheduler-%s")); } @AfterClass(alwaysRun = true) public void tearDown() { notificationExecutor.shutdownNow(); yieldExecutor.shutdownNow(); queryContext = null; taskContext = null; pipelineContext = null; driverContext = null; operatorContext = null; userPool = null; systemPool = null; } @BeforeMethod public void setUpTest() { userPool = new MemoryPool(new MemoryPoolId("test"), memoryPoolSize); systemPool = new MemoryPool(new MemoryPoolId("testSystem"), systemMemoryPoolSize); queryContext = new QueryContext( new QueryId("test_query"), queryMaxMemory, userPool, systemPool, new TestingGcMonitor(), notificationExecutor, yieldExecutor, queryMaxSpillSize, spillSpaceTracker); taskContext = queryContext.addTaskContext( new TaskStateMachine(new TaskId("query", 0, 0), notificationExecutor), testSessionBuilder().build(), true, true); pipelineContext = taskContext.addPipelineContext(0, true, true); driverContext = pipelineContext.addDriverContext(); operatorContext = driverContext.addOperatorContext(1, new PlanNodeId("a"), "test-operator"); } @Test public void testOperatorAllocations() { MemoryTrackingContext operatorMemoryContext = operatorContext.getOperatorMemoryContext(); LocalMemoryContext systemMemory = operatorContext.newLocalSystemMemoryContext(); LocalMemoryContext userMemory = operatorContext.localUserMemoryContext(); LocalMemoryContext revocableMemory = operatorContext.localRevocableMemoryContext(); userMemory.setBytes(100); assertOperatorMemoryAllocations(operatorMemoryContext, 100, 0, 0); systemMemory.setBytes(1_000_000); assertOperatorMemoryAllocations(operatorMemoryContext, 100, 1_000_000, 0); systemMemory.setBytes(2_000_000); assertOperatorMemoryAllocations(operatorMemoryContext, 100, 2_000_000, 0); userMemory.setBytes(500); assertOperatorMemoryAllocations(operatorMemoryContext, 500, 2_000_000, 0); userMemory.setBytes(userMemory.getBytes() - 500); assertOperatorMemoryAllocations(operatorMemoryContext, 0, 2_000_000, 0); revocableMemory.setBytes(300); assertOperatorMemoryAllocations(operatorMemoryContext, 0, 2_000_000, 300); assertAllocationFails((ignored) -> userMemory.setBytes(userMemory.getBytes() - 500), "bytes cannot be negative"); operatorContext.destroy(); assertOperatorMemoryAllocations(operatorMemoryContext, 0, 0, 0); } @Test public void testLocalSystemAllocations() { long pipelineLocalAllocation = 1_000_000; long taskLocalAllocation = 10_000_000; LocalMemoryContext pipelineLocalSystemMemoryContext = pipelineContext.localSystemMemoryContext(); pipelineLocalSystemMemoryContext.setBytes(pipelineLocalAllocation); assertLocalMemoryAllocations(pipelineContext.getPipelineMemoryContext(), 0, 0, pipelineLocalAllocation, pipelineLocalAllocation); LocalMemoryContext taskLocalSystemMemoryContext = taskContext.localSystemMemoryContext(); taskLocalSystemMemoryContext.setBytes(taskLocalAllocation); assertLocalMemoryAllocations( taskContext.getTaskMemoryContext(), 0, 0, taskLocalAllocation + pipelineLocalAllocation, // at the pool level we should observe both taskLocalAllocation); assertEquals(pipelineContext.getPipelineStats().getSystemMemoryReservation().toBytes(), pipelineLocalAllocation, "task level allocations should not be visible at the pipeline level"); pipelineLocalSystemMemoryContext.setBytes(pipelineLocalSystemMemoryContext.getBytes() - pipelineLocalAllocation); assertLocalMemoryAllocations( pipelineContext.getPipelineMemoryContext(), 0, 0, taskLocalAllocation, 0); taskLocalSystemMemoryContext.setBytes(taskLocalSystemMemoryContext.getBytes() - taskLocalAllocation); assertLocalMemoryAllocations( taskContext.getTaskMemoryContext(), 0, 0, 0, 0); } @Test public void testStats() { LocalMemoryContext systemMemory = operatorContext.newLocalSystemMemoryContext(); LocalMemoryContext userMemory = operatorContext.localUserMemoryContext(); userMemory.setBytes(100_000_000); systemMemory.setBytes(200_000_000); assertStats( operatorContext.getOperatorStats(), driverContext.getDriverStats(), pipelineContext.getPipelineStats(), taskContext.getTaskStats(), 100_000_000, 0, 200_000_000); // allocate more and check peak memory reservation userMemory.setBytes(600_000_000); assertStats( operatorContext.getOperatorStats(), driverContext.getDriverStats(), pipelineContext.getPipelineStats(), taskContext.getTaskStats(), 600_000_000, 0, 200_000_000); userMemory.setBytes(userMemory.getBytes() - 300_000_000); assertStats( operatorContext.getOperatorStats(), driverContext.getDriverStats(), pipelineContext.getPipelineStats(), taskContext.getTaskStats(), 300_000_000, 0, 200_000_000); userMemory.setBytes(userMemory.getBytes() - 300_000_000); assertStats( operatorContext.getOperatorStats(), driverContext.getDriverStats(), pipelineContext.getPipelineStats(), taskContext.getTaskStats(), 0, 0, 200_000_000); operatorContext.destroy(); assertStats( operatorContext.getOperatorStats(), driverContext.getDriverStats(), pipelineContext.getPipelineStats(), taskContext.getTaskStats(), 0, 0, 0); } @Test public void testRevocableMemoryAllocations() { LocalMemoryContext systemMemory = operatorContext.newLocalSystemMemoryContext(); LocalMemoryContext userMemory = operatorContext.localUserMemoryContext(); LocalMemoryContext revocableMemory = operatorContext.localRevocableMemoryContext(); revocableMemory.setBytes(100_000_000); assertStats( operatorContext.getOperatorStats(), driverContext.getDriverStats(), pipelineContext.getPipelineStats(), taskContext.getTaskStats(), 0, 100_000_000, 0); userMemory.setBytes(100_000_000); systemMemory.setBytes(100_000_000); revocableMemory.setBytes(200_000_000); assertStats( operatorContext.getOperatorStats(), driverContext.getDriverStats(), pipelineContext.getPipelineStats(), taskContext.getTaskStats(), 100_000_000, 200_000_000, 100_000_000); } @Test public void testTrySetBytes() { LocalMemoryContext localMemoryContext = operatorContext.localUserMemoryContext(); assertTrue(localMemoryContext.trySetBytes(100_000_000)); assertStats( operatorContext.getOperatorStats(), driverContext.getDriverStats(), pipelineContext.getPipelineStats(), taskContext.getTaskStats(), 100_000_000, 0, 0); assertTrue(localMemoryContext.trySetBytes(200_000_000)); assertStats( operatorContext.getOperatorStats(), driverContext.getDriverStats(), pipelineContext.getPipelineStats(), taskContext.getTaskStats(), 200_000_000, 0, 0); assertTrue(localMemoryContext.trySetBytes(100_000_000)); assertStats( operatorContext.getOperatorStats(), driverContext.getDriverStats(), pipelineContext.getPipelineStats(), taskContext.getTaskStats(), 100_000_000, 0, 0); // allocating more than the pool size should fail and we should have the same stats as before assertFalse(localMemoryContext.trySetBytes(userPool.getMaxBytes() + 1)); assertStats( operatorContext.getOperatorStats(), driverContext.getDriverStats(), pipelineContext.getPipelineStats(), taskContext.getTaskStats(), 100_000_000, 0, 0); } @Test public void testTransferMemoryToTaskContext() { LocalMemoryContext userMemory = operatorContext.localUserMemoryContext(); userMemory.setBytes(300_000_000); assertEquals(operatorContext.getOperatorMemoryContext().getUserMemory(), 300_000_000); assertEquals(driverContext.getDriverMemoryContext().getUserMemory(), 300_000_000); assertEquals(pipelineContext.getPipelineMemoryContext().getUserMemory(), 300_000_000); assertEquals(taskContext.getTaskMemoryContext().getUserMemory(), 300_000_000); LocalMemoryContext transferredBytesMemoryContext = taskContext.createNewTransferredBytesMemoryContext(); operatorContext.transferMemoryToTaskContext(500_000_000, transferredBytesMemoryContext); assertEquals(operatorContext.getOperatorMemoryContext().getUserMemory(), 0); assertEquals(driverContext.getDriverMemoryContext().getUserMemory(), 0); assertEquals(pipelineContext.getPipelineMemoryContext().getUserMemory(), 0); assertEquals(taskContext.getTaskMemoryContext().getUserMemory(), 500_000_000); assertLocalMemoryAllocations(taskContext.getTaskMemoryContext(), 500_000_000, 500_000_000, 0, 0); transferredBytesMemoryContext.close(); assertLocalMemoryAllocations(taskContext.getTaskMemoryContext(), 0, 0, 0, 0); // do another set of allocations where transferMemoryToTaskContext() will be called // with exactly the same number of bytes as in the operator user memory context userMemory.setBytes(1000); assertEquals(operatorContext.getOperatorMemoryContext().getUserMemory(), 1000); assertEquals(driverContext.getDriverMemoryContext().getUserMemory(), 1000); assertEquals(pipelineContext.getPipelineMemoryContext().getUserMemory(), 1000); assertEquals(taskContext.getTaskMemoryContext().getUserMemory(), 1000); transferredBytesMemoryContext = taskContext.createNewTransferredBytesMemoryContext(); operatorContext.transferMemoryToTaskContext(1000, transferredBytesMemoryContext); assertEquals(operatorContext.getOperatorMemoryContext().getUserMemory(), 0); assertEquals(driverContext.getDriverMemoryContext().getUserMemory(), 0); assertEquals(pipelineContext.getPipelineMemoryContext().getUserMemory(), 0); assertEquals(taskContext.getTaskMemoryContext().getUserMemory(), 1000); assertLocalMemoryAllocations(taskContext.getTaskMemoryContext(), 1000, 1000, 0, 0); transferredBytesMemoryContext.close(); assertLocalMemoryAllocations(taskContext.getTaskMemoryContext(), 0, 0, 0, 0); // exhaust the pool userMemory.setBytes(memoryPoolSize.toBytes()); assertEquals(operatorContext.getOperatorMemoryContext().getUserMemory(), memoryPoolSize.toBytes()); assertEquals(driverContext.getDriverMemoryContext().getUserMemory(), memoryPoolSize.toBytes()); assertEquals(pipelineContext.getPipelineMemoryContext().getUserMemory(), memoryPoolSize.toBytes()); assertEquals(taskContext.getTaskMemoryContext().getUserMemory(), memoryPoolSize.toBytes()); transferredBytesMemoryContext = taskContext.createNewTransferredBytesMemoryContext(); try { operatorContext.transferMemoryToTaskContext(memoryPoolSize.toBytes() + 1000, transferredBytesMemoryContext); } catch (Throwable t) { assertInstanceOf(t, ExceededMemoryLimitException.class); assertEquals(transferredBytesMemoryContext.getBytes(), 0); } } @Test public void testDestroy() { LocalMemoryContext newLocalSystemMemoryContext = operatorContext.newLocalSystemMemoryContext(); LocalMemoryContext newLocalUserMemoryContext = operatorContext.localUserMemoryContext(); LocalMemoryContext newLocalRevocableMemoryContext = operatorContext.localRevocableMemoryContext(); newLocalSystemMemoryContext.setBytes(100_000); newLocalRevocableMemoryContext.setBytes(200_000); newLocalUserMemoryContext.setBytes(400_000); assertEquals(operatorContext.getOperatorMemoryContext().getSystemMemory(), 100_000); assertEquals(operatorContext.getOperatorMemoryContext().getUserMemory(), 400_000); operatorContext.destroy(); assertOperatorMemoryAllocations(operatorContext.getOperatorMemoryContext(), 0, 0, 0); } private void assertStats( OperatorStats operatorStats, DriverStats driverStats, PipelineStats pipelineStats, TaskStats taskStats, long expectedUserMemory, long expectedRevocableMemory, long expectedSystemMemory) { assertEquals(operatorStats.getUserMemoryReservation().toBytes(), expectedUserMemory); assertEquals(driverStats.getUserMemoryReservation().toBytes(), expectedUserMemory); assertEquals(pipelineStats.getUserMemoryReservation().toBytes(), expectedUserMemory); assertEquals(taskStats.getUserMemoryReservation().toBytes(), expectedUserMemory); assertEquals(operatorStats.getSystemMemoryReservation().toBytes(), expectedSystemMemory); assertEquals(driverStats.getSystemMemoryReservation().toBytes(), expectedSystemMemory); assertEquals(pipelineStats.getSystemMemoryReservation().toBytes(), expectedSystemMemory); assertEquals(taskStats.getSystemMemoryReservation().toBytes(), expectedSystemMemory); assertEquals(operatorStats.getRevocableMemoryReservation().toBytes(), expectedRevocableMemory); assertEquals(driverStats.getRevocableMemoryReservation().toBytes(), expectedRevocableMemory); assertEquals(pipelineStats.getRevocableMemoryReservation().toBytes(), expectedRevocableMemory); assertEquals(taskStats.getRevocableMemoryReservation().toBytes(), expectedRevocableMemory); } private void assertAllocationFails(Consumer<Void> allocationFunction, String expectedPattern) { try { allocationFunction.accept(null); fail("Expected exception"); } catch (IllegalArgumentException e) { assertTrue(Pattern.matches(expectedPattern, e.getMessage()), "\nExpected (re) :" + expectedPattern + "\nActual :" + e.getMessage()); } } // the allocations that are done at the operator level are reflected at that level and all the way up to the pools private void assertOperatorMemoryAllocations( MemoryTrackingContext memoryTrackingContext, long expectedUserMemory, long expectedSystemMemory, long expectedRevocableMemory) { assertEquals(memoryTrackingContext.getUserMemory(), expectedUserMemory, "User memory verification failed"); assertEquals(userPool.getReservedBytes(), expectedUserMemory, "User pool memory verification failed"); assertEquals(memoryTrackingContext.getSystemMemory(), expectedSystemMemory, "System memory verification failed"); assertEquals(systemPool.getReservedBytes(), expectedSystemMemory, "System pool memory verification failed"); assertEquals(memoryTrackingContext.getRevocableMemory(), expectedRevocableMemory, "Revocable memory verification failed"); } // the local allocations are reflected only at that level and all the way up to the pools private void assertLocalMemoryAllocations( MemoryTrackingContext memoryTrackingContext, long expectedUserPoolMemory, long expectedContextUserMemory, long expectedSystemPoolMemory, long expectedContextSystemMemory) { assertEquals(memoryTrackingContext.getUserMemory(), expectedContextUserMemory, "User memory verification failed"); assertEquals(userPool.getReservedBytes(), expectedUserPoolMemory, "User pool memory verification failed"); assertEquals(memoryTrackingContext.localSystemMemoryContext().getBytes(), expectedContextSystemMemory, "Local system memory verification failed"); assertEquals(systemPool.getReservedBytes(), expectedSystemPoolMemory, "System pool memory verification failed"); } }
/* The contents of this file are subject to the license and copyright terms * detailed in the license directory at the root of the source tree (also * available online at http://fedora-commons.org/license/). */ package org.fcrepo.server.storage; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Map; import org.jrdf.graph.ObjectNode; import org.jrdf.graph.Triple; import org.trippi.RDFFormat; import org.trippi.TripleIterator; import org.trippi.TrippiException; import org.fcrepo.common.Constants; import org.fcrepo.common.PID; import org.fcrepo.common.rdf.SimpleLiteral; import org.fcrepo.common.rdf.SimpleTriple; import org.fcrepo.common.rdf.SimpleURIReference; import org.fcrepo.server.Context; import org.fcrepo.server.Server; import org.fcrepo.server.errors.GeneralException; import org.fcrepo.server.errors.ObjectIntegrityException; import org.fcrepo.server.errors.ServerException; import org.fcrepo.server.storage.translation.DOTranslator; import org.fcrepo.server.storage.types.Datastream; import org.fcrepo.server.storage.types.DigitalObject; import org.fcrepo.server.storage.types.XMLDatastreamProcessor; import org.fcrepo.server.utilities.FilteredTripleIterator; import org.fcrepo.server.validation.ValidationUtility; /** * A DigitalObject-backed DOWriter. * <p> * This interface supports transaction behavior with the commit(String) and * rollBack() methods. When a DOWriter is instantiated, there is an implicit * transaction. Write methods may be called, but they won't affect the the * underlying data store until commit(String) is invoked. This also has the * effect of creating another implicit transaction. If temporary changes are no * longer wanted, rollBack() may be called to return the object to it's original * form. rollBack() is only valid for the current transaction. * </p> * <p> * The read methods of DOWriter reflect on the composition of the object in the * context of the current transaction. * </p> * * @author Chris Wilper */ public class SimpleDOWriter extends SimpleDOReader implements Constants, DOWriter { private static ObjectIntegrityException ERROR_PENDING_REMOVAL = new ObjectIntegrityException("That can't be done because you said " + "I should remove the object and i assume that's what you " + "want unless you call rollback()"); private static ObjectIntegrityException ERROR_INVALIDATED = new ObjectIntegrityException("The handle is no longer valid " + "... this object has already been committed or explicitly" + " invalidated."); private final DigitalObject m_obj; private final Context m_context; private final DefaultDOManager m_mgr; private boolean m_pendingRemoval = false; private boolean m_invalidated = false; private boolean m_committed = false; public SimpleDOWriter(Context context, DefaultDOManager mgr, DOTranslator translator, String exportFormat, String encoding, DigitalObject obj) { super(context, mgr, translator, exportFormat, encoding, obj); m_context = context; m_obj = obj; m_mgr = mgr; } public void setState(String state) throws ObjectIntegrityException { assertNotInvalidated(); assertNotPendingRemoval(); m_obj.setState(state); } public void setOwnerId(String ownerId) throws ObjectIntegrityException { assertNotInvalidated(); assertNotPendingRemoval(); m_obj.setOwnerId(ownerId); } public void setDatastreamState(String datastreamID, String dsState) throws ServerException { assertNotInvalidated(); assertNotPendingRemoval(); // Set all versions of this datastreamID to the specified state for (Datastream ds : m_obj.datastreams(datastreamID)) { ds.DSState = dsState; } } public void setDatastreamVersionable(String datastreamID, boolean versionable) throws ServerException { assertNotInvalidated(); assertNotPendingRemoval(); // Set all versions of this datastreamID to the specified versionable // status for (Datastream ds : m_obj.datastreams(datastreamID)) { ds.DSVersionable = versionable; } } public void setLabel(String label) throws ObjectIntegrityException { assertNotInvalidated(); assertNotPendingRemoval(); if (label != null && label.equals("")) { label = null; } m_obj.setLabel(label); } /** * Removes the entire digital object. * * @throws ServerException * If any type of error occurred fulfilling the request. */ public void remove() throws ObjectIntegrityException { assertNotInvalidated(); assertNotPendingRemoval(); m_pendingRemoval = true; } /** * Adds a datastream to the object. * * @param datastream * The datastream. * @throws ServerException * If any type of error occurred fulfilling the request. */ public void addDatastream(Datastream datastream, boolean addNewVersion) throws ServerException { assertNotInvalidated(); assertNotPendingRemoval(); // use this call to handle versionable m_obj.addDatastreamVersion(datastream, addNewVersion); } /** * Removes a datastream from the object. * * @param id * The id of the datastream. * @param start * The start date (inclusive) of versions to remove. If * <code>null</code>, this is taken to be the smallest possible * value. * @param end * The end date (inclusive) of versions to remove. If * <code>null</code>, this is taken to be the greatest possible * value. * @throws ServerException * If any type of error occurred fulfilling the request. */ public Date[] removeDatastream(String id, Date start, Date end) throws ServerException { assertNotInvalidated(); assertNotPendingRemoval(); ArrayList<Datastream> removeList = new ArrayList<Datastream>(); for (Datastream ds : m_obj.datastreams(id)) { boolean doRemove = false; if (start != null) { if (end != null) { if (ds.DSCreateDT.compareTo(start) >= 0 && ds.DSCreateDT.compareTo(end) <= 0) { doRemove = true; } } else { if (ds.DSCreateDT.compareTo(start) >= 0) { doRemove = true; } } } else { if (end != null) { if (ds.DSCreateDT.compareTo(end) <= 0) { doRemove = true; } } else { doRemove = true; } } if (doRemove) { // Note: We don't remove old audit records by design. // add this datastream to the datastream to-be-removed list. removeList.add(ds); } } /* Now that we've identified all ds versions to remove, remove 'em */ for (Datastream toRemove : removeList) { m_obj.removeDatastreamVersion(toRemove); } // finally, return the dates of each deleted item Date[] deletedDates = new Date[removeList.size()]; for (int i = 0; i < removeList.size(); i++) { deletedDates[i] = (removeList.get(i)).DSCreateDT; } return deletedDates; } // from the relationship subject, determine which datastream to modify etc private String resolveSubjectToDatastream(String subject) throws ServerException{ String dsId = null; String pidURI = PID.toURI(m_obj.getPid()); if (subject.equals(pidURI)) { dsId = "RELS-EXT"; } else { if (subject.startsWith(pidURI + "/")) { dsId = "RELS-INT"; } else { throw new GeneralException("Cannot determine which relationship datastream to update for subject " + subject + ". Relationship subjects must be the URI of the object or the URI of a datastream within the object."); } } return dsId; } public boolean addRelationship(String subject, String relationship, String object, boolean isLiteral, String datatype) throws ServerException { return addRelationship(resolveSubjectToDatastream(subject), subject, relationship, object, isLiteral, datatype); } public boolean addRelationship(String dsId, String subject, String relationship, String object, boolean isLiteral, String datatype) throws ServerException { Triple toAdd = createTriple(subject, relationship, object, isLiteral, datatype); Datastream relsDatastream = GetDatastream(dsId, null); XMLDatastreamProcessor dsxml = null; if (relsDatastream == null) { ByteArrayOutputStream out = new ByteArrayOutputStream(); Map<String, String> map = new HashMap<String, String>(); // namespaces for RELS-EXT if (dsId.equals("RELS-EXT")) { map.put(RELS_EXT.prefix, RELS_EXT.uri); map.put(MODEL.prefix, MODEL.uri); } map.put(RDF.prefix, RDF.uri); try { TripleIterator triples = new FilteredTripleIterator(map, toAdd, true); triples.toStream(out, RDFFormat.RDF_XML, false); } catch (TrippiException e) { throw new GeneralException(e.getMessage(), e); } dsxml = new XMLDatastreamProcessor(dsId); Datastream newds = dsxml.getDatastream(); newds.DatastreamAltIDs = new String[0]; // formats for internal datastreams if (dsId.equals("RELS-EXT")) { newds.DSFormatURI = RELS_EXT1_0.uri; } else { if (dsId.equals("RELS-INT")) newds.DSFormatURI = RELS_INT1_0.uri; } newds.DSMIME = "application/rdf+xml"; //newds.DSControlGrp = "X"; set by XMLDatastreamProcessor instead newds.DSInfoType = null; newds.DSState = "A"; newds.DSVersionable = false; newds.DSVersionID = dsId + ".0"; newds.DSLabel = "Relationships"; newds.DSCreateDT = Server.getCurrentDate(m_context); newds.DSLocation = null; newds.DSLocationType = null; newds.DSChecksumType = Datastream.getDefaultChecksumType(); dsxml.setXMLContent(out.toByteArray()); newds.DSSize = dsxml.getXMLContent().length; ValidationUtility.validateReservedDatastream(PID.getInstance(m_obj.getPid()), newds.DatastreamID, newds); addDatastream(newds, false); } else { // (relsDatastream != null) dsxml = new XMLDatastreamProcessor(relsDatastream); FilteredTripleIterator newIter = null; try { ByteArrayOutputStream out = new ByteArrayOutputStream(); TripleIterator iter = TripleIterator.fromStream(relsDatastream.getContentStream(), RDFFormat.RDF_XML); newIter = new FilteredTripleIterator(iter, toAdd, true); newIter.toStream(out, RDFFormat.RDF_XML, false); if (newIter.wasChangeMade()) { XMLDatastreamProcessor newdsxml = dsxml.newVersion(); Datastream newds = newdsxml.getDatastream(); // TODO: only for XML Metadata datastream newdsxml.setDSMDClass(dsxml.getDSMDClass()); newds.DatastreamID = relsDatastream.DatastreamID; newds.DatastreamAltIDs = relsDatastream.DatastreamAltIDs; newds.DSFormatURI = relsDatastream.DSFormatURI; newds.DSMIME = relsDatastream.DSMIME; // newds.DSControlGrp = "X"; set by XMLDatastreamProcessor newds.DSInfoType = relsDatastream.DSInfoType; newds.DSState = relsDatastream.DSState; newds.DSVersionable = relsDatastream.DSVersionable; newds.DSVersionID = newDatastreamID(dsId); newds.DSLabel = relsDatastream.DSLabel; newds.DSCreateDT = Server.getCurrentDate(m_context); newds.DSLocation = null; newds.DSLocationType = null; newds.DSChecksumType = relsDatastream.DSChecksumType; newdsxml.setXMLContent(out.toByteArray()); newds.DSSize = newdsxml.getXMLContent().length; ValidationUtility.validateReservedDatastream(PID.getInstance(m_obj.getPid()), newds.DatastreamID, newds); addDatastream(newds, newds.DSVersionable); } else { // relationship already exists return false; } } catch (TrippiException e) { throw new GeneralException(e.getMessage(), e); } finally { try { if (newIter != null) { newIter.close(); } } catch (TrippiException e) { throw new GeneralException(e.getMessage(), e); } } } return true; } public boolean purgeRelationship(String subject, String relationship, String object, boolean isLiteral, String datatype) throws ServerException { return purgeRelationship(resolveSubjectToDatastream(subject), subject, relationship, object, isLiteral, datatype); } public boolean purgeRelationship(String dsId, String subject, String relationship, String object, boolean isLiteral, String datatype) throws ServerException { Triple toPurge = createTriple(subject, relationship, object, isLiteral, datatype); Datastream relsDatastream = GetDatastream(dsId, null); if (relsDatastream == null) { // relationship does not exist return false; } else { // (relsExt != null) XMLDatastreamProcessor dsxml = new XMLDatastreamProcessor(relsDatastream); InputStream relsDatastreamIS = relsDatastream.getContentStream(); TripleIterator iter = null; FilteredTripleIterator newIter = null; try { iter = TripleIterator.fromStream(relsDatastreamIS, RDFFormat.RDF_XML); newIter = new FilteredTripleIterator(iter, toPurge, false); ByteArrayOutputStream out = new ByteArrayOutputStream(); newIter.toStream(out, RDFFormat.RDF_XML, false); if (newIter.wasChangeMade()) { XMLDatastreamProcessor newdsxml = dsxml.newVersion(); Datastream newds = newdsxml.getDatastream(); // TODO: setting of this on DatastreamAsXML // TODO: wrap original datastream in handler class newdsxml.setDSMDClass(dsxml.getDSMDClass()); newds.DatastreamID = dsId; newds.DatastreamAltIDs = relsDatastream.DatastreamAltIDs; newds.DSFormatURI = relsDatastream.DSFormatURI; newds.DSMIME = relsDatastream.DSMIME; // newds.DSControlGrp = "X"; set by XMLDatastreamProcessor newds.DSInfoType = relsDatastream.DSInfoType; newds.DSState = relsDatastream.DSState; newds.DSVersionable = relsDatastream.DSVersionable; newds.DSVersionID = newDatastreamID(dsId); newds.DSLabel = relsDatastream.DSLabel; newds.DSCreateDT = Server.getCurrentDate(m_context); newds.DSLocation = null; newds.DSLocationType = null; newds.DSChecksumType = relsDatastream.DSChecksumType; newdsxml.setXMLContent(out.toByteArray()); newds.DSSize = newdsxml.getXMLContent().length; ValidationUtility.validateReservedDatastream(PID.getInstance(m_obj.getPid()), newds.DatastreamID, newds); addDatastream(newds, newds.DSVersionable); } else { // relationship does not exist return false; } } catch (TrippiException e) { throw new GeneralException(e.getMessage(), e); } finally { try { if (newIter != null) { newIter.close(); // also closes the contained iter } } catch (TrippiException e) { throw new GeneralException(e.getMessage(), e); } } } return true; } private static Triple createTriple(String subject, String predicate, String object, boolean isLiteral, String datatype) throws ServerException { ObjectNode o = null; try { if (isLiteral) { if (datatype == null || datatype.length() == 0) { o = new SimpleLiteral(object); } else { o = new SimpleLiteral(object, new URI(datatype)); } } else { o = new SimpleURIReference(new URI(object)); } return new SimpleTriple(new SimpleURIReference(new URI(subject)), new SimpleURIReference(new URI(predicate)), o); } catch (URISyntaxException e) { throw new GeneralException(e.getMessage(), e); } } /** * Saves the changes thus far to the permanent copy of the digital object. * * @param logMessage * An explanation of the change(s). * @throws ServerException * If any type of error occurred fulfilling the request. */ public void commit(String logMessage) throws ServerException { assertNotInvalidated(); m_mgr.doCommit(Server.USE_DEFINITIVE_STORE, m_context, m_obj, logMessage, m_pendingRemoval); m_committed = true; invalidate(); } public void invalidate() { m_invalidated = true; } /** * Generate a unique id for a datastream. */ public String newDatastreamID() { return m_obj.newDatastreamID(); } /** * Generate a unique id for a datastream version. */ public String newDatastreamID(String dsID) { return m_obj.newDatastreamID(dsID); } /** * Generate a unique id for an audit record. */ public String newAuditRecordID() { return m_obj.newAuditRecordID(); } private void assertNotPendingRemoval() throws ObjectIntegrityException { if (m_pendingRemoval) { throw ERROR_PENDING_REMOVAL; } } private void assertNotInvalidated() throws ObjectIntegrityException { if (m_invalidated) { throw ERROR_INVALIDATED; } } public boolean isCommitted() { return m_committed; } public boolean isNew() { return m_obj.isNew(); } }
/* * */ package net.community.apps.tools.xslapply; import java.awt.BorderLayout; import java.awt.Container; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStream; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import javax.swing.JOptionPane; import javax.swing.JSplitPane; import javax.swing.JToolBar; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import net.community.apps.common.BaseMainFrame; import net.community.apps.tools.xslapply.resources.ResourcesAnchor; import net.community.chest.dom.DOMUtils; import net.community.chest.io.FileUtil; import net.community.chest.lang.ExceptionUtil; import net.community.chest.resources.XmlAnchoredResourceAccessor; import net.community.chest.resources.XmlDocumentRetriever; import net.community.chest.swing.component.table.DefaultTableScroll; import net.community.chest.swing.component.table.JTableReflectiveProxy; import net.community.chest.swing.options.BaseOptionPane; import net.community.chest.ui.components.tree.document.BaseDocumentPanel; import net.community.chest.ui.components.tree.document.TitledEditableFilePathDocumentPanel; import net.community.chest.ui.helpers.panel.input.LRFieldWithLabelPanel; import net.community.chest.util.logging.LoggerWrapper; import net.community.chest.util.logging.factory.WrapperFactoryManager; import org.w3c.dom.Document; import org.w3c.dom.Element; /** * <P>Copyright 2008 as per GPLv2</P> * * @author Lyor G. * @since Dec 10, 2008 12:12:56 PM */ final class MainFrame extends BaseMainFrame<ResourcesAnchor> { /** * */ private static final long serialVersionUID = 7131233476426179222L; private static LoggerWrapper _logger /* =null */; /* * @see net.community.apps.common.BaseMainFrame#getLogger() */ @Override protected synchronized LoggerWrapper getLogger () { if (null == _logger) _logger = WrapperFactoryManager.getLogger(getClass()); return _logger; } /* * @see net.community.apps.common.BaseMainFrame#getResourcesAnchor() */ @Override public ResourcesAnchor getResourcesAnchor () { return ResourcesAnchor.getInstance(); } private TitledEditableFilePathDocumentPanel _srcPanel; public String getSourceXMLFilePath () { return (null == _srcPanel) ? null : _srcPanel.getFilePath(); } private SelectionFilesList _xslFiles /* =null */; public Collection<File> getSelectedXSLFiles () { return (null == _xslFiles) ? null : _xslFiles.getSelectedFiles(); } protected void loadXSLFiles (final Collection<? extends File> fl) { if (_xslFiles != null) _xslFiles.addFiles(fl); } private static final String XSL_LOAD_ELEMNAME="load-xsl-dialog"; /* * @see net.community.apps.common.BaseMainFrame#loadFile(java.io.File, java.lang.String, org.w3c.dom.Element) */ @Override public void loadFile (File f, String cmd, Element dlgElement) { final String filePath=(null == f) ? null : f.getAbsolutePath(); if ((null == filePath) || (filePath.length() <= 0)) return; try { if (_srcPanel != null) { final Document doc=DOMUtils.loadDocument(filePath); _srcPanel.clearContent(); _srcPanel.setDocument(doc); _srcPanel.setFilePath(filePath); } } catch(Exception e) { getLogger().error("loadFile(" + filePath + ") " + e.getClass().getName() + ": " + e.getMessage(), e); BaseOptionPane.showMessageDialog(this, e); } } /* * @see net.community.apps.common.FilesLoadMainFrame#loadFiles(java.lang.String, org.w3c.dom.Element, java.util.List) */ @Override public void loadFiles (String cmd, Element dlgElement, List<? extends File> fl) { final String fileType=(null == dlgElement) ? null : dlgElement.getAttribute(XmlAnchoredResourceAccessor.SECTION_NAME_ATTR); if (XSL_LOAD_ELEMNAME.equalsIgnoreCase(fileType)) loadXSLFiles(fl); else super.loadFiles(cmd, dlgElement, fl); } protected TitledEditableFilePathDocumentPanel _dstPanel; public String getDestinationXMLFilePath () { return (null == _dstPanel) ? null : _dstPanel.getFilePath(); } private static final Transformer getSavedFileTransformer (final File tgtFile) throws TransformerConfigurationException { final String n=(null == tgtFile) ? null : tgtFile.getAbsolutePath(); final int nLen=(null == n) ? 0 : n.length(), sPos=(nLen <= 2) /* must be a.b */ ? (-1) : n.lastIndexOf('.'); final String sfx=(sPos > 0) ? n.substring(sPos) : null; if (XmlDocumentRetriever.XML_SUFFIX.equalsIgnoreCase(sfx)) return DOMUtils.getDefaultXmlTransformer(); else if (".htm".equalsIgnoreCase(sfx) || ".html".equalsIgnoreCase(sfx)) return DOMUtils.getDefaultHtmlTransformer(); else throw new TransformerConfigurationException("Unknown output file suffix: " + n); } /* * @see net.community.apps.common.BaseMainFrame#saveFile(java.io.File, org.w3c.dom.Element) */ @Override public void saveFile (final File tgtFile, final Element dlgElement) { final String filePath=(null == tgtFile) ? null : tgtFile.getAbsolutePath(); if ((null == filePath) || (filePath.length() <= 0)) return; try { final Document doc=(null == _dstPanel) ? null : _dstPanel.getDocument(); if (null == doc) // ignore if no document return; if (tgtFile.exists()) { if (JOptionPane.showConfirmDialog(this, "Overwrite existing file ?", "File already exist", JOptionPane.YES_NO_OPTION) != JOptionPane.YES_OPTION) return; } final Transformer t=getSavedFileTransformer(tgtFile); OutputStream out=null; try { out = new FileOutputStream(tgtFile); t.transform(new DOMSource(doc), new StreamResult(out)); } finally { FileUtil.closeAll(out); } _dstPanel.setFilePath(filePath); JOptionPane.showMessageDialog(this, "File successfully written", filePath, JOptionPane.INFORMATION_MESSAGE); } catch(Exception e) { getLogger().error("saveFile(" + filePath + ") " + e.getClass().getName() + ": " + e.getMessage(), e); BaseOptionPane.showMessageDialog(this, e); } } /* * @see net.community.apps.common.BaseMainFrame#saveFile() */ @Override public void saveFile () { final String dstPath=getDestinationXMLFilePath(); if ((dstPath != null) && (dstPath.length() > 0)) { if (JOptionPane.showConfirmDialog(this, "Re-use existing file ?", dstPath, JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION) { saveFile(new File(dstPath), null); return; } } super.saveFile(); } public Element getLoadXSLDialogElement () throws Exception { return getResourcesAnchor().getSection(XSL_LOAD_ELEMNAME); } protected void loadXSLFiles () throws Exception { loadFile(LOAD_CMD, getLoadXSLDialogElement()); } protected void apply () throws Exception { final Collection<File> xslFiles=getSelectedXSLFiles(); if ((null == xslFiles) || (xslFiles.size() <= 0)) return; // ignore if not set final Document doc=(null == _srcPanel) ? null : _srcPanel.getDocument(); if (null == doc) return; // ignore if not loaded if (_dstPanel != null) { final ExecutorService es=Executors.newSingleThreadExecutor(); es.submit(new Runnable() { /* * @see java.lang.Runnable#run() */ @Override public void run () { final LoggerWrapper l=getLogger(); Document xlt=doc; for (final File xf : xslFiles) { if (null == xf) // should not happen continue; l.info("run() - processing XSL=" + xf); try { xlt = DOMUtils.xlateDocument(xlt, xf); l.info("run() - processed XSL=" + xf); } catch(Exception e) { l.error("apply(" + xf + ") " + e.getClass().getName() + ": " + e.getMessage(), e); BaseOptionPane.showMessageDialog(getMainFrame(), e); return; } } JOptionPane.showMessageDialog(getMainFrame(), "Done."); _dstPanel.setDocument(xlt); } }); } } private ActionListener _applyListener /* =null */; protected synchronized ActionListener getApplyListener () { if (null == _applyListener) _applyListener = new ActionListener() { /* * @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent) */ @Override public void actionPerformed (ActionEvent event) { try { apply(); } catch(Exception e) { getLogger().error("actionPerformed(" + ((null == event) ? null : event.getActionCommand()) + ") " + e.getClass().getName() + ": " + e.getMessage(), e); BaseOptionPane.showMessageDialog(getMainFrameInstance(), e); } } }; return _applyListener; } private ActionListener _xslLoader /* =null */; protected synchronized ActionListener getXSLLoaderListener () { if (null == _xslLoader) _xslLoader = new ActionListener() { /* * @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent) */ @Override public void actionPerformed (ActionEvent event) { try { loadXSLFiles(); } catch(Exception e) { getLogger().error("actionPerformed(" + ((null == event) ? null : event.getActionCommand()) + ") " + e.getClass().getName() + ": " + e.getMessage(), e); BaseOptionPane.showMessageDialog(getMainFrameInstance(), e); } } }; return _xslLoader; } protected void clearDisplay () { final BaseDocumentPanel[] pa={ _srcPanel, _dstPanel }; for (final BaseDocumentPanel p : pa) { final Document doc=(null == p) ? null : p.getDocument(); if (p == _srcPanel) p.clearContent(); else if (doc != null) p.setDocument(null); } } private ActionListener _clrListener /* =null */; protected ActionListener getClearListener () { if (null == _clrListener) _clrListener = new ActionListener() { /* * @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent) */ @Override public void actionPerformed (ActionEvent e) { clearDisplay(); } }; return _clrListener; } /* * @see net.community.apps.common.BaseMainFrame#getActionListenersMap(boolean) */ @Override protected Map<String,? extends ActionListener> getActionListenersMap (boolean createIfNotExist) { final Map<String,? extends ActionListener> org=super.getActionListenersMap(createIfNotExist); if (((org != null) && (org.size() > 0)) || (!createIfNotExist)) return org; final Map<String,ActionListener> lm=new TreeMap<String,ActionListener>(String.CASE_INSENSITIVE_ORDER); lm.put("loadXML", getLoadFileListener()); lm.put("loadXSL", getXSLLoaderListener()); lm.put(SAVE_CMD, getSaveFileListener()); lm.put("new", getClearListener()); lm.put("apply", getApplyListener()); lm.put(EXIT_CMD, getExitActionListener()); lm.put(ABOUT_CMD, getShowManifestActionListener()); setActionListenersMap(lm); return lm; } private static final File getFileArgument (final String argType, final int aIndex, final String ... args) { final int numArgs=(null == args) ? 0 : args.length; if (aIndex >= numArgs) throw new IllegalArgumentException("Missing " + argType + " argument"); return new File(args[aIndex+1]); } private void processMainArguments (final String ... args) throws Exception { final int numArgs=(null == args) ? 0 : args.length; boolean autoRun=false, autoSave=false; File inputFile=null, saveFile=null; Collection<File> xslFiles=null; for (int aIndex=0; aIndex < numArgs; aIndex++) { final String av=args[aIndex]; if ((null == av) || (av.length() <= 0)) continue; if ("-r".equalsIgnoreCase(av)) autoRun = true; else if ("-s".equalsIgnoreCase(av)) autoSave = true; else if ("-i".equalsIgnoreCase(av)) { if (inputFile != null) throw new IllegalArgumentException("Re-specified input file argument"); inputFile = getFileArgument("input file", aIndex, args); aIndex++; } else if ("-o".equalsIgnoreCase(av)) { if (saveFile != null) throw new IllegalArgumentException("Re-specified output file argument"); saveFile = getFileArgument("output file", aIndex, args); aIndex++; } else if ("-t".equalsIgnoreCase(av)) { final File xf=getFileArgument("transformer file", aIndex, args); if (null == xslFiles) xslFiles = new LinkedList<File>(); xslFiles.add(xf); aIndex++; } else throw new IllegalArgumentException("unknown option: " + av); } if ((inputFile != null) && (_srcPanel != null)) loadFile(inputFile, LOAD_CMD, null); if ((xslFiles != null) && (xslFiles.size() > 0)) loadXSLFiles(xslFiles); if ((saveFile != null) && (_dstPanel != null)) _dstPanel.setFilePath(saveFile.getAbsolutePath()); if (autoRun) apply(); if (autoSave) saveFile(); } /* * @see net.community.apps.common.BaseMainFrame#layoutSection(java.lang.String, org.w3c.dom.Element) */ @Override public void layoutSection (String name, Element elem) throws RuntimeException { try { if ("xsl-files-list".equalsIgnoreCase(name)) { if (null == _xslFiles) _xslFiles = new SelectionFilesList(); final Object o=JTableReflectiveProxy.TBL.fromXml(_xslFiles, elem); if (o != _xslFiles) throw new IllegalStateException("Mismatched re-constructed instances"); } } catch(Exception e) { throw ExceptionUtil.toRuntimeException(e); } super.layoutSection(name, elem); } /* * @see net.community.apps.common.BaseMainFrame#layoutComponent() */ @Override public void layoutComponent () throws RuntimeException { super.layoutComponent(); final Container ctPane=getContentPane(); try { final JToolBar b=getMainToolBar(); setToolBarHandlers(b); ctPane.add(b, BorderLayout.NORTH); } catch(Exception e) { throw ExceptionUtil.toRuntimeException(e); } if (null == _srcPanel) _srcPanel = new TitledEditableFilePathDocumentPanel() { /** * */ private static final long serialVersionUID = -2136995495503771786L; /* * @see net.community.chest.ui.helpers.tree.trees.TitledEditableFilePathDocumentPanel#createFilePathComponent() */ @Override protected LRFieldWithLabelPanel createFilePathComponent () { final LRFieldWithLabelPanel p=super.createFilePathComponent(); if (p != null) p.setTitle("Source file:"); return p; } }; if (null == _dstPanel) _dstPanel = new TitledEditableFilePathDocumentPanel() { /** * */ private static final long serialVersionUID = 4698458642775059233L; /* * @see net.community.chest.ui.helpers.tree.trees.TitledEditableFilePathDocumentPanel#createFilePathComponent() */ @Override protected LRFieldWithLabelPanel createFilePathComponent () { final LRFieldWithLabelPanel p=super.createFilePathComponent(); if (p != null) p.setTitle("Result file:"); return p; } }; // split the XML structure trees horizontally final JSplitPane spDocs=new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, _srcPanel, _dstPanel); spDocs.setResizeWeight(0.5); // split the main view final JSplitPane spMain= new JSplitPane(JSplitPane.VERTICAL_SPLIT, spDocs, new DefaultTableScroll(_xslFiles)); spMain.setResizeWeight(0.75); ctPane.add(spMain, BorderLayout.CENTER); } /** * @param args original arguments as received by <I>main</I> entry point * @throws Exception if unable to start main frame and application */ MainFrame (final String ... args) throws Exception { super(args); try { processMainArguments(args); } catch(Exception e) { getLogger().error(e.getClass().getName() + " while process arguments: " + e.getMessage()); BaseOptionPane.showMessageDialog(this, e); } } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.docgen; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.docgen.annot.DocumentMethods; import com.google.devtools.build.docgen.annot.StarlarkConstructor; import com.google.devtools.build.docgen.starlark.StarlarkBuiltinDoc; import com.google.devtools.build.docgen.starlark.StarlarkConstructorMethodDoc; import com.google.devtools.build.docgen.starlark.StarlarkDocExpander; import com.google.devtools.build.docgen.starlark.StarlarkMethodDoc; import com.google.devtools.build.lib.analysis.starlark.StarlarkModules; import com.google.devtools.build.lib.analysis.starlark.StarlarkRuleContext; import com.google.devtools.build.lib.collect.nestedset.Depset; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.stream.Collectors; import net.starlark.java.annot.Param; import net.starlark.java.annot.StarlarkBuiltin; import net.starlark.java.annot.StarlarkMethod; import net.starlark.java.eval.Dict; import net.starlark.java.eval.Sequence; import net.starlark.java.eval.StarlarkList; import net.starlark.java.eval.StarlarkValue; import net.starlark.java.eval.Tuple; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for Starlark documentation. */ @RunWith(JUnit4.class) public class StarlarkDocumentationTest { private static final ImmutableList<String> DEPRECATED_UNDOCUMENTED_TOP_LEVEL_SYMBOLS = ImmutableList.of("Actions"); private static final StarlarkDocExpander expander = new StarlarkDocExpander(null) { @Override public String expand(String docString) { return docString; } }; @Test public void testStarlarkRuleClassBuiltInItemsAreDocumented() throws Exception { ImmutableMap.Builder<String, Object> env = ImmutableMap.builder(); StarlarkModules.addPredeclared(env); checkStarlarkTopLevelEnvItemsAreDocumented(env.buildOrThrow()); } private void checkStarlarkTopLevelEnvItemsAreDocumented(Map<String, Object> globals) throws Exception { Map<String, String> docMap = new HashMap<>(); Map<String, StarlarkBuiltinDoc> modules = new TreeMap<>(StarlarkDocumentationCollector.getAllModules(expander)); StarlarkBuiltinDoc topLevel = modules.remove(StarlarkDocumentationCollector.getTopLevelModule().name()); for (StarlarkMethodDoc method : topLevel.getMethods()) { docMap.put(method.getName(), method.getDocumentation()); } for (Map.Entry<String, StarlarkBuiltinDoc> entry : modules.entrySet()) { docMap.put(entry.getKey(), entry.getValue().getDocumentation()); } List<String> undocumentedItems = new ArrayList<>(); // All built in variables are registered in the Starlark global environment. for (String varname : globals.keySet()) { if (docMap.containsKey(varname)) { if (docMap.get(varname).isEmpty()) { undocumentedItems.add(varname); } } else { undocumentedItems.add(varname); } } // These constants are currently undocumented. // If they need documentation, the easiest approach would be // to hard-code it in StarlarkDocumentationCollector. undocumentedItems.remove("True"); undocumentedItems.remove("False"); undocumentedItems.remove("None"); assertWithMessage("Undocumented items: " + undocumentedItems) .that(undocumentedItems) .containsExactlyElementsIn(DEPRECATED_UNDOCUMENTED_TOP_LEVEL_SYMBOLS); } // TODO(bazel-team): come up with better Starlark specific tests. @Test public void testDirectJavaMethodsAreGenerated() throws Exception { assertThat(collect(StarlarkRuleContext.class)).isNotEmpty(); } /** MockClassA */ @StarlarkBuiltin(name = "MockClassA", doc = "MockClassA") private static class MockClassA implements StarlarkValue { @StarlarkMethod(name = "get", doc = "MockClassA#get") public Integer get() { return 0; } } /** MockClassD */ @StarlarkBuiltin(name = "MockClassD", doc = "MockClassD") private static class MockClassD implements StarlarkValue { @StarlarkMethod( name = "test", doc = "MockClassD#test", parameters = { @Param(name = "a"), @Param(name = "b"), @Param(name = "c", named = true, positional = false), @Param(name = "d", named = true, positional = false, defaultValue = "1"), }) public Integer test(int a, int b, int c, int d) { return 0; } } /** MockClassE */ @StarlarkBuiltin(name = "MockClassE", doc = "MockClassE") private static class MockClassE extends MockClassA { @Override public Integer get() { return 1; } } /** MockClassF */ @StarlarkBuiltin(name = "MockClassF", doc = "MockClassF") private static class MockClassF implements StarlarkValue { @StarlarkMethod( name = "test", doc = "MockClassF#test", parameters = { @Param(name = "a", named = false, positional = true), @Param(name = "b", named = true, positional = true), @Param(name = "c", named = true, positional = false), @Param(name = "d", named = true, positional = false, defaultValue = "1"), }, extraPositionals = @Param(name = "myArgs")) public Integer test(int a, int b, int c, int d, Sequence<?> args) { return 0; } } /** MockClassG */ @StarlarkBuiltin(name = "MockClassG", doc = "MockClassG") private static class MockClassG implements StarlarkValue { @StarlarkMethod( name = "test", doc = "MockClassG#test", parameters = { @Param(name = "a", named = false, positional = true), @Param(name = "b", named = true, positional = true), @Param(name = "c", named = true, positional = false), @Param(name = "d", named = true, positional = false, defaultValue = "1"), }, extraKeywords = @Param(name = "myKwargs")) public Integer test(int a, int b, int c, int d, Dict<?, ?> kwargs) { return 0; } } /** MockClassH */ @StarlarkBuiltin(name = "MockClassH", doc = "MockClassH") private static class MockClassH implements StarlarkValue { @StarlarkMethod( name = "test", doc = "MockClassH#test", parameters = { @Param(name = "a", named = false, positional = true), @Param(name = "b", named = true, positional = true), @Param(name = "c", named = true, positional = false), @Param(name = "d", named = true, positional = false, defaultValue = "1"), }, extraPositionals = @Param(name = "myArgs"), extraKeywords = @Param(name = "myKwargs")) public Integer test(int a, int b, int c, int d, Sequence<?> args, Dict<?, ?> kwargs) { return 0; } } /** MockClassI */ @StarlarkBuiltin(name = "MockClassI", doc = "MockClassI") private static class MockClassI implements StarlarkValue { @StarlarkMethod( name = "test", doc = "MockClassI#test", parameters = { @Param(name = "a", named = false, positional = true), @Param(name = "b", named = true, positional = true), @Param(name = "c", named = true, positional = false), @Param(name = "d", named = true, positional = false, defaultValue = "1"), @Param( name = "e", named = true, positional = false, documented = false, defaultValue = "2"), }, extraPositionals = @Param(name = "myArgs")) public Integer test(int a, int b, int c, int d, int e, Sequence<?> args) { return 0; } } /** * MockGlobalLibrary. While nothing directly depends on it, a test method in * StarlarkDocumentationTest checks all of the classes under a wide classpath and ensures this one * shows up. */ @DocumentMethods @SuppressWarnings("unused") private static class MockGlobalLibrary { @StarlarkMethod( name = "MockGlobalCallable", doc = "GlobalCallable documentation", parameters = { @Param(name = "a", named = false, positional = true), @Param(name = "b", named = true, positional = true), @Param(name = "c", named = true, positional = false), @Param(name = "d", named = true, positional = false, defaultValue = "1"), }, extraPositionals = @Param(name = "myArgs"), extraKeywords = @Param(name = "myKwargs")) public Integer test(int a, int b, int c, int d, Sequence<?> args, Dict<?, ?> kwargs) { return 0; } } /** MockClassWithContainerReturnValues */ @StarlarkBuiltin( name = "MockClassWithContainerReturnValues", doc = "MockClassWithContainerReturnValues") private static class MockClassWithContainerReturnValues implements StarlarkValue { @StarlarkMethod(name = "depset", doc = "depset") public Depset /*<Integer>*/ getNestedSet() { return null; } @StarlarkMethod(name = "tuple", doc = "tuple") public Tuple getTuple() { return null; } @StarlarkMethod(name = "immutable", doc = "immutable") public ImmutableList<Integer> getImmutableList() { return null; } @StarlarkMethod(name = "mutable", doc = "mutable") public StarlarkList<Integer> getMutableList() { return null; } @StarlarkMethod(name = "starlark", doc = "starlark") public Sequence<Integer> getStarlarkList() { return null; } } /** MockClassCommonNameOne */ @StarlarkBuiltin(name = "MockClassCommonName", doc = "MockClassCommonName") private static class MockClassCommonNameOne implements StarlarkValue { @StarlarkMethod(name = "one", doc = "one") public Integer one() { return 1; } } /** SubclassOfMockClassCommonNameOne */ @StarlarkBuiltin(name = "MockClassCommonName", doc = "MockClassCommonName") private static class SubclassOfMockClassCommonNameOne extends MockClassCommonNameOne { @StarlarkMethod(name = "two", doc = "two") public Integer two() { return 1; } } /** PointsToCommonNameOneWithSubclass */ @StarlarkBuiltin( name = "PointsToCommonNameOneWithSubclass", doc = "PointsToCommonNameOneWithSubclass") private static class PointsToCommonNameOneWithSubclass implements StarlarkValue { @StarlarkMethod(name = "one", doc = "one") public MockClassCommonNameOne getOne() { return null; } @StarlarkMethod(name = "one_subclass", doc = "one_subclass") public SubclassOfMockClassCommonNameOne getOneSubclass() { return null; } } /** MockClassCommonNameOneUndocumented */ @StarlarkBuiltin(name = "MockClassCommonName", documented = false, doc = "") private static class MockClassCommonNameUndocumented implements StarlarkValue { @StarlarkMethod(name = "two", doc = "two") public Integer two() { return 1; } } /** PointsToCommonNameAndUndocumentedModule */ @StarlarkBuiltin( name = "PointsToCommonNameAndUndocumentedModule", doc = "PointsToCommonNameAndUndocumentedModule") private static class PointsToCommonNameAndUndocumentedModule implements StarlarkValue { @StarlarkMethod(name = "one", doc = "one") public MockClassCommonNameOne getOne() { return null; } @StarlarkMethod(name = "undocumented_module", doc = "undocumented_module") public MockClassCommonNameUndocumented getUndocumented() { return null; } } /** A module which has a selfCall method which constructs copies of MockClassA. */ @StarlarkBuiltin( name = "MockClassWithSelfCallConstructor", doc = "MockClassWithSelfCallConstructor") private static class MockClassWithSelfCallConstructor implements StarlarkValue { @StarlarkMethod(name = "one", doc = "one") public MockClassCommonNameOne getOne() { return null; } @StarlarkMethod(name = "makeMockClassA", selfCall = true, doc = "makeMockClassA") @StarlarkConstructor public MockClassA makeMockClassA() { return new MockClassA(); } } @Test public void testStarlarkCallableParameters() throws Exception { Map<String, StarlarkBuiltinDoc> objects = collect(MockClassD.class); StarlarkBuiltinDoc moduleDoc = objects.get("MockClassD"); assertThat(moduleDoc.getDocumentation()).isEqualTo("MockClassD"); assertThat(moduleDoc.getMethods()).hasSize(1); StarlarkMethodDoc methodDoc = moduleDoc.getMethods().iterator().next(); assertThat(methodDoc.getDocumentation()).isEqualTo("MockClassD#test"); assertThat(methodDoc.getSignature()) .isEqualTo( "<a class=\"anchor\" href=\"int.html\">int</a> MockClassD.test(a, b, *, c, d=1)"); assertThat(methodDoc.getParams()).hasSize(4); } @Test public void testStarlarkCallableParametersAndArgs() throws Exception { Map<String, StarlarkBuiltinDoc> objects = collect(MockClassF.class); StarlarkBuiltinDoc moduleDoc = objects.get("MockClassF"); assertThat(moduleDoc.getDocumentation()).isEqualTo("MockClassF"); assertThat(moduleDoc.getMethods()).hasSize(1); StarlarkMethodDoc methodDoc = moduleDoc.getMethods().iterator().next(); assertThat(methodDoc.getDocumentation()).isEqualTo("MockClassF#test"); assertThat(methodDoc.getSignature()) .isEqualTo( "<a class=\"anchor\" href=\"int.html\">int</a> " + "MockClassF.test(a, b, *, c, d=1, *myArgs)"); assertThat(methodDoc.getParams()).hasSize(5); } @Test public void testStarlarkCallableParametersAndKwargs() throws Exception { Map<String, StarlarkBuiltinDoc> objects = collect(MockClassG.class); StarlarkBuiltinDoc moduleDoc = objects.get("MockClassG"); assertThat(moduleDoc.getDocumentation()).isEqualTo("MockClassG"); assertThat(moduleDoc.getMethods()).hasSize(1); StarlarkMethodDoc methodDoc = moduleDoc.getMethods().iterator().next(); assertThat(methodDoc.getDocumentation()).isEqualTo("MockClassG#test"); assertThat(methodDoc.getSignature()) .isEqualTo( "<a class=\"anchor\" href=\"int.html\">int</a> " + "MockClassG.test(a, b, *, c, d=1, **myKwargs)"); assertThat(methodDoc.getParams()).hasSize(5); } @Test public void testStarlarkCallableParametersAndArgsAndKwargs() throws Exception { Map<String, StarlarkBuiltinDoc> objects = collect(MockClassH.class); StarlarkBuiltinDoc moduleDoc = objects.get("MockClassH"); assertThat(moduleDoc.getDocumentation()).isEqualTo("MockClassH"); assertThat(moduleDoc.getMethods()).hasSize(1); StarlarkMethodDoc methodDoc = moduleDoc.getMethods().iterator().next(); assertThat(methodDoc.getDocumentation()).isEqualTo("MockClassH#test"); assertThat(methodDoc.getSignature()) .isEqualTo( "<a class=\"anchor\" href=\"int.html\">int</a> " + "MockClassH.test(a, b, *, c, d=1, *myArgs, **myKwargs)"); assertThat(methodDoc.getParams()).hasSize(6); } @Test public void testStarlarkUndocumentedParameters() throws Exception { Map<String, StarlarkBuiltinDoc> objects = collect(MockClassI.class); StarlarkBuiltinDoc moduleDoc = objects.get("MockClassI"); assertThat(moduleDoc.getDocumentation()).isEqualTo("MockClassI"); assertThat(moduleDoc.getMethods()).hasSize(1); StarlarkMethodDoc methodDoc = moduleDoc.getMethods().iterator().next(); assertThat(methodDoc.getDocumentation()).isEqualTo("MockClassI#test"); assertThat(methodDoc.getSignature()) .isEqualTo( "<a class=\"anchor\" href=\"int.html\">int</a> " + "MockClassI.test(a, b, *, c, d=1, *myArgs)"); assertThat(methodDoc.getParams()).hasSize(5); } @Test public void testStarlarkGlobalLibraryCallable() throws Exception { StarlarkBuiltinDoc topLevel = StarlarkDocumentationCollector.getAllModules(expander) .get(StarlarkDocumentationCollector.getTopLevelModule().name()); boolean foundGlobalLibrary = false; for (StarlarkMethodDoc methodDoc : topLevel.getMethods()) { if (methodDoc.getName().equals("MockGlobalCallable")) { assertThat(methodDoc.getDocumentation()).isEqualTo("GlobalCallable documentation"); assertThat(methodDoc.getSignature()) .isEqualTo( "<a class=\"anchor\" href=\"int.html\">int</a> " + "MockGlobalCallable(a, b, *, c, d=1, *myArgs, **myKwargs)"); foundGlobalLibrary = true; break; } } assertThat(foundGlobalLibrary).isTrue(); } @Test public void testStarlarkCallableOverriding() throws Exception { Map<String, StarlarkBuiltinDoc> objects = collect(ImmutableList.of(MockClassA.class, MockClassE.class)); StarlarkBuiltinDoc moduleDoc = objects.get("MockClassE"); assertThat(moduleDoc.getDocumentation()).isEqualTo("MockClassE"); assertThat(moduleDoc.getMethods()).hasSize(1); StarlarkMethodDoc methodDoc = moduleDoc.getMethods().iterator().next(); assertThat(methodDoc.getDocumentation()).isEqualTo("MockClassA#get"); assertThat(methodDoc.getSignature()) .isEqualTo("<a class=\"anchor\" href=\"int.html\">int</a> MockClassE.get()"); } @Test public void testStarlarkContainerReturnTypesWithoutAnnotations() throws Exception { Map<String, StarlarkBuiltinDoc> objects = collect(MockClassWithContainerReturnValues.class); assertThat(objects).containsKey("MockClassWithContainerReturnValues"); Collection<? extends StarlarkMethodDoc> methods = objects.get("MockClassWithContainerReturnValues").getMethods(); List<String> signatures = methods.stream().map(m -> m.getSignature()).collect(Collectors.toList()); assertThat(signatures).hasSize(5); assertThat(signatures) .contains( "<a class=\"anchor\" href=\"depset.html\">depset</a> " + "MockClassWithContainerReturnValues.depset()"); assertThat(signatures) .contains( "<a class=\"anchor\" href=\"tuple.html\">tuple</a> " + "MockClassWithContainerReturnValues.tuple()"); assertThat(signatures) .contains( "<a class=\"anchor\" href=\"list.html\">list</a> " + "MockClassWithContainerReturnValues.immutable()"); assertThat(signatures) .contains( "<a class=\"anchor\" href=\"list.html\">list</a> " + "MockClassWithContainerReturnValues.mutable()"); assertThat(signatures) .contains( "<a class=\"anchor\" href=\"list.html\">sequence</a> " + "MockClassWithContainerReturnValues.starlark()"); } @Test public void testDocumentedModuleTakesPrecedence() throws Exception { Map<String, StarlarkBuiltinDoc> objects = collect( ImmutableList.of( PointsToCommonNameAndUndocumentedModule.class, MockClassCommonNameOne.class, MockClassCommonNameUndocumented.class)); Collection<? extends StarlarkMethodDoc> methods = objects.get("MockClassCommonName").getMethods(); List<String> methodNames = methods.stream().map(m -> m.getName()).collect(Collectors.toList()); assertThat(methodNames).containsExactly("one"); } @Test public void testDocumentModuleSubclass() { Map<String, StarlarkBuiltinDoc> objects = collect( ImmutableList.of( PointsToCommonNameOneWithSubclass.class, MockClassCommonNameOne.class, SubclassOfMockClassCommonNameOne.class)); Collection<? extends StarlarkMethodDoc> methods = objects.get("MockClassCommonName").getMethods(); List<String> methodNames = methods.stream().map(m -> m.getName()).collect(Collectors.toList()); assertThat(methodNames).containsExactly("one", "two"); } @Test public void testDocumentSelfcallConstructor() { Map<String, StarlarkBuiltinDoc> objects = collect(ImmutableList.of(MockClassA.class, MockClassWithSelfCallConstructor.class)); Collection<? extends StarlarkMethodDoc> methods = objects.get("MockClassA").getMethods(); StarlarkMethodDoc firstMethod = methods.iterator().next(); assertThat(firstMethod).isInstanceOf(StarlarkConstructorMethodDoc.class); List<String> methodNames = methods.stream().map(m -> m.getName()).collect(Collectors.toList()); assertThat(methodNames).containsExactly("MockClassA", "get"); } private Map<String, StarlarkBuiltinDoc> collect(Iterable<Class<?>> classObjects) { return StarlarkDocumentationCollector.collectModules(classObjects, expander); } private Map<String, StarlarkBuiltinDoc> collect(Class<?> classObject) { return collect(ImmutableList.of(classObject)); } }
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.profiler; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicInteger; import com.navercorp.pinpoint.bootstrap.context.ServerMetaData; import com.navercorp.pinpoint.bootstrap.context.ServerMetaDataHolder; import com.navercorp.pinpoint.bootstrap.context.ServiceInfo; import com.navercorp.pinpoint.common.ServiceType; import org.apache.thrift.TException; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.navercorp.pinpoint.profiler.context.DefaultServerMetaData; import com.navercorp.pinpoint.profiler.context.DefaultServerMetaDataHolder; import com.navercorp.pinpoint.profiler.sender.TcpDataSender; import com.navercorp.pinpoint.rpc.PinpointSocketException; import com.navercorp.pinpoint.rpc.client.PinpointSocket; import com.navercorp.pinpoint.rpc.client.PinpointSocketFactory; import com.navercorp.pinpoint.rpc.packet.HandshakeResponseCode; import com.navercorp.pinpoint.rpc.packet.HandshakeResponseType; import com.navercorp.pinpoint.rpc.packet.RequestPacket; import com.navercorp.pinpoint.rpc.packet.SendPacket; import com.navercorp.pinpoint.rpc.server.PinpointServerAcceptor; import com.navercorp.pinpoint.rpc.server.ServerMessageListener; import com.navercorp.pinpoint.rpc.server.PinpointServer; import com.navercorp.pinpoint.thrift.dto.TResult; import com.navercorp.pinpoint.thrift.io.HeaderTBaseSerializer; import com.navercorp.pinpoint.thrift.io.HeaderTBaseSerializerFactory; public class AgentInfoSenderTest { private final Logger logger = LoggerFactory.getLogger(this.getClass()); public static final int PORT = 10050; public static final String HOST = "127.0.0.1"; @Test public void agentInfoShouldBeSent() throws InterruptedException { final AtomicInteger requestCount = new AtomicInteger(); final AtomicInteger successCount = new AtomicInteger(); final long agentInfoSendRetryIntervalMs = 1000L; ResponseServerMessageListener serverListener = new ResponseServerMessageListener(requestCount, successCount); PinpointServerAcceptor serverAcceptor = createServerAcceptor(serverListener); PinpointSocketFactory socketFactory = createPinpointSocketFactory(); PinpointSocket socket = createPinpointSocket(HOST, PORT, socketFactory); TcpDataSender sender = new TcpDataSender(socket); AgentInfoSender agentInfoSender = new AgentInfoSender(sender, agentInfoSendRetryIntervalMs, getAgentInfo()); try { agentInfoSender.start(); Thread.sleep(10000L); } finally { closeAll(serverAcceptor, agentInfoSender, socket, socketFactory); } assertEquals(1, requestCount.get()); assertEquals(1, successCount.get()); } @Test public void agentInfoShouldRetryUntilSuccess() throws InterruptedException { final AtomicInteger requestCount = new AtomicInteger(); final AtomicInteger successCount = new AtomicInteger(); final long agentInfoSendRetryIntervalMs = 1000L; final int expectedTriesUntilSuccess = 5; ResponseServerMessageListener serverListener = new ResponseServerMessageListener(requestCount, successCount, expectedTriesUntilSuccess); PinpointServerAcceptor serverAcceptor = createServerAcceptor(serverListener); PinpointSocketFactory socketFactory = createPinpointSocketFactory(); PinpointSocket socket = createPinpointSocket(HOST, PORT, socketFactory); TcpDataSender dataSender = new TcpDataSender(socket); AgentInfoSender agentInfoSender = new AgentInfoSender(dataSender, agentInfoSendRetryIntervalMs, getAgentInfo()); try { agentInfoSender.start(); Thread.sleep(agentInfoSendRetryIntervalMs * expectedTriesUntilSuccess); } finally { closeAll(serverAcceptor, agentInfoSender, socket, socketFactory); } assertEquals(expectedTriesUntilSuccess, requestCount.get()); assertEquals(1, successCount.get()); } @Test public void agentInfoShouldBeSentOnlyOnceEvenAfterReconnect() throws InterruptedException { final AtomicInteger requestCount = new AtomicInteger(); final AtomicInteger successCount = new AtomicInteger(); final long agentInfoSendRetryIntervalMs = 1000L; ResponseServerMessageListener serverListener = new ResponseServerMessageListener(requestCount, successCount); PinpointSocketFactory socketFactory = createPinpointSocketFactory(); PinpointSocket socket = createPinpointSocket(HOST, PORT, socketFactory); TcpDataSender dataSender = new TcpDataSender(socket); AgentInfoSender agentInfoSender = new AgentInfoSender(dataSender, agentInfoSendRetryIntervalMs, getAgentInfo()); try { agentInfoSender.start(); createAndDeleteServer(serverListener, 5000L); Thread.sleep(1000L); createAndDeleteServer(serverListener, 5000L); Thread.sleep(1000L); createAndDeleteServer(serverListener, 5000L); } finally { closeAll(null, agentInfoSender, socket, socketFactory); } assertEquals(1, requestCount.get()); assertEquals(1, successCount.get()); } @Test public void agentInfoShouldKeepRetrying() throws InterruptedException { final AtomicInteger requestCount = new AtomicInteger(); final AtomicInteger successCount = new AtomicInteger(); final long agentInfoSendRetryIntervalMs = 1000L; final long minimumAgentInfoSendRetryCount = 10; ResponseServerMessageListener serverListener = new ResponseServerMessageListener(requestCount, successCount, Integer.MAX_VALUE); PinpointServerAcceptor serverAcceptor = createServerAcceptor(serverListener); PinpointSocketFactory socketFactory = createPinpointSocketFactory(); PinpointSocket socket = createPinpointSocket(HOST, PORT, socketFactory); TcpDataSender dataSender = new TcpDataSender(socket); AgentInfoSender agentInfoSender = new AgentInfoSender(dataSender, agentInfoSendRetryIntervalMs, getAgentInfo()); try { agentInfoSender.start(); Thread.sleep(agentInfoSendRetryIntervalMs * minimumAgentInfoSendRetryCount); } finally { closeAll(serverAcceptor, agentInfoSender, socket, socketFactory); } assertTrue(requestCount.get() >= minimumAgentInfoSendRetryCount); assertEquals(0, successCount.get()); } @Test public void serverMetaDataShouldBeSentOnPublish() throws InterruptedException { // Given final AtomicInteger requestCount = new AtomicInteger(); final AtomicInteger successCount = new AtomicInteger(); final long agentInfoSendRetryIntervalMs = 1000L; ResponseServerMessageListener serverListener = new ResponseServerMessageListener(requestCount, successCount); PinpointServerAcceptor serverAcceptor = createServerAcceptor(serverListener); PinpointSocketFactory socketFactory = createPinpointSocketFactory(); PinpointSocket socket = createPinpointSocket(HOST, PORT, socketFactory); TcpDataSender sender = new TcpDataSender(socket); AgentInfoSender agentInfoSender = new AgentInfoSender(sender, agentInfoSendRetryIntervalMs, getAgentInfo()); final List<ServerMetaData> serverMetaDataObjects = new ArrayList<ServerMetaData>(); serverMetaDataObjects.add(new DefaultServerMetaData("server1", Collections.<String>emptyList(), Collections.<Integer, String>emptyMap(), Collections.<ServiceInfo>emptyList())); serverMetaDataObjects.add(new DefaultServerMetaData("server2", Collections.<String>emptyList(), Collections.<Integer, String>emptyMap(), Collections.<ServiceInfo>emptyList())); serverMetaDataObjects.add(new DefaultServerMetaData("server3", Collections.<String>emptyList(), Collections.<Integer, String>emptyMap(), Collections.<ServiceInfo>emptyList())); serverMetaDataObjects.add(new DefaultServerMetaData("server4", Collections.<String>emptyList(), Collections.<Integer, String>emptyMap(), Collections.<ServiceInfo>emptyList())); serverMetaDataObjects.add(new DefaultServerMetaData("server5", Collections.<String>emptyList(), Collections.<Integer, String>emptyMap(), Collections.<ServiceInfo>emptyList())); // When try { for (ServerMetaData serverMetaData : serverMetaDataObjects) { agentInfoSender.publishServerMetaData(serverMetaData); } Thread.sleep(10000L); } finally { closeAll(serverAcceptor, agentInfoSender, socket, socketFactory); } // Then assertEquals(5, requestCount.get()); assertEquals(5, successCount.get()); } @Test public void serverMetaDataCouldBePublishedFromMultipleThreads() throws InterruptedException { // Given final AtomicInteger requestCount = new AtomicInteger(); final AtomicInteger successCount = new AtomicInteger(); final long agentInfoSendRetryIntervalMs = 1000L; final int threadCount = 50; final CountDownLatch initLatch = new CountDownLatch(threadCount); final CountDownLatch startLatch = new CountDownLatch(1); final CountDownLatch endLatch = new CountDownLatch(threadCount); final ExecutorService executorService = Executors.newFixedThreadPool(threadCount); final Queue<Throwable> exceptions = new ConcurrentLinkedQueue<Throwable>(); ResponseServerMessageListener delayedServerListener = new ResponseServerMessageListener(requestCount, successCount); PinpointServerAcceptor serverAcceptor = createServerAcceptor(delayedServerListener); PinpointSocketFactory socketFactory = createPinpointSocketFactory(); PinpointSocket socket = createPinpointSocket(HOST, PORT, socketFactory); TcpDataSender sender = new TcpDataSender(socket); AgentInfoSender agentInfoSender = new AgentInfoSender(sender, agentInfoSendRetryIntervalMs, getAgentInfo()); final ServerMetaDataHolder metaDataContext = new DefaultServerMetaDataHolder(Collections.<String>emptyList()); metaDataContext.addListener(agentInfoSender); // When for (int i = 0; i < threadCount; ++i) { executorService.submit(new Callable<Void>() { @Override public Void call() throws Exception { initLatch.countDown(); try { startLatch.await(); metaDataContext.publishServerMetaData(); } catch (final Throwable t) { exceptions.add(t); } finally { endLatch.countDown(); } return null; } }); } initLatch.await(); startLatch.countDown(); endLatch.await(); executorService.shutdown(); try { Thread.sleep(10000L); } finally { closeAll(serverAcceptor, agentInfoSender, socket, socketFactory); } // Then assertTrue("Failed with exceptions : " + exceptions, exceptions.isEmpty()); assertEquals(threadCount, requestCount.get()); assertEquals(threadCount, successCount.get()); } public void reconnectionStressTest() throws InterruptedException { final AtomicInteger requestCount = new AtomicInteger(); final AtomicInteger successCount = new AtomicInteger(); final long stressTestTime = 60 * 1000L; final int randomMaxTime = 3000; final long agentInfoSendRetryIntervalMs = 1000L; final int expectedTriesUntilSuccess = (int)stressTestTime / (randomMaxTime * 2); ResponseServerMessageListener serverListener = new ResponseServerMessageListener(requestCount, successCount, expectedTriesUntilSuccess); PinpointSocketFactory socketFactory = createPinpointSocketFactory(); PinpointSocket socket = createPinpointSocket(HOST, PORT, socketFactory); TcpDataSender dataSender = new TcpDataSender(socket); AgentInfoSender agentInfoSender = new AgentInfoSender(dataSender, agentInfoSendRetryIntervalMs, getAgentInfo()); long startTime = System.currentTimeMillis(); try { agentInfoSender.start(); Random random = new Random(System.currentTimeMillis()); while (System.currentTimeMillis() < startTime + stressTestTime) { createAndDeleteServer(serverListener, Math.abs(random.nextInt(randomMaxTime))); Thread.sleep(Math.abs(random.nextInt(1000))); } } finally { closeAll(null, agentInfoSender, socket, socketFactory); } assertEquals(1, successCount.get()); assertEquals(expectedTriesUntilSuccess, requestCount.get()); } private PinpointServerAcceptor createServerAcceptor(ServerMessageListener listener) { PinpointServerAcceptor serverAcceptor = new PinpointServerAcceptor(); // server.setMessageListener(new // NoResponseServerMessageListener(requestCount)); serverAcceptor.setMessageListener(listener); serverAcceptor.bind(HOST, PORT); return serverAcceptor; } private void createAndDeleteServer(ServerMessageListener listner, long waitTimeMillis) throws InterruptedException { PinpointServerAcceptor server = null; try { server = createServerAcceptor(listner); Thread.sleep(waitTimeMillis); } finally { if (server != null) { server.close(); } } } private void closeAll(PinpointServerAcceptor serverAcceptor, AgentInfoSender agentInfoSender, PinpointSocket socket, PinpointSocketFactory factory) { if (serverAcceptor != null) { serverAcceptor.close(); } if (agentInfoSender != null) { agentInfoSender.stop(); } if (socket != null) { socket.close(); } if (factory != null) { factory.release(); } } private AgentInformation getAgentInfo() { AgentInformation agentInfo = new AgentInformation("agentId", "appName", System.currentTimeMillis(), 1111, "hostname", "127.0.0.1", ServiceType.USER, "1"); return agentInfo; } class ResponseServerMessageListener implements ServerMessageListener { private final AtomicInteger requestCount; private final AtomicInteger successCount; private final int successCondition; public ResponseServerMessageListener(AtomicInteger requestCount, AtomicInteger successCount) { this(requestCount, successCount, 1); } public ResponseServerMessageListener(AtomicInteger requestCount, AtomicInteger successCount, int successCondition) { this.requestCount = requestCount; this.successCount = successCount; this.successCondition = successCondition; } @Override public void handleSend(SendPacket sendPacket, PinpointServer pinpointServer) { logger.info("handleSend:{}", sendPacket); } @Override public void handleRequest(RequestPacket requestPacket, PinpointServer pinpointServer) { int requestCount = this.requestCount.incrementAndGet(); if (requestCount < successCondition) { return; } logger.info("handleRequest~~~:{}", requestPacket); try { HeaderTBaseSerializer serializer = HeaderTBaseSerializerFactory.DEFAULT_FACTORY.createSerializer(); TResult result = new TResult(true); byte[] resultBytes = serializer.serialize(result); this.successCount.incrementAndGet(); pinpointServer.response(requestPacket, resultBytes); } catch (TException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @Override public HandshakeResponseCode handleHandshake(@SuppressWarnings("rawtypes") Map arg0) { return HandshakeResponseType.Success.DUPLEX_COMMUNICATION; } } private PinpointSocketFactory createPinpointSocketFactory() { PinpointSocketFactory pinpointSocketFactory = new PinpointSocketFactory(); pinpointSocketFactory.setTimeoutMillis(1000 * 5); pinpointSocketFactory.setProperties(Collections.<String, Object>emptyMap()); return pinpointSocketFactory; } private PinpointSocket createPinpointSocket(String host, int port, PinpointSocketFactory factory) { PinpointSocket socket = null; for (int i = 0; i < 3; i++) { try { socket = factory.connect(host, port); logger.info("tcp connect success:{}/{}", host, port); return socket; } catch (PinpointSocketException e) { logger.warn("tcp connect fail:{}/{} try reconnect, retryCount:{}", host, port, i); } } logger.warn("change background tcp connect mode {}/{} ", host, port); socket = factory.scheduledConnect(host, port); return socket; } }
// Copyright (C) 2012 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.change; import static com.google.common.base.Preconditions.checkState; import static com.google.gerrit.extensions.client.ListChangesOption.ALL_COMMITS; import static com.google.gerrit.extensions.client.ListChangesOption.ALL_FILES; import static com.google.gerrit.extensions.client.ListChangesOption.ALL_REVISIONS; import static com.google.gerrit.extensions.client.ListChangesOption.CHANGE_ACTIONS; import static com.google.gerrit.extensions.client.ListChangesOption.CHECK; import static com.google.gerrit.extensions.client.ListChangesOption.COMMIT_FOOTERS; import static com.google.gerrit.extensions.client.ListChangesOption.CURRENT_ACTIONS; import static com.google.gerrit.extensions.client.ListChangesOption.CURRENT_COMMIT; import static com.google.gerrit.extensions.client.ListChangesOption.CURRENT_FILES; import static com.google.gerrit.extensions.client.ListChangesOption.CURRENT_REVISION; import static com.google.gerrit.extensions.client.ListChangesOption.DETAILED_ACCOUNTS; import static com.google.gerrit.extensions.client.ListChangesOption.DETAILED_LABELS; import static com.google.gerrit.extensions.client.ListChangesOption.DOWNLOAD_COMMANDS; import static com.google.gerrit.extensions.client.ListChangesOption.LABELS; import static com.google.gerrit.extensions.client.ListChangesOption.MESSAGES; import static com.google.gerrit.extensions.client.ListChangesOption.PUSH_CERTIFICATES; import static com.google.gerrit.extensions.client.ListChangesOption.REVIEWED; import static com.google.gerrit.extensions.client.ListChangesOption.REVIEWER_UPDATES; import static com.google.gerrit.extensions.client.ListChangesOption.SUBMITTABLE; import static com.google.gerrit.extensions.client.ListChangesOption.TRACKING_IDS; import static com.google.gerrit.extensions.client.ListChangesOption.WEB_LINKS; import static com.google.gerrit.server.CommonConverters.toGitPerson; import static java.util.stream.Collectors.toList; import com.google.auto.value.AutoValue; import com.google.common.base.Joiner; import com.google.common.base.MoreObjects; import com.google.common.base.Throwables; import com.google.common.collect.FluentIterable; import com.google.common.collect.HashBasedTable; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.LinkedHashMultimap; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.MultimapBuilder; import com.google.common.collect.SetMultimap; import com.google.common.collect.Sets; import com.google.common.collect.Table; import com.google.common.primitives.Ints; import com.google.gerrit.common.Nullable; import com.google.gerrit.common.data.LabelType; import com.google.gerrit.common.data.LabelTypes; import com.google.gerrit.common.data.LabelValue; import com.google.gerrit.common.data.SubmitRecord; import com.google.gerrit.common.data.SubmitTypeRecord; import com.google.gerrit.extensions.api.changes.FixInput; import com.google.gerrit.extensions.client.ListChangesOption; import com.google.gerrit.extensions.client.ReviewerState; import com.google.gerrit.extensions.common.AccountInfo; import com.google.gerrit.extensions.common.ApprovalInfo; import com.google.gerrit.extensions.common.ChangeInfo; import com.google.gerrit.extensions.common.ChangeMessageInfo; import com.google.gerrit.extensions.common.CommitInfo; import com.google.gerrit.extensions.common.FetchInfo; import com.google.gerrit.extensions.common.LabelInfo; import com.google.gerrit.extensions.common.ProblemInfo; import com.google.gerrit.extensions.common.PushCertificateInfo; import com.google.gerrit.extensions.common.ReviewerUpdateInfo; import com.google.gerrit.extensions.common.RevisionInfo; import com.google.gerrit.extensions.common.TrackingIdInfo; import com.google.gerrit.extensions.common.VotingRangeInfo; import com.google.gerrit.extensions.common.WebLinkInfo; import com.google.gerrit.extensions.config.DownloadCommand; import com.google.gerrit.extensions.config.DownloadScheme; import com.google.gerrit.extensions.registration.DynamicMap; import com.google.gerrit.extensions.restapi.AuthException; import com.google.gerrit.extensions.restapi.Url; import com.google.gerrit.index.query.QueryResult; import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.Change; import com.google.gerrit.reviewdb.client.ChangeMessage; import com.google.gerrit.reviewdb.client.Patch; import com.google.gerrit.reviewdb.client.PatchSet; import com.google.gerrit.reviewdb.client.PatchSetApproval; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.reviewdb.server.ReviewDb; import com.google.gerrit.server.AnonymousUser; import com.google.gerrit.server.ApprovalsUtil; import com.google.gerrit.server.ChangeMessagesUtil; import com.google.gerrit.server.CurrentUser; import com.google.gerrit.server.GpgException; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.server.ReviewerByEmailSet; import com.google.gerrit.server.ReviewerSet; import com.google.gerrit.server.ReviewerStatusUpdate; import com.google.gerrit.server.StarredChangesUtil; import com.google.gerrit.server.WebLinks; import com.google.gerrit.server.account.AccountLoader; import com.google.gerrit.server.api.accounts.AccountInfoComparator; import com.google.gerrit.server.api.accounts.GpgApiAdapter; import com.google.gerrit.server.config.TrackingFooters; import com.google.gerrit.server.git.GitRepositoryManager; import com.google.gerrit.server.git.MergeUtil; import com.google.gerrit.server.index.change.ChangeField; import com.google.gerrit.server.index.change.ChangeIndexCollection; import com.google.gerrit.server.mail.Address; import com.google.gerrit.server.notedb.ChangeNotes; import com.google.gerrit.server.notedb.ReviewerStateInternal; import com.google.gerrit.server.patch.PatchListNotAvailableException; import com.google.gerrit.server.permissions.ChangePermission; import com.google.gerrit.server.permissions.LabelPermission; import com.google.gerrit.server.permissions.PermissionBackend; import com.google.gerrit.server.permissions.PermissionBackendException; import com.google.gerrit.server.project.NoSuchChangeException; import com.google.gerrit.server.project.NoSuchProjectException; import com.google.gerrit.server.project.ProjectCache; import com.google.gerrit.server.project.RemoveReviewerControl; import com.google.gerrit.server.project.SubmitRuleOptions; import com.google.gerrit.server.query.change.ChangeData; import com.google.gerrit.server.query.change.ChangeData.ChangedLines; import com.google.gerrit.server.query.change.PluginDefinedAttributesFactory; import com.google.gwtorm.server.OrmException; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.Singleton; import com.google.inject.assistedinject.Assisted; import java.io.IOException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.TreeMap; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevWalk; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ChangeJson { private static final Logger log = LoggerFactory.getLogger(ChangeJson.class); public static final SubmitRuleOptions SUBMIT_RULE_OPTIONS_LENIENT = ChangeField.SUBMIT_RULE_OPTIONS_LENIENT.toBuilder().build(); public static final SubmitRuleOptions SUBMIT_RULE_OPTIONS_STRICT = ChangeField.SUBMIT_RULE_OPTIONS_STRICT.toBuilder().build(); public static final ImmutableSet<ListChangesOption> REQUIRE_LAZY_LOAD = ImmutableSet.of( ALL_COMMITS, ALL_REVISIONS, CHANGE_ACTIONS, CHECK, COMMIT_FOOTERS, CURRENT_ACTIONS, CURRENT_COMMIT, MESSAGES); @Singleton public static class Factory { private final AssistedFactory factory; @Inject Factory(AssistedFactory factory) { this.factory = factory; } public ChangeJson noOptions() { return create(ImmutableSet.of()); } public ChangeJson create(Iterable<ListChangesOption> options) { return factory.create(options); } public ChangeJson create(ListChangesOption first, ListChangesOption... rest) { return create(Sets.immutableEnumSet(first, rest)); } } public interface AssistedFactory { ChangeJson create(Iterable<ListChangesOption> options); } private final Provider<ReviewDb> db; private final Provider<CurrentUser> userProvider; private final AnonymousUser anonymous; private final PermissionBackend permissionBackend; private final GitRepositoryManager repoManager; private final ProjectCache projectCache; private final MergeUtil.Factory mergeUtilFactory; private final IdentifiedUser.GenericFactory userFactory; private final ChangeData.Factory changeDataFactory; private final FileInfoJson fileInfoJson; private final AccountLoader.Factory accountLoaderFactory; private final DynamicMap<DownloadScheme> downloadSchemes; private final DynamicMap<DownloadCommand> downloadCommands; private final WebLinks webLinks; private final ImmutableSet<ListChangesOption> options; private final ChangeMessagesUtil cmUtil; private final Provider<ConsistencyChecker> checkerProvider; private final ActionJson actionJson; private final GpgApiAdapter gpgApi; private final ChangeNotes.Factory notesFactory; private final ChangeResource.Factory changeResourceFactory; private final ChangeKindCache changeKindCache; private final ChangeIndexCollection indexes; private final ApprovalsUtil approvalsUtil; private final RemoveReviewerControl removeReviewerControl; private final TrackingFooters trackingFooters; private boolean lazyLoad = true; private AccountLoader accountLoader; private FixInput fix; private PluginDefinedAttributesFactory pluginDefinedAttributesFactory; @Inject ChangeJson( Provider<ReviewDb> db, Provider<CurrentUser> user, AnonymousUser au, PermissionBackend permissionBackend, GitRepositoryManager repoManager, ProjectCache projectCache, MergeUtil.Factory mergeUtilFactory, IdentifiedUser.GenericFactory uf, ChangeData.Factory cdf, FileInfoJson fileInfoJson, AccountLoader.Factory ailf, DynamicMap<DownloadScheme> downloadSchemes, DynamicMap<DownloadCommand> downloadCommands, WebLinks webLinks, ChangeMessagesUtil cmUtil, Provider<ConsistencyChecker> checkerProvider, ActionJson actionJson, GpgApiAdapter gpgApi, ChangeNotes.Factory notesFactory, ChangeResource.Factory changeResourceFactory, ChangeKindCache changeKindCache, ChangeIndexCollection indexes, ApprovalsUtil approvalsUtil, RemoveReviewerControl removeReviewerControl, TrackingFooters trackingFooters, @Assisted Iterable<ListChangesOption> options) { this.db = db; this.userProvider = user; this.anonymous = au; this.changeDataFactory = cdf; this.permissionBackend = permissionBackend; this.repoManager = repoManager; this.userFactory = uf; this.projectCache = projectCache; this.mergeUtilFactory = mergeUtilFactory; this.fileInfoJson = fileInfoJson; this.accountLoaderFactory = ailf; this.downloadSchemes = downloadSchemes; this.downloadCommands = downloadCommands; this.webLinks = webLinks; this.cmUtil = cmUtil; this.checkerProvider = checkerProvider; this.actionJson = actionJson; this.gpgApi = gpgApi; this.notesFactory = notesFactory; this.changeResourceFactory = changeResourceFactory; this.changeKindCache = changeKindCache; this.indexes = indexes; this.approvalsUtil = approvalsUtil; this.removeReviewerControl = removeReviewerControl; this.options = Sets.immutableEnumSet(options); this.trackingFooters = trackingFooters; } public ChangeJson lazyLoad(boolean load) { lazyLoad = load; return this; } public ChangeJson fix(FixInput fix) { this.fix = fix; return this; } public void setPluginDefinedAttributesFactory(PluginDefinedAttributesFactory pluginsFactory) { this.pluginDefinedAttributesFactory = pluginsFactory; } public ChangeInfo format(ChangeResource rsrc) throws OrmException { return format(changeDataFactory.create(db.get(), rsrc.getNotes())); } public ChangeInfo format(Change change) throws OrmException { return format(changeDataFactory.create(db.get(), change)); } public ChangeInfo format(Project.NameKey project, Change.Id id) throws OrmException { ChangeNotes notes; try { notes = notesFactory.createChecked(db.get(), project, id); } catch (OrmException e) { if (!has(CHECK)) { throw e; } return checkOnly(changeDataFactory.create(db.get(), project, id)); } return format(changeDataFactory.create(db.get(), notes)); } public ChangeInfo format(ChangeData cd) throws OrmException { return format(cd, Optional.empty(), true); } private ChangeInfo format( ChangeData cd, Optional<PatchSet.Id> limitToPsId, boolean fillAccountLoader) throws OrmException { try { if (fillAccountLoader) { accountLoader = accountLoaderFactory.create(has(DETAILED_ACCOUNTS)); ChangeInfo res = toChangeInfo(cd, limitToPsId); accountLoader.fill(); return res; } return toChangeInfo(cd, limitToPsId); } catch (PatchListNotAvailableException | GpgException | OrmException | IOException | PermissionBackendException | NoSuchProjectException | RuntimeException e) { if (!has(CHECK)) { Throwables.throwIfInstanceOf(e, OrmException.class); throw new OrmException(e); } return checkOnly(cd); } } public ChangeInfo format(RevisionResource rsrc) throws OrmException { ChangeData cd = changeDataFactory.create(db.get(), rsrc.getNotes()); return format(cd, Optional.of(rsrc.getPatchSet().getId()), true); } public List<List<ChangeInfo>> formatQueryResults(List<QueryResult<ChangeData>> in) throws OrmException { accountLoader = accountLoaderFactory.create(has(DETAILED_ACCOUNTS)); ensureLoaded(FluentIterable.from(in).transformAndConcat(QueryResult::entities)); List<List<ChangeInfo>> res = Lists.newArrayListWithCapacity(in.size()); Map<Change.Id, ChangeInfo> out = new HashMap<>(); for (QueryResult<ChangeData> r : in) { List<ChangeInfo> infos = toChangeInfo(out, r.entities()); if (!infos.isEmpty() && r.more()) { infos.get(infos.size() - 1)._moreChanges = true; } res.add(infos); } accountLoader.fill(); return res; } public List<ChangeInfo> formatChangeDatas(Collection<ChangeData> in) throws OrmException { accountLoader = accountLoaderFactory.create(has(DETAILED_ACCOUNTS)); ensureLoaded(in); List<ChangeInfo> out = new ArrayList<>(in.size()); for (ChangeData cd : in) { out.add(format(cd)); } accountLoader.fill(); return out; } private void ensureLoaded(Iterable<ChangeData> all) throws OrmException { if (lazyLoad) { ChangeData.ensureChangeLoaded(all); if (has(ALL_REVISIONS)) { ChangeData.ensureAllPatchSetsLoaded(all); } else if (has(CURRENT_REVISION) || has(MESSAGES)) { ChangeData.ensureCurrentPatchSetLoaded(all); } if (has(REVIEWED) && userProvider.get().isIdentifiedUser()) { ChangeData.ensureReviewedByLoadedForOpenChanges(all); } ChangeData.ensureCurrentApprovalsLoaded(all); } else { for (ChangeData cd : all) { cd.setLazyLoad(false); } } } private boolean has(ListChangesOption option) { return options.contains(option); } private List<ChangeInfo> toChangeInfo(Map<Change.Id, ChangeInfo> out, List<ChangeData> changes) { List<ChangeInfo> info = Lists.newArrayListWithCapacity(changes.size()); for (ChangeData cd : changes) { ChangeInfo i = out.get(cd.getId()); if (i == null) { try { i = toChangeInfo(cd, Optional.empty()); } catch (PatchListNotAvailableException | GpgException | OrmException | IOException | PermissionBackendException | NoSuchProjectException | RuntimeException e) { if (has(CHECK)) { i = checkOnly(cd); } else if (e instanceof NoSuchChangeException) { log.info( "NoSuchChangeException: Omitting corrupt change " + cd.getId() + " from results. Seems to be stale in the index."); continue; } else { log.warn("Omitting corrupt change " + cd.getId() + " from results", e); continue; } } out.put(cd.getId(), i); } info.add(i); } return info; } private ChangeInfo checkOnly(ChangeData cd) { ChangeNotes notes; try { notes = cd.notes(); } catch (OrmException e) { String msg = "Error loading change"; log.warn(msg + " " + cd.getId(), e); ChangeInfo info = new ChangeInfo(); info._number = cd.getId().get(); ProblemInfo p = new ProblemInfo(); p.message = msg; info.problems = Lists.newArrayList(p); return info; } ConsistencyChecker.Result result = checkerProvider.get().check(notes, fix); ChangeInfo info; Change c = result.change(); if (c != null) { info = new ChangeInfo(); info.project = c.getProject().get(); info.branch = c.getDest().getShortName(); info.topic = c.getTopic(); info.changeId = c.getKey().get(); info.subject = c.getSubject(); info.status = c.getStatus().asChangeStatus(); info.owner = new AccountInfo(c.getOwner().get()); info.created = c.getCreatedOn(); info.updated = c.getLastUpdatedOn(); info._number = c.getId().get(); info.problems = result.problems(); info.isPrivate = c.isPrivate() ? true : null; info.workInProgress = c.isWorkInProgress() ? true : null; info.hasReviewStarted = c.hasReviewStarted(); finish(info); } else { info = new ChangeInfo(); info._number = result.id().get(); info.problems = result.problems(); } return info; } private ChangeInfo toChangeInfo(ChangeData cd, Optional<PatchSet.Id> limitToPsId) throws PatchListNotAvailableException, GpgException, OrmException, IOException, PermissionBackendException, NoSuchProjectException { ChangeInfo out = new ChangeInfo(); CurrentUser user = userProvider.get(); if (has(CHECK)) { out.problems = checkerProvider.get().check(cd.notes(), fix).problems(); // If any problems were fixed, the ChangeData needs to be reloaded. for (ProblemInfo p : out.problems) { if (p.status == ProblemInfo.Status.FIXED) { cd = changeDataFactory.create(cd.db(), cd.project(), cd.getId()); break; } } } PermissionBackend.ForChange perm = permissionBackendForChange(user, cd); Change in = cd.change(); out.project = in.getProject().get(); out.branch = in.getDest().getShortName(); out.topic = in.getTopic(); if (indexes.getSearchIndex().getSchema().hasField(ChangeField.ASSIGNEE)) { if (in.getAssignee() != null) { out.assignee = accountLoader.get(in.getAssignee()); } } out.hashtags = cd.hashtags(); out.changeId = in.getKey().get(); if (in.getStatus().isOpen()) { SubmitTypeRecord str = cd.submitTypeRecord(); if (str.isOk()) { out.submitType = str.type; } out.mergeable = cd.isMergeable(); if (has(SUBMITTABLE)) { out.submittable = submittable(cd); } } Optional<ChangedLines> changedLines = cd.changedLines(); if (changedLines.isPresent()) { out.insertions = changedLines.get().insertions; out.deletions = changedLines.get().deletions; } out.isPrivate = in.isPrivate() ? true : null; out.workInProgress = in.isWorkInProgress() ? true : null; out.hasReviewStarted = in.hasReviewStarted(); out.subject = in.getSubject(); out.status = in.getStatus().asChangeStatus(); out.owner = accountLoader.get(in.getOwner()); out.created = in.getCreatedOn(); out.updated = in.getLastUpdatedOn(); out._number = in.getId().get(); out.unresolvedCommentCount = cd.unresolvedCommentCount(); if (user.isIdentifiedUser()) { Collection<String> stars = cd.stars(user.getAccountId()); out.starred = stars.contains(StarredChangesUtil.DEFAULT_LABEL) ? true : null; if (!stars.isEmpty()) { out.stars = stars; } } if (in.getStatus().isOpen() && has(REVIEWED) && user.isIdentifiedUser()) { out.reviewed = cd.isReviewedBy(user.getAccountId()) ? true : null; } out.labels = labelsFor(perm, cd, has(LABELS), has(DETAILED_LABELS)); if (out.labels != null && has(DETAILED_LABELS)) { // If limited to specific patch sets but not the current patch set, don't // list permitted labels, since users can't vote on those patch sets. if (user.isIdentifiedUser() && (!limitToPsId.isPresent() || limitToPsId.get().equals(in.currentPatchSetId()))) { out.permittedLabels = cd.change().getStatus() != Change.Status.ABANDONED ? permittedLabels(perm, cd) : ImmutableMap.of(); } out.reviewers = reviewerMap(cd.reviewers(), cd.reviewersByEmail(), false); out.pendingReviewers = reviewerMap(cd.pendingReviewers(), cd.pendingReviewersByEmail(), true); out.removableReviewers = removableReviewers(cd, out); } setSubmitter(cd, out); out.plugins = pluginDefinedAttributesFactory != null ? pluginDefinedAttributesFactory.create(cd) : null; out.revertOf = cd.change().getRevertOf() != null ? cd.change().getRevertOf().get() : null; if (has(REVIEWER_UPDATES)) { out.reviewerUpdates = reviewerUpdates(cd); } boolean needMessages = has(MESSAGES); boolean needRevisions = has(ALL_REVISIONS) || has(CURRENT_REVISION) || limitToPsId.isPresent(); Map<PatchSet.Id, PatchSet> src; if (needMessages || needRevisions) { src = loadPatchSets(cd, limitToPsId); } else { src = null; } if (needMessages) { out.messages = messages(cd); } finish(out); // This block must come after the ChangeInfo is mostly populated, since // it will be passed to ActionVisitors as-is. if (needRevisions) { out.revisions = revisions(cd, src, limitToPsId, out); if (out.revisions != null) { for (Map.Entry<String, RevisionInfo> entry : out.revisions.entrySet()) { if (entry.getValue().isCurrent) { out.currentRevision = entry.getKey(); break; } } } } if (has(CURRENT_ACTIONS) || has(CHANGE_ACTIONS)) { actionJson.addChangeActions(out, cd.notes()); } if (has(TRACKING_IDS)) { ListMultimap<String, String> set = trackingFooters.extract(cd.commitFooters()); out.trackingIds = set.entries() .stream() .map(e -> new TrackingIdInfo(e.getKey(), e.getValue())) .collect(toList()); } return out; } private Map<ReviewerState, Collection<AccountInfo>> reviewerMap( ReviewerSet reviewers, ReviewerByEmailSet reviewersByEmail, boolean includeRemoved) { Map<ReviewerState, Collection<AccountInfo>> reviewerMap = new HashMap<>(); for (ReviewerStateInternal state : ReviewerStateInternal.values()) { if (!includeRemoved && state == ReviewerStateInternal.REMOVED) { continue; } Collection<AccountInfo> reviewersByState = toAccountInfo(reviewers.byState(state)); reviewersByState.addAll(toAccountInfoByEmail(reviewersByEmail.byState(state))); if (!reviewersByState.isEmpty()) { reviewerMap.put(state.asReviewerState(), reviewersByState); } } return reviewerMap; } private Collection<ReviewerUpdateInfo> reviewerUpdates(ChangeData cd) throws OrmException { List<ReviewerStatusUpdate> reviewerUpdates = cd.reviewerUpdates(); List<ReviewerUpdateInfo> result = new ArrayList<>(reviewerUpdates.size()); for (ReviewerStatusUpdate c : reviewerUpdates) { ReviewerUpdateInfo change = new ReviewerUpdateInfo(); change.updated = c.date(); change.state = c.state().asReviewerState(); change.updatedBy = accountLoader.get(c.updatedBy()); change.reviewer = accountLoader.get(c.reviewer()); result.add(change); } return result; } private boolean submittable(ChangeData cd) throws OrmException { return SubmitRecord.findOkRecord(cd.submitRecords(SUBMIT_RULE_OPTIONS_STRICT)).isPresent(); } private List<SubmitRecord> submitRecords(ChangeData cd) throws OrmException { return cd.submitRecords(SUBMIT_RULE_OPTIONS_LENIENT); } private Map<String, LabelInfo> labelsFor( PermissionBackend.ForChange perm, ChangeData cd, boolean standard, boolean detailed) throws OrmException, PermissionBackendException { if (!standard && !detailed) { return null; } LabelTypes labelTypes = cd.getLabelTypes(); Map<String, LabelWithStatus> withStatus = cd.change().getStatus() == Change.Status.MERGED ? labelsForSubmittedChange(perm, cd, labelTypes, standard, detailed) : labelsForUnsubmittedChange(perm, cd, labelTypes, standard, detailed); return ImmutableMap.copyOf(Maps.transformValues(withStatus, LabelWithStatus::label)); } private Map<String, LabelWithStatus> labelsForUnsubmittedChange( PermissionBackend.ForChange perm, ChangeData cd, LabelTypes labelTypes, boolean standard, boolean detailed) throws OrmException, PermissionBackendException { Map<String, LabelWithStatus> labels = initLabels(cd, labelTypes, standard); if (detailed) { setAllApprovals(perm, cd, labels); } for (Map.Entry<String, LabelWithStatus> e : labels.entrySet()) { LabelType type = labelTypes.byLabel(e.getKey()); if (type == null) { continue; } if (standard) { for (PatchSetApproval psa : cd.currentApprovals()) { if (type.matches(psa)) { short val = psa.getValue(); Account.Id accountId = psa.getAccountId(); setLabelScores(type, e.getValue(), val, accountId); } } } if (detailed) { setLabelValues(type, e.getValue()); } } return labels; } private Map<String, LabelWithStatus> initLabels( ChangeData cd, LabelTypes labelTypes, boolean standard) throws OrmException { Map<String, LabelWithStatus> labels = new TreeMap<>(labelTypes.nameComparator()); for (SubmitRecord rec : submitRecords(cd)) { if (rec.labels == null) { continue; } for (SubmitRecord.Label r : rec.labels) { LabelWithStatus p = labels.get(r.label); if (p == null || p.status().compareTo(r.status) < 0) { LabelInfo n = new LabelInfo(); if (standard) { switch (r.status) { case OK: n.approved = accountLoader.get(r.appliedBy); break; case REJECT: n.rejected = accountLoader.get(r.appliedBy); n.blocking = true; break; case IMPOSSIBLE: case MAY: case NEED: default: break; } } n.optional = r.status == SubmitRecord.Label.Status.MAY ? true : null; labels.put(r.label, LabelWithStatus.create(n, r.status)); } } } return labels; } private void setLabelScores( LabelType type, LabelWithStatus l, short score, Account.Id accountId) { if (l.label().approved != null || l.label().rejected != null) { return; } if (type.getMin() == null || type.getMax() == null) { // Can't set score for unknown or misconfigured type. return; } if (score != 0) { if (score == type.getMin().getValue()) { l.label().rejected = accountLoader.get(accountId); } else if (score == type.getMax().getValue()) { l.label().approved = accountLoader.get(accountId); } else if (score < 0) { l.label().disliked = accountLoader.get(accountId); l.label().value = score; } else if (score > 0 && l.label().disliked == null) { l.label().recommended = accountLoader.get(accountId); l.label().value = score; } } } private void setAllApprovals( PermissionBackend.ForChange basePerm, ChangeData cd, Map<String, LabelWithStatus> labels) throws OrmException, PermissionBackendException { Change.Status status = cd.change().getStatus(); checkState( status != Change.Status.MERGED, "should not call setAllApprovals on %s change", status); // Include a user in the output for this label if either: // - They are an explicit reviewer. // - They ever voted on this change. Set<Account.Id> allUsers = new HashSet<>(); allUsers.addAll(cd.reviewers().byState(ReviewerStateInternal.REVIEWER)); for (PatchSetApproval psa : cd.approvals().values()) { allUsers.add(psa.getAccountId()); } Table<Account.Id, String, PatchSetApproval> current = HashBasedTable.create(allUsers.size(), cd.getLabelTypes().getLabelTypes().size()); for (PatchSetApproval psa : cd.currentApprovals()) { current.put(psa.getAccountId(), psa.getLabel(), psa); } LabelTypes labelTypes = cd.getLabelTypes(); for (Account.Id accountId : allUsers) { PermissionBackend.ForChange perm = basePerm.user(userFactory.create(accountId)); Map<String, VotingRangeInfo> pvr = getPermittedVotingRanges(permittedLabels(perm, cd)); for (Map.Entry<String, LabelWithStatus> e : labels.entrySet()) { LabelType lt = labelTypes.byLabel(e.getKey()); if (lt == null) { // Ignore submit record for undefined label; likely the submit rule // author didn't intend for the label to show up in the table. continue; } Integer value; VotingRangeInfo permittedVotingRange = pvr.getOrDefault(lt.getName(), null); String tag = null; Timestamp date = null; PatchSetApproval psa = current.get(accountId, lt.getName()); if (psa != null) { value = Integer.valueOf(psa.getValue()); if (value == 0) { // This may be a dummy approval that was inserted when the reviewer // was added. Explicitly check whether the user can vote on this // label. value = perm.test(new LabelPermission(lt)) ? 0 : null; } tag = psa.getTag(); date = psa.getGranted(); if (psa.isPostSubmit()) { log.warn("unexpected post-submit approval on open change: {}", psa); } } else { // Either the user cannot vote on this label, or they were added as a // reviewer but have not responded yet. Explicitly check whether the // user can vote on this label. value = perm.test(new LabelPermission(lt)) ? 0 : null; } addApproval( e.getValue().label(), approvalInfo(accountId, value, permittedVotingRange, tag, date)); } } } private Map<String, VotingRangeInfo> getPermittedVotingRanges( Map<String, Collection<String>> permittedLabels) { Map<String, VotingRangeInfo> permittedVotingRanges = Maps.newHashMapWithExpectedSize(permittedLabels.size()); for (String label : permittedLabels.keySet()) { List<Integer> permittedVotingRange = permittedLabels .get(label) .stream() .map(this::parseRangeValue) .filter(java.util.Objects::nonNull) .sorted() .collect(toList()); if (permittedVotingRange.isEmpty()) { permittedVotingRanges.put(label, null); } else { int minPermittedValue = permittedVotingRange.get(0); int maxPermittedValue = Iterables.getLast(permittedVotingRange); permittedVotingRanges.put(label, new VotingRangeInfo(minPermittedValue, maxPermittedValue)); } } return permittedVotingRanges; } private Integer parseRangeValue(String value) { if (value.startsWith("+")) { value = value.substring(1); } else if (value.startsWith(" ")) { value = value.trim(); } return Ints.tryParse(value); } private void setSubmitter(ChangeData cd, ChangeInfo out) throws OrmException { Optional<PatchSetApproval> s = cd.getSubmitApproval(); if (!s.isPresent()) { return; } out.submitted = s.get().getGranted(); out.submitter = accountLoader.get(s.get().getAccountId()); } private Map<String, LabelWithStatus> labelsForSubmittedChange( PermissionBackend.ForChange basePerm, ChangeData cd, LabelTypes labelTypes, boolean standard, boolean detailed) throws OrmException, PermissionBackendException { Set<Account.Id> allUsers = new HashSet<>(); if (detailed) { // Users expect to see all reviewers on closed changes, even if they // didn't vote on the latest patch set. If we don't need detailed labels, // we aren't including 0 votes for all users below, so we can just look at // the latest patch set (in the next loop). for (PatchSetApproval psa : cd.approvals().values()) { allUsers.add(psa.getAccountId()); } } Set<String> labelNames = new HashSet<>(); SetMultimap<Account.Id, PatchSetApproval> current = MultimapBuilder.hashKeys().hashSetValues().build(); for (PatchSetApproval a : cd.currentApprovals()) { allUsers.add(a.getAccountId()); LabelType type = labelTypes.byLabel(a.getLabelId()); if (type != null) { labelNames.add(type.getName()); // Not worth the effort to distinguish between votable/non-votable for 0 // values on closed changes, since they can't vote anyway. current.put(a.getAccountId(), a); } } // Since voting on merged changes is allowed all labels which apply to // the change must be returned. All applying labels can be retrieved from // the submit records, which is what initLabels does. // It's not possible to only compute the labels based on the approvals // since merged changes may not have approvals for all labels (e.g. if not // all labels are required for submit or if the change was auto-closed due // to direct push or if new labels were defined after the change was // merged). Map<String, LabelWithStatus> labels; labels = initLabels(cd, labelTypes, standard); // Also include all labels for which approvals exists. E.g. there can be // approvals for labels that are ignored by a Prolog submit rule and hence // it wouldn't be included in the submit records. for (String name : labelNames) { if (!labels.containsKey(name)) { labels.put(name, LabelWithStatus.create(new LabelInfo(), null)); } } if (detailed) { labels .entrySet() .stream() .filter(e -> labelTypes.byLabel(e.getKey()) != null) .forEach(e -> setLabelValues(labelTypes.byLabel(e.getKey()), e.getValue())); } for (Account.Id accountId : allUsers) { Map<String, ApprovalInfo> byLabel = Maps.newHashMapWithExpectedSize(labels.size()); Map<String, VotingRangeInfo> pvr = Collections.emptyMap(); if (detailed) { PermissionBackend.ForChange perm = basePerm.user(userFactory.create(accountId)); pvr = getPermittedVotingRanges(permittedLabels(perm, cd)); for (Map.Entry<String, LabelWithStatus> entry : labels.entrySet()) { ApprovalInfo ai = approvalInfo(accountId, 0, null, null, null); byLabel.put(entry.getKey(), ai); addApproval(entry.getValue().label(), ai); } } for (PatchSetApproval psa : current.get(accountId)) { LabelType type = labelTypes.byLabel(psa.getLabelId()); if (type == null) { continue; } short val = psa.getValue(); ApprovalInfo info = byLabel.get(type.getName()); if (info != null) { info.value = Integer.valueOf(val); info.permittedVotingRange = pvr.getOrDefault(type.getName(), null); info.date = psa.getGranted(); info.tag = psa.getTag(); if (psa.isPostSubmit()) { info.postSubmit = true; } } if (!standard) { continue; } setLabelScores(type, labels.get(type.getName()), val, accountId); } } return labels; } private ApprovalInfo approvalInfo( Account.Id id, Integer value, VotingRangeInfo permittedVotingRange, String tag, Timestamp date) { ApprovalInfo ai = getApprovalInfo(id, value, permittedVotingRange, tag, date); accountLoader.put(ai); return ai; } public static ApprovalInfo getApprovalInfo( Account.Id id, Integer value, VotingRangeInfo permittedVotingRange, String tag, Timestamp date) { ApprovalInfo ai = new ApprovalInfo(id.get()); ai.value = value; ai.permittedVotingRange = permittedVotingRange; ai.date = date; ai.tag = tag; return ai; } private static boolean isOnlyZero(Collection<String> values) { return values.isEmpty() || (values.size() == 1 && values.contains(" 0")); } private void setLabelValues(LabelType type, LabelWithStatus l) { l.label().defaultValue = type.getDefaultValue(); l.label().values = new LinkedHashMap<>(); for (LabelValue v : type.getValues()) { l.label().values.put(v.formatValue(), v.getText()); } if (isOnlyZero(l.label().values.keySet())) { l.label().values = null; } } private Map<String, Collection<String>> permittedLabels( PermissionBackend.ForChange perm, ChangeData cd) throws OrmException, PermissionBackendException { boolean isMerged = cd.change().getStatus() == Change.Status.MERGED; LabelTypes labelTypes = cd.getLabelTypes(); Map<String, LabelType> toCheck = new HashMap<>(); for (SubmitRecord rec : submitRecords(cd)) { if (rec.labels != null) { for (SubmitRecord.Label r : rec.labels) { LabelType type = labelTypes.byLabel(r.label); if (type != null && (!isMerged || type.allowPostSubmit())) { toCheck.put(type.getName(), type); } } } } Map<String, Short> labels = null; Set<LabelPermission.WithValue> can = perm.testLabels(toCheck.values()); SetMultimap<String, String> permitted = LinkedHashMultimap.create(); for (SubmitRecord rec : submitRecords(cd)) { if (rec.labels == null) { continue; } for (SubmitRecord.Label r : rec.labels) { LabelType type = labelTypes.byLabel(r.label); if (type == null || (isMerged && !type.allowPostSubmit())) { continue; } for (LabelValue v : type.getValues()) { boolean ok = can.contains(new LabelPermission.WithValue(type, v)); if (isMerged) { if (labels == null) { labels = currentLabels(perm, cd); } short prev = labels.getOrDefault(type.getName(), (short) 0); ok &= v.getValue() >= prev; } if (ok) { permitted.put(r.label, v.formatValue()); } } } } List<String> toClear = Lists.newArrayListWithCapacity(permitted.keySet().size()); for (Map.Entry<String, Collection<String>> e : permitted.asMap().entrySet()) { if (isOnlyZero(e.getValue())) { toClear.add(e.getKey()); } } for (String label : toClear) { permitted.removeAll(label); } return permitted.asMap(); } private Map<String, Short> currentLabels(PermissionBackend.ForChange perm, ChangeData cd) throws OrmException { IdentifiedUser user = perm.user().asIdentifiedUser(); Map<String, Short> result = new HashMap<>(); for (PatchSetApproval psa : approvalsUtil.byPatchSetUser( db.get(), lazyLoad ? cd.notes() : notesFactory.createFromIndexedChange(cd.change()), user, cd.change().currentPatchSetId(), user.getAccountId(), null, null)) { result.put(psa.getLabel(), psa.getValue()); } return result; } private Collection<ChangeMessageInfo> messages(ChangeData cd) throws OrmException { List<ChangeMessage> messages = cmUtil.byChange(db.get(), cd.notes()); if (messages.isEmpty()) { return Collections.emptyList(); } List<ChangeMessageInfo> result = Lists.newArrayListWithCapacity(messages.size()); for (ChangeMessage message : messages) { PatchSet.Id patchNum = message.getPatchSetId(); ChangeMessageInfo cmi = new ChangeMessageInfo(); cmi.id = message.getKey().get(); cmi.author = accountLoader.get(message.getAuthor()); cmi.date = message.getWrittenOn(); cmi.message = message.getMessage(); cmi.tag = message.getTag(); cmi._revisionNumber = patchNum != null ? patchNum.get() : null; Account.Id realAuthor = message.getRealAuthor(); if (realAuthor != null) { cmi.realAuthor = accountLoader.get(realAuthor); } result.add(cmi); } return result; } private Collection<AccountInfo> removableReviewers(ChangeData cd, ChangeInfo out) throws PermissionBackendException, NoSuchProjectException, OrmException, IOException { // Although this is called removableReviewers, this method also determines // which CCs are removable. // // For reviewers, we need to look at each approval, because the reviewer // should only be considered removable if *all* of their approvals can be // removed. First, add all reviewers with *any* removable approval to the // "removable" set. Along the way, if we encounter a non-removable approval, // add the reviewer to the "fixed" set. Before we return, remove all members // of "fixed" from "removable", because not all of their approvals can be // removed. Collection<LabelInfo> labels = out.labels.values(); Set<Account.Id> fixed = Sets.newHashSetWithExpectedSize(labels.size()); Set<Account.Id> removable = Sets.newHashSetWithExpectedSize(labels.size()); for (LabelInfo label : labels) { if (label.all == null) { continue; } for (ApprovalInfo ai : label.all) { Account.Id id = new Account.Id(ai._accountId); if (removeReviewerControl.testRemoveReviewer( cd, userProvider.get(), id, MoreObjects.firstNonNull(ai.value, 0))) { removable.add(id); } else { fixed.add(id); } } } // CCs are simpler than reviewers. They are removable if the ChangeControl // would permit a non-negative approval by that account to be removed, in // which case add them to removable. We don't need to add unremovable CCs to // "fixed" because we only visit each CC once here. Collection<AccountInfo> ccs = out.reviewers.get(ReviewerState.CC); if (ccs != null) { for (AccountInfo ai : ccs) { if (ai._accountId != null) { Account.Id id = new Account.Id(ai._accountId); if (removeReviewerControl.testRemoveReviewer(cd, userProvider.get(), id, 0)) { removable.add(id); } } } } // Subtract any reviewers with non-removable approvals from the "removable" // set. This also subtracts any CCs that for some reason also hold // unremovable approvals. removable.removeAll(fixed); List<AccountInfo> result = Lists.newArrayListWithCapacity(removable.size()); for (Account.Id id : removable) { result.add(accountLoader.get(id)); } // Reviewers added by email are always removable for (Collection<AccountInfo> infos : out.reviewers.values()) { for (AccountInfo info : infos) { if (info._accountId == null) { result.add(info); } } } return result; } private Collection<AccountInfo> toAccountInfo(Collection<Account.Id> accounts) { return accounts .stream() .map(accountLoader::get) .sorted(AccountInfoComparator.ORDER_NULLS_FIRST) .collect(toList()); } private Collection<AccountInfo> toAccountInfoByEmail(Collection<Address> addresses) { return addresses .stream() .map(a -> new AccountInfo(a.getName(), a.getEmail())) .sorted(AccountInfoComparator.ORDER_NULLS_FIRST) .collect(toList()); } @Nullable private Repository openRepoIfNecessary(Project.NameKey project) throws IOException { if (has(ALL_COMMITS) || has(CURRENT_COMMIT) || has(COMMIT_FOOTERS)) { return repoManager.openRepository(project); } return null; } @Nullable private RevWalk newRevWalk(@Nullable Repository repo) { return repo != null ? new RevWalk(repo) : null; } private Map<String, RevisionInfo> revisions( ChangeData cd, Map<PatchSet.Id, PatchSet> map, Optional<PatchSet.Id> limitToPsId, ChangeInfo changeInfo) throws PatchListNotAvailableException, GpgException, OrmException, IOException, PermissionBackendException { Map<String, RevisionInfo> res = new LinkedHashMap<>(); Boolean isWorldReadable = null; try (Repository repo = openRepoIfNecessary(cd.project()); RevWalk rw = newRevWalk(repo)) { for (PatchSet in : map.values()) { PatchSet.Id id = in.getId(); boolean want = false; if (has(ALL_REVISIONS)) { want = true; } else if (limitToPsId.isPresent()) { want = id.equals(limitToPsId.get()); } else { want = id.equals(cd.change().currentPatchSetId()); } if (want) { if (isWorldReadable == null) { isWorldReadable = isWorldReadable(cd); } res.put( in.getRevision().get(), toRevisionInfo(cd, in, repo, rw, false, changeInfo, isWorldReadable)); } } return res; } } private Map<PatchSet.Id, PatchSet> loadPatchSets(ChangeData cd, Optional<PatchSet.Id> limitToPsId) throws OrmException { Collection<PatchSet> src; if (has(ALL_REVISIONS) || has(MESSAGES)) { src = cd.patchSets(); } else { PatchSet ps; if (limitToPsId.isPresent()) { ps = cd.patchSet(limitToPsId.get()); if (ps == null) { throw new OrmException("missing patch set " + limitToPsId.get()); } } else { ps = cd.currentPatchSet(); if (ps == null) { throw new OrmException("missing current patch set for change " + cd.getId()); } } src = Collections.singletonList(ps); } Map<PatchSet.Id, PatchSet> map = Maps.newHashMapWithExpectedSize(src.size()); for (PatchSet patchSet : src) { map.put(patchSet.getId(), patchSet); } return map; } public RevisionInfo getRevisionInfo(ChangeData cd, PatchSet in) throws PatchListNotAvailableException, GpgException, OrmException, IOException, PermissionBackendException { accountLoader = accountLoaderFactory.create(has(DETAILED_ACCOUNTS)); try (Repository repo = openRepoIfNecessary(cd.project()); RevWalk rw = newRevWalk(repo)) { RevisionInfo rev = toRevisionInfo(cd, in, repo, rw, true, null, isWorldReadable(cd)); accountLoader.fill(); return rev; } } private RevisionInfo toRevisionInfo( ChangeData cd, PatchSet in, @Nullable Repository repo, @Nullable RevWalk rw, boolean fillCommit, @Nullable ChangeInfo changeInfo, boolean isWorldReadable) throws PatchListNotAvailableException, GpgException, OrmException, IOException { Change c = cd.change(); RevisionInfo out = new RevisionInfo(); out.isCurrent = in.getId().equals(c.currentPatchSetId()); out._number = in.getId().get(); out.ref = in.getRefName(); out.created = in.getCreatedOn(); out.uploader = accountLoader.get(in.getUploader()); out.fetch = makeFetchMap(cd, in, isWorldReadable); out.kind = changeKindCache.getChangeKind(rw, repo != null ? repo.getConfig() : null, cd, in); out.description = in.getDescription(); boolean setCommit = has(ALL_COMMITS) || (out.isCurrent && has(CURRENT_COMMIT)); boolean addFooters = out.isCurrent && has(COMMIT_FOOTERS); if (setCommit || addFooters) { checkState(rw != null); checkState(repo != null); Project.NameKey project = c.getProject(); String rev = in.getRevision().get(); RevCommit commit = rw.parseCommit(ObjectId.fromString(rev)); rw.parseBody(commit); if (setCommit) { out.commit = toCommit(project, rw, commit, has(WEB_LINKS), fillCommit); } if (addFooters) { Ref ref = repo.exactRef(cd.change().getDest().get()); RevCommit mergeTip = null; if (ref != null) { mergeTip = rw.parseCommit(ref.getObjectId()); rw.parseBody(mergeTip); } out.commitWithFooters = mergeUtilFactory .create(projectCache.get(project)) .createCommitMessageOnSubmit( commit, mergeTip, cd.notes(), userProvider.get(), in.getId()); } } if (has(ALL_FILES) || (out.isCurrent && has(CURRENT_FILES))) { out.files = fileInfoJson.toFileInfoMap(c, in); out.files.remove(Patch.COMMIT_MSG); out.files.remove(Patch.MERGE_LIST); } if (out.isCurrent && has(CURRENT_ACTIONS) && userProvider.get().isIdentifiedUser()) { actionJson.addRevisionActions( changeInfo, out, new RevisionResource(changeResourceFactory.create(cd.notes(), userProvider.get()), in)); } if (gpgApi.isEnabled() && has(PUSH_CERTIFICATES)) { if (in.getPushCertificate() != null) { out.pushCertificate = gpgApi.checkPushCertificate( in.getPushCertificate(), userFactory.create(in.getUploader())); } else { out.pushCertificate = new PushCertificateInfo(); } } return out; } CommitInfo toCommit( Project.NameKey project, RevWalk rw, RevCommit commit, boolean addLinks, boolean fillCommit) throws IOException { CommitInfo info = new CommitInfo(); if (fillCommit) { info.commit = commit.name(); } info.parents = new ArrayList<>(commit.getParentCount()); info.author = toGitPerson(commit.getAuthorIdent()); info.committer = toGitPerson(commit.getCommitterIdent()); info.subject = commit.getShortMessage(); info.message = commit.getFullMessage(); if (addLinks) { List<WebLinkInfo> links = webLinks.getPatchSetLinks(project, commit.name()); info.webLinks = links.isEmpty() ? null : links; } for (RevCommit parent : commit.getParents()) { rw.parseBody(parent); CommitInfo i = new CommitInfo(); i.commit = parent.name(); i.subject = parent.getShortMessage(); if (addLinks) { List<WebLinkInfo> parentLinks = webLinks.getParentLinks(project, parent.name()); i.webLinks = parentLinks.isEmpty() ? null : parentLinks; } info.parents.add(i); } return info; } private Map<String, FetchInfo> makeFetchMap(ChangeData cd, PatchSet in, boolean isWorldReadable) { Map<String, FetchInfo> r = new LinkedHashMap<>(); for (DynamicMap.Entry<DownloadScheme> e : downloadSchemes) { String schemeName = e.getExportName(); DownloadScheme scheme = e.getProvider().get(); if (!scheme.isEnabled() || (scheme.isAuthRequired() && !userProvider.get().isIdentifiedUser())) { continue; } if (!scheme.isAuthSupported() && !isWorldReadable) { continue; } String projectName = cd.project().get(); String url = scheme.getUrl(projectName); String refName = in.getRefName(); FetchInfo fetchInfo = new FetchInfo(url, refName); r.put(schemeName, fetchInfo); if (has(DOWNLOAD_COMMANDS)) { populateFetchMap(scheme, downloadCommands, projectName, refName, fetchInfo); } } return r; } public static void populateFetchMap( DownloadScheme scheme, DynamicMap<DownloadCommand> commands, String projectName, String refName, FetchInfo fetchInfo) { for (DynamicMap.Entry<DownloadCommand> e2 : commands) { String commandName = e2.getExportName(); DownloadCommand command = e2.getProvider().get(); String c = command.getCommand(scheme, projectName, refName); if (c != null) { addCommand(fetchInfo, commandName, c); } } } private static void addCommand(FetchInfo fetchInfo, String commandName, String c) { if (fetchInfo.commands == null) { fetchInfo.commands = new TreeMap<>(); } fetchInfo.commands.put(commandName, c); } static void finish(ChangeInfo info) { info.id = Joiner.on('~') .join(Url.encode(info.project), Url.encode(info.branch), Url.encode(info.changeId)); } private static void addApproval(LabelInfo label, ApprovalInfo approval) { if (label.all == null) { label.all = new ArrayList<>(); } label.all.add(approval); } /** * @return {@link com.google.gerrit.server.permissions.PermissionBackend.ForChange} constructed * from either an index-backed or a database-backed {@link ChangeData} depending on {@code * lazyload}. */ private PermissionBackend.ForChange permissionBackendForChange(CurrentUser user, ChangeData cd) throws OrmException { PermissionBackend.WithUser withUser = permissionBackend.user(user).database(db); return lazyLoad ? withUser.change(cd) : withUser.indexedChange(cd, notesFactory.createFromIndexedChange(cd.change())); } private boolean isWorldReadable(ChangeData cd) throws OrmException, PermissionBackendException { try { permissionBackendForChange(anonymous, cd).check(ChangePermission.READ); return true; } catch (AuthException ae) { return false; } } @AutoValue abstract static class LabelWithStatus { private static LabelWithStatus create(LabelInfo label, SubmitRecord.Label.Status status) { return new AutoValue_ChangeJson_LabelWithStatus(label, status); } abstract LabelInfo label(); @Nullable abstract SubmitRecord.Label.Status status(); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.vcs.changes.committed; import com.intellij.concurrency.JobScheduler; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.components.PersistentStateComponent; import com.intellij.openapi.components.State; import com.intellij.openapi.components.Storage; import com.intellij.openapi.components.StoragePathMacros; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.Ref; import com.intellij.openapi.vcs.*; import com.intellij.openapi.vcs.changes.Change; import com.intellij.openapi.vcs.impl.ProjectLevelVcsManagerImpl; import com.intellij.openapi.vcs.impl.VcsInitObject; import com.intellij.openapi.vcs.ui.VcsBalloonProblemNotifier; import com.intellij.openapi.vcs.update.UpdatedFiles; import com.intellij.openapi.vcs.versionBrowser.ChangeBrowserSettings; import com.intellij.openapi.vcs.versionBrowser.CommittedChangeList; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.Consumer; import com.intellij.util.MessageBusUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import com.intellij.util.messages.MessageBus; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.messages.Topic; import com.intellij.vcs.ProgressManagerQueue; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.io.File; import java.io.IOException; import java.util.*; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; /** * @author yole */ @State( name = "CommittedChangesCache", storages = {@Storage(StoragePathMacros.WORKSPACE_FILE)} ) public class CommittedChangesCache implements PersistentStateComponent<CommittedChangesCache.State> { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.vcs.changes.committed.CommittedChangesCache"); private final Project myProject; private final MessageBus myBus; private final ProgressManagerQueue myTaskQueue; private final MessageBusConnection myConnection; private boolean myRefreshingIncomingChanges = false; private int myPendingUpdateCount = 0; private State myState = new State(); private ScheduledFuture myFuture; private List<CommittedChangeList> myCachedIncomingChangeLists; private final Set<CommittedChangeList> myNewIncomingChanges = new LinkedHashSet<>(); private final ProjectLevelVcsManager myVcsManager; private MyRefreshRunnable myRefresnRunnable; private final Map<String, Pair<Long, List<CommittedChangeList>>> myExternallyLoadedChangeLists; private final CachesHolder myCachesHolder; private final RepositoryLocationCache myLocationCache; public static class State { private int myInitialCount = 500; private int myInitialDays = 90; private int myRefreshInterval = 30; private boolean myRefreshEnabled = false; public int getInitialCount() { return myInitialCount; } public void setInitialCount(final int initialCount) { myInitialCount = initialCount; } public int getInitialDays() { return myInitialDays; } public void setInitialDays(final int initialDays) { myInitialDays = initialDays; } public int getRefreshInterval() { return myRefreshInterval; } public void setRefreshInterval(final int refreshInterval) { myRefreshInterval = refreshInterval; } public boolean isRefreshEnabled() { return myRefreshEnabled; } public void setRefreshEnabled(final boolean refreshEnabled) { myRefreshEnabled = refreshEnabled; } } public static final Topic<CommittedChangesListener> COMMITTED_TOPIC = new Topic<>("committed changes updates", CommittedChangesListener.class); public static CommittedChangesCache getInstance(Project project) { return project.getComponent(CommittedChangesCache.class); } public CommittedChangesCache(final Project project, final MessageBus bus, final ProjectLevelVcsManager vcsManager) { myProject = project; myBus = bus; myConnection = myBus.connect(); final VcsListener vcsListener = new VcsListener() { @Override public void directoryMappingChanged() { myLocationCache.reset(); refreshAllCachesAsync(false, true); refreshIncomingChangesAsync(); myTaskQueue.run(() -> { for (ChangesCacheFile file : myCachesHolder.getAllCaches()) { final RepositoryLocation location = file.getLocation(); fireChangesLoaded(location, Collections.emptyList()); } fireIncomingReloaded(); }); } }; myLocationCache = new RepositoryLocationCache(project); myCachesHolder = new CachesHolder(project, myLocationCache); myTaskQueue = new ProgressManagerQueue(project, VcsBundle.message("committed.changes.refresh.progress")); ((ProjectLevelVcsManagerImpl) vcsManager).addInitializationRequest(VcsInitObject.COMMITTED_CHANGES_CACHE, () -> ApplicationManager.getApplication().runReadAction(() -> { if (myProject.isDisposed()) return; myTaskQueue.start(); myConnection.subscribe(ProjectLevelVcsManager.VCS_CONFIGURATION_CHANGED, vcsListener); myConnection.subscribe(ProjectLevelVcsManager.VCS_CONFIGURATION_CHANGED_IN_PLUGIN, vcsListener); })); myVcsManager = vcsManager; Disposer.register(project, new Disposable() { @Override public void dispose() { cancelRefreshTimer(); myConnection.disconnect(); } }); myExternallyLoadedChangeLists = ContainerUtil.newConcurrentMap(); } public MessageBus getMessageBus() { return myBus; } @Override public State getState() { return myState; } @Override public void loadState(@NotNull State state) { myState = state; updateRefreshTimer(); } @Nullable public CommittedChangesProvider getProviderForProject() { final AbstractVcs[] vcss = myVcsManager.getAllActiveVcss(); List<AbstractVcs> vcsWithProviders = new ArrayList<>(); for(AbstractVcs vcs: vcss) { if (vcs.getCommittedChangesProvider() != null) { vcsWithProviders.add(vcs); } } if (vcsWithProviders.isEmpty()) { return null; } if (vcsWithProviders.size() == 1) { return vcsWithProviders.get(0).getCommittedChangesProvider(); } return new CompositeCommittedChangesProvider(myProject, vcsWithProviders.toArray(new AbstractVcs[0])); } public boolean isMaxCountSupportedForProject() { for(AbstractVcs vcs: myVcsManager.getAllActiveVcss()) { final CommittedChangesProvider provider = vcs.getCommittedChangesProvider(); if (provider instanceof CachingCommittedChangesProvider) { final CachingCommittedChangesProvider cachingProvider = (CachingCommittedChangesProvider)provider; if (!cachingProvider.isMaxCountSupported()) { return false; } } } return true; } private class MyProjectChangesLoader implements Runnable { private final ChangeBrowserSettings mySettings; private final int myMaxCount; private final boolean myCacheOnly; private final Consumer<? super List<CommittedChangeList>> myConsumer; private final Consumer<? super List<VcsException>> myErrorConsumer; private final LinkedHashSet<CommittedChangeList> myResult = new LinkedHashSet<>(); private final List<VcsException> myExceptions = new ArrayList<>(); private boolean myDisposed = false; private MyProjectChangesLoader(ChangeBrowserSettings settings, int maxCount, boolean cacheOnly, Consumer<? super List<CommittedChangeList>> consumer, Consumer<? super List<VcsException>> errorConsumer) { mySettings = settings; myMaxCount = maxCount; myCacheOnly = cacheOnly; myConsumer = consumer; myErrorConsumer = errorConsumer; } @Override public void run() { for(AbstractVcs vcs: myVcsManager.getAllActiveVcss()) { final CommittedChangesProvider provider = vcs.getCommittedChangesProvider(); if (provider == null) continue; final VcsCommittedListsZipper vcsZipper = provider.getZipper(); CommittedListsSequencesZipper zipper = null; if (vcsZipper != null) { zipper = new CommittedListsSequencesZipper(vcsZipper); } boolean zipSupported = zipper != null; final Map<VirtualFile, RepositoryLocation> map = myCachesHolder.getAllRootsUnderVcs(vcs); for (VirtualFile root : map.keySet()) { if (myProject.isDisposed()) return; final RepositoryLocation location = map.get(root); try { final List<CommittedChangeList> lists = getChanges(mySettings, root, vcs, myMaxCount, myCacheOnly, provider, location); if (lists != null) { if (zipSupported) { zipper.add(location, lists); } else { myResult.addAll(lists); } } } catch (VcsException e) { myExceptions.add(e); } catch(ProcessCanceledException e) { myDisposed = true; } } if (zipSupported) { myResult.addAll(zipper.execute()); } } ApplicationManager.getApplication().invokeLater(() -> { LOG.info("FINISHED CommittedChangesCache.getProjectChangesAsync - execution in queue"); if (myProject.isDisposed()) { return; } if (myExceptions.size() > 0) { myErrorConsumer.consume(myExceptions); } else if (!myDisposed) { myConsumer.consume(new ArrayList<>(myResult)); } }, ModalityState.NON_MODAL); } } public void getProjectChangesAsync(final ChangeBrowserSettings settings, final int maxCount, final boolean cacheOnly, final Consumer<? super List<CommittedChangeList>> consumer, final Consumer<? super List<VcsException>> errorConsumer) { final MyProjectChangesLoader loader = new MyProjectChangesLoader(settings, maxCount, cacheOnly, consumer, errorConsumer); myTaskQueue.run(loader); } @Nullable public List<CommittedChangeList> getChanges(ChangeBrowserSettings settings, final VirtualFile file, @NotNull final AbstractVcs vcs, final int maxCount, final boolean cacheOnly, final CommittedChangesProvider provider, final RepositoryLocation location) throws VcsException { if (settings instanceof CompositeCommittedChangesProvider.CompositeChangeBrowserSettings) { settings = ((CompositeCommittedChangesProvider.CompositeChangeBrowserSettings) settings).get(vcs); } if (provider instanceof CachingCommittedChangesProvider) { try { if (cacheOnly) { ChangesCacheFile cacheFile = myCachesHolder.getCacheFile(vcs, file, location); if (!cacheFile.isEmpty()) { final RepositoryLocation fileLocation = cacheFile.getLocation(); fileLocation.onBeforeBatch(); final List<CommittedChangeList> committedChangeLists = cacheFile.readChanges(settings, maxCount); fileLocation.onAfterBatch(); return committedChangeLists; } return null; } else { if (canGetFromCache(vcs, settings, file, location, maxCount)) { return getChangesWithCaching(vcs, settings, file, location, maxCount); } } } catch (IOException e) { LOG.info(e); } } //noinspection unchecked return provider.getCommittedChanges(settings, location, maxCount); } private boolean canGetFromCache(final AbstractVcs vcs, final ChangeBrowserSettings settings, final VirtualFile root, final RepositoryLocation location, final int maxCount) throws IOException { ChangesCacheFile cacheFile = myCachesHolder.getCacheFile(vcs, root, location); if (cacheFile.isEmpty()) { return true; // we'll initialize the cache and check again after that } if (settings.USE_DATE_BEFORE_FILTER && !settings.USE_DATE_AFTER_FILTER) { return cacheFile.hasCompleteHistory(); } if (settings.USE_CHANGE_BEFORE_FILTER && !settings.USE_CHANGE_AFTER_FILTER) { return cacheFile.hasCompleteHistory(); } boolean hasDateFilter = settings.USE_DATE_AFTER_FILTER || settings.USE_DATE_BEFORE_FILTER || settings.USE_CHANGE_AFTER_FILTER || settings.USE_CHANGE_BEFORE_FILTER; boolean hasNonDateFilter = settings.isNonDateFilterSpecified(); if (!hasDateFilter && hasNonDateFilter) { return cacheFile.hasCompleteHistory(); } if (settings.USE_DATE_AFTER_FILTER && settings.getDateAfter().getTime() < cacheFile.getFirstCachedDate().getTime()) { return cacheFile.hasCompleteHistory(); } if (settings.USE_CHANGE_AFTER_FILTER && settings.getChangeAfterFilter().longValue() < cacheFile.getFirstCachedChangelist()) { return cacheFile.hasCompleteHistory(); } return true; } public void hasCachesForAnyRoot(@Nullable final Consumer<? super Boolean> continuation) { myTaskQueue.run(() -> { final Ref<Boolean> success = new Ref<>(); try { success.set(hasCachesWithEmptiness(false)); } catch (ProcessCanceledException e) { success.set(true); } ApplicationManager.getApplication().invokeLater(() -> continuation.consume(success.get()), myProject.getDisposed()); }); } public boolean hasEmptyCaches() { try { return hasCachesWithEmptiness(true); } catch (ProcessCanceledException e) { return false; } } private boolean hasCachesWithEmptiness(final boolean emptiness) { final Ref<Boolean> resultRef = new Ref<>(Boolean.FALSE); myCachesHolder.iterateAllCaches(changesCacheFile -> { try { if (changesCacheFile.isEmpty() == emptiness) { resultRef.set(true); return false; } } catch (IOException e) { LOG.info(e); } return true; }); return resultRef.get(); } @Nullable public Iterator<ChangesBunch> getBackBunchedIterator(final AbstractVcs vcs, final VirtualFile root, final RepositoryLocation location, final int bunchSize) { final ChangesCacheFile cacheFile = myCachesHolder.getCacheFile(vcs, root, location); try { if (! cacheFile.isEmpty()) { return cacheFile.getBackBunchedIterator(bunchSize); } } catch (IOException e) { LOG.error(e); } return null; } private List<CommittedChangeList> getChangesWithCaching(final AbstractVcs vcs, final ChangeBrowserSettings settings, final VirtualFile root, final RepositoryLocation location, final int maxCount) throws VcsException, IOException { ChangesCacheFile cacheFile = myCachesHolder.getCacheFile(vcs, root, location); if (cacheFile.isEmpty()) { List<CommittedChangeList> changes = initCache(cacheFile); if (canGetFromCache(vcs, settings, root, location, maxCount)) { settings.filterChanges(changes); return trimToSize(changes, maxCount); } //noinspection unchecked return cacheFile.getProvider().getCommittedChanges(settings, location, maxCount); } else { // we take location instance that would be used for deserialization final RepositoryLocation fileLocation = cacheFile.getLocation(); fileLocation.onBeforeBatch(); final List<CommittedChangeList> changes = cacheFile.readChanges(settings, maxCount); fileLocation.onAfterBatch(); List<CommittedChangeList> newChanges = refreshCache(cacheFile); settings.filterChanges(newChanges); changes.addAll(newChanges); return trimToSize(changes, maxCount); } } @TestOnly public void refreshAllCaches() throws IOException, VcsException { debug("Start refreshing all caches"); final Collection<ChangesCacheFile> files = myCachesHolder.getAllCaches(); debug(files.size() + " caches found"); for(ChangesCacheFile file: files) { if (file.isEmpty()) { initCache(file); } else { refreshCache(file); } } debug("Finished refreshing all caches"); } private List<CommittedChangeList> initCache(final ChangesCacheFile cacheFile) throws VcsException, IOException { debug("Initializing cache for " + cacheFile.getLocation()); final CachingCommittedChangesProvider provider = cacheFile.getProvider(); final RepositoryLocation location = cacheFile.getLocation(); final ChangeBrowserSettings settings = provider.createDefaultSettings(); int maxCount = 0; if (isMaxCountSupportedForProject()) { maxCount = myState.getInitialCount(); } else { settings.USE_DATE_AFTER_FILTER = true; Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DAY_OF_YEAR, -myState.getInitialDays()); settings.setDateAfter(calendar.getTime()); } //noinspection unchecked final List<CommittedChangeList> changes = provider.getCommittedChanges(settings, location, maxCount); // when initially initializing cache, assume all changelists are locally available writeChangesInReadAction(cacheFile, changes); // this sorts changes in chronological order if (maxCount > 0 && changes.size() < myState.getInitialCount()) { cacheFile.setHaveCompleteHistory(true); } if (changes.size() > 0) { fireChangesLoaded(location, changes); } return changes; } private void fireChangesLoaded(final RepositoryLocation location, final List<CommittedChangeList> changes) { MessageBusUtil.invokeLaterIfNeededOnSyncPublisher(myProject, COMMITTED_TOPIC, listener -> listener.changesLoaded(location, changes)); } private void fireIncomingReloaded() { MessageBusUtil.invokeLaterIfNeededOnSyncPublisher(myProject, COMMITTED_TOPIC, listener -> listener.incomingChangesUpdated(Collections.emptyList())); } // todo: fix - would externally loaded necessarily for file? i.e. just not efficient now private List<CommittedChangeList> refreshCache(final ChangesCacheFile cacheFile) throws VcsException, IOException { debug("Refreshing cache for " + cacheFile.getLocation()); final List<CommittedChangeList> newLists = new ArrayList<>(); final CachingCommittedChangesProvider provider = cacheFile.getProvider(); final RepositoryLocation location = cacheFile.getLocation(); final Pair<Long, List<CommittedChangeList>> externalLists = myExternallyLoadedChangeLists.get(location.getKey()); final long latestChangeList = getLatestListForFile(cacheFile); if ((externalLists != null) && (latestChangeList == externalLists.first.longValue())) { newLists.addAll(appendLoadedChanges(cacheFile, location, externalLists.second)); myExternallyLoadedChangeLists.clear(); } final ChangeBrowserSettings defaultSettings = provider.createDefaultSettings(); int maxCount = 0; if (provider.refreshCacheByNumber()) { final long number = cacheFile.getLastCachedChangelist(); debug("Refreshing cache for " + location + " since #" + number); if (number >= 0) { defaultSettings.CHANGE_AFTER = Long.toString(number); defaultSettings.USE_CHANGE_AFTER_FILTER = true; } else { maxCount = myState.getInitialCount(); } } else { final Date date = cacheFile.getLastCachedDate(); debug("Refreshing cache for " + location + " since " + date); defaultSettings.setDateAfter(date); defaultSettings.USE_DATE_AFTER_FILTER = true; } defaultSettings.STRICTLY_AFTER = true; final List<CommittedChangeList> newChanges = provider.getCommittedChanges(defaultSettings, location, maxCount); debug("Loaded " + newChanges.size() + " new changelists"); newLists.addAll(appendLoadedChanges(cacheFile, location, newChanges)); return newLists; } private static void debug(@NonNls String message) { LOG.debug(message); } private List<CommittedChangeList> appendLoadedChanges(final ChangesCacheFile cacheFile, final RepositoryLocation location, final List<? extends CommittedChangeList> newChanges) throws IOException { final List<CommittedChangeList> savedChanges = writeChangesInReadAction(cacheFile, newChanges); if (savedChanges.size() > 0) { fireChangesLoaded(location, savedChanges); } return savedChanges; } private static List<CommittedChangeList> writeChangesInReadAction(final ChangesCacheFile cacheFile, final List<? extends CommittedChangeList> newChanges) throws IOException { // ensure that changes are loaded before taking read action, to avoid stalling UI for(CommittedChangeList changeList: newChanges) { changeList.getChanges(); } final Ref<IOException> ref = new Ref<>(); final List<CommittedChangeList> savedChanges = ReadAction.compute(() -> { try { return cacheFile.writeChanges(newChanges); // skip duplicates; } catch (IOException e) { ref.set(e); return null; } }); if (!ref.isNull()) { throw ref.get(); } return savedChanges; } private static List<CommittedChangeList> trimToSize(final List<CommittedChangeList> changes, final int maxCount) { if (maxCount > 0) { while(changes.size() > maxCount) { changes.remove(0); } } return changes; } public List<CommittedChangeList> loadIncomingChanges(boolean inBackground) { final List<CommittedChangeList> result = new ArrayList<>(); final Collection<ChangesCacheFile> caches = myCachesHolder.getAllCaches(); debug(caches.size() + " caches found"); final MultiMap<AbstractVcs, Pair<RepositoryLocation, List<CommittedChangeList>>> byVcs = new MultiMap<>(); for(ChangesCacheFile cache: caches) { try { if (inBackground && (! cache.getVcs().isVcsBackgroundOperationsAllowed(cache.getRootPath().getVirtualFile()))) continue; if (!cache.isEmpty()) { debug("Loading incoming changes for " + cache.getLocation()); final List<CommittedChangeList> incomingChanges = cache.loadIncomingChanges(); byVcs.putValue(cache.getVcs(), Pair.create(cache.getLocation(), incomingChanges)); } else { debug("Empty cache found for " + cache.getLocation()); } } catch (IOException e) { LOG.error(e); } } for (AbstractVcs vcs : byVcs.keySet()) { final CommittedChangesProvider committedChangesProvider = vcs.getCommittedChangesProvider(); VcsCommittedListsZipper vcsZipper = committedChangesProvider.getZipper(); if (vcsZipper != null) { final VcsCommittedListsZipper incomingZipper = new IncomingListsZipper(vcsZipper); final CommittedListsSequencesZipper zipper = new CommittedListsSequencesZipper(incomingZipper); for (Pair<RepositoryLocation, List<CommittedChangeList>> pair : byVcs.get(vcs)) { zipper.add(pair.getFirst(), pair.getSecond()); } result.addAll(zipper.execute()); } else { for (Pair<RepositoryLocation, List<CommittedChangeList>> pair : byVcs.get(vcs)) { result.addAll(pair.getSecond()); } } } myCachedIncomingChangeLists = result; debug("Incoming changes loaded"); notifyIncomingChangesUpdated(result); return result; } private static class IncomingListsZipper extends VcsCommittedListsZipperAdapter { private final VcsCommittedListsZipper myVcsZipper; private IncomingListsZipper(final VcsCommittedListsZipper vcsZipper) { super(null); myVcsZipper = vcsZipper; } @Override public Pair<List<RepositoryLocationGroup>, List<RepositoryLocation>> groupLocations(final List<RepositoryLocation> in) { return myVcsZipper.groupLocations(in); } @Override public CommittedChangeList zip(final RepositoryLocationGroup group, final List<? extends CommittedChangeList> lists) { if (lists.size() == 1) { return lists.get(0); } final CommittedChangeList victim = ReceivedChangeList.unwrap(lists.get(0)); final ReceivedChangeList result = new ReceivedChangeList(victim); result.setForcePartial(false); final Set<Change> baseChanges = new HashSet<>(); for (CommittedChangeList list : lists) { baseChanges.addAll(ReceivedChangeList.unwrap(list).getChanges()); final Collection<Change> changes = list.getChanges(); for (Change change : changes) { if (! result.getChanges().contains(change)) { result.addChange(change); } } } result.setForcePartial(baseChanges.size() != result.getChanges().size()); return result; } @Override public long getNumber(final CommittedChangeList list) { return myVcsZipper.getNumber(list); } } public void commitMessageChanged(final AbstractVcs vcs, final RepositoryLocation location, final long number, final String newMessage) { myTaskQueue.run(() -> { final ChangesCacheFile file = myCachesHolder.haveCache(location); if (file != null) { try { if (file.isEmpty()) return; file.editChangelist(number, newMessage); loadIncomingChanges(true); fireChangesLoaded(location, Collections.emptyList()); } catch (IOException e) { VcsBalloonProblemNotifier.showOverChangesView(myProject, "Didn't update Repository changes with new message due to error: " + e.getMessage(), MessageType.ERROR); } } }); } public void loadIncomingChangesAsync(@Nullable final Consumer<? super List<CommittedChangeList>> consumer, final boolean inBackground) { debug("Loading incoming changes"); final Runnable task = () -> { final List<CommittedChangeList> list = loadIncomingChanges(inBackground); if (consumer != null) { consumer.consume(new ArrayList<>(list)); } }; myTaskQueue.run(task); } public void clearCaches(final Runnable continuation) { myTaskQueue.run(() -> { myCachesHolder.clearAllCaches(); myCachedIncomingChangeLists = null; continuation.run(); MessageBusUtil.invokeLaterIfNeededOnSyncPublisher(myProject, COMMITTED_TOPIC, listener -> listener.changesCleared()); }); } @Nullable public List<CommittedChangeList> getCachedIncomingChanges() { return myCachedIncomingChangeLists; } public void processUpdatedFiles(final UpdatedFiles updatedFiles) { processUpdatedFiles(updatedFiles, null); } public void processUpdatedFiles(final UpdatedFiles updatedFiles, @Nullable final Consumer<? super List<CommittedChangeList>> incomingChangesConsumer) { final Runnable task = () -> { debug("Processing updated files"); final Collection<ChangesCacheFile> caches = myCachesHolder.getAllCaches(); myPendingUpdateCount += caches.size(); for(final ChangesCacheFile cache: caches) { try { if (cache.isEmpty()) { pendingUpdateProcessed(incomingChangesConsumer); continue; } debug("Processing updated files in " + cache.getLocation()); boolean needRefresh = cache.processUpdatedFiles(updatedFiles, myNewIncomingChanges); if (needRefresh) { debug("Found unaccounted files, requesting refresh"); // todo do we need double-queueing here??? processUpdatedFilesAfterRefresh(cache, updatedFiles, incomingChangesConsumer); } else { debug("Clearing cached incoming changelists"); myCachedIncomingChangeLists = null; pendingUpdateProcessed(incomingChangesConsumer); } } catch (IOException e) { LOG.error(e); } } }; myTaskQueue.run(task); } private void pendingUpdateProcessed(@Nullable Consumer<? super List<CommittedChangeList>> incomingChangesConsumer) { myPendingUpdateCount--; if (myPendingUpdateCount == 0) { notifyIncomingChangesUpdated(myNewIncomingChanges); if (incomingChangesConsumer != null) { incomingChangesConsumer.consume(ContainerUtil.newArrayList(myNewIncomingChanges)); } myNewIncomingChanges.clear(); } } private void processUpdatedFilesAfterRefresh(final ChangesCacheFile cache, final UpdatedFiles updatedFiles, @Nullable final Consumer<? super List<CommittedChangeList>> incomingChangesConsumer) { refreshCacheAsync(cache, false, new RefreshResultConsumer() { @Override public void receivedChanges(final List<CommittedChangeList> committedChangeLists) { try { debug("Processing updated files after refresh in " + cache.getLocation()); boolean result = true; if (committedChangeLists.size() > 0) { // received some new changelists, try to process updated files again result = cache.processUpdatedFiles(updatedFiles, myNewIncomingChanges); } debug(result ? "Still have unaccounted files" : "No more unaccounted files"); // for svn, we won't get exact revision numbers in updatedFiles, so we have to double-check by // checking revisions we have locally if (result) { cache.refreshIncomingChanges(); debug("Clearing cached incoming changelists"); myCachedIncomingChangeLists = null; } pendingUpdateProcessed(incomingChangesConsumer); } catch (IOException e) { LOG.error(e); } catch(VcsException e) { notifyRefreshError(e); } } @Override public void receivedError(VcsException ex) { notifyRefreshError(ex); } }); } private void fireIncomingChangesUpdated(final List<? extends CommittedChangeList> lists) { MessageBusUtil.invokeLaterIfNeededOnSyncPublisher(myProject, COMMITTED_TOPIC, listener -> listener.incomingChangesUpdated(new ArrayList<>(lists))); } private void notifyIncomingChangesUpdated(@Nullable final Collection<CommittedChangeList> receivedChanges) { final Collection<CommittedChangeList> changes = receivedChanges == null ? myCachedIncomingChangeLists : receivedChanges; if (changes == null) { final Application application = ApplicationManager.getApplication(); final Runnable runnable = () -> { final List<CommittedChangeList> lists = loadIncomingChanges(true); fireIncomingChangesUpdated(lists); }; if (application.isDispatchThread()) { myTaskQueue.run(runnable); } else { runnable.run(); } return; } final ArrayList<CommittedChangeList> listCopy = new ArrayList<>(changes); fireIncomingChangesUpdated(listCopy); } private void notifyRefreshError(final VcsException e) { MessageBusUtil.invokeLaterIfNeededOnSyncPublisher(myProject, COMMITTED_TOPIC, listener -> listener.refreshErrorStatusChanged(e)); } public boolean isRefreshingIncomingChanges() { return myRefreshingIncomingChanges; } public boolean refreshIncomingChanges() { boolean hasChanges = false; final Collection<ChangesCacheFile> caches = myCachesHolder.getAllCaches(); for(ChangesCacheFile file: caches) { try { if (file.isEmpty()) { continue; } debug("Refreshing incoming changes for " + file.getLocation()); boolean changesForCache = file.refreshIncomingChanges(); hasChanges |= changesForCache; } catch (IOException e) { LOG.error(e); } catch(VcsException e) { notifyRefreshError(e); } } return hasChanges; } public void refreshIncomingChangesAsync() { debug("Refreshing incoming changes in background"); myRefreshingIncomingChanges = true; final Runnable task = () -> { refreshIncomingChanges(); refreshIncomingUi(); }; myTaskQueue.run(task); } private void refreshIncomingUi() { ApplicationManager.getApplication().invokeLater(() -> { myRefreshingIncomingChanges = false; debug("Incoming changes refresh complete, clearing cached incoming changes"); notifyReloadIncomingChanges(); }, ModalityState.NON_MODAL, myProject.getDisposed()); } public void refreshAllCachesAsync(final boolean initIfEmpty, final boolean inBackground) { final Runnable task = () -> { Collection<ChangesCacheFile> files = myCachesHolder.getAllCaches(); final RefreshResultConsumer notifyConsumer = new RefreshResultConsumer() { private VcsException myError = null; private int myCount = 0; private int totalChangesCount = 0; @Override public void receivedChanges(List<CommittedChangeList> changes) { totalChangesCount += changes.size(); checkDone(); } @Override public void receivedError(VcsException ex) { myError = ex; checkDone(); } private void checkDone() { myCount++; if (myCount == files.size()) { myTaskQueue.run(() -> { if (totalChangesCount > 0) { notifyReloadIncomingChanges(); } else { myProject.getMessageBus().syncPublisher(CommittedChangesTreeBrowser.ITEMS_RELOADED).emptyRefresh(); } }); notifyRefreshError(myError); } } }; for(ChangesCacheFile file: files) { if ((! inBackground) || file.getVcs().isVcsBackgroundOperationsAllowed(file.getRootPath().getVirtualFile())) { refreshCacheAsync(file, initIfEmpty, notifyConsumer, false); } } }; myTaskQueue.run(task); } private void notifyReloadIncomingChanges() { myCachedIncomingChangeLists = null; notifyIncomingChangesUpdated(null); } private void refreshCacheAsync(final ChangesCacheFile cache, final boolean initIfEmpty, @Nullable final RefreshResultConsumer consumer) { refreshCacheAsync(cache, initIfEmpty, consumer, true); } private void refreshCacheAsync(final ChangesCacheFile cache, final boolean initIfEmpty, @Nullable final RefreshResultConsumer consumer, final boolean asynch) { try { if (!initIfEmpty && cache.isEmpty()) { return; } } catch (IOException e) { LOG.error(e); return; } final Runnable task = () -> { try { final List<CommittedChangeList> list; if (initIfEmpty && cache.isEmpty()) { list = initCache(cache); } else { list = refreshCache(cache); } if (consumer != null) { consumer.receivedChanges(list); } } catch(ProcessCanceledException ex) { // ignore } catch (IOException e) { LOG.error(e); } catch (VcsException e) { if (consumer != null) { consumer.receivedError(e); } } }; if (asynch) { myTaskQueue.run(task); } else { task.run(); } } private void updateRefreshTimer() { cancelRefreshTimer(); if (myState.isRefreshEnabled()) { myRefresnRunnable = new MyRefreshRunnable(this); // if "schedule with fixed rate" is used, then after waking up from stand-by mode, events are generated for inactive period // it does not make sense myFuture = JobScheduler.getScheduler().scheduleWithFixedDelay(myRefresnRunnable, myState.getRefreshInterval() * 60L, myState.getRefreshInterval() * 60L, TimeUnit.SECONDS); } } private void cancelRefreshTimer() { if (myRefresnRunnable != null) { myRefresnRunnable.cancel(); myRefresnRunnable = null; } if (myFuture != null) { myFuture.cancel(false); myFuture = null; } } @Nullable public Pair<CommittedChangeList, Change> getIncomingChangeList(final VirtualFile file) { if (myCachedIncomingChangeLists != null) { File ioFile = new File(file.getPath()); for(CommittedChangeList changeList: myCachedIncomingChangeLists) { for(Change change: changeList.getChanges()) { if (change.affectsFile(ioFile)) { return Pair.create(changeList, change); } } } } return null; } private long getLatestListForFile(final ChangesCacheFile file) { try { if ((file == null) || (file.isEmpty())) { return -1; } return file.getLastCachedChangelist(); } catch (IOException e) { return -1; } } public CachesHolder getCachesHolder() { return myCachesHolder; } public void submitExternallyLoaded(final RepositoryLocation location, final long myLastCl, final List<CommittedChangeList> lists) { myExternallyLoadedChangeLists.put(location.getKey(), new Pair<>(myLastCl, lists)); } private interface RefreshResultConsumer { void receivedChanges(List<CommittedChangeList> changes); void receivedError(VcsException ex); } private static class MyRefreshRunnable implements Runnable { private CommittedChangesCache myCache; private MyRefreshRunnable(final CommittedChangesCache cache) { myCache = cache; } private void cancel() { myCache = null; } @Override public void run() { final CommittedChangesCache cache = myCache; if (cache == null) return; cache.refreshAllCachesAsync(false, true); for(ChangesCacheFile file: cache.getCachesHolder().getAllCaches()) { if (file.getVcs().isVcsBackgroundOperationsAllowed(file.getRootPath().getVirtualFile())) { if (file.getProvider().refreshIncomingWithCommitted()) { cache.refreshIncomingChangesAsync(); break; } } } } } public RepositoryLocationCache getLocationCache() { return myLocationCache; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.resourcemanager.slotmanager; import org.apache.flink.api.common.JobID; import org.apache.flink.runtime.clusterframework.types.AllocationID; import org.apache.flink.runtime.clusterframework.types.ResourceProfile; import org.apache.flink.runtime.instance.InstanceID; import org.apache.flink.runtime.resourcemanager.registration.TaskExecutorConnection; import org.apache.flink.runtime.slots.ResourceCounter; import org.apache.flink.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Optional; /** Implementation of {@link TaskManagerTracker} supporting fine-grained resource management. */ public class FineGrainedTaskManagerTracker implements TaskManagerTracker { private static final Logger LOG = LoggerFactory.getLogger(FineGrainedTaskManagerTracker.class); /** Map for allocated and pending slots. */ private final Map<AllocationID, FineGrainedTaskManagerSlot> slots; /** All currently registered task managers. */ private final Map<InstanceID, FineGrainedTaskManagerRegistration> taskManagerRegistrations; private final Map<PendingTaskManagerId, PendingTaskManager> pendingTaskManagers; private final Map<PendingTaskManagerId, Map<JobID, ResourceCounter>> pendingSlotAllocationRecords; public FineGrainedTaskManagerTracker() { slots = new HashMap<>(); taskManagerRegistrations = new HashMap<>(); pendingTaskManagers = new HashMap<>(); pendingSlotAllocationRecords = new HashMap<>(); } @Override public void replaceAllPendingAllocations( Map<PendingTaskManagerId, Map<JobID, ResourceCounter>> pendingSlotAllocations) { Preconditions.checkNotNull(pendingSlotAllocations); LOG.trace("Record the pending allocations {}.", pendingSlotAllocations); pendingSlotAllocationRecords.clear(); pendingSlotAllocationRecords.putAll(pendingSlotAllocations); } @Override public void addTaskManager( TaskExecutorConnection taskExecutorConnection, ResourceProfile totalResourceProfile, ResourceProfile defaultSlotResourceProfile) { Preconditions.checkNotNull(taskExecutorConnection); Preconditions.checkNotNull(totalResourceProfile); Preconditions.checkNotNull(defaultSlotResourceProfile); LOG.debug( "Add task manager {} with total resource {} and default slot resource {}.", taskExecutorConnection.getInstanceID(), totalResourceProfile, defaultSlotResourceProfile); final FineGrainedTaskManagerRegistration taskManagerRegistration = new FineGrainedTaskManagerRegistration( taskExecutorConnection, totalResourceProfile, defaultSlotResourceProfile); taskManagerRegistrations.put( taskExecutorConnection.getInstanceID(), taskManagerRegistration); } @Override public void removeTaskManager(InstanceID instanceId) { Preconditions.checkNotNull(instanceId); final FineGrainedTaskManagerRegistration taskManager = Preconditions.checkNotNull(taskManagerRegistrations.remove(instanceId)); LOG.debug("Remove task manager {}.", instanceId); for (AllocationID allocationId : taskManager.getAllocatedSlots().keySet()) { slots.remove(allocationId); } } @Override public void addPendingTaskManager(PendingTaskManager pendingTaskManager) { Preconditions.checkNotNull(pendingTaskManager); LOG.debug("Add pending task manager {}.", pendingTaskManager); pendingTaskManagers.put(pendingTaskManager.getPendingTaskManagerId(), pendingTaskManager); } @Override public Map<JobID, ResourceCounter> removePendingTaskManager( PendingTaskManagerId pendingTaskManagerId) { Preconditions.checkNotNull(pendingTaskManagerId); Preconditions.checkNotNull(pendingTaskManagers.remove(pendingTaskManagerId)); LOG.debug("Remove pending task manager {}.", pendingTaskManagerId); return Optional.ofNullable(pendingSlotAllocationRecords.remove(pendingTaskManagerId)) .orElse(Collections.emptyMap()); } // --------------------------------------------------------------------------------------------- // Core state transitions // --------------------------------------------------------------------------------------------- @Override public void notifySlotStatus( AllocationID allocationId, JobID jobId, InstanceID instanceId, ResourceProfile resourceProfile, SlotState slotState) { Preconditions.checkNotNull(allocationId); Preconditions.checkNotNull(jobId); Preconditions.checkNotNull(instanceId); Preconditions.checkNotNull(resourceProfile); Preconditions.checkNotNull(slotState); switch (slotState) { case FREE: freeSlot(instanceId, allocationId); break; case ALLOCATED: addAllocatedSlot(allocationId, jobId, instanceId, resourceProfile); break; case PENDING: addPendingSlot(allocationId, jobId, instanceId, resourceProfile); break; } } private void freeSlot(InstanceID instanceId, AllocationID allocationId) { final FineGrainedTaskManagerRegistration taskManager = Preconditions.checkNotNull(taskManagerRegistrations.get(instanceId)); Preconditions.checkNotNull(slots.remove(allocationId)); LOG.debug("Free allocated slot with allocationId {}.", allocationId); taskManager.freeSlot(allocationId); } private void addAllocatedSlot( AllocationID allocationId, JobID jobId, InstanceID instanceId, ResourceProfile resourceProfile) { final FineGrainedTaskManagerRegistration taskManager = Preconditions.checkNotNull(taskManagerRegistrations.get(instanceId)); if (slots.containsKey(allocationId)) { // Complete allocation of pending slot LOG.debug("Complete slot allocation with allocationId {}.", allocationId); taskManager.notifyAllocationComplete(allocationId); } else { // New allocated slot LOG.debug("Register new allocated slot with allocationId {}.", allocationId); final FineGrainedTaskManagerSlot slot = new FineGrainedTaskManagerSlot( allocationId, jobId, resourceProfile, taskManager.getTaskExecutorConnection(), SlotState.ALLOCATED); slots.put(allocationId, slot); taskManager.notifyAllocation(allocationId, slot); } } private void addPendingSlot( AllocationID allocationId, JobID jobId, InstanceID instanceId, ResourceProfile resourceProfile) { Preconditions.checkState(!slots.containsKey(allocationId)); final FineGrainedTaskManagerRegistration taskManager = Preconditions.checkNotNull(taskManagerRegistrations.get(instanceId)); LOG.debug("Add pending slot with allocationId {}.", allocationId); final FineGrainedTaskManagerSlot slot = new FineGrainedTaskManagerSlot( allocationId, jobId, resourceProfile, taskManager.getTaskExecutorConnection(), SlotState.PENDING); taskManager.notifyAllocation(allocationId, slot); slots.put(allocationId, slot); } // --------------------------------------------------------------------------------------------- // Getters of internal state // --------------------------------------------------------------------------------------------- @Override public Map<JobID, ResourceCounter> getPendingAllocationsOfPendingTaskManager( PendingTaskManagerId pendingTaskManagerId) { return Collections.unmodifiableMap( pendingSlotAllocationRecords.getOrDefault( pendingTaskManagerId, Collections.emptyMap())); } @Override public Collection<? extends TaskManagerInfo> getRegisteredTaskManagers() { return Collections.unmodifiableCollection(taskManagerRegistrations.values()); } @Override public Optional<TaskManagerInfo> getRegisteredTaskManager(InstanceID instanceId) { return Optional.ofNullable(taskManagerRegistrations.get(instanceId)); } @Override public Optional<TaskManagerSlotInformation> getAllocatedOrPendingSlot( AllocationID allocationId) { return Optional.ofNullable(slots.get(allocationId)); } @Override public Collection<PendingTaskManager> getPendingTaskManagers() { return Collections.unmodifiableCollection(pendingTaskManagers.values()); } @Override public ClusterResourceOverview getClusterResourceOverview() { return new ClusterResourceOverview(taskManagerRegistrations); } @Override public void clear() { slots.clear(); taskManagerRegistrations.clear(); pendingTaskManagers.clear(); pendingSlotAllocationRecords.clear(); } }
/* * Copyright (C) 2015 University of Oregon * * You may distribute under the terms of either the GNU General Public * License or the Apache License, as specified in the LICENSE file. * * For more information, see the LICENSE file. */ package vnmr.jgl; import javax.media.opengl.*; import javax.media.opengl.glu.*; import vnmr.util.DebugOutput; import java.io.*; import java.nio.IntBuffer; import java.nio.ByteBuffer; import java.util.Hashtable; import java.util.StringTokenizer; public class GLSLProgram extends JGLShaderProgram { int program_obj; static public String Defs=""; // ------------------------------------ // GLSL vertex shader // ------------------------------------ static public String VShader=new String("" +"varying vec3 P;\n" +"varying vec4 EyeDirection;\n" +"varying vec4 Normal;\n" +"varying vec4 Color;\n" +"void main (void)\n" +"{\n" +" gl_Position = ftransform();\n" +" gl_TexCoord[0] = gl_MultiTexCoord0;\n" +"#ifdef LIGHTING\n" +" Normal.xyz = gl_NormalMatrix * gl_Normal;\n" +" EyeDirection=(gl_ModelViewMatrix * gl_Vertex);\n" +"#endif\n" +" Color=gl_Color;\n" +" P = gl_Vertex.xyz;\n" +"}\n" ); // // GLSL fragment shader // ------------------------------------ // inputs // ------------------------------------ // [voltex] volume data 2D[3D]-texture // colormap color palette 1D-texture // functionmap contrast LUT 1D-texture // params[0].x color index // params[0].y intensity // params[0].z threshold static public String FShader1D=new String("" +"varying vec3 P;\n" +"varying vec4 Color;\n" +"uniform vec4 params;\n" +"uniform vec4 gcol;\n" +"uniform vec4 bcol;\n" +"uniform int cmode;\n" +"uniform sampler1D functionmap;\n" +"uniform sampler1D colormap;\n" +"#define grid params.w\n" +"vec4 addGrid(vec4 color,vec4 col){ \n" +" vec2 f = abs(fract (P.xz * grid)-0.5);\n" +" vec2 df = fwidth(P.xz * grid);\n" +" vec2 g2 = smoothstep(-df,df , f);\n" +" float g = g2.x * g2.y; \n" +" g *=1.0-g; \n" +" g = min(2.0*g,1.0); \n" +" vec3 c=mix(color.rgb,col.rgb,g);\n" +" return vec4(c,color.a);\n" +"}\n" +"#define spacing params.z\n" +"vec4 addContours(vec4 color, vec4 col){ \n" //+" if(spacing==0.0) return color;\n" //+" float y = P.y;\n" +" float y = pow(abs(P.y),0.5);\n" +" float f = abs(fract (y * abs(spacing))-0.5);\n" +" float dy = fwidth(y * spacing);\n" +" float g = smoothstep(-dy,dy , f);\n" +" g *=1.0-g; \n" +" g = min(3.0*g,1.0); \n" //+" g *= 1.0-smoothstep(0.2,0.22,abs(y)); \n" +" vec3 c=mix(color.rgb,col.rgb,g);\n" +" return vec4(c,g);\n" +"}\n" +"#ifdef LIGHTING\n" +"varying vec4 EyeDirection;\n" +"varying vec4 Normal;\n" +"uniform vec4 lpars;\n" +"uniform vec4 lpos;\n" +"vec4 addLighting(vec4 color){ \n" +" vec3 normal = normalize(Normal.xyz);\n" +" vec3 eye = normalize(EyeDirection.xyz);\n" +" float ldn = dot(normalize(lpos.xyz),normal);\n" +" float ambient = lpars.x;\n" +" float diffuse = lpars.y*max(ldn,0.0);\n" +" vec3 H = normalize((lpos.xyz + eye)*0.5);\n" +" float sdp = max(0.0, dot(normal,H));\n" +" float specular = lpars.z*pow(sdp, lpars.w);\n" +" vec3 col= color.rgb*(ambient+diffuse)+specular;\n" +" return vec4(col,color.a);\n" +"}\n" +"#endif\n" +"void main (void)\n" +"{\n" +" float f = gl_TexCoord[0].x;\n" +" vec4 fmap = texture1D(functionmap,f);\n" +" float b = (params.x>=0.0)?params.x:fmap.r;\n" +" float a = (params.y>0.5)?fmap.y:1.0;\n" +" vec4 tcol;\n" +" if(cmode<1)\n" +" tcol=texture1D(colormap, b);\n" +" else\n" +" tcol=Color;\n" +" vec4 bg = params.z>=0.0?tcol:bcol;\n" +" vec4 fg = params.z>=0.0?bcol:tcol;\n" +" vec4 color=bg;\n" +"#ifdef LIGHTING\n" +" color= addLighting(bg);\n" +"#endif\n" +" float alpha=color.a;\n" +" color=addContours(color,fg);\n" +" color=addGrid(color,gcol);\n" +" gl_FragColor = color;\n" +" gl_FragColor.a = a*alpha;\n" +"}\n" ); // 3D Fragment Shaders static public String volumeFShader2D=new String("" +"uniform vec4 params;\n" +"uniform sampler1D functionmap;\n" +"uniform sampler1D colormap;\n" +"uniform sampler2D voltex;\n" +"void main (void)\n" +"{\n" +" vec4 v = texture2D(voltex, gl_TexCoord[0].xy);\n" +" vec4 fmap = texture1D( functionmap, v.a*params.y);\n" +" float b = (params.x>=0.0)?params.x:fmap.r;\n" +" gl_FragColor = texture1D( colormap, b);\n" +" gl_FragColor.a = fmap.a;\n" +"}\n" ); static public String volumeFShader3D=new String("" +"uniform vec4 params;\n" +"uniform sampler1D functionmap;\n" +"uniform sampler1D colormap;\n" +"uniform sampler3D voltex;\n" +"void main (void)\n" +"{\n" +" vec4 v = texture3D( voltex, gl_TexCoord[0].xyz );\n" +" vec4 fmap = texture1D( functionmap, v.a*params.y);\n" +" float b = (params.x>=0.0)?params.x:fmap.r;\n" +" gl_FragColor = texture1D( colormap, b);\n" +" gl_FragColor.a = fmap.a;\n" +"}\n" ); static public String mipFShader2D=new String("" +"uniform vec4 params;\n" +"uniform sampler1D functionmap;\n" +"uniform sampler1D colormap;\n" +"uniform sampler2D voltex;\n" +"void main (void)\n" +"{\n" +" vec4 v = texture2D(voltex, gl_TexCoord[0].xy);\n" +" vec4 fmap = texture1D( functionmap, v.a*params.y);\n" +" float b = (params.x>=0.0)?params.x:fmap.r;\n" +" if(fmap.a>0.0){\n" +" gl_FragColor = texture1D( colormap, b);\n" +" gl_FragDepth = 1.0-fmap.a;\n" + "}\n" +" else {\n" +" gl_FragDepth = 2.0;\n" + "}\n" +"}\n" ); static public String mipFShader3D=new String("" +"uniform vec4 params;\n" +"uniform sampler1D functionmap;\n" +"uniform sampler1D colormap;\n" +"uniform sampler3D voltex;\n" +"void main (void)\n" +"{\n" +" vec4 v = texture3D( voltex, gl_TexCoord[0].xyz );\n" +" vec4 fmap = texture1D( functionmap, v.a*params.y);\n" +" float b = (params.x>=0.0)?params.x:fmap.r;\n" +" if(fmap.a>0.0){\n" +" gl_FragColor = texture1D( colormap, b);\n" +" gl_FragDepth = 1.0-fmap.a;\n" + "}\n" +" else {\n" +" gl_FragDepth = 2.0;\n" + "}\n" +"}\n" ); GLSLProgram(){ super(); program = gl.glCreateProgramObjectARB(); } private int getUniformLocation(String name) { int loc=0; loc=gl.glGetUniformLocationARB(program, name); //if ( loc < 0 ) // System.err.println(name() // +" WARNING - symbol not found in shader program: "+name); return loc; } private boolean setUniformFloat (String name, float value ){ int loc = getUniformLocation(name ); if ( loc < 0 ) return false; gl.glUniform1fARB ( loc, value ); return true; } private boolean setUniformInt (String name, int value ){ int loc = getUniformLocation(name ); if ( loc < 0 ) return false; gl.glUniform1iARB ( loc, value ); return true; } private boolean setUniformfVector (String name, float value[] ){ int loc = getUniformLocation(name ); if ( loc < 0 ) return false; switch(value.length){ case 1: gl.glUniform1fARB ( loc, value[0] ); return true; case 2: gl.glUniform2fARB ( loc, value[0],value[1] ); return true; case 3: gl.glUniform3fARB ( loc, value[0],value[1],value[2] ); return true; case 4: gl.glUniform4fARB ( loc, value[0],value[1],value[2],value[3] ); return true; } return false; } private boolean setUniformiVector (String name, int value[] ){ int loc = getUniformLocation(name ); if ( loc < 0 ) return false; switch(value.length){ case 1: gl.glUniform1iARB ( loc, value[0] ); return true; case 2: gl.glUniform2iARB ( loc, value[0],value[1] ); return true; case 3: gl.glUniform3iARB ( loc, value[0],value[1],value[2] ); return true; case 4: gl.glUniform4iARB ( loc, value[0],value[1],value[2],value[3] ); return true; } return false; } private void linkShaderProgram(){ gl.glLinkProgramARB(program); gl.glValidateProgramARB(program); checkError(program); if(DebugOutput.isSetFor("glsl")) System.out.println(name()+" linkShaderProgram()"); } // functions overridden from base class protected boolean loadShader(String source, JGLShader shader){ int obj=shader.getShaderObject(); if(obj!=0){ gl.glDetachObjectARB(program, obj); gl.glDeleteShader(obj); } shader.setActive(); obj=shader.createShaderObject(); gl.glShaderSourceARB(obj, 1, new String[] { source }, (int[])null, 0); gl.glCompileShaderARB(obj); if(checkError(obj)) return false; gl.glAttachObjectARB(program, obj); if(DebugOutput.isSetFor("glsl")) System.out.println(name()+" "+shader.name()); return true; } protected boolean checkError(int obj) { IntBuffer iVal = IntBuffer.allocate(1); gl.glGetObjectParameterivARB(obj, GL2.GL_OBJECT_INFO_LOG_LENGTH_ARB, iVal); int length = iVal.get(); if(length <= 1) { return false; } ByteBuffer infoLog = ByteBuffer.allocate(length); iVal.flip(); gl.glGetInfoLogARB(obj, length, iVal, infoLog); byte[] infoBytes = new byte[length]; infoLog.get(infoBytes); System.out.println("GPU shader error >> " + new String(infoBytes)); return true; } public static String name(){ return "GLSL"; } public static boolean isSupported() { if(GLUtil.isExtensionSupported("GL_ARB_shading_language_100") && GLUtil.isExtensionSupported("GL_ARB_shader_objects") && GLUtil.isExtensionSupported("GL_ARB_vertex_shader") && GLUtil.isExtensionSupported("GL_ARB_fragment_shader")) return true; return false; } public void destroy(){ GL2 gl=GLU.getCurrentGL().getGL2(); if(DebugOutput.isSetFor("gpu")) System.out.println(name()+" destroy("+program+")"); if(program>0){ int obj; obj=vshader.getShaderObject(); if(obj>0) gl.glDetachObjectARB(program, obj); obj=fshader.getShaderObject(); if(obj>0) gl.glDetachObjectARB(program, obj); gl.glDeleteObjectARB(program); } program=0; super.destroy(); } public void bind() { if(program !=0) gl.glUseProgramObjectARB(program); } public void unbind() { gl.glUseProgramObjectARB(0); } public boolean setFloatValue (String name, float value ){ return setUniformFloat(name,value); } public boolean setIntValue (String name, int value ){ return setUniformInt(name,value); } public boolean setFloatVector (String name, float value[] ){ return setUniformfVector(name,value); } public boolean setIntVector (String name, int value[] ){ return setUniformiVector(name,value); } public boolean setTexture(String name, int texUnit) { int loc=gl.glGetUniformLocation(program,name); if(loc==-1){ System.out.println("ERROR setTexture("+name+","+texUnit+")"); return false; } gl.glUniform1iARB(loc, texUnit); if(DebugOutput.isSetFor("glsl")) System.out.println(name()+" setTexture("+name+","+texUnit+")"); return true; } public void loadShader(int mode){ if((mode & S2D)>0){ loadVertexShader(VShader); if((mode & SMIP)>0) loadFragmentShader(mipFShader2D); else loadFragmentShader(volumeFShader2D); } else if((mode & S3D)>0){ loadVertexShader(VShader); if((mode & SMIP)>0) loadFragmentShader(mipFShader3D); else loadFragmentShader(volumeFShader3D); } else{ // 1D if((mode & SLGT)>0){ Defs="#define LIGHTING\n"; //loadVertexShader(lgtVShader); //loadFragmentShader(lgtFShader1D); } else{ Defs=""; } String vs=Defs+VShader; String fs=Defs+FShader1D; loadVertexShader(vs); loadFragmentShader(fs); } linkShaderProgram(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.calrissian.accumulorecipes.commons.iterators.support; import static org.calrissian.accumulorecipes.commons.support.Constants.NULL_BYTE; import java.io.IOException; import java.util.Collection; import java.util.Enumeration; import java.util.HashSet; import java.util.Iterator; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreeNode; import org.apache.accumulo.core.data.ByteSequence; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.iterators.SortedKeyValueIterator; import org.apache.commons.jexl2.parser.ParserTreeConstants; import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; import org.calrissian.accumulorecipes.commons.iterators.AndIterator; import org.calrissian.accumulorecipes.commons.iterators.FieldIndexIterator; import org.calrissian.accumulorecipes.commons.iterators.OrIterator; /** * * */ public class BooleanLogicTreeNode extends DefaultMutableTreeNode { protected static final Logger log = Logger.getLogger(BooleanLogicTreeNode.class); private static final long serialVersionUID = 1L; private Key myTopKey = null; private Key advanceKey = null; private Text fValue = null; private Text fName = null; private boolean negated = false; private int type; private boolean done = false; private boolean valid = false; private boolean rollUp = false; private String fOperator = null; private boolean childrenAllNegated = false; private HashSet<Key> uids; private Text upperBound; private Text lowerBound; private boolean rangeNode; public BooleanLogicTreeNode(int type) { super(); this.type = type; uids = new HashSet<Key>(); setOperator(); } public BooleanLogicTreeNode(int type, String fieldName, String fieldValue) { super(); this.type = type; if (fieldValue != null) { this.fValue = new Text(fieldValue); } if (fieldName != null) { this.fName = new Text(NodeToJexl.revertToOriginalkey(fieldName)); } uids = new HashSet<Key>(); setOperator(); } public BooleanLogicTreeNode(int type, String fieldName, String fieldValue, boolean negated) { super(); this.type = type; if (fieldValue != null) { this.fValue = new Text(fieldValue); } if (fieldName != null) { this.fName = new Text(NodeToJexl.revertToOriginalkey(fieldName)); } uids = new HashSet<Key>(); this.negated = negated; setOperator(); } public static Key buildKey(Key key) { if (key == null) { log.error("Problem in BooleanLogicTreeNodeJexl.buildKey"); return null; } // Build Key(Text row, Text colfam) where colFam is dataype\0uid String[] cq = key.getColumnQualifier().toString().split(NULL_BYTE); Text uuid = new Text(cq.length > 1 ? cq[1] : cq[0]); Text row = key.getRow(); log.debug("Key-> r:" + row + " fam:" + uuid); Key k = new Key(row, uuid); return k; } public boolean isValid() { return this.valid; } public void setValid(boolean b) { this.valid = b; } public int getType() { return this.type; } public void setType(int t) { this.type = t; } public boolean isChildrenAllNegated() { return childrenAllNegated; } public void setChildrenAllNegated(boolean childrenAllNegated) { this.childrenAllNegated = childrenAllNegated; } public Key getAdvanceKey() { return advanceKey; } public void setAdvanceKey(Key advanceKey) { this.advanceKey = advanceKey; } public boolean isNegated() { return negated; } public Key getTopKey() { return myTopKey; } public void setTopKey(Key id) { this.myTopKey = id; } public boolean isDone() { return done; } public void setDone(boolean done) { this.done = done; } public void setRollUp(boolean rollUp) { this.rollUp = rollUp; } public Text getFieldValue() { return fValue; } public void setFieldValue(Text term) { this.fValue = term; } public Text getFieldName() { return fName; } public void setFieldName(Text dataLocation) { this.fName = dataLocation; } public String getFieldOperator() { return fOperator; } private void setOperator() { this.fOperator = JexlOperatorConstants.getOperator(type); if (negated && this.fOperator.equals("!=")) { this.fOperator = JexlOperatorConstants.getOperator(JexlOperatorConstants.JJTEQNODE); } } public Text getLowerBound() { return lowerBound; } public void setLowerBound(Text lowerBound) { this.lowerBound = lowerBound; } public Text getUpperBound() { return upperBound; } public void setUpperBound(Text upperBound) { this.upperBound = upperBound; } public boolean isRangeNode() { return rangeNode; } public void setRangeNode(boolean rangeNode) { this.rangeNode = rangeNode; } public String getContents() { StringBuilder s = new StringBuilder("["); s.append(toString()); if (children != null) { Enumeration<?> e = this.children(); while (e.hasMoreElements()) { BooleanLogicTreeNode n = (BooleanLogicTreeNode) e.nextElement(); s.append(","); s.append(n.getContents()); } } s.append("]"); return s.toString(); } public String printNode() { StringBuilder s = new StringBuilder("["); s.append("Full Location & Term = "); if (this.fName != null) { s.append(this.fName.toString()); } else { s.append("BlankDataLocation"); } s.append(" "); if (this.fValue != null) { s.append(this.fValue.toString()); } else { s.append("BlankTerm"); } s.append("]"); return s.toString(); } @Override public String toString() { String uidStr = "none"; if (myTopKey != null) { String cf = myTopKey.getColumnFamily().toString(); uidStr = cf; } switch (type) { case ParserTreeConstants.JJTEQNODE: return fName.toString() + ":" + fValue.toString() + ", uid=" + uidStr + " , negation=" + this.isNegated(); case ParserTreeConstants.JJTNENODE: return fName.toString() + ":" + fValue.toString() + ", uid=" + uidStr + " , negation=" + this.isNegated(); case ParserTreeConstants.JJTERNODE: return fName.toString() + ":" + fValue.toString() + ", uid=" + uidStr + " , negation=" + this.isNegated(); case ParserTreeConstants.JJTNRNODE: return fName.toString() + ":" + fValue.toString() + ", uid=" + uidStr + " , negation=" + this.isNegated(); case ParserTreeConstants.JJTLENODE: return "<=:" + fName.toString() + ":" + fValue.toString() + ", uid=" + uidStr + " , negation=" + this.isNegated(); case ParserTreeConstants.JJTLTNODE: return "<:" + fName.toString() + ":" + fValue.toString() + ", uid=" + uidStr + " , negation=" + this.isNegated(); case ParserTreeConstants.JJTGENODE: return ">=:" + fName.toString() + ":" + fValue.toString() + ", uid=" + uidStr + " , negation=" + this.isNegated(); case ParserTreeConstants.JJTGTNODE: return ">:" + fName.toString() + ":" + fValue.toString() + ", uid=" + uidStr + " , negation=" + this.isNegated(); case ParserTreeConstants.JJTJEXLSCRIPT: return "HEAD" + ":" + uidStr + ":" + isValid(); case ParserTreeConstants.JJTANDNODE: return "AND" + ":" + uidStr + ":" + isValid(); case ParserTreeConstants.JJTNOTNODE: return "NOT"; case ParserTreeConstants.JJTORNODE: return "OR" + ":" + uidStr + ":" + isValid(); default: System.out.println("Problem in BLTNODE.toString()"); return null; } } public void seek(Range range, Collection<ByteSequence> columnFamilies, boolean inclusive) throws IOException { // always start fresh this.setTopKey(null); this.setDone(false); // get my user object which should be an iterator SortedKeyValueIterator<?, ?> iter = (SortedKeyValueIterator<?, ?>) this.getUserObject(); if (iter != null) { iter.seek(range, columnFamilies, inclusive); if (iter.hasTop()) { Key key = (Key) iter.getTopKey(); key = buildKey(key); this.setTopKey(key); if (log.isDebugEnabled()) { log.debug("BLTNODE.seek() -> found: " + this.getTopKey()); } } else { if (log.isDebugEnabled()) { log.debug("BLTNODE.seek() -> hasTop::false"); } this.setDone(true); } } else { if (log.isDebugEnabled()) { log.debug("BLTNODE.seek(), The iterator was null!"); } this.setTopKey(null); } } public String buildTreePathString(TreeNode[] path) { StringBuilder s = new StringBuilder("["); for (TreeNode p : path) { s.append(p.toString()); s.append(","); } s.deleteCharAt(s.length() - 1); s.append("]"); return s.toString(); } public void next() throws IOException { // always start fresh this.setTopKey(null); if (log.isDebugEnabled()) { TreeNode[] path = this.getPath(); log.debug("BLTNODE.next() path-> " + this.buildTreePathString(path)); } // have I been marked as done? if (this.isDone()) { if (log.isDebugEnabled()) { log.debug("I've been marked as done, returning"); } return; } SortedKeyValueIterator<?, ?> iter = (SortedKeyValueIterator<?, ?>) this.getUserObject(); iter.next(); if (iter.hasTop()) { Key key = (Key) iter.getTopKey(); // I have a valid topKey, pull out the piece I want key = buildKey(key); this.setTopKey(key); if (log.isDebugEnabled()) { log.debug("BLTNODE.next() -> found: " + this.getTopKey()); } } else { // no top value has been returned, I'm done. if (log.isDebugEnabled()) { log.debug("BLTNODE.next() -> Nothing found"); } this.setTopKey(null); this.setDone(true); } } public boolean jump(Key jumpKey) throws IOException { boolean ok = true; if (this.getType() == ParserTreeConstants.JJTEQNODE) { FieldIndexIterator iter = (FieldIndexIterator) this.getUserObject(); ok = iter.jump(jumpKey); if (iter.hasTop()) { Key key = (Key) iter.getTopKey(); key = buildKey(key); this.setTopKey(key); if (log.isDebugEnabled()) { log.debug("BLTNODE.jump() -> found: " + this.getTopKey()); } } else { if (log.isDebugEnabled()) { log.debug("FieldIndexIteratorJexl does not have top after jump, marking done."); } this.setTopKey(null); this.setDone(true); } } else if (this.getType() == ParserTreeConstants.JJTANDNODE) { AndIterator iter = (AndIterator) this.getUserObject(); ok = iter.jump(jumpKey); if (iter.hasTop()) { Key key = iter.getTopKey(); key = buildKey(key); this.setTopKey(key); if (log.isDebugEnabled()) { log.debug("BLTNODE.jump() -> found: " + this.getTopKey()); } } else { if (log.isDebugEnabled()) { log.debug("IntersectingIteratorJexl does not have top after jump, marking done."); } this.setTopKey(null); this.setDone(true); } } else if (this.getType() == ParserTreeConstants.JJTORNODE) { OrIterator iter = (OrIterator) this.getUserObject(); ok = iter.jump(jumpKey); if (iter.hasTop()) { Key key = (Key) iter.getTopKey(); key = buildKey(key); this.setTopKey(key); if (log.isDebugEnabled()) { log.debug("BLTNODE.jump() -> found: " + this.getTopKey()); } } else { if (log.isDebugEnabled()) { log.debug("OrIteratorJexl does not have top after jump, marking done."); } this.setTopKey(null); this.setDone(true); } } return ok; } public void addToSet(Key i) { uids.add(i); } public void reSet() { uids = new HashSet<Key>(); } public Iterator<Key> getSetIterator() { return uids.iterator(); } public HashSet<Key> getIntersection(HashSet<Key> h) { h.retainAll(uids); return h; } public Key getMinUniqueID() { Iterator<Key> iter = uids.iterator(); Key min = null; while (iter.hasNext()) { Key t = (Key) iter.next(); if (log.isDebugEnabled()) { log.debug("OR set member: " + t); } if (t != null) { if (min == null) { min = t; } else if (t.compareTo(min) < 0) { min = t; } } } return min; } public boolean hasTop() { // This part really needs to be cleaned up. // It was created before I knew what was being passed back. if (this.getType() == ParserTreeConstants.JJTORNODE) { // Are you a Logical OR or an OR Iterator if (!this.isLeaf()) { // logical construct // I have a set of keys return this.uids.size() > 0; } else { // or iterator, you only have possible key if (this.getTopKey() == null) { return false; } else { return true; } } } else { return this.getTopKey() != null; } } }
/************************************************************************** * Developed by Language Technologies Institute, Carnegie Mellon University * Written by Richard Wang (rcwang#cs,cmu,edu) **************************************************************************/ package com.rcwang.seal.expand; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.log4j.Logger; import com.rcwang.seal.fetch.Document; import com.rcwang.seal.util.Helper; public class CategoryPredictor { public static class MetaInfo { public String title, desc; public Set<String> keywords; public double weight; public MetaInfo() { weight = 0; keywords = new HashSet<String>(); } public String toString() { StringBuffer buf = new StringBuffer(); buf.append("Title: ").append(title).append("\n"); buf.append("Desc: ").append(desc).append("\n"); buf.append("Keywords: "); for (String keyword : keywords) buf.append(keyword).append(", "); buf.append("\n"); return buf.toString(); } } public static class Category implements Comparable<Category> { public String name; public int keywordFreq = 0; public int docFreq = 0; public double score = 0; public int isCorrect = -1; public int compareTo(Category category) { return Double.compare(category.score, score); } public boolean wasEvaluated() { return isCorrect > -1; } public String toString() { return keywordFreq + " " + docFreq + " " + (isCorrect == -1 ? "" : (isCorrect == 0 ? "-" : "+")) + name + ":\t" + score; } } public static Logger log = Logger.getLogger(CategoryPredictor.class); public static final int MAX_RANK_TO_EVALUATE = 10; public static final double MAX_KEYWORD_RATIO = 0.5; public static final Pattern TITLE_PATTERN = Pattern.compile("(?i)<title>([^<>]+?)</title>"); public static final Pattern DESC_PATTERN = Pattern.compile("(?i)<meta\\s+name=(?:\"|')description(?:\"|')\\s+content=(?:\"|')([^<>]+?)(?:\"|')"); public static final Pattern KEYWORD_PATTERN = Pattern.compile("(?i)<meta\\s+name=(?:\"|')keywords(?:\"|')\\s+content=(?:\"|')([^<>]+?)(?:\"|')"); private Map<String, Category> categoryMap; private Set<MetaInfo> metaInfos; private int maxKeywordFreq; public CategoryPredictor() { maxKeywordFreq = 0; metaInfos = new HashSet<MetaInfo>(); categoryMap = new HashMap<String, Category>(); } public void clear() { categoryMap.clear(); metaInfos.clear(); maxKeywordFreq = 0; } public static void main(String args[]) { File htmlFile = new File("/www.cache/321/321.847938881.html"); String content = Helper.readFile(htmlFile); Document docURL = new Document(content, null); docURL.setWeight(1.0); List<Document> docURLs = new ArrayList<Document>(); docURLs.add(docURL); // CategoryPredictor.predict(docURLs); } public List<Category> predict() { for (MetaInfo metaInfo : metaInfos) weightNames(metaInfo); List<Category> categories = new ArrayList<Category>(categoryMap.values()); Collections.sort(categories); return categories; } public Set<String> parse(Document docURL, Collection<String> badNames) { MetaInfo metaInfo = extractMetaInfo(docURL); if (metaInfo == null) return null; metaInfos.add(metaInfo); return updateCategories(metaInfo, badNames); } public static MetaInfo extractMetaInfo(Document document) { MetaInfo metaInfo = new MetaInfo(); metaInfo.weight = document.getWeight(); if (document.getText() == null) return null; Matcher m = TITLE_PATTERN.matcher(document.getText()); if (m.find()) metaInfo.title = tidy(m.group(1)); m = DESC_PATTERN.matcher(document.getText()); if (m.find()) metaInfo.desc = tidy(m.group(1)); m = KEYWORD_PATTERN.matcher(document.getText()); if (m.find()) { String[] keywords = m.group(1).split("(\\-|:|,|\\|)"); for (String keyword : keywords) { String s = tidy(keyword); if (s != null) metaInfo.keywords.add(s.toLowerCase()); } } if (metaInfo.title == null && metaInfo.desc == null && metaInfo.keywords.isEmpty()) return null; return metaInfo; } private Set<String> updateCategories(MetaInfo metaInfo, Collection<String> badTerms) { Set<String> categories = new HashSet<String>(); if (metaInfo == null || metaInfo.keywords.isEmpty()) return categories; for (String keyword : metaInfo.keywords) { boolean isGood = true; // A keyword is *not* good if... // 1) the keyword is a substring of any badTerm or // 2) any badTerm is a substring of the keyword, for (String badTerm : badTerms) { if (keyword.contains(badTerm) || badTerm.contains(keyword)) { isGood = false; break; } } if (!isGood) continue; Category category = categoryMap.get(keyword); if (category == null) { category = new Category(); category.name = keyword; categoryMap.put(category.name, category); categories.add(category.name); } category.keywordFreq++; category.score += metaInfo.weight; maxKeywordFreq = Math.max(category.keywordFreq, maxKeywordFreq); } return categories; } private static String tidy(String s) { s = s.replaceAll("\\s+", " ").trim(); return (s.length() == 0) ? null : s; } private void weightNames(MetaInfo metaInfo) { StringBuffer buf = new StringBuffer(); if (metaInfo.title != null) buf.append(metaInfo.title).append(" "); if (metaInfo.desc != null) buf.append(metaInfo.desc).append(" "); if (!metaInfo.keywords.isEmpty()) { for (String keyword : metaInfo.keywords) buf.append(keyword).append(" "); } if (buf.length() == 0) return; String strings = buf.toString().toLowerCase(); for (Iterator<String> i = categoryMap.keySet().iterator(); i.hasNext();) { String name = i.next(); Category category = categoryMap.get(name); // remove keywords that are less than MAX_KEYWORD_RATIO frequent as the most frequent one if (category.keywordFreq < maxKeywordFreq * MAX_KEYWORD_RATIO) { i.remove(); continue; } // A term is considered *not* useful when it: // 1) has only 1 char (in any language) // 2) has less than 3 chars and starts with a letter (i.e. English) // 3) is not found in the pool of words if (name.length() < 2 || name.length() < 3 && name.matches("[A-Za-z].*") || strings.indexOf(name) == -1) continue; category.score += metaInfo.weight; category.docFreq++; } } public Set<String> getCategoryNames() { return categoryMap.keySet(); } public Collection<Category> getCategories() { return categoryMap.values(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.test; import org.apache.calcite.adapter.java.ReflectiveSchema; import org.apache.calcite.avatica.util.DateTimeUtils; import org.apache.calcite.config.Lex; import org.apache.calcite.jdbc.CalciteConnection; import org.apache.calcite.jdbc.Driver; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Linq4j; import org.apache.calcite.linq4j.QueryProvider; import org.apache.calcite.linq4j.function.Function1; import org.apache.calcite.linq4j.tree.Expressions; import org.apache.calcite.linq4j.tree.ParameterExpression; import org.apache.calcite.linq4j.tree.Primitive; import org.apache.calcite.linq4j.tree.Types; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.schema.impl.AbstractSchema; import org.apache.calcite.schema.impl.TableMacroImpl; import org.apache.calcite.schema.impl.ViewTable; import org.apache.calcite.util.Smalls; import org.apache.calcite.util.Util; import com.google.common.collect.ImmutableList; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; import java.util.Arrays; import java.util.BitSet; import java.util.Date; import java.util.List; import java.util.Properties; import static org.apache.calcite.test.JdbcTest.Employee; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; /** * Unit tests for {@link ReflectiveSchema}. */ public class ReflectiveSchemaTest { public static final Method LINQ4J_AS_ENUMERABLE_METHOD = Types.lookupMethod( Linq4j.class, "asEnumerable", Object[].class); private static final ReflectiveSchema CATCHALL = new ReflectiveSchema(new CatchallSchema()); /** * Test that uses a JDBC connection as a linq4j * {@link org.apache.calcite.linq4j.QueryProvider}. * * @throws Exception on error */ @Test public void testQueryProvider() throws Exception { Connection connection = CalciteAssert .that(CalciteAssert.Config.REGULAR).connect(); QueryProvider queryProvider = connection.unwrap(QueryProvider.class); ParameterExpression e = Expressions.parameter(Employee.class, "e"); // "Enumerable<T> asEnumerable(final T[] ts)" List<Object[]> list = queryProvider.createQuery( Expressions.call( Expressions.call( Types.of(Enumerable.class, Employee.class), null, LINQ4J_AS_ENUMERABLE_METHOD, Expressions.constant( new JdbcTest.HrSchema().emps)), "asQueryable"), Employee.class) .where( Expressions.lambda( Expressions.lessThan( Expressions.field( e, "empid"), Expressions.constant(160)), e)) .where( Expressions.lambda( Expressions.greaterThan( Expressions.field( e, "empid"), Expressions.constant(140)), e)) .select( Expressions.<Function1<Employee, Object[]>>lambda( Expressions.new_( Object[].class, Expressions.field( e, "empid"), Expressions.call( Expressions.field( e, "name"), "toUpperCase")), e)) .toList(); assertEquals(1, list.size()); assertEquals(2, list.get(0).length); assertEquals(150, list.get(0)[0]); assertEquals("SEBASTIAN", list.get(0)[1]); } @Test public void testQueryProviderSingleColumn() throws Exception { Connection connection = CalciteAssert .that(CalciteAssert.Config.REGULAR).connect(); QueryProvider queryProvider = connection.unwrap(QueryProvider.class); ParameterExpression e = Expressions.parameter(Employee.class, "e"); // "Enumerable<T> asEnumerable(final T[] ts)" List<Integer> list = queryProvider.createQuery( Expressions.call( Expressions.call( Types.of(Enumerable.class, Employee.class), null, LINQ4J_AS_ENUMERABLE_METHOD, Expressions.constant(new JdbcTest.HrSchema().emps)), "asQueryable"), Employee.class) .select( Expressions.<Function1<Employee, Integer>>lambda( Expressions.field(e, "empid"), e)) .toList(); assertEquals(Arrays.asList(100, 200, 150, 110), list); } /** * Tests a relation that is accessed via method syntax. * The function returns a {@link org.apache.calcite.linq4j.Queryable}. */ @Ignore @Test public void testOperator() throws SQLException, ClassNotFoundException { Connection connection = DriverManager.getConnection("jdbc:calcite:"); CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); SchemaPlus rootSchema = calciteConnection.getRootSchema(); SchemaPlus schema = rootSchema.add("s", new AbstractSchema()); schema.add("GenerateStrings", TableMacroImpl.create(Smalls.GENERATE_STRINGS_METHOD)); schema.add("StringUnion", TableMacroImpl.create(Smalls.STRING_UNION_METHOD)); rootSchema.add("hr", new ReflectiveSchema(new JdbcTest.HrSchema())); ResultSet resultSet = connection.createStatement().executeQuery( "select *\n" + "from table(s.StringUnion(\n" + " GenerateStrings(5),\n" + " cursor (select name from emps)))\n" + "where char_length(s) > 3"); assertTrue(resultSet.next()); } /** * Tests a view. */ @Test public void testView() throws SQLException, ClassNotFoundException { Connection connection = DriverManager.getConnection("jdbc:calcite:"); CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); SchemaPlus rootSchema = calciteConnection.getRootSchema(); SchemaPlus schema = rootSchema.add("s", new AbstractSchema()); schema.add("emps_view", ViewTable.viewMacro(schema, "select * from \"hr\".\"emps\" where \"deptno\" = 10", null, Arrays.asList("s", "emps_view"), null)); rootSchema.add("hr", new ReflectiveSchema(new JdbcTest.HrSchema())); ResultSet resultSet = connection.createStatement().executeQuery( "select *\n" + "from \"s\".\"emps_view\"\n" + "where \"empid\" < 120"); assertEquals( "empid=100; deptno=10; name=Bill; salary=10000.0; commission=1000\n" + "empid=110; deptno=10; name=Theodore; salary=11500.0; commission=250\n", CalciteAssert.toString(resultSet)); } /** * Tests a view with a path. */ @Test public void testViewPath() throws SQLException, ClassNotFoundException { Connection connection = DriverManager.getConnection("jdbc:calcite:"); CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); SchemaPlus rootSchema = calciteConnection.getRootSchema(); SchemaPlus schema = rootSchema.add("s", new AbstractSchema()); // create a view s.emps based on hr.emps. uses explicit schema path "hr". schema.add("emps", ViewTable.viewMacro(schema, "select * from \"emps\" where \"deptno\" = 10", ImmutableList.of("hr"), ImmutableList.of("s", "emps"), null)); schema.add("hr_emps", ViewTable.viewMacro(schema, "select * from \"emps\"", ImmutableList.of("hr"), ImmutableList.of("s", "hr_emps"), null)); schema.add("s_emps", ViewTable.viewMacro(schema, "select * from \"emps\"", ImmutableList.of("s"), ImmutableList.of("s", "s_emps"), null)); schema.add("null_emps", ViewTable.viewMacro(schema, "select * from \"emps\"", null, ImmutableList.of("s", "null_emps"), null)); rootSchema.add("hr", new ReflectiveSchema(new JdbcTest.HrSchema())); final Statement statement = connection.createStatement(); ResultSet resultSet; resultSet = statement.executeQuery( "select * from \"s\".\"hr_emps\""); assertEquals(4, count(resultSet)); // "hr_emps" -> "hr"."emps", 4 rows resultSet = statement.executeQuery( "select * from \"s\".\"s_emps\""); // "s_emps" -> "s"."emps", 3 rows assertEquals(3, count(resultSet)); resultSet = statement.executeQuery( "select * from \"s\".\"null_emps\""); // "null_emps" -> "s"."emps", 3 assertEquals(3, count(resultSet)); statement.close(); } private int count(ResultSet resultSet) throws SQLException { int i = 0; while (resultSet.next()) { ++i; } resultSet.close(); return i; } /** Tests column based on java.sql.Date field. */ @Test public void testDateColumn() throws Exception { CalciteAssert.that() .withSchema("s", new ReflectiveSchema(new DateColumnSchema())) .query("select * from \"s\".\"emps\"") .returns("" + "hireDate=1970-01-01; empid=10; deptno=20; name=fred; salary=0.0; commission=null\n" + "hireDate=1970-04-11; empid=10; deptno=20; name=bill; salary=0.0; commission=null\n"); } /** Tests querying an object that has no public fields. */ @Test public void testNoPublicFields() throws Exception { final CalciteAssert.AssertThat with = CalciteAssert.that().withSchema("s", CATCHALL); with.query("select 1 from \"s\".\"allPrivates\"") .returns("EXPR$0=1\n"); with.query("select \"x\" from \"s\".\"allPrivates\"") .throws_("Column 'x' not found in any table"); } /** Tests columns based on types such as java.sql.Date and java.util.Date. * * @see CatchallSchema#everyTypes */ @Test public void testColumnTypes() throws Exception { final CalciteAssert.AssertThat with = CalciteAssert.that().withSchema("s", CATCHALL); with.query("select \"primitiveBoolean\" from \"s\".\"everyTypes\"") .returns("primitiveBoolean=false\n" + "primitiveBoolean=true\n"); with.query("select * from \"s\".\"everyTypes\"") .returns("" + "primitiveBoolean=false; primitiveByte=0; primitiveChar=\u0000; primitiveShort=0; primitiveInt=0; primitiveLong=0; primitiveFloat=0.0; primitiveDouble=0.0; wrapperBoolean=false; wrapperByte=0; wrapperCharacter=\u0000; wrapperShort=0; wrapperInteger=0; wrapperLong=0; wrapperFloat=0.0; wrapperDouble=0.0; sqlDate=1970-01-01; sqlTime=00:00:00; sqlTimestamp=1970-01-01 00:00:00; utilDate=1970-01-01 00:00:00; string=1\n" + "primitiveBoolean=true; primitiveByte=127; primitiveChar=\uffff; primitiveShort=32767; primitiveInt=2147483647; primitiveLong=9223372036854775807; primitiveFloat=3.4028235E38; primitiveDouble=1.7976931348623157E308; wrapperBoolean=null; wrapperByte=null; wrapperCharacter=null; wrapperShort=null; wrapperInteger=null; wrapperLong=null; wrapperFloat=null; wrapperDouble=null; sqlDate=null; sqlTime=null; sqlTimestamp=null; utilDate=null; string=null\n"); } /** * Tests NOT for nullable columns * @see CatchallSchema#everyTypes */ @Test public void testWhereNOT() throws Exception { final CalciteAssert.AssertThat with = CalciteAssert.that().withSchema("s", CATCHALL); with.query( "select \"wrapperByte\" from \"s\".\"everyTypes\" where NOT (\"wrapperByte\" is null)") .returnsUnordered("wrapperByte=0"); } /** * Tests NOT for nullable columns * @see CatchallSchema#everyTypes */ @Test public void testSelectNOT() throws Exception { final CalciteAssert.AssertThat with = CalciteAssert.that().withSchema("s", CATCHALL); with.query( "select NOT \"wrapperBoolean\" \"value\" from \"s\".\"everyTypes\"") .returnsUnordered( "value=null", "value=true"); } /** Tests columns based on types such as java.sql.Date and java.util.Date. * * @see CatchallSchema#everyTypes */ @Test public void testAggregateFunctions() throws Exception { final CalciteAssert.AssertThat with = CalciteAssert.that().withSchema("s", CATCHALL); checkAgg(with, "min"); checkAgg(with, "max"); checkAgg(with, "avg"); checkAgg(with, "count"); } private void checkAgg(CalciteAssert.AssertThat with, String fn) { for (final Field field : fn.equals("avg") ? EveryType.numericFields() : EveryType.fields()) { with.query( "select " + fn + "(\"" + field.getName() + "\") as c\n" + "from \"s\".\"everyTypes\"") .returns( input -> { int n = 0; try { while (input.next()) { final Object o = get(input); Util.discard(o); ++n; } } catch (SQLException e) { throw new RuntimeException(e); } assertThat(n, equalTo(1)); }); } } private Object get(ResultSet input) throws SQLException { final int type = input.getMetaData().getColumnType(1); switch (type) { case java.sql.Types.BOOLEAN: return input.getBoolean(1); case java.sql.Types.TINYINT: return input.getByte(1); case java.sql.Types.SMALLINT: return input.getShort(1); case java.sql.Types.INTEGER: return input.getInt(1); case java.sql.Types.BIGINT: return input.getLong(1); case java.sql.Types.REAL: return input.getFloat(1); case java.sql.Types.DOUBLE: return input.getDouble(1); case java.sql.Types.CHAR: case java.sql.Types.VARCHAR: return input.getString(1); case java.sql.Types.DATE: return input.getDate(1); case java.sql.Types.TIME: return input.getTime(1); case java.sql.Types.TIMESTAMP: return input.getTimestamp(1); default: throw new AssertionError(type); } } @Test public void testClassNames() throws Exception { CalciteAssert.that() .withSchema("s", CATCHALL).query("select * from \"s\".\"everyTypes\"") .returns( resultSet -> { try { final ResultSetMetaData metaData = resultSet.getMetaData(); check(metaData, "primitiveBoolean", Boolean.class); check(metaData, "primitiveByte", Byte.class); check(metaData, "primitiveChar", String.class); check(metaData, "primitiveShort", Short.class); check(metaData, "primitiveInt", Integer.class); check(metaData, "primitiveLong", Long.class); check(metaData, "primitiveFloat", Float.class); check(metaData, "primitiveDouble", Double.class); check(metaData, "wrapperBoolean", Boolean.class); check(metaData, "wrapperByte", Byte.class); check(metaData, "wrapperCharacter", String.class); check(metaData, "wrapperShort", Short.class); check(metaData, "wrapperInteger", Integer.class); check(metaData, "wrapperLong", Long.class); check(metaData, "wrapperFloat", Float.class); check(metaData, "wrapperDouble", Double.class); check(metaData, "sqlDate", java.sql.Date.class); check(metaData, "sqlTime", Time.class); check(metaData, "sqlTimestamp", Timestamp.class); check(metaData, "utilDate", Timestamp.class); check(metaData, "string", String.class); } catch (SQLException e) { throw new RuntimeException(e); } }); } private void check(ResultSetMetaData metaData, String columnName, Class expectedType) throws SQLException { for (int i = 1; i <= metaData.getColumnCount(); i++) { if (metaData.getColumnName(i).equals(columnName)) { assertThat(metaData.getColumnClassName(i), equalTo(expectedType.getName())); return; } } Assert.fail("column not found: " + columnName); } @Test public void testJavaBoolean() throws Exception { final CalciteAssert.AssertThat with = CalciteAssert.that().withSchema("s", CATCHALL); with.query("select count(*) as c from \"s\".\"everyTypes\"\n" + "where \"primitiveBoolean\"") .returns("C=1\n"); with.query("select count(*) as c from \"s\".\"everyTypes\"\n" + "where \"wrapperBoolean\"") .returns("C=0\n"); with.query("select count(*) as c from \"s\".\"everyTypes\"\n" + "where \"wrapperBoolean\" is true") .returns("C=0\n"); with.query("select count(*) as c from \"s\".\"everyTypes\"\n" + "where \"wrapperBoolean\" is not true") .returns("C=2\n"); with.query("select count(*) as c from \"s\".\"everyTypes\"\n" + "where \"wrapperBoolean\" is false") .returns("C=1\n"); with.query("select count(*) as c from \"s\".\"everyTypes\"\n" + "where \"wrapperBoolean\" is not false") .returns("C=1\n"); with.query("select count(*) as c from \"s\".\"everyTypes\"\n" + "where \"wrapperBoolean\" is null") .returns("C=1\n"); with.query("select count(*) as c from \"s\".\"everyTypes\"\n" + "where \"wrapperBoolean\" is not null") .returns("C=1\n"); with.query("select count(*) as c from \"s\".\"everyTypes\"\n" + "where \"primitiveInt\" > 0") .returns("C=1\n"); } /** Test case for * <a href="https://issues.apache.org/jira/browse/CALCITE-119">[CALCITE-119] * Comparing a Java type long with a SQL type INTEGER gives wrong * answer</a>. */ @Test public void testCompareJavaAndSqlTypes() throws Exception { final CalciteAssert.AssertThat with = CalciteAssert.that().withSchema("s", CATCHALL); // With CALCITE-119, returned 0 rows. The problem was that when comparing // a Java type (long) and a SQL type (INTEGER), the SQL type was deemed // "less restrictive". So, the long value got truncated to an int value. with.query("select \"primitiveLong\" as c from \"s\".\"everyTypes\"\n" + "where \"primitiveLong\" > 0") .returns("C=9223372036854775807\n"); // count(nullif(b, false)) counts how many times b is true with.query("select count(\"primitiveBoolean\") as p,\n" + " count(\"wrapperBoolean\") as w,\n" + " count(nullif(\"primitiveShort\" >= 0, false)) as sp,\n" + " count(nullif(\"wrapperShort\" >= 0, false)) as sw,\n" + " count(nullif(\"primitiveInt\" >= 0, false)) as ip,\n" + " count(nullif(\"wrapperInteger\" >= 0, false)) as iw,\n" + " count(nullif(\"primitiveLong\" >= 0, false)) as lp,\n" + " count(nullif(\"wrapperLong\" >= 0, false)) as lw\n" + "from \"s\".\"everyTypes\"") .returns("P=2; W=1; SP=2; SW=1; IP=2; IW=1; LP=2; LW=1\n"); } @Test public void testDivideWraperPrimitive() throws Exception { final CalciteAssert.AssertThat with = CalciteAssert.that().withSchema("s", CATCHALL); with.query("select \"wrapperLong\" / \"primitiveLong\" as c\n" + " from \"s\".\"everyTypes\" where \"primitiveLong\" <> 0") .planContains( "final Long inp13_ = current.wrapperLong;") .planContains( "return inp13_ == null ? (Long) null : Long.valueOf(inp13_.longValue() / current.primitiveLong);") .returns("C=null\n"); } @Test public void testDivideWraperWrapper() throws Exception { final CalciteAssert.AssertThat with = CalciteAssert.that().withSchema("s", CATCHALL); with.query("select \"wrapperLong\" / \"wrapperLong\" as c\n" + " from \"s\".\"everyTypes\" where \"primitiveLong\" <> 0") .planContains( "final Long inp13_ = ((org.apache.calcite.test.ReflectiveSchemaTest.EveryType) inputEnumerator.current()).wrapperLong;") .planContains( "return inp13_ == null ? (Long) null : Long.valueOf(inp13_.longValue() / inp13_.longValue());") .returns("C=null\n"); } @Test public void testDivideWraperWrapperMultipleTimes() throws Exception { final CalciteAssert.AssertThat with = CalciteAssert.that().withSchema("s", CATCHALL); with.query("select \"wrapperLong\" / \"wrapperLong\"\n" + "+ \"wrapperLong\" / \"wrapperLong\" as c\n" + " from \"s\".\"everyTypes\" where \"primitiveLong\" <> 0") .planContains( "final Long inp13_ = ((org.apache.calcite.test.ReflectiveSchemaTest.EveryType) inputEnumerator.current()).wrapperLong;") .planContains( "return inp13_ == null ? (Long) null : Long.valueOf(inp13_.longValue() / inp13_.longValue() + inp13_.longValue() / inp13_.longValue());") .returns("C=null\n"); } @Test public void testOp() throws Exception { final CalciteAssert.AssertThat with = CalciteAssert.that() .withSchema("s", CATCHALL); checkOp(with, "+"); checkOp(with, "-"); checkOp(with, "*"); checkOp(with, "/"); } private void checkOp(CalciteAssert.AssertThat with, String fn) { for (Field field : EveryType.numericFields()) { for (Field field2 : EveryType.numericFields()) { final String name = "\"" + field.getName() + "\""; final String name2 = "\"" + field2.getName() + "\""; with.query("select " + name + "\n" + " " + fn + " " + name2 + " as c\n" + "from \"s\".\"everyTypes\"\n" + "where " + name + " <> 0") .returns(resultSet -> { }); } } } @Test public void testCastFromString() { CalciteAssert.that().withSchema("s", CATCHALL) .query("select cast(\"string\" as int) as c from \"s\".\"everyTypes\"") .returns("C=1\n" + "C=null\n"); } /** Test case for * <a href="https://issues.apache.org/jira/browse/CALCITE-580">[CALCITE-580] * Average aggregation on an Integer column throws ClassCastException</a>. */ @Test public void testAvgInt() throws Exception { CalciteAssert.that().withSchema("s", CATCHALL).with(Lex.JAVA) .query("select primitiveLong, avg(primitiveInt)\n" + "from s.everyTypes\n" + "group by primitiveLong order by primitiveLong") .returns(input -> { StringBuilder buf = new StringBuilder(); try { while (input.next()) { buf.append(input.getInt(2)).append("\n"); } } catch (SQLException e) { throw new RuntimeException(e); } assertThat(buf.toString(), equalTo("0\n2147483647\n")); }); } private static boolean isNumeric(Class type) { switch (Primitive.flavor(type)) { case BOX: return Primitive.ofBox(type).isNumeric(); case PRIMITIVE: return Primitive.of(type).isNumeric(); default: return Number.class.isAssignableFrom(type); // e.g. BigDecimal } } /** Tests that if a field of a relation has an unrecognized type (in this * case a {@link BitSet}) then it is treated as an object. * * @see CatchallSchema#badTypes */ @Test public void testTableFieldHasBadType() throws Exception { CalciteAssert.that() .withSchema("s", CATCHALL) .query("select * from \"s\".\"badTypes\"") .returns("integer=0; bitSet={}\n"); } /** Tests that a schema with a field whose type cannot be recognized * throws an informative exception. * * @see CatchallSchema#enumerable * @see CatchallSchema#list */ @Test public void testSchemaFieldHasBadType() throws Exception { final CalciteAssert.AssertThat with = CalciteAssert.that().withSchema("s", CATCHALL); // BitSet is not a valid relation type. It's as if "bitSet" field does // not exist. with.query("select * from \"s\".\"bitSet\"") .throws_("Object 'bitSet' not found within 's'"); // Enumerable field returns 3 records with 0 fields with.query("select * from \"s\".\"enumerable\"") .returns("\n" + "\n" + "\n" + "\n"); // List is implicitly converted to Enumerable with.query("select * from \"s\".\"list\"") .returns("\n" + "\n" + "\n" + "\n"); } /** Test case for a bug where a Java string 'Abc' compared to a char 'Ab' * would be truncated to the char precision and falsely match. */ @Test public void testPrefix() throws Exception { CalciteAssert.that() .withSchema("s", CATCHALL) .query( "select * from \"s\".\"prefixEmps\" where \"name\" in ('Ab', 'Abd')") .returns("empid=2; deptno=10; name=Ab; salary=0.0; commission=null\n" + "empid=4; deptno=10; name=Abd; salary=0.0; commission=null\n"); } /** If a method returns a * {@link ViewTable}.{@code ViewTableMacro}, then it * should be expanded. */ @Ignore @Test public void testTableMacroIsView() throws Exception { CalciteAssert.that() .withSchema("s", new ReflectiveSchema(new JdbcTest.HrSchema())) .query("select * from table(\"s\".\"view\"('abc'))") .returns( "empid=2; deptno=10; name=Ab; salary=0.0; commission=null\n" + "empid=4; deptno=10; name=Abd; salary=0.0; commission=null\n"); } /** Finds a table-macro using reflection. */ @Ignore @Test public void testTableMacro() throws Exception { CalciteAssert.that() .withSchema("s", new ReflectiveSchema(new JdbcTest.HrSchema())) .query("select * from table(\"s\".\"foo\"(3))") .returns( "empid=2; deptno=10; name=Ab; salary=0.0; commission=null\n" + "empid=4; deptno=10; name=Abd; salary=0.0; commission=null\n"); } /** Table with single field as Integer[] */ @Ignore( "java.lang.AssertionError RelDataTypeImpl.getFieldList(RelDataTypeImpl.java:99)") @Test public void testArrayOfBoxedPrimitives() { CalciteAssert.that() .withSchema("s", CATCHALL) .query("select * from \"s\".\"primesBoxed\"") .returnsUnordered("value=1", "value=3", "value=7"); } /** Table with single field as int[] */ @Ignore( "java.lang.AssertionError RelDataTypeImpl.getFieldList(RelDataTypeImpl.java:99)") @Test public void testArrayOfPrimitives() { CalciteAssert.that() .withSchema("s", CATCHALL) .query("select * from \"s\".\"primes\"") .returnsUnordered("value=1", "value=3", "value=7"); } @Test public void testCustomBoxedScalar() { CalciteAssert.that() .withSchema("s", CATCHALL) .query("select \"value\" from \"s\".\"primesCustomBoxed\"") .returnsUnordered("value=1", "value=3", "value=5"); } @Test public void testCustomBoxedSalarCalc() { CalciteAssert.that() .withSchema("s", CATCHALL) .query("select \"value\"*2 \"value\" from \"s\".\"primesCustomBoxed\"") .returnsUnordered("value=2", "value=6", "value=10"); } /** Test case for * <a href="https://issues.apache.org/jira/browse/CALCITE-1569">[CALCITE-1569] * Date condition can generates Integer == Integer, which is always * false</a>. */ @Test public void testDateCanCompare() { final String sql = "select a.v\n" + "from (select \"sqlDate\" v\n" + " from \"s\".\"everyTypes\" " + " group by \"sqlDate\") a," + " (select \"sqlDate\" v\n" + " from \"s\".\"everyTypes\"\n" + " group by \"sqlDate\") b\n" + "where a.v >= b.v\n" + "group by a.v"; CalciteAssert.that() .withSchema("s", CATCHALL) .query(sql) .returnsUnordered("V=1970-01-01"); } /** Test case for * <a href="https://issues.apache.org/jira/browse/CALCITE-281">[CALCITE-1919] * NPE when target in ReflectiveSchema belongs to the unnamed package</a>. */ @Test public void testReflectiveSchemaInUnnamedPackage() throws Exception { final Driver driver = new Driver(); try (CalciteConnection connection = (CalciteConnection) driver.connect("jdbc:calcite:", new Properties())) { SchemaPlus rootSchema = connection.getRootSchema(); final Class<?> c = Class.forName("RootHr"); final Object o = c.getDeclaredConstructor().newInstance(); rootSchema.add("hr", new ReflectiveSchema(o)); connection.setSchema("hr"); final Statement statement = connection.createStatement(); final String sql = "select * from \"emps\""; final ResultSet resultSet = statement.executeQuery(sql); final String expected = "empid=100; name=Bill\n" + "empid=200; name=Eric\n" + "empid=150; name=Sebastian\n"; assertThat(CalciteAssert.toString(resultSet), is(expected)); } } /** Extension to {@link Employee} with a {@code hireDate} column. */ public static class EmployeeWithHireDate extends Employee { public final java.sql.Date hireDate; public EmployeeWithHireDate( int empid, int deptno, String name, float salary, Integer commission, java.sql.Date hireDate) { super(empid, deptno, name, salary, commission); this.hireDate = hireDate; } } /** Record that has a field of every interesting type. */ public static class EveryType { public final boolean primitiveBoolean; public final byte primitiveByte; public final char primitiveChar; public final short primitiveShort; public final int primitiveInt; public final long primitiveLong; public final float primitiveFloat; public final double primitiveDouble; public final Boolean wrapperBoolean; public final Byte wrapperByte; public final Character wrapperCharacter; public final Short wrapperShort; public final Integer wrapperInteger; public final Long wrapperLong; public final Float wrapperFloat; public final Double wrapperDouble; public final java.sql.Date sqlDate; public final Time sqlTime; public final Timestamp sqlTimestamp; public final Date utilDate; public final String string; public EveryType( boolean primitiveBoolean, byte primitiveByte, char primitiveChar, short primitiveShort, int primitiveInt, long primitiveLong, float primitiveFloat, double primitiveDouble, Boolean wrapperBoolean, Byte wrapperByte, Character wrapperCharacter, Short wrapperShort, Integer wrapperInteger, Long wrapperLong, Float wrapperFloat, Double wrapperDouble, java.sql.Date sqlDate, Time sqlTime, Timestamp sqlTimestamp, Date utilDate, String string) { this.primitiveBoolean = primitiveBoolean; this.primitiveByte = primitiveByte; this.primitiveChar = primitiveChar; this.primitiveShort = primitiveShort; this.primitiveInt = primitiveInt; this.primitiveLong = primitiveLong; this.primitiveFloat = primitiveFloat; this.primitiveDouble = primitiveDouble; this.wrapperBoolean = wrapperBoolean; this.wrapperByte = wrapperByte; this.wrapperCharacter = wrapperCharacter; this.wrapperShort = wrapperShort; this.wrapperInteger = wrapperInteger; this.wrapperLong = wrapperLong; this.wrapperFloat = wrapperFloat; this.wrapperDouble = wrapperDouble; this.sqlDate = sqlDate; this.sqlTime = sqlTime; this.sqlTimestamp = sqlTimestamp; this.utilDate = utilDate; this.string = string; } static Enumerable<Field> fields() { return Linq4j.asEnumerable(EveryType.class.getFields()); } static Enumerable<Field> numericFields() { return fields() .where(v1 -> isNumeric(v1.getType())); } } /** All field are private, therefore the resulting record has no fields. */ public static class AllPrivate { private final int x = 0; } /** Table that has a field that cannot be recognized as a SQL type. */ public static class BadType { public final int integer = 0; public final BitSet bitSet = new BitSet(0); } /** Table that has integer and string fields */ public static class IntAndString { public final int id; public final String value; public IntAndString(int id, String value) { this.id = id; this.value = value; } } /** Object whose fields are relations. Called "catch-all" because it's OK * if tests add new fields. */ public static class CatchallSchema { public final Enumerable<Employee> enumerable = Linq4j.asEnumerable( Arrays.asList(new JdbcTest.HrSchema().emps)); public final List<Employee> list = Arrays.asList(new JdbcTest.HrSchema().emps); public final BitSet bitSet = new BitSet(1); public final EveryType[] everyTypes = { new EveryType( false, (byte) 0, (char) 0, (short) 0, 0, 0L, 0F, 0D, false, (byte) 0, (char) 0, (short) 0, 0, 0L, 0F, 0D, new java.sql.Date(0), new Time(0), new Timestamp(0), new Date(0), "1"), new EveryType( true, Byte.MAX_VALUE, Character.MAX_VALUE, Short.MAX_VALUE, Integer.MAX_VALUE, Long.MAX_VALUE, Float.MAX_VALUE, Double.MAX_VALUE, null, null, null, null, null, null, null, null, null, null, null, null, null), }; public final AllPrivate[] allPrivates = { new AllPrivate() }; public final BadType[] badTypes = { new BadType() }; public final Employee[] prefixEmps = { new Employee(1, 10, "A", 0f, null), new Employee(2, 10, "Ab", 0f, null), new Employee(3, 10, "Abc", 0f, null), new Employee(4, 10, "Abd", 0f, null), }; public final Integer[] primesBoxed = new Integer[]{1, 3, 5}; public final int[] primes = new int[]{1, 3, 5}; public final IntHolder[] primesCustomBoxed = new IntHolder[]{new IntHolder(1), new IntHolder(3), new IntHolder(5)}; public final IntAndString[] nullables = new IntAndString[] { new IntAndString(1, "A"), new IntAndString(2, "B"), new IntAndString(2, "C"), new IntAndString(3, null)}; public final IntAndString[] bools = new IntAndString[] { new IntAndString(1, "T"), new IntAndString(2, "F"), new IntAndString(3, null)}; } /** * Custom java class that holds just a single field. */ public static class IntHolder { public final int value; public IntHolder(int value) { this.value = value; } } /** Schema that contains a table with a date column. */ public static class DateColumnSchema { public final EmployeeWithHireDate[] emps = { new EmployeeWithHireDate( 10, 20, "fred", 0f, null, new java.sql.Date(0)), // 1970-1-1 new EmployeeWithHireDate( 10, 20, "bill", 0f, null, new java.sql.Date(100 * DateTimeUtils.MILLIS_PER_DAY)) // 1970-04-11 }; } } // End ReflectiveSchemaTest.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.significant; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Objects; public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, SignificantTermsAggregationBuilder> { public static final String NAME = "significant_terms"; public static final InternalAggregation.Type TYPE = new Type(NAME); static final ParseField BACKGROUND_FILTER = new ParseField("background_filter"); static final ParseField HEURISTIC = new ParseField("significance_heuristic"); static final TermsAggregator.BucketCountThresholds DEFAULT_BUCKET_COUNT_THRESHOLDS = new TermsAggregator.BucketCountThresholds( 3, 0, 10, -1); static final SignificanceHeuristic DEFAULT_SIGNIFICANCE_HEURISTIC = new JLHScore(); private IncludeExclude includeExclude = null; private String executionHint = null; private QueryBuilder filterBuilder = null; private TermsAggregator.BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(DEFAULT_BUCKET_COUNT_THRESHOLDS); private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC; public SignificantTermsAggregationBuilder(String name, ValueType valueType) { super(name, TYPE, ValuesSourceType.ANY, valueType); } /** * Read from a Stream. */ public SignificantTermsAggregationBuilder(StreamInput in) throws IOException { super(in, TYPE, ValuesSourceType.ANY); bucketCountThresholds = new BucketCountThresholds(in); executionHint = in.readOptionalString(); filterBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); includeExclude = in.readOptionalWriteable(IncludeExclude::new); significanceHeuristic = in.readNamedWriteable(SignificanceHeuristic.class); } @Override protected void innerWriteTo(StreamOutput out) throws IOException { bucketCountThresholds.writeTo(out); out.writeOptionalString(executionHint); out.writeOptionalNamedWriteable(filterBuilder); out.writeOptionalWriteable(includeExclude); out.writeNamedWriteable(significanceHeuristic); } @Override protected boolean serializeTargetValueType() { return true; } protected TermsAggregator.BucketCountThresholds getBucketCountThresholds() { return new TermsAggregator.BucketCountThresholds(bucketCountThresholds); } public TermsAggregator.BucketCountThresholds bucketCountThresholds() { return bucketCountThresholds; } public SignificantTermsAggregationBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) { if (bucketCountThresholds == null) { throw new IllegalArgumentException("[bucketCountThresholds] must not be null: [" + name + "]"); } this.bucketCountThresholds = bucketCountThresholds; return this; } /** * Sets the size - indicating how many term buckets should be returned * (defaults to 10) */ public SignificantTermsAggregationBuilder size(int size) { if (size <= 0) { throw new IllegalArgumentException("[size] must be greater than 0. Found [" + size + "] in [" + name + "]"); } bucketCountThresholds.setRequiredSize(size); return this; } /** * Sets the shard_size - indicating the number of term buckets each shard * will return to the coordinating node (the node that coordinates the * search execution). The higher the shard size is, the more accurate the * results are. */ public SignificantTermsAggregationBuilder shardSize(int shardSize) { if (shardSize <= 0) { throw new IllegalArgumentException( "[shardSize] must be greater than 0. Found [" + shardSize + "] in [" + name + "]"); } bucketCountThresholds.setShardSize(shardSize); return this; } /** * Set the minimum document count terms should have in order to appear in * the response. */ public SignificantTermsAggregationBuilder minDocCount(long minDocCount) { if (minDocCount < 0) { throw new IllegalArgumentException( "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]"); } bucketCountThresholds.setMinDocCount(minDocCount); return this; } /** * Set the minimum document count terms should have on the shard in order to * appear in the response. */ public SignificantTermsAggregationBuilder shardMinDocCount(long shardMinDocCount) { if (shardMinDocCount < 0) { throw new IllegalArgumentException( "[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]"); } bucketCountThresholds.setShardMinDocCount(shardMinDocCount); return this; } /** * Expert: sets an execution hint to the aggregation. */ public SignificantTermsAggregationBuilder executionHint(String executionHint) { this.executionHint = executionHint; return this; } /** * Expert: gets an execution hint to the aggregation. */ public String executionHint() { return executionHint; } public SignificantTermsAggregationBuilder backgroundFilter(QueryBuilder backgroundFilter) { if (backgroundFilter == null) { throw new IllegalArgumentException("[backgroundFilter] must not be null: [" + name + "]"); } this.filterBuilder = backgroundFilter; return this; } public QueryBuilder backgroundFilter() { return filterBuilder; } /** * Set terms to include and exclude from the aggregation results */ public SignificantTermsAggregationBuilder includeExclude(IncludeExclude includeExclude) { this.includeExclude = includeExclude; return this; } /** * Get terms to include and exclude from the aggregation results */ public IncludeExclude includeExclude() { return includeExclude; } public SignificantTermsAggregationBuilder significanceHeuristic(SignificanceHeuristic significanceHeuristic) { if (significanceHeuristic == null) { throw new IllegalArgumentException("[significanceHeuristic] must not be null: [" + name + "]"); } this.significanceHeuristic = significanceHeuristic; return this; } public SignificanceHeuristic significanceHeuristic() { return significanceHeuristic; } @Override protected ValuesSourceAggregatorFactory<ValuesSource, ?> innerBuild(AggregationContext context, ValuesSourceConfig<ValuesSource> config, AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException { SignificanceHeuristic executionHeuristic = this.significanceHeuristic.rewrite(context.searchContext()); return new SignificantTermsAggregatorFactory(name, type, config, includeExclude, executionHint, filterBuilder, bucketCountThresholds, executionHeuristic, context, parent, subFactoriesBuilder, metaData); } @Override protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { bucketCountThresholds.toXContent(builder, params); if (executionHint != null) { builder.field(TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint); } if (filterBuilder != null) { builder.field(BACKGROUND_FILTER.getPreferredName(), filterBuilder); } if (includeExclude != null) { includeExclude.toXContent(builder, params); } significanceHeuristic.toXContent(builder, params); return builder; } @Override protected int innerHashCode() { return Objects.hash(bucketCountThresholds, executionHint, filterBuilder, includeExclude, significanceHeuristic); } @Override protected boolean innerEquals(Object obj) { SignificantTermsAggregationBuilder other = (SignificantTermsAggregationBuilder) obj; return Objects.equals(bucketCountThresholds, other.bucketCountThresholds) && Objects.equals(executionHint, other.executionHint) && Objects.equals(filterBuilder, other.filterBuilder) && Objects.equals(includeExclude, other.includeExclude) && Objects.equals(significanceHeuristic, other.significanceHeuristic); } @Override public String getWriteableName() { return NAME; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder.endpoint.dsl; import javax.annotation.Generated; import org.apache.camel.builder.EndpointConsumerBuilder; import org.apache.camel.builder.EndpointProducerBuilder; import org.apache.camel.builder.endpoint.AbstractEndpointBuilder; import org.apache.camel.spi.HeaderFilterStrategy; /** * Send messages to an AWS Simple Notification Topic using AWS SDK version 2.x. * * Generated by camel build tools - do NOT edit this file! */ @Generated("org.apache.camel.maven.packaging.EndpointDslMojo") public interface Sns2EndpointBuilderFactory { /** * Builder for endpoint for the AWS 2 Simple Notification System (SNS) * component. */ public interface Sns2EndpointBuilder extends EndpointProducerBuilder { default AdvancedSns2EndpointBuilder advanced() { return (AdvancedSns2EndpointBuilder) this; } /** * To use the AmazonSNS as the client. * * The option is a: * <code>software.amazon.awssdk.services.sns.SnsClient</code> type. * * Group: producer */ default Sns2EndpointBuilder amazonSNSClient(Object amazonSNSClient) { doSetProperty("amazonSNSClient", amazonSNSClient); return this; } /** * To use the AmazonSNS as the client. * * The option will be converted to a * <code>software.amazon.awssdk.services.sns.SnsClient</code> type. * * Group: producer */ default Sns2EndpointBuilder amazonSNSClient(String amazonSNSClient) { doSetProperty("amazonSNSClient", amazonSNSClient); return this; } /** * Setting the autocreation of the topic. * * The option is a: <code>boolean</code> type. * * Default: true * Group: producer */ default Sns2EndpointBuilder autoCreateTopic(boolean autoCreateTopic) { doSetProperty("autoCreateTopic", autoCreateTopic); return this; } /** * Setting the autocreation of the topic. * * The option will be converted to a <code>boolean</code> type. * * Default: true * Group: producer */ default Sns2EndpointBuilder autoCreateTopic(String autoCreateTopic) { doSetProperty("autoCreateTopic", autoCreateTopic); return this; } /** * Setting the autoDiscoverClient mechanism, if true, the component will * look for a client instance in the registry automatically otherwise it * will skip that checking. * * The option is a: <code>boolean</code> type. * * Default: true * Group: common */ default Sns2EndpointBuilder autoDiscoverClient( boolean autoDiscoverClient) { doSetProperty("autoDiscoverClient", autoDiscoverClient); return this; } /** * Setting the autoDiscoverClient mechanism, if true, the component will * look for a client instance in the registry automatically otherwise it * will skip that checking. * * The option will be converted to a <code>boolean</code> type. * * Default: true * Group: common */ default Sns2EndpointBuilder autoDiscoverClient(String autoDiscoverClient) { doSetProperty("autoDiscoverClient", autoDiscoverClient); return this; } /** * To use a custom HeaderFilterStrategy to map headers to/from Camel. * * The option is a: * <code>org.apache.camel.spi.HeaderFilterStrategy</code> type. * * Group: producer */ default Sns2EndpointBuilder headerFilterStrategy( HeaderFilterStrategy headerFilterStrategy) { doSetProperty("headerFilterStrategy", headerFilterStrategy); return this; } /** * To use a custom HeaderFilterStrategy to map headers to/from Camel. * * The option will be converted to a * <code>org.apache.camel.spi.HeaderFilterStrategy</code> type. * * Group: producer */ default Sns2EndpointBuilder headerFilterStrategy( String headerFilterStrategy) { doSetProperty("headerFilterStrategy", headerFilterStrategy); return this; } /** * The ID of an AWS-managed customer master key (CMK) for Amazon SNS or * a custom CMK. * * The option is a: <code>java.lang.String</code> type. * * Group: producer */ default Sns2EndpointBuilder kmsMasterKeyId(String kmsMasterKeyId) { doSetProperty("kmsMasterKeyId", kmsMasterKeyId); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: <code>boolean</code> type. * * Default: false * Group: producer */ default Sns2EndpointBuilder lazyStartProducer(boolean lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: producer */ default Sns2EndpointBuilder lazyStartProducer(String lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * The message structure to use such as json. * * The option is a: <code>java.lang.String</code> type. * * Group: producer */ default Sns2EndpointBuilder messageStructure(String messageStructure) { doSetProperty("messageStructure", messageStructure); return this; } /** * The policy for this queue. * * The option is a: <code>java.lang.String</code> type. * * Group: producer */ default Sns2EndpointBuilder policy(String policy) { doSetProperty("policy", policy); return this; } /** * To define a proxy host when instantiating the SNS client. * * The option is a: <code>java.lang.String</code> type. * * Group: producer */ default Sns2EndpointBuilder proxyHost(String proxyHost) { doSetProperty("proxyHost", proxyHost); return this; } /** * To define a proxy port when instantiating the SNS client. * * The option is a: <code>java.lang.Integer</code> type. * * Group: producer */ default Sns2EndpointBuilder proxyPort(Integer proxyPort) { doSetProperty("proxyPort", proxyPort); return this; } /** * To define a proxy port when instantiating the SNS client. * * The option will be converted to a <code>java.lang.Integer</code> * type. * * Group: producer */ default Sns2EndpointBuilder proxyPort(String proxyPort) { doSetProperty("proxyPort", proxyPort); return this; } /** * To define a proxy protocol when instantiating the SNS client. * * The option is a: <code>software.amazon.awssdk.core.Protocol</code> * type. * * Default: HTTPS * Group: producer */ default Sns2EndpointBuilder proxyProtocol(Protocol proxyProtocol) { doSetProperty("proxyProtocol", proxyProtocol); return this; } /** * To define a proxy protocol when instantiating the SNS client. * * The option will be converted to a * <code>software.amazon.awssdk.core.Protocol</code> type. * * Default: HTTPS * Group: producer */ default Sns2EndpointBuilder proxyProtocol(String proxyProtocol) { doSetProperty("proxyProtocol", proxyProtocol); return this; } /** * The queueUrl to subscribe to. * * The option is a: <code>java.lang.String</code> type. * * Group: producer */ default Sns2EndpointBuilder queueUrl(String queueUrl) { doSetProperty("queueUrl", queueUrl); return this; } /** * The region in which SNS client needs to work. When using this * parameter, the configuration will expect the lowercase name of the * region (for example ap-east-1) You'll need to use the name * Region.EU_WEST_1.id(). * * The option is a: <code>java.lang.String</code> type. * * Group: producer */ default Sns2EndpointBuilder region(String region) { doSetProperty("region", region); return this; } /** * Define if Server Side Encryption is enabled or not on the topic. * * The option is a: <code>boolean</code> type. * * Default: false * Group: producer */ default Sns2EndpointBuilder serverSideEncryptionEnabled( boolean serverSideEncryptionEnabled) { doSetProperty("serverSideEncryptionEnabled", serverSideEncryptionEnabled); return this; } /** * Define if Server Side Encryption is enabled or not on the topic. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: producer */ default Sns2EndpointBuilder serverSideEncryptionEnabled( String serverSideEncryptionEnabled) { doSetProperty("serverSideEncryptionEnabled", serverSideEncryptionEnabled); return this; } /** * The subject which is used if the message header 'CamelAwsSnsSubject' * is not present. * * The option is a: <code>java.lang.String</code> type. * * Group: producer */ default Sns2EndpointBuilder subject(String subject) { doSetProperty("subject", subject); return this; } /** * Define if the subscription between SNS Topic and SQS must be done or * not. * * The option is a: <code>boolean</code> type. * * Default: false * Group: producer */ default Sns2EndpointBuilder subscribeSNStoSQS(boolean subscribeSNStoSQS) { doSetProperty("subscribeSNStoSQS", subscribeSNStoSQS); return this; } /** * Define if the subscription between SNS Topic and SQS must be done or * not. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: producer */ default Sns2EndpointBuilder subscribeSNStoSQS(String subscribeSNStoSQS) { doSetProperty("subscribeSNStoSQS", subscribeSNStoSQS); return this; } /** * If we want to trust all certificates in case of overriding the * endpoint. * * The option is a: <code>boolean</code> type. * * Default: false * Group: producer */ default Sns2EndpointBuilder trustAllCertificates( boolean trustAllCertificates) { doSetProperty("trustAllCertificates", trustAllCertificates); return this; } /** * If we want to trust all certificates in case of overriding the * endpoint. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: producer */ default Sns2EndpointBuilder trustAllCertificates( String trustAllCertificates) { doSetProperty("trustAllCertificates", trustAllCertificates); return this; } /** * Amazon AWS Access Key. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default Sns2EndpointBuilder accessKey(String accessKey) { doSetProperty("accessKey", accessKey); return this; } /** * Amazon AWS Secret Key. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default Sns2EndpointBuilder secretKey(String secretKey) { doSetProperty("secretKey", secretKey); return this; } } /** * Advanced builder for endpoint for the AWS 2 Simple Notification System * (SNS) component. */ public interface AdvancedSns2EndpointBuilder extends EndpointProducerBuilder { default Sns2EndpointBuilder basic() { return (Sns2EndpointBuilder) this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option is a: <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedSns2EndpointBuilder basicPropertyBinding( boolean basicPropertyBinding) { doSetProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedSns2EndpointBuilder basicPropertyBinding( String basicPropertyBinding) { doSetProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option is a: <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedSns2EndpointBuilder synchronous(boolean synchronous) { doSetProperty("synchronous", synchronous); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedSns2EndpointBuilder synchronous(String synchronous) { doSetProperty("synchronous", synchronous); return this; } } /** * Proxy enum for <code>software.amazon.awssdk.core.Protocol</code> enum. */ enum Protocol { HTTP, HTTPS; } public interface Sns2Builders { /** * AWS 2 Simple Notification System (SNS) (camel-aws2-sns) * Send messages to an AWS Simple Notification Topic using AWS SDK * version 2.x. * * Category: cloud,messaging,mobile * Since: 3.1 * Maven coordinates: org.apache.camel:camel-aws2-sns * * Syntax: <code>aws2-sns:topicNameOrArn</code> * * Path parameter: topicNameOrArn (required) * Topic name or ARN * * @param path topicNameOrArn */ default Sns2EndpointBuilder aws2Sns(String path) { return Sns2EndpointBuilderFactory.endpointBuilder("aws2-sns", path); } /** * AWS 2 Simple Notification System (SNS) (camel-aws2-sns) * Send messages to an AWS Simple Notification Topic using AWS SDK * version 2.x. * * Category: cloud,messaging,mobile * Since: 3.1 * Maven coordinates: org.apache.camel:camel-aws2-sns * * Syntax: <code>aws2-sns:topicNameOrArn</code> * * Path parameter: topicNameOrArn (required) * Topic name or ARN * * @param componentName to use a custom component name for the endpoint * instead of the default name * @param path topicNameOrArn */ default Sns2EndpointBuilder aws2Sns(String componentName, String path) { return Sns2EndpointBuilderFactory.endpointBuilder(componentName, path); } } static Sns2EndpointBuilder endpointBuilder(String componentName, String path) { class Sns2EndpointBuilderImpl extends AbstractEndpointBuilder implements Sns2EndpointBuilder, AdvancedSns2EndpointBuilder { public Sns2EndpointBuilderImpl(String path) { super(componentName, path); } } return new Sns2EndpointBuilderImpl(path); } }
package tools.snapshot.nodes; import com.oracle.truffle.api.Truffle; import com.oracle.truffle.api.dsl.GenerateNodeFactory; import com.oracle.truffle.api.dsl.Specialization; import com.oracle.truffle.api.frame.FrameDescriptor; import com.oracle.truffle.api.frame.FrameSlot; import com.oracle.truffle.api.frame.MaterializedFrame; import som.compiler.Variable.Internal; import som.interpreter.FrameOnStackMarker; import som.interpreter.Method; import som.interpreter.Types; import som.interpreter.objectstorage.ClassFactory; import som.vm.constants.Classes; import som.vmobjects.SAbstractObject; import som.vmobjects.SBlock; import som.vmobjects.SInvokable; import tools.snapshot.SnapshotBackend; import tools.snapshot.SnapshotBuffer; import tools.snapshot.SnapshotRecord; import tools.snapshot.deserialization.DeserializationBuffer; import tools.snapshot.deserialization.FixupInformation; @GenerateNodeFactory public abstract class BlockSerializationNode extends AbstractSerializationNode { private static final int SINVOKABLE_SIZE = Short.BYTES; public BlockSerializationNode(final ClassFactory classFact) { super(classFact); } // TODO specialize on different blocks @Specialization public void serialize(final SBlock block, final SnapshotBuffer sb) { MaterializedFrame mf = block.getContextOrNull(); if (mf == null) { int base = sb.addObject(block, classFact, SINVOKABLE_SIZE + 2); SInvokable meth = block.getMethod(); sb.putShortAt(base, meth.getIdentifier().getSymbolId()); sb.putShortAt(base + 2, (short) 0); } else { FrameDescriptor fd = mf.getFrameDescriptor(); Object[] args = mf.getArguments(); int start = sb.addObject(block, classFact, SINVOKABLE_SIZE + ((args.length + fd.getSlots().size()) * Long.BYTES) + 2); int base = start; SInvokable meth = block.getMethod(); sb.putShortAt(base, meth.getIdentifier().getSymbolId()); sb.putByteAt(base + 2, (byte) args.length); base += 3; SnapshotRecord record = sb.getRecord(); for (int i = 0; i < args.length; i++) { // TODO optimization: cache argument serialization Types.getClassOf(args[i]).serialize(args[i], sb); sb.putLongAt(base + (i * Long.BYTES), record.getObjectPointer(args[i])); } base += (args.length * Long.BYTES); int j = 0; sb.putByteAt(base, (byte) fd.getSlots().size()); base++; for (FrameSlot slot : fd.getSlots()) { // assume this is ordered by index // TODO optimization: MaterializedFrameSerialization Nodes that are associated with the // Invokables Frame Descriptor. Possibly use Local Var Read Nodes. Object value = mf.getValue(slot); switch (fd.getFrameSlotKind(slot)) { case Boolean: Classes.booleanClass.serialize(value, sb); break; case Double: Classes.doubleClass.serialize(value, sb); break; case Long: Classes.integerClass.serialize(value, sb); break; case Object: // We are going to represent this as a boolean, the slot will handled in replay if (value instanceof FrameOnStackMarker) { value = ((FrameOnStackMarker) value).isOnStack(); Classes.booleanClass.serialize(value, sb); } else { assert value instanceof SAbstractObject; Types.getClassOf(value).serialize(value, sb); } break; case Illegal: // Uninitialized variables Types.getClassOf(fd.getDefaultValue()).serialize(fd.getDefaultValue(), sb); break; default: throw new IllegalArgumentException("Unexpected SlotKind"); } sb.putLongAt(base + (j * Long.BYTES), sb.getRecord().getObjectPointer(value)); j++; // dont redo frame! // just serialize locals and arguments ordered by their slotnumber // we can get the frame from the invokables root node } base += j * Long.BYTES; assert base == start + SINVOKABLE_SIZE + ((args.length + fd.getSlots().size()) * Long.BYTES) + 2; } } @Override public Object deserialize(final DeserializationBuffer bb) { short sinv = bb.getShort(); SInvokable invokable = SnapshotBackend.lookupInvokable(sinv); FrameDescriptor fd = ((Method) invokable.getInvokable()).getLexicalScope().getOuterMethod() .getMethod().getFrameDescriptor(); // read num args int numArgs = bb.get(); Object[] args = new Object[numArgs]; // read args for (int i = 0; i < numArgs; i++) { Object arg = bb.getReference(); if (DeserializationBuffer.needsFixup(arg)) { bb.installFixup(new BlockArgumentFixup(args, i)); } else { args[i] = arg; } } MaterializedFrame frame = Truffle.getRuntime().createMaterializedFrame(args, fd); int numSlots = bb.get(); if (numSlots > 0) { assert numSlots == fd.getSlots().size(); for (int i = 0; i < numSlots; i++) { FrameSlot slot = fd.getSlots().get(i); Object o = bb.getReference(); if (DeserializationBuffer.needsFixup(o)) { bb.installFixup(new FrameSlotFixup(frame, slot)); } else { switch (fd.getFrameSlotKind(slot)) { case Boolean: frame.setBoolean(slot, (boolean) o); break; case Double: frame.setDouble(slot, (double) o); break; case Long: frame.setLong(slot, (long) o); break; case Object: if (slot.getIdentifier() instanceof Internal) { FrameOnStackMarker fosm = new FrameOnStackMarker(); if (!(boolean) o) { fosm.frameNoLongerOnStack(); } o = fosm; } frame.setObject(slot, o); break; case Illegal: // uninitialized variable, uses default frame.setObject(slot, o); break; default: throw new IllegalArgumentException("Unexpected SlotKind"); } } } } return new SBlock(invokable, frame); } public static class BlockArgumentFixup extends FixupInformation { Object[] args; int idx; public BlockArgumentFixup(final Object[] args, final int idx) { this.args = args; this.idx = idx; } @Override public void fixUp(final Object o) { args[idx] = o; } } public static class FrameSlotFixup extends FixupInformation { FrameSlot slot; MaterializedFrame frame; public FrameSlotFixup(final MaterializedFrame frame, final FrameSlot slot) { this.frame = frame; this.slot = slot; } @Override public void fixUp(final Object o) { frame.setObject(slot, o); } } }
/** * Copyright 2005-2014 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.maven.proxy.impl; import java.io.File; import java.util.Collections; import java.util.Dictionary; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import io.fabric8.api.RuntimeProperties; import io.fabric8.api.jcip.GuardedBy; import io.fabric8.api.jcip.ThreadSafe; import io.fabric8.api.scr.AbstractComponent; import io.fabric8.api.scr.Configurer; import io.fabric8.api.scr.ValidatingReference; import io.fabric8.deployer.ProjectDeployer; import io.fabric8.maven.MavenResolver; import io.fabric8.maven.proxy.MavenProxy; import io.fabric8.zookeeper.ZkPath; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.state.ConnectionState; import org.apache.curator.framework.state.ConnectionStateListener; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.ConfigurationPolicy; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.apache.zookeeper.CreateMode; import org.osgi.service.http.HttpContext; import org.osgi.service.http.HttpService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static io.fabric8.zookeeper.utils.ZooKeeperUtils.create; import static io.fabric8.zookeeper.utils.ZooKeeperUtils.deleteSafe; @ThreadSafe @Component(name = "io.fabric8.maven.proxy", label = "Fabric8 Maven Proxy Registration Handler", policy = ConfigurationPolicy.OPTIONAL, immediate = true, metatype = true) @Service(ConnectionStateListener.class) public final class MavenProxyRegistrationHandler extends AbstractComponent implements ConnectionStateListener { private static final Logger LOGGER = LoggerFactory.getLogger(MavenProxyRegistrationHandler.class); private static final String DEFAULT_ROLE = "admin"; private static final String DEFAULT_REALM = "karaf"; private static final String DEFAULT_LOCAL_REPOSITORY = System.getProperty("karaf.data") + File.separator + "maven" + File.separator + "proxy" + File.separator + "downloads"; @Reference private Configurer configurer; @Reference(referenceInterface = HttpService.class) private final ValidatingReference<HttpService> httpService = new ValidatingReference<HttpService>(); @Reference(referenceInterface = CuratorFramework.class) private final ValidatingReference<CuratorFramework> curator = new ValidatingReference<CuratorFramework>(); @Reference(referenceInterface = RuntimeProperties.class) private final ValidatingReference<RuntimeProperties> runtimeProperties = new ValidatingReference<RuntimeProperties>(); @Reference(referenceInterface = ProjectDeployer.class) private final ValidatingReference<ProjectDeployer> projectDeployer = new ValidatingReference<ProjectDeployer>(); @Reference(referenceInterface = MavenResolver.class) private final ValidatingReference<MavenResolver> mavenResolver = new ValidatingReference<>(); private final Map<String, Set<String>> registeredProxies; @GuardedBy("volatile") private volatile MavenDownloadProxyServlet mavenDownloadProxyServlet; @GuardedBy("volatile") private volatile MavenUploadProxyServlet mavenUploadProxyServlet; @GuardedBy("volatile") @Property(name = "realm", label = "Jaas Realm", description = "The Jaas Realm to use for uploads", value = DEFAULT_REALM) private volatile String realm; @GuardedBy("volatile") @Property(name = "role", label = "Jaas Role", description = "The Jaas Role to use for uploads", value = DEFAULT_ROLE) private volatile String role; @Property(name = "name", label = "Container Name", description = "The name of the container", value = "${runtime.id}") private String name; @Property(name = "threadMaximumPoolSize", label = "Thread pool maximum size", description = "Maximum number of concurrent threads used for the DownloadMavenProxy servlet", intValue = 5) private int threadMaximumPoolSize; @GuardedBy("AtomicBoolean") private final AtomicBoolean connected = new AtomicBoolean(false); public MavenProxyRegistrationHandler() { Map<String, Set<String>> proxies = new HashMap<String, Set<String>>(); proxies.put(MavenProxy.DOWNLOAD_TYPE, new HashSet<String>()); proxies.put(MavenProxy.UPLOAD_TYPE, new HashSet<String>()); registeredProxies = Collections.unmodifiableMap(proxies); } @Activate void init(Map<String, ?> configuration) throws Exception { configurer.configure(configuration, this); this.mavenDownloadProxyServlet = new MavenDownloadProxyServlet(mavenResolver.get(), runtimeProperties.get(), projectDeployer.get(), threadMaximumPoolSize); this.mavenDownloadProxyServlet.start(); this.mavenUploadProxyServlet = new MavenUploadProxyServlet(mavenResolver.get(), runtimeProperties.get(), projectDeployer.get()); this.mavenUploadProxyServlet.start(); try { HttpContext base = httpService.get().createDefaultHttpContext(); HttpContext secure = new MavenSecureHttpContext(base, realm, role); httpService.get().registerServlet("/maven/download", mavenDownloadProxyServlet, createParams("maven-download"), base); httpService.get().registerServlet("/maven/upload", mavenUploadProxyServlet, createParams("maven-upload"), secure); } catch (Throwable t) { LOGGER.warn("Failed to register fabric maven proxy servlets, due to:" + t.getMessage()); } activateComponent(); } @Deactivate void destroy() { deactivateComponent(); if (mavenDownloadProxyServlet != null) { mavenDownloadProxyServlet.stop(); } if (mavenUploadProxyServlet != null) { mavenUploadProxyServlet.stop(); } try { httpService.get().unregister("/maven/download"); httpService.get().unregister("/maven/upload"); } catch (Exception ex) { LOGGER.warn("Http service returned error on servlet unregister. Possibly the service has already been stopped"); } if (connected.get()) { unregister(MavenProxy.DOWNLOAD_TYPE); unregister(MavenProxy.UPLOAD_TYPE); } } private Dictionary<String, String> createParams(String name) { Dictionary<String, String> d = new Hashtable<String, String>(); d.put("servlet-name", name); return d; } private void register(String type) { unregister(type); try { String mavenProxyUrl = "${zk:" + name + "/http}/maven/" + type + "/"; String parentPath = ZkPath.MAVEN_PROXY.getPath(type); String path = parentPath + "/p_"; registeredProxies.get(type).add(create(curator.get(), path, mavenProxyUrl, CreateMode.EPHEMERAL_SEQUENTIAL)); } catch (Exception e) { LOGGER.warn("Failed to register maven proxy."); } } private void unregister(String type) { Set<String> proxyNodes = registeredProxies.get(type); if (proxyNodes != null) { try { for (String entry : registeredProxies.get(type)) { deleteSafe(curator.get(), entry); } } catch (Exception e) { LOGGER.warn("Failed to remove maven proxy from registry."); } registeredProxies.get(type).clear(); } } private String readProperty(Map<String, ?> properties, String key, String defaultValue) { return properties != null && properties.containsKey(key) ? properties.get(key).toString() : defaultValue; } @Override public void stateChanged(CuratorFramework client, ConnectionState newState) { switch (newState) { case CONNECTED: case RECONNECTED: connected.set(true); if (isValid()) { register(MavenProxy.DOWNLOAD_TYPE); register(MavenProxy.UPLOAD_TYPE); } break; default: connected.set(false); break; } } void bindCurator(CuratorFramework curator) { this.curator.bind(curator); } void unbindCurator(CuratorFramework curator) { this.curator.unbind(curator); } void bindHttpService(HttpService service) { this.httpService.bind(service); } void unbindHttpService(HttpService service) { this.httpService.unbind(service); } void bindRuntimeProperties(RuntimeProperties service) { this.runtimeProperties.bind(service); } void unbindRuntimeProperties(RuntimeProperties service) { this.runtimeProperties.unbind(service); } void bindProjectDeployer(ProjectDeployer projectDeployer) { this.projectDeployer.bind(projectDeployer); } void unbindProjectDeployer(ProjectDeployer projectDeployer) { this.projectDeployer.unbind(projectDeployer); } void bindMavenResolver(MavenResolver mavenResolver) { this.mavenResolver.bind(mavenResolver); } void unbindMavenResolver(MavenResolver mavenResolver) { this.mavenResolver.unbind(mavenResolver); } }
/* * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* @test * @bug 6206780 * @summary Test that all public unsynchronized methods of StringBuffer are either directly or indirectly synchronized */ import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * TestSynchronization tests whether synchronized methods calls on an object * result in synchronized calls. Note that this may not test all cases desired. * It only tests whether some synchronization has occurred on the object during * the call chain, and can't tell whether the object was locked across all * operations that have been performed on the object. */ public class TestSynchronization { /** * Define parameters used in methods of StringBuffer - admittedly a bit of * hack but 'purpose-built' for StringBuffer. Something more general could * probably be developed if the test needs to be more widely adopted. * <p/> * boolean char char[] int double float long Object CharSequence String * StringBuffer StringBuilder * <p/> */ private static final boolean BOOLEAN_VAL = true; private static final char CHAR_VAL = 'x'; private static final char[] CHAR_ARRAY_VAL = {'c', 'h', 'a', 'r', 'a', 'r', 'r', 'a', 'y'}; private static final int INT_VAL = 1; private static final double DOUBLE_VAL = 1.0d; private static final float FLOAT_VAL = 1.0f; private static final long LONG_VAL = 1L; private static final Object OBJECT_VAL = new Object(); private static final String STRING_VAL = "String value"; private static final StringBuilder STRING_BUILDER_VAL = new StringBuilder("StringBuilder value"); private static final StringBuffer STRING_BUFFER_VAL = new StringBuffer("StringBuffer value"); private static final CharSequence[] CHAR_SEQUENCE_VAL = {STRING_VAL, STRING_BUILDER_VAL, STRING_BUFFER_VAL}; public static void main(String... args) throws Exception { // First, test the tester testClass(MyTestClass.class, /* * self-test */ true); // Finally, test StringBuffer testClass(StringBuffer.class, /* * self-test */ false); } /** * Test all the public, unsynchronized methods of the given class. If * isSelfTest is true, this is a self-test to ensure that the test program * itself is working correctly. Should help ensure correctness of this * program if it changes. * <p/> * @param aClass - the class to test * @param isSelfTest - true if this is the special self-test class * @throws SecurityException */ private static void testClass(Class<?> aClass, boolean isSelfTest) throws Exception { // Get all unsynchronized public methods via reflection. We don't need // to test synchronized methods. By definition. they are already doing // the right thing. List<Method> methods = Arrays.asList(aClass.getDeclaredMethods()); for (Method m : methods) { // skip synthetic methods, like default interface methods and lambdas if (m.isSynthetic()) { continue; } int modifiers = m.getModifiers(); if (Modifier.isPublic(modifiers) && !Modifier.isSynchronized(modifiers)) { try { testMethod(aClass, m); } catch (TestFailedException e) { if (isSelfTest) { String methodName = e.getMethod().getName(); switch (methodName) { case "should_pass": throw new RuntimeException( "Test failed: self-test failed. The 'should_pass' method did not pass the synchronization test. Check the test code."); case "should_fail": break; default: throw new RuntimeException( "Test failed: something is amiss with the test. A TestFailedException was generated on a call to " + methodName + " which we didn't expect to test in the first place."); } } else { throw new RuntimeException("Test failed: the method " + e.getMethod().toString() + " should be synchronized, but isn't."); } } } } } private static void invokeMethod(Class<?> aClass, final Method m, final Object[] args) throws TestFailedException, Exception { //System.out.println( "Invoking " + m.toString() + " with parameters " + Arrays.toString(args)); final Constructor<?> objConstructor; Object obj = null; objConstructor = aClass.getConstructor(String.class); obj = objConstructor.newInstance("LeftPalindrome-emordnilaP-thgiR"); // test method m for synchronization if (!isSynchronized(m, obj, args)) { throw new TestFailedException(m); } } private static void testMethod(Class<?> aClass, Method m) throws Exception { /* * Construct call with arguments of the correct type. Note that the * values are somewhat irrelevant. If the call actually succeeds, it * means we aren't synchronized and the test has failed. */ Class<?>[] pTypes = m.getParameterTypes(); List<Integer> charSequenceArgs = new ArrayList<>(); Object[] args = new Object[pTypes.length]; for (int i = 0; i < pTypes.length; i++) { // determine the type and create the corresponding actual argument Class<?> pType = pTypes[i]; if (pType.equals(boolean.class)) { args[i] = BOOLEAN_VAL; } else if (pType.equals(char.class)) { args[i] = CHAR_VAL; } else if (pType.equals(int.class)) { args[i] = INT_VAL; } else if (pType.equals(double.class)) { args[i] = DOUBLE_VAL; } else if (pType.equals(float.class)) { args[i] = FLOAT_VAL; } else if (pType.equals(long.class)) { args[i] = LONG_VAL; } else if (pType.equals(Object.class)) { args[i] = OBJECT_VAL; } else if (pType.equals(StringBuilder.class)) { args[i] = STRING_BUILDER_VAL; } else if (pType.equals(StringBuffer.class)) { args[i] = STRING_BUFFER_VAL; } else if (pType.equals(String.class)) { args[i] = STRING_VAL; } else if (pType.isArray() && pType.getComponentType().equals(char.class)) { args[i] = CHAR_ARRAY_VAL; } else if (pType.equals(CharSequence.class)) { charSequenceArgs.add(new Integer(i)); } else { throw new RuntimeException("Test Failed: not accounting for method call with parameter type of " + pType.getName() + " You must update the test."); } } /* * If there are no CharSequence args, we can simply invoke our method * and test it */ if (charSequenceArgs.isEmpty()) { invokeMethod(aClass, m, args); } else { /* * Iterate through the different CharSequence types and invoke the * method for each type. */ if (charSequenceArgs.size() > 1) { throw new RuntimeException("Test Failed: the test cannot handle a method with multiple CharSequence arguments. You must update the test to handle the method " + m.toString()); } for (int j = 0; j < CHAR_SEQUENCE_VAL.length; j++) { args[charSequenceArgs.get(0)] = CHAR_SEQUENCE_VAL[j]; invokeMethod(aClass, m, args); } } } @SuppressWarnings("serial") private static class TestFailedException extends Exception { final Method m; public Method getMethod() { return m; } public TestFailedException(Method m) { this.m = m; } } static class InvokeTask implements Runnable { private final Method m; private final Object target; private final Object[] args; InvokeTask(Method m, Object target, Object... args) { this.m = m; this.target = target; this.args = args; } @Override public void run() { try { m.invoke(target, args); } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { e.printStackTrace(); } } } /** * isSynchronized tests whether the given method is synchronized or not by * invoking it in a thread and testing the thread state after starting the * thread * <p/> * @param m the method to test * @param target the object the method is executed on * @param args the arguments passed to the method * @return true iff the method is synchronized */ private static boolean isSynchronized(Method m, Object target, Object... args) { Thread t = new Thread(new InvokeTask(m, target, args)); Boolean isSynchronized = null; synchronized (target) { t.start(); while (isSynchronized == null) { switch (t.getState()) { case NEW: case RUNNABLE: case WAITING: case TIMED_WAITING: Thread.yield(); break; case BLOCKED: isSynchronized = true; break; case TERMINATED: isSynchronized = false; break; } } } try { t.join(); } catch (InterruptedException ex) { ex.printStackTrace(); } return isSynchronized; } /* * This class is used to test the synchronization tester above. It has a * method, should_pass, that is unsynchronized but calls a synchronized * method. It has another method, should_fail, which isn't synchronized and * doesn't call a synchronized method. The former should pass and the latter * should fail. */ private static class MyTestClass { @SuppressWarnings("unused") public MyTestClass(String s) { } @SuppressWarnings("unused") public void should_pass() { // call sync method sync_shouldnt_be_tested(); } @SuppressWarnings("unused") public void should_fail() { } public synchronized void sync_shouldnt_be_tested() { } } }
/* * Copyright (C) 2004-2009 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.jivesoftware.openfire.user; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import org.jivesoftware.util.ClassUtils; import org.jivesoftware.util.JiveGlobals; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Delegate UserProvider operations among up to three configurable provider implementation classes. * * @author Marc Seeger * @author Chris Neasbitt * @author Tom Evans */ public class HybridUserProvider implements UserProvider { private static final Logger Log = LoggerFactory.getLogger(HybridUserProvider.class); private List<UserProvider> userproviders = null; public HybridUserProvider() { // Migrate user provider properties JiveGlobals.migrateProperty("hybridUserProvider.primaryProvider.className"); JiveGlobals.migrateProperty("hybridUserProvider.secondaryProvider.className"); JiveGlobals.migrateProperty("hybridUserProvider.tertiaryProvider.className"); userproviders = new ArrayList<>(); // Load primary, secondary, and tertiary user providers. String primaryClass = JiveGlobals.getProperty("hybridUserProvider.primaryProvider.className"); if (primaryClass == null) { Log.error("A primary UserProvider must be specified via openfire.xml or the system properties"); return; } try { Class c = ClassUtils.forName(primaryClass); UserProvider primaryProvider = (UserProvider) c.newInstance(); userproviders.add(primaryProvider); Log.debug("Primary user provider: " + primaryClass); } catch (Exception e) { Log.error("Unable to load primary user provider: " + primaryClass + ". Users in this provider will be disabled.", e); return; } String secondaryClass = JiveGlobals.getProperty("hybridUserProvider.secondaryProvider.className"); if (secondaryClass != null) { try { Class c = ClassUtils.forName(secondaryClass); UserProvider secondaryProvider = (UserProvider) c.newInstance(); userproviders.add(secondaryProvider); Log.debug("Secondary user provider: " + secondaryClass); } catch (Exception e) { Log.error("Unable to load secondary user provider: " + secondaryClass, e); } } String tertiaryClass = JiveGlobals.getProperty("hybridUserProvider.tertiaryProvider.className"); if (tertiaryClass != null) { try { Class c = ClassUtils.forName(tertiaryClass); UserProvider tertiaryProvider = (UserProvider) c.newInstance(); userproviders.add(tertiaryProvider); Log.debug("Tertiary user provider: " + tertiaryClass); } catch (Exception e) { Log.error("Unable to load tertiary user provider: " + tertiaryClass, e); } } } @Override public User createUser(String username, String password, String name, String email) throws UserAlreadyExistsException { User returnvalue = null; // create the user (first writable provider wins) for (UserProvider provider : userproviders) { if (provider.isReadOnly()) { continue; } returnvalue = provider.createUser(username, password, name, email); if (returnvalue != null) { break; } } if (returnvalue == null) { throw new UnsupportedOperationException(); } return returnvalue; } @Override public void deleteUser(String username) { boolean isDeleted = false; for (UserProvider provider : userproviders) { if (provider.isReadOnly()) { continue; } provider.deleteUser(username); isDeleted = true; } // all providers are read-only if (!isDeleted) { throw new UnsupportedOperationException(); } } @Override public Collection<User> findUsers(Set<String> fields, String query) throws UnsupportedOperationException { List<User> userList = new ArrayList<>(); boolean isUnsupported = false; for (UserProvider provider : userproviders) { // validate search fields for each provider Set<String> validFields = provider.getSearchFields(); for (String field : fields) { if (!validFields.contains(field)) { continue; } } try { userList.addAll(provider.findUsers(fields, query)); } catch (UnsupportedOperationException uoe) { Log.warn("UserProvider.findUsers is not supported by this UserProvider: " + provider.getClass().getName()); isUnsupported = true; } } if (isUnsupported && userList.size() == 0) { throw new UnsupportedOperationException(); } return userList; } @Override public Collection<User> findUsers(Set<String> fields, String query, int startIndex, int numResults) throws UnsupportedOperationException { List<User> userList = new ArrayList<>(); boolean isUnsupported = false; int totalMatchedUserCount = 0; for (UserProvider provider : userproviders) { // validate search fields for each provider Set<String> validFields = provider.getSearchFields(); for (String field : fields) { if (!validFields.contains(field)) { continue; } } try { Collection<User> providerResults = provider.findUsers(fields, query); totalMatchedUserCount += providerResults.size(); if (startIndex >= totalMatchedUserCount) { continue; } int providerStartIndex = Math.max(0, startIndex - totalMatchedUserCount); int providerResultMax = numResults - userList.size(); List<User> providerList = providerResults instanceof List<?> ? (List<User>) providerResults : new ArrayList<>(providerResults); userList.addAll(providerList.subList(providerStartIndex, providerResultMax)); if (userList.size() >= numResults) { break; } } catch (UnsupportedOperationException uoe) { Log.warn("UserProvider.findUsers is not supported by this UserProvider: " + provider.getClass().getName()); isUnsupported = true; } } if (isUnsupported && userList.size() == 0) { throw new UnsupportedOperationException(); } return userList; } @Override public Set<String> getSearchFields() throws UnsupportedOperationException { Set<String> returnvalue = new HashSet<>(); for (UserProvider provider : userproviders) { returnvalue.addAll(provider.getSearchFields()); } // no search fields were returned if (returnvalue.size() == 0) { throw new UnsupportedOperationException(); } return returnvalue; } @Override public int getUserCount() { int count = 0; for (UserProvider provider : userproviders) { count += provider.getUserCount(); } return count; } @Override public Collection<String> getUsernames() { List<String> returnvalue = new ArrayList<>(); for (UserProvider provider : userproviders){ returnvalue.addAll(provider.getUsernames()); } return returnvalue; } @Override public Collection<User> getUsers() { List<User> returnvalue = new ArrayList<>(); for (UserProvider provider : userproviders){ returnvalue.addAll(provider.getUsers()); } return returnvalue; } @Override public Collection<User> getUsers(int startIndex, int numResults) { List<User> userList = new ArrayList<>(); int totalUserCount = 0; for (UserProvider provider : userproviders) { int providerStartIndex = Math.max((startIndex - totalUserCount), 0); totalUserCount += provider.getUserCount(); if (startIndex >= totalUserCount) { continue; } int providerResultMax = numResults - userList.size(); userList.addAll(provider.getUsers(providerStartIndex, providerResultMax)); if (userList.size() >= numResults) { break; } } return userList; } @Override public boolean isReadOnly() { return false; } @Override public boolean isNameRequired() { return false; } @Override public boolean isEmailRequired() { return false; } @Override public User loadUser(String username) throws UserNotFoundException { for (UserProvider provider : userproviders) { try { return provider.loadUser(username); } catch (UserNotFoundException unfe) { if (Log.isDebugEnabled()) { Log.debug("User " + username + " not found by UserProvider " + provider.getClass().getName()); } } } //if we get this far, no provider was able to load the user throw new UserNotFoundException(); } @Override public void setCreationDate(String username, Date creationDate) throws UserNotFoundException { boolean isUnsupported = false; for (UserProvider provider : userproviders) { try { provider.setCreationDate(username, creationDate); return; } catch (UnsupportedOperationException uoe) { Log.warn("UserProvider.setCreationDate is not supported by this UserProvider: " + provider.getClass().getName()); isUnsupported = true; } catch (UserNotFoundException unfe) { if (Log.isDebugEnabled()) { Log.debug("User " + username + " not found by UserProvider " + provider.getClass().getName()); } } } if (isUnsupported) { throw new UnsupportedOperationException(); } else { throw new UserNotFoundException(); } } @Override public void setEmail(String username, String email) throws UserNotFoundException { boolean isUnsupported = false; for (UserProvider provider : userproviders) { try { provider.setEmail(username, email); return; } catch (UnsupportedOperationException uoe) { Log.warn("UserProvider.setEmail is not supported by this UserProvider: " + provider.getClass().getName()); isUnsupported = true; } catch (UserNotFoundException unfe) { if (Log.isDebugEnabled()) { Log.debug("User " + username + " not found by UserProvider " + provider.getClass().getName()); } } } if (isUnsupported) { throw new UnsupportedOperationException(); } else { throw new UserNotFoundException(); } } @Override public void setModificationDate(String username, Date modificationDate) throws UserNotFoundException { boolean isUnsupported = false; for (UserProvider provider : userproviders) { try { provider.setModificationDate(username, modificationDate); return; } catch (UnsupportedOperationException uoe) { Log.warn("UserProvider.setModificationDate is not supported by this UserProvider: " + provider.getClass().getName()); isUnsupported = true; } catch (UserNotFoundException unfe) { if (Log.isDebugEnabled()) { Log.debug("User " + username + " not found by UserProvider " + provider.getClass().getName()); } } } if (isUnsupported) { throw new UnsupportedOperationException(); } else { throw new UserNotFoundException(); } } @Override public void setName(String username, String name) throws UserNotFoundException { boolean isUnsupported = false; for (UserProvider provider : userproviders) { try { provider.setName(username, name); return; } catch (UnsupportedOperationException uoe) { Log.warn("UserProvider.setName is not supported by this UserProvider: " + provider.getClass().getName()); isUnsupported = true; } catch (UserNotFoundException unfe) { if (Log.isDebugEnabled()) { Log.debug("User " + username + " not found by UserProvider " + provider.getClass().getName()); } } } if (isUnsupported) { throw new UnsupportedOperationException(); } else { throw new UserNotFoundException(); } } }
/******************************************************************************* * Copyright (c) 2013, SAP AG * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * - Neither the name of the SAP AG nor the names of its contributors may * be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ package com.sap.research.primelife.ds.pdp.evaluation; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.List; import javax.xml.bind.JAXBException; import org.herasaf.xacml.core.ProcessingException; import org.herasaf.xacml.core.SyntaxException; import org.herasaf.xacml.core.api.PDP; import org.herasaf.xacml.core.api.PolicyRepository; import org.herasaf.xacml.core.context.RequestCtx; import org.herasaf.xacml.core.context.RequestInformation; import org.herasaf.xacml.core.context.ResponseCtx; import org.herasaf.xacml.core.context.impl.DecisionType; import org.herasaf.xacml.core.context.impl.RequestType; import org.herasaf.xacml.core.policy.Evaluatable; import org.herasaf.xacml.core.policy.MissingAttributeException; import org.herasaf.xacml.core.simplePDP.SimplePDPFactory; import org.herasaf.xacml.core.targetMatcher.TargetMatcher; import org.herasaf.xacml.core.targetMatcher.impl.TargetMatcherImpl; import com.sap.research.primelife.exceptions.WritingException; import com.sap.research.primelife.utils.ConverterFunctions; import eu.primelife.ppl.policy.impl.PolicySetType; import eu.primelife.ppl.policy.impl.PolicyType; import eu.primelife.ppl.policy.impl.RuleType; import eu.primelife.ppl.policy.xacml.impl.PolicySetTypePolicySetOrPolicyOrPolicySetIdReferenceItem; import eu.primelife.ppl.policy.xacml.impl.PolicyTypeCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItem; import eu.primelife.ppl.policy.xacml.impl.TargetType; /** * The AccessControlUtils contain methods which use HERAS. * * */ public class AccessControlUtils { private static TargetMatcherImpl targetMatcher = new TargetMatcherImpl(); /** * Access control: * Check the target elements of the policySetOrPolicy with HERAS against the request. * * * @param policySetOrPolicy - a list of PolicyType or PolicySetType objects from PPL * @param request - the XACML request * @return the decision (PERMIT, DENY, INDETERMINATE, NOT_APPLICABLE) * @throws WritingException * @throws SyntaxException * @throws com.sap.research.primelife.exceptions.SyntaxException * @throws JAXBException */ public DecisionType checkAccess(List<Object> policySetOrPolicy, RequestType request) throws WritingException, SyntaxException, com.sap.research.primelife.exceptions.SyntaxException, JAXBException{ // we evaluate request against policy repository associated with the PII SimplePDPFactory.useDefaultInitializers(); PDP simplePDP = SimplePDPFactory.getSimplePDP(); PolicyRepository repo = simplePDP.getPolicyRepository(); // initialize policy repository for (Object obj : policySetOrPolicy) { Evaluatable evaluatable = null; if (obj instanceof PolicySetType) { evaluatable = ConverterFunctions.convertToHerasPolicySet( (PolicySetType) obj); } else if (obj instanceof PolicyType) { evaluatable = ConverterFunctions.convertToHerasPolicy( (PolicyType) obj); } else if (obj instanceof eu.primelife.ppl.policy.xacml.impl.PolicyType) { evaluatable = ConverterFunctions.convertToHerasPolicy((eu.primelife.ppl.policy.xacml.impl.PolicyType) obj); } repo.deploy(evaluatable); } ResponseCtx responseCtx = simplePDP.evaluate(new RequestCtx(request)); DecisionType decision = responseCtx.getResponse().getResults().get(0).getDecision(); return decision; } /** * Finds the applicable Rule element; for each PolicySet, Policy and Rule * element it will match its Target against the request which is composed by: * subject, resource and action. * * @param policyList list with PolicySet or Policy element * @return applicable Rule element * @throws SyntaxException * @throws com.sap.research.primelife.exceptions.SyntaxException * @throws WritingException * @throws FileNotFoundException * @throws MissingAttributeException * @throws ProcessingException * @throws JAXBException */ public RuleType findApplicableRule(List<Object> policyList, RequestType request) throws FileNotFoundException, WritingException, com.sap.research.primelife.exceptions.SyntaxException, SyntaxException, ProcessingException, MissingAttributeException, JAXBException { RuleType rule = null; for (Object obj : policyList) { if (obj instanceof PolicySetType) { PolicySetType policySet = (PolicySetType) obj; if (matchTarget(request, policySet.getTarget())) { List<Object> childrenPolicyList = new ArrayList<Object>(); for (PolicySetTypePolicySetOrPolicyOrPolicySetIdReferenceItem item : policySet.getPolicySetOrPolicyOrPolicySetIdReferenceItems()) { if (item.getItemPolicySet() instanceof PolicySetType) { PolicySetType policySetType = (PolicySetType) item.getItemPolicySet(); childrenPolicyList.add(policySetType); } else if (item.getItemPolicy() instanceof PolicyType) { PolicyType policyType = (PolicyType) item.getItemPolicy(); childrenPolicyList.add(policyType); } RuleType ruleTemp = findApplicableRule(childrenPolicyList, request); if (ruleTemp != null) rule = ruleTemp; } } } else if (obj instanceof PolicyType) { PolicyType policyType = (PolicyType) obj; if (matchTarget(request, policyType.getTarget())) { for (PolicyTypeCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItem item : policyType.getCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItems()) { if (item.getItemRule() instanceof RuleType) { RuleType ruleType = (RuleType) item.getItemRule(); if (matchTarget(request, ruleType.getTarget())) rule = ruleType; } } } }/*else if (obj instanceof eu.primelife.ppl.policy.xacml.impl.PolicyType) { eu.primelife.ppl.policy.xacml.impl.PolicyType policyType = (eu.primelife.ppl.policy.xacml.impl.PolicyType) obj; if(matchTarget(request, policyType.getTarget())){ for (PolicyTypeCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItem item : policyType.getCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItems()) { if (item.getItemRule() instanceof eu.primelife.ppl.policy.xacml.impl.RuleType) { eu.primelife.ppl.policy.xacml.impl.RuleType ruleType = (eu.primelife.ppl.policy.xacml.impl.RuleType) item.getItemRule(); if (matchTarget(request, ruleType.getTarget())) rule = ruleType; } } } }*/ } return rule; } /** * Converts Target element from PPL schema to HERAS schema and matches * it against the request using HERAS {@link TargetMatcher}. * * @param request HERAS request * @param target Target element in PPL schema format * * @return result of matching (true if the target is applicable) * @throws JAXBException */ private static boolean matchTarget(RequestType request, TargetType target) throws WritingException, com.sap.research.primelife.exceptions.SyntaxException, FileNotFoundException, SyntaxException, ProcessingException, MissingAttributeException, JAXBException { //converting the target from PPL TargetType to HERAS TargetType org.herasaf.xacml.core.policy.impl.TargetType herasTarget = ConverterFunctions.fromPPLTargetToHerasTarget(target); return targetMatcher.match(request, herasTarget, new RequestInformation(null)); } }
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.Maps.immutableEntry; import static com.google.common.collect.Sets.newHashSet; import static com.google.common.collect.testing.Helpers.nefariousMapEntry; import static com.google.common.collect.testing.IteratorFeature.MODIFIABLE; import static com.google.common.truth.Truth.assertThat; import static java.util.Arrays.asList; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Predicates; import com.google.common.base.Supplier; import com.google.common.collect.Maps.EntryTransformer; import com.google.common.collect.testing.IteratorTester; import com.google.common.collect.testing.google.UnmodifiableCollectionTests; import com.google.common.testing.NullPointerTester; import com.google.common.testing.SerializableTester; import junit.framework.TestCase; import java.io.Serializable; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Queue; import java.util.RandomAccess; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeSet; import javax.annotation.Nullable; /** * Unit test for {@code Multimaps}. * * @author Jared Levy */ @GwtCompatible(emulated = true) public class MultimapsTest extends TestCase { private static final Comparator<Integer> INT_COMPARATOR = Ordering.<Integer>natural().reverse().nullsFirst(); private static final EntryTransformer<Object, Object, Object> ALWAYS_NULL = new EntryTransformer<Object, Object, Object>() { @Override public Object transformEntry(Object k, Object v1) { return null; } }; @SuppressWarnings("deprecation") public void testUnmodifiableListMultimapShortCircuit() { ListMultimap<String, Integer> mod = ArrayListMultimap.create(); ListMultimap<String, Integer> unmod = Multimaps.unmodifiableListMultimap(mod); assertNotSame(mod, unmod); assertSame(unmod, Multimaps.unmodifiableListMultimap(unmod)); ImmutableListMultimap<String, Integer> immutable = ImmutableListMultimap.of("a", 1, "b", 2, "a", 3); assertSame(immutable, Multimaps.unmodifiableListMultimap(immutable)); assertSame( immutable, Multimaps.unmodifiableListMultimap((ListMultimap<String, Integer>) immutable)); } @SuppressWarnings("deprecation") public void testUnmodifiableSetMultimapShortCircuit() { SetMultimap<String, Integer> mod = HashMultimap.create(); SetMultimap<String, Integer> unmod = Multimaps.unmodifiableSetMultimap(mod); assertNotSame(mod, unmod); assertSame(unmod, Multimaps.unmodifiableSetMultimap(unmod)); ImmutableSetMultimap<String, Integer> immutable = ImmutableSetMultimap.of("a", 1, "b", 2, "a", 3); assertSame(immutable, Multimaps.unmodifiableSetMultimap(immutable)); assertSame( immutable, Multimaps.unmodifiableSetMultimap((SetMultimap<String, Integer>) immutable)); } @SuppressWarnings("deprecation") public void testUnmodifiableMultimapShortCircuit() { Multimap<String, Integer> mod = HashMultimap.create(); Multimap<String, Integer> unmod = Multimaps.unmodifiableMultimap(mod); assertNotSame(mod, unmod); assertSame(unmod, Multimaps.unmodifiableMultimap(unmod)); ImmutableMultimap<String, Integer> immutable = ImmutableMultimap.of("a", 1, "b", 2, "a", 3); assertSame(immutable, Multimaps.unmodifiableMultimap(immutable)); assertSame(immutable, Multimaps.unmodifiableMultimap((Multimap<String, Integer>) immutable)); } @GwtIncompatible("slow (~10s)") public void testUnmodifiableArrayListMultimap() { checkUnmodifiableMultimap( ArrayListMultimap.<String, Integer>create(), true); } @GwtIncompatible("SerializableTester") public void testSerializingUnmodifiableArrayListMultimap() { Multimap<String, Integer> unmodifiable = prepareUnmodifiableTests(ArrayListMultimap.<String, Integer>create(), true, null, null); SerializableTester.reserializeAndAssert(unmodifiable); } public void testUnmodifiableArrayListMultimapRandomAccess() { ListMultimap<String, Integer> delegate = ArrayListMultimap.create(); delegate.put("foo", 1); delegate.put("foo", 3); ListMultimap<String, Integer> multimap = Multimaps.unmodifiableListMultimap(delegate); assertTrue(multimap.get("foo") instanceof RandomAccess); assertTrue(multimap.get("bar") instanceof RandomAccess); } public void testUnmodifiableLinkedListMultimapRandomAccess() { ListMultimap<String, Integer> delegate = LinkedListMultimap.create(); delegate.put("foo", 1); delegate.put("foo", 3); ListMultimap<String, Integer> multimap = Multimaps.unmodifiableListMultimap(delegate); assertFalse(multimap.get("foo") instanceof RandomAccess); assertFalse(multimap.get("bar") instanceof RandomAccess); } @GwtIncompatible("slow (~10s)") public void testUnmodifiableHashMultimap() { checkUnmodifiableMultimap(HashMultimap.<String, Integer>create(), false); } @GwtIncompatible("SerializableTester") public void testSerializingUnmodifiableHashMultimap() { Multimap<String, Integer> unmodifiable = prepareUnmodifiableTests(HashMultimap.<String, Integer>create(), false, null, null); SerializableTester.reserializeAndAssert(unmodifiable); } @GwtIncompatible("slow (~10s)") public void testUnmodifiableTreeMultimap() { checkUnmodifiableMultimap( TreeMultimap.<String, Integer>create(), false, "null", 42); } @GwtIncompatible("SerializableTester") public void testSerializingUnmodifiableTreeMultimap() { Multimap<String, Integer> unmodifiable = prepareUnmodifiableTests(TreeMultimap.<String, Integer>create(), false, "null", 42); SerializableTester.reserializeAndAssert(unmodifiable); } @GwtIncompatible("slow (~10s)") public void testUnmodifiableSynchronizedArrayListMultimap() { checkUnmodifiableMultimap(Multimaps.synchronizedListMultimap( ArrayListMultimap.<String, Integer>create()), true); } @GwtIncompatible("SerializableTester") public void testSerializingUnmodifiableSynchronizedArrayListMultimap() { Multimap<String, Integer> unmodifiable = prepareUnmodifiableTests(Multimaps.synchronizedListMultimap( ArrayListMultimap.<String, Integer>create()), true, null, null); SerializableTester.reserializeAndAssert(unmodifiable); } @GwtIncompatible("slow (~10s)") public void testUnmodifiableSynchronizedHashMultimap() { checkUnmodifiableMultimap(Multimaps.synchronizedSetMultimap( HashMultimap.<String, Integer>create()), false); } @GwtIncompatible("SerializableTester") public void testSerializingUnmodifiableSynchronizedHashMultimap() { Multimap<String, Integer> unmodifiable = prepareUnmodifiableTests(Multimaps.synchronizedSetMultimap( HashMultimap.<String, Integer>create()), false, null, null); SerializableTester.reserializeAndAssert(unmodifiable); } @GwtIncompatible("slow (~10s)") public void testUnmodifiableSynchronizedTreeMultimap() { TreeMultimap<String, Integer> delegate = TreeMultimap.create(Ordering.<String>natural(), INT_COMPARATOR); SortedSetMultimap<String, Integer> multimap = Multimaps.synchronizedSortedSetMultimap(delegate); checkUnmodifiableMultimap(multimap, false, "null", 42); assertSame(INT_COMPARATOR, multimap.valueComparator()); } @GwtIncompatible("SerializableTester") public void testSerializingUnmodifiableSynchronizedTreeMultimap() { TreeMultimap<String, Integer> delegate = TreeMultimap.create(Ordering.<String>natural(), INT_COMPARATOR); SortedSetMultimap<String, Integer> multimap = Multimaps.synchronizedSortedSetMultimap(delegate); Multimap<String, Integer> unmodifiable = prepareUnmodifiableTests(multimap, false, "null", 42); SerializableTester.reserializeAndAssert(unmodifiable); assertSame(INT_COMPARATOR, multimap.valueComparator()); } public void testUnmodifiableMultimapIsView() { Multimap<String, Integer> mod = HashMultimap.create(); Multimap<String, Integer> unmod = Multimaps.unmodifiableMultimap(mod); assertEquals(mod, unmod); mod.put("foo", 1); assertTrue(unmod.containsEntry("foo", 1)); assertEquals(mod, unmod); } @SuppressWarnings("unchecked") public void testUnmodifiableMultimapEntries() { Multimap<String, Integer> mod = HashMultimap.create(); Multimap<String, Integer> unmod = Multimaps.unmodifiableMultimap(mod); mod.put("foo", 1); Entry<String, Integer> entry = unmod.entries().iterator().next(); try { entry.setValue(2); fail("UnsupportedOperationException expected"); } catch (UnsupportedOperationException expected) {} entry = (Entry<String, Integer>) unmod.entries().toArray()[0]; try { entry.setValue(2); fail("UnsupportedOperationException expected"); } catch (UnsupportedOperationException expected) {} Entry<String, Integer>[] array = (Entry<String, Integer>[]) new Entry<?, ?>[2]; assertSame(array, unmod.entries().toArray(array)); try { array[0].setValue(2); fail("UnsupportedOperationException expected"); } catch (UnsupportedOperationException expected) {} assertFalse(unmod.entries().contains(nefariousMapEntry("pwnd", 2))); assertFalse(unmod.keys().contains("pwnd")); } /** * The supplied multimap will be mutated and an unmodifiable instance used * in its stead. The multimap must support null keys and values. */ private static void checkUnmodifiableMultimap( Multimap<String, Integer> multimap, boolean permitsDuplicates) { checkUnmodifiableMultimap(multimap, permitsDuplicates, null, null); } /** * The supplied multimap will be mutated and an unmodifiable instance used * in its stead. If the multimap does not support null keys or values, * alternatives may be specified for tests involving nulls. */ private static void checkUnmodifiableMultimap( Multimap<String, Integer> multimap, boolean permitsDuplicates, @Nullable String nullKey, @Nullable Integer nullValue) { Multimap<String, Integer> unmodifiable = prepareUnmodifiableTests(multimap, permitsDuplicates, nullKey, nullValue); UnmodifiableCollectionTests.assertMultimapIsUnmodifiable( unmodifiable, "test", 123); assertUnmodifiableIterableInTandem( unmodifiable.keys(), multimap.keys()); assertUnmodifiableIterableInTandem( unmodifiable.keySet(), multimap.keySet()); assertUnmodifiableIterableInTandem( unmodifiable.entries(), multimap.entries()); assertUnmodifiableIterableInTandem( unmodifiable.asMap().entrySet(), multimap.asMap().entrySet()); assertEquals(multimap.toString(), unmodifiable.toString()); assertEquals(multimap.hashCode(), unmodifiable.hashCode()); assertEquals(multimap, unmodifiable); assertThat(unmodifiable.asMap().get("bar")).has().exactly(5, -1); assertNull(unmodifiable.asMap().get("missing")); assertFalse(unmodifiable.entries() instanceof Serializable); } /** * Prepares the multimap for unmodifiable tests, returning an unmodifiable view * of the map. */ private static Multimap<String, Integer> prepareUnmodifiableTests( Multimap<String, Integer> multimap, boolean permitsDuplicates, @Nullable String nullKey, @Nullable Integer nullValue) { multimap.clear(); multimap.put("foo", 1); multimap.put("foo", 2); multimap.put("foo", 3); multimap.put("bar", 5); multimap.put("bar", -1); multimap.put(nullKey, nullValue); multimap.put("foo", nullValue); multimap.put(nullKey, 5); multimap.put("foo", 2); if (permitsDuplicates) { assertEquals(9, multimap.size()); } else { assertEquals(8, multimap.size()); } Multimap<String, Integer> unmodifiable; if (multimap instanceof SortedSetMultimap) { unmodifiable = Multimaps.unmodifiableSortedSetMultimap( (SortedSetMultimap<String, Integer>) multimap); } else if (multimap instanceof SetMultimap) { unmodifiable = Multimaps.unmodifiableSetMultimap( (SetMultimap<String, Integer>) multimap); } else if (multimap instanceof ListMultimap) { unmodifiable = Multimaps.unmodifiableListMultimap( (ListMultimap<String, Integer>) multimap); } else { unmodifiable = Multimaps.unmodifiableMultimap(multimap); } return unmodifiable; } private static <T> void assertUnmodifiableIterableInTandem( Iterable<T> unmodifiable, Iterable<T> modifiable) { UnmodifiableCollectionTests.assertIteratorIsUnmodifiable( unmodifiable.iterator()); UnmodifiableCollectionTests.assertIteratorsInOrder( unmodifiable.iterator(), modifiable.iterator()); } public void testInvertFrom() { ImmutableMultimap<Integer, String> empty = ImmutableMultimap.of(); // typical usage example - sad that ArrayListMultimap.create() won't work Multimap<String, Integer> multimap = Multimaps.invertFrom(empty, ArrayListMultimap.<String, Integer>create()); assertTrue(multimap.isEmpty()); ImmutableMultimap<Integer, String> single = new ImmutableMultimap.Builder<Integer, String>() .put(1, "one") .put(2, "two") .build(); // copy into existing multimap assertSame(multimap, Multimaps.invertFrom(single, multimap)); ImmutableMultimap<String, Integer> expected = new ImmutableMultimap.Builder<String, Integer>() .put("one", 1) .put("two", 2) .build(); assertEquals(expected, multimap); } public void testAsMap_multimap() { Multimap<String, Integer> multimap = Multimaps.newMultimap( new HashMap<String, Collection<Integer>>(), new QueueSupplier()); Map<String, Collection<Integer>> map = Multimaps.asMap(multimap); assertSame(multimap.asMap(), map); } public void testAsMap_listMultimap() { ListMultimap<String, Integer> listMultimap = ArrayListMultimap.create(); Map<String, List<Integer>> map = Multimaps.asMap(listMultimap); assertSame(listMultimap.asMap(), map); } public void testAsMap_setMultimap() { SetMultimap<String, Integer> setMultimap = LinkedHashMultimap.create(); Map<String, Set<Integer>> map = Multimaps.asMap(setMultimap); assertSame(setMultimap.asMap(), map); } public void testAsMap_sortedSetMultimap() { SortedSetMultimap<String, Integer> sortedSetMultimap = TreeMultimap.create(); Map<String, SortedSet<Integer>> map = Multimaps.asMap(sortedSetMultimap); assertSame(sortedSetMultimap.asMap(), map); } public void testForMap() { Map<String, Integer> map = Maps.newHashMap(); map.put("foo", 1); map.put("bar", 2); Multimap<String, Integer> multimap = HashMultimap.create(); multimap.put("foo", 1); multimap.put("bar", 2); Multimap<String, Integer> multimapView = Multimaps.forMap(map); assertTrue(multimap.equals(multimapView)); assertTrue(multimapView.equals(multimap)); assertTrue(multimapView.equals(multimapView)); assertFalse(multimapView.equals(map)); Multimap<String, Integer> multimap2 = HashMultimap.create(); multimap2.put("foo", 1); assertFalse(multimapView.equals(multimap2)); multimap2.put("bar", 1); assertFalse(multimapView.equals(multimap2)); ListMultimap<String, Integer> listMultimap = new ImmutableListMultimap.Builder<String, Integer>() .put("foo", 1).put("bar", 2).build(); assertFalse("SetMultimap equals ListMultimap", multimapView.equals(listMultimap)); assertEquals(multimap.toString(), multimapView.toString()); assertEquals(multimap.hashCode(), multimapView.hashCode()); assertEquals(multimap.size(), multimapView.size()); assertTrue(multimapView.containsKey("foo")); assertTrue(multimapView.containsValue(1)); assertTrue(multimapView.containsEntry("bar", 2)); assertEquals(Collections.singleton(1), multimapView.get("foo")); assertEquals(Collections.singleton(2), multimapView.get("bar")); try { multimapView.put("baz", 3); fail("UnsupportedOperationException expected"); } catch (UnsupportedOperationException expected) {} try { multimapView.putAll("baz", Collections.singleton(3)); fail("UnsupportedOperationException expected"); } catch (UnsupportedOperationException expected) {} try { multimapView.putAll(multimap); fail("UnsupportedOperationException expected"); } catch (UnsupportedOperationException expected) {} try { multimapView.replaceValues("foo", Collections.<Integer>emptySet()); fail("UnsupportedOperationException expected"); } catch (UnsupportedOperationException expected) {} multimapView.remove("bar", 2); assertFalse(multimapView.containsKey("bar")); assertFalse(map.containsKey("bar")); assertEquals(map.keySet(), multimapView.keySet()); assertEquals(map.keySet(), multimapView.keys().elementSet()); assertThat(multimapView.keys()).has().item("foo"); assertThat(multimapView.values()).has().item(1); assertThat(multimapView.entries()).has().item( Maps.immutableEntry("foo", 1)); assertThat(multimapView.asMap().entrySet()).has().item( Maps.immutableEntry( "foo", (Collection<Integer>) Collections.singleton(1))); multimapView.clear(); assertFalse(multimapView.containsKey("foo")); assertFalse(map.containsKey("foo")); assertTrue(map.isEmpty()); assertTrue(multimapView.isEmpty()); multimap.clear(); assertEquals(multimap.toString(), multimapView.toString()); assertEquals(multimap.hashCode(), multimapView.hashCode()); assertEquals(multimap.size(), multimapView.size()); assertEquals(multimapView, ArrayListMultimap.create()); } @GwtIncompatible("SerializableTester") public void testForMapSerialization() { Map<String, Integer> map = Maps.newHashMap(); map.put("foo", 1); map.put("bar", 2); Multimap<String, Integer> multimapView = Multimaps.forMap(map); SerializableTester.reserializeAndAssert(multimapView); } public void testForMapRemoveAll() { Map<String, Integer> map = Maps.newHashMap(); map.put("foo", 1); map.put("bar", 2); map.put("cow", 3); Multimap<String, Integer> multimap = Multimaps.forMap(map); assertEquals(3, multimap.size()); assertEquals(Collections.emptySet(), multimap.removeAll("dog")); assertEquals(3, multimap.size()); assertTrue(multimap.containsKey("bar")); assertEquals(Collections.singleton(2), multimap.removeAll("bar")); assertEquals(2, multimap.size()); assertFalse(multimap.containsKey("bar")); } public void testForMapAsMap() { Map<String, Integer> map = Maps.newHashMap(); map.put("foo", 1); map.put("bar", 2); Map<String, Collection<Integer>> asMap = Multimaps.forMap(map).asMap(); assertEquals(Collections.singleton(1), asMap.get("foo")); assertNull(asMap.get("cow")); assertTrue(asMap.containsKey("foo")); assertFalse(asMap.containsKey("cow")); Set<Entry<String, Collection<Integer>>> entries = asMap.entrySet(); assertFalse(entries.contains(4.5)); assertFalse(entries.remove(4.5)); assertFalse(entries.contains(Maps.immutableEntry("foo", Collections.singletonList(1)))); assertFalse(entries.remove(Maps.immutableEntry("foo", Collections.singletonList(1)))); assertFalse(entries.contains(Maps.immutableEntry("foo", Sets.newLinkedHashSet(asList(1, 2))))); assertFalse(entries.remove(Maps.immutableEntry("foo", Sets.newLinkedHashSet(asList(1, 2))))); assertFalse(entries.contains(Maps.immutableEntry("foo", Collections.singleton(2)))); assertFalse(entries.remove(Maps.immutableEntry("foo", Collections.singleton(2)))); assertTrue(map.containsKey("foo")); assertTrue(entries.contains(Maps.immutableEntry("foo", Collections.singleton(1)))); assertTrue(entries.remove(Maps.immutableEntry("foo", Collections.singleton(1)))); assertFalse(map.containsKey("foo")); } public void testForMapGetIteration() { IteratorTester<Integer> tester = new IteratorTester<Integer>(4, MODIFIABLE, newHashSet(1), IteratorTester.KnownOrder.KNOWN_ORDER) { private Multimap<String, Integer> multimap; @Override protected Iterator<Integer> newTargetIterator() { Map<String, Integer> map = Maps.newHashMap(); map.put("foo", 1); map.put("bar", 2); multimap = Multimaps.forMap(map); return multimap.get("foo").iterator(); } @Override protected void verify(List<Integer> elements) { assertEquals(newHashSet(elements), multimap.get("foo")); } }; tester.test(); } private enum Color {BLUE, RED, YELLOW, GREEN} private abstract static class CountingSupplier<E> implements Supplier<E>, Serializable { int count; abstract E getImpl(); @Override public E get() { count++; return getImpl(); } } private static class QueueSupplier extends CountingSupplier<Queue<Integer>> { @Override public Queue<Integer> getImpl() { return new LinkedList<Integer>(); } private static final long serialVersionUID = 0; } public void testNewMultimapWithCollectionRejectingNegativeElements() { CountingSupplier<Set<Integer>> factory = new SetSupplier() { @Override public Set<Integer> getImpl() { final Set<Integer> backing = super.getImpl(); return new ForwardingSet<Integer>() { @Override protected Set<Integer> delegate() { return backing; } @Override public boolean add(Integer element) { checkArgument(element >= 0); return super.add(element); } @Override public boolean addAll(Collection<? extends Integer> collection) { return standardAddAll(collection); } }; } }; Map<Color, Collection<Integer>> map = Maps.newEnumMap(Color.class); Multimap<Color, Integer> multimap = Multimaps.newMultimap(map, factory); try { multimap.put(Color.BLUE, -1); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { // expected } multimap.put(Color.RED, 1); multimap.put(Color.BLUE, 2); try { multimap.put(Color.GREEN, -1); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { // expected } assertThat(multimap.entries()).has().exactly( Maps.immutableEntry(Color.RED, 1), Maps.immutableEntry(Color.BLUE, 2)); } public void testNewMultimap() { // The ubiquitous EnumArrayBlockingQueueMultimap CountingSupplier<Queue<Integer>> factory = new QueueSupplier(); Map<Color, Collection<Integer>> map = Maps.newEnumMap(Color.class); Multimap<Color, Integer> multimap = Multimaps.newMultimap(map, factory); assertEquals(0, factory.count); multimap.putAll(Color.BLUE, asList(3, 1, 4)); assertEquals(1, factory.count); multimap.putAll(Color.RED, asList(2, 7, 1, 8)); assertEquals(2, factory.count); assertEquals("[3, 1, 4]", multimap.get(Color.BLUE).toString()); Multimap<Color, Integer> ummodifiable = Multimaps.unmodifiableMultimap(multimap); assertEquals("[3, 1, 4]", ummodifiable.get(Color.BLUE).toString()); Collection<Integer> collection = multimap.get(Color.BLUE); assertEquals(collection, collection); assertFalse(multimap.keySet() instanceof SortedSet); assertFalse(multimap.asMap() instanceof SortedMap); } @GwtIncompatible("SerializableTester") public void testNewMultimapSerialization() { CountingSupplier<Queue<Integer>> factory = new QueueSupplier(); Map<Color, Collection<Integer>> map = Maps.newEnumMap(Color.class); Multimap<Color, Integer> multimap = Multimaps.newMultimap(map, factory); multimap.putAll(Color.BLUE, asList(3, 1, 4)); multimap.putAll(Color.RED, asList(2, 7, 1, 8)); SerializableTester.reserializeAndAssert(multimap); } private static class ListSupplier extends CountingSupplier<LinkedList<Integer>> { @Override public LinkedList<Integer> getImpl() { return new LinkedList<Integer>(); } private static final long serialVersionUID = 0; } public void testNewListMultimap() { CountingSupplier<LinkedList<Integer>> factory = new ListSupplier(); Map<Color, Collection<Integer>> map = Maps.newTreeMap(); ListMultimap<Color, Integer> multimap = Multimaps.newListMultimap(map, factory); assertEquals(0, factory.count); multimap.putAll(Color.BLUE, asList(3, 1, 4, 1)); assertEquals(1, factory.count); multimap.putAll(Color.RED, asList(2, 7, 1, 8)); assertEquals(2, factory.count); assertEquals("{BLUE=[3, 1, 4, 1], RED=[2, 7, 1, 8]}", multimap.toString()); assertFalse(multimap.get(Color.BLUE) instanceof RandomAccess); assertTrue(multimap.keySet() instanceof SortedSet); assertTrue(multimap.asMap() instanceof SortedMap); } @GwtIncompatible("SerializableTester") public void testNewListMultimapSerialization() { CountingSupplier<LinkedList<Integer>> factory = new ListSupplier(); Map<Color, Collection<Integer>> map = Maps.newTreeMap(); ListMultimap<Color, Integer> multimap = Multimaps.newListMultimap(map, factory); multimap.putAll(Color.BLUE, asList(3, 1, 4, 1)); multimap.putAll(Color.RED, asList(2, 7, 1, 8)); SerializableTester.reserializeAndAssert(multimap); } private static class SetSupplier extends CountingSupplier<Set<Integer>> { @Override public Set<Integer> getImpl() { return new HashSet<Integer>(4); } private static final long serialVersionUID = 0; } public void testNewSetMultimap() { CountingSupplier<Set<Integer>> factory = new SetSupplier(); Map<Color, Collection<Integer>> map = Maps.newHashMap(); SetMultimap<Color, Integer> multimap = Multimaps.newSetMultimap(map, factory); assertEquals(0, factory.count); multimap.putAll(Color.BLUE, asList(3, 1, 4)); assertEquals(1, factory.count); multimap.putAll(Color.RED, asList(2, 7, 1, 8)); assertEquals(2, factory.count); assertEquals(Sets.newHashSet(4, 3, 1), multimap.get(Color.BLUE)); } @GwtIncompatible("SerializableTester") public void testNewSetMultimapSerialization() { CountingSupplier<Set<Integer>> factory = new SetSupplier(); Map<Color, Collection<Integer>> map = Maps.newHashMap(); SetMultimap<Color, Integer> multimap = Multimaps.newSetMultimap(map, factory); multimap.putAll(Color.BLUE, asList(3, 1, 4)); multimap.putAll(Color.RED, asList(2, 7, 1, 8)); SerializableTester.reserializeAndAssert(multimap); } private static class SortedSetSupplier extends CountingSupplier<TreeSet<Integer>> { @Override public TreeSet<Integer> getImpl() { return Sets.newTreeSet(INT_COMPARATOR); } private static final long serialVersionUID = 0; } public void testNewSortedSetMultimap() { CountingSupplier<TreeSet<Integer>> factory = new SortedSetSupplier(); Map<Color, Collection<Integer>> map = Maps.newEnumMap(Color.class); SortedSetMultimap<Color, Integer> multimap = Multimaps.newSortedSetMultimap(map, factory); // newSortedSetMultimap calls the factory once to determine the comparator. assertEquals(1, factory.count); multimap.putAll(Color.BLUE, asList(3, 1, 4)); assertEquals(2, factory.count); multimap.putAll(Color.RED, asList(2, 7, 1, 8)); assertEquals(3, factory.count); assertEquals("[4, 3, 1]", multimap.get(Color.BLUE).toString()); assertEquals(INT_COMPARATOR, multimap.valueComparator()); } @GwtIncompatible("SerializableTester") public void testNewSortedSetMultimapSerialization() { CountingSupplier<TreeSet<Integer>> factory = new SortedSetSupplier(); Map<Color, Collection<Integer>> map = Maps.newEnumMap(Color.class); SortedSetMultimap<Color, Integer> multimap = Multimaps.newSortedSetMultimap(map, factory); multimap.putAll(Color.BLUE, asList(3, 1, 4)); multimap.putAll(Color.RED, asList(2, 7, 1, 8)); SerializableTester.reserializeAndAssert(multimap); assertEquals(INT_COMPARATOR, multimap.valueComparator()); } public void testIndex() { final Multimap<String, Object> stringToObject = new ImmutableMultimap.Builder<String, Object>() .put("1", 1) .put("1", 1L) .put("1", "1") .put("2", 2) .put("2", 2L) .build(); ImmutableMultimap<String, Object> outputMap = Multimaps.index(stringToObject.values(), Functions.toStringFunction()); assertEquals(stringToObject, outputMap); } public void testIndexIterator() { final Multimap<String, Object> stringToObject = new ImmutableMultimap.Builder<String, Object>() .put("1", 1) .put("1", 1L) .put("1", "1") .put("2", 2) .put("2", 2L) .build(); ImmutableMultimap<String, Object> outputMap = Multimaps.index(stringToObject.values().iterator(), Functions.toStringFunction()); assertEquals(stringToObject, outputMap); } public void testIndex_ordering() { final Multimap<Integer, String> expectedIndex = new ImmutableListMultimap.Builder<Integer, String>() .put(4, "Inky") .put(6, "Blinky") .put(5, "Pinky") .put(5, "Pinky") .put(5, "Clyde") .build(); final List<String> badGuys = Arrays.asList("Inky", "Blinky", "Pinky", "Pinky", "Clyde"); final Function<String, Integer> stringLengthFunction = new Function<String, Integer>() { @Override public Integer apply(String input) { return input.length(); } }; Multimap<Integer, String> index = Multimaps.index(badGuys, stringLengthFunction); assertEquals(expectedIndex, index); } public void testIndex_nullValue() { List<Integer> values = Arrays.asList(1, null); try { Multimaps.index(values, Functions.identity()); fail(); } catch (NullPointerException e) {} } public void testIndex_nullKey() { List<Integer> values = Arrays.asList(1, 2); try { Multimaps.index(values, Functions.constant(null)); fail(); } catch (NullPointerException e) {} } @GwtIncompatible(value = "untested") public void testTransformValues() { SetMultimap<String, Integer> multimap = ImmutableSetMultimap.of("a", 2, "b", -3, "b", 3, "a", 4, "c", 6); Function<Integer, Integer> square = new Function<Integer, Integer>() { @Override public Integer apply(Integer in) { return in * in; } }; Multimap<String, Integer> transformed = Multimaps.transformValues(multimap, square); assertThat(transformed.entries()).has().exactly(immutableEntry("a", 4), immutableEntry("a", 16), immutableEntry("b", 9), immutableEntry("b", 9), immutableEntry("c", 36)).inOrder(); } @GwtIncompatible(value = "untested") public void testTransformValuesIsView() { Multimap<String, String> multimap = LinkedListMultimap.create(); multimap.put("a", "a"); Multimap<String, Integer> transformed = Multimaps.transformValues(multimap, new Function<String, Integer>() { @Override public Integer apply(String str) { return str.length(); } }); Entry<String, String> entry = multimap.entries().iterator().next(); entry.setValue("bbb"); assertThat(transformed.entries()).has().exactly(immutableEntry("a", 3)).inOrder(); } @GwtIncompatible(value = "untested") public void testTransformListValues() { ListMultimap<String, Integer> multimap = ImmutableListMultimap.of("a", 2, "b", -3, "b", 3, "a", 4, "c", 6); Function<Integer, Integer> square = new Function<Integer, Integer>() { @Override public Integer apply(Integer in) { return in * in; } }; ListMultimap<String, Integer> transformed = Multimaps.transformValues(multimap, square); assertThat(transformed.entries()).has().exactly(immutableEntry("a", 4), immutableEntry("a", 16), immutableEntry("b", 9), immutableEntry("b", 9), immutableEntry("c", 36)).inOrder(); } @GwtIncompatible(value = "untested") public void testTransformEntries() { SetMultimap<String, Integer> multimap = ImmutableSetMultimap.of("a", 1, "a", 4, "b", -6); EntryTransformer<String, Integer, String> transformer = new EntryTransformer<String, Integer, String>() { @Override public String transformEntry(String key, Integer value) { return (value >= 0) ? key : "no" + key; } }; Multimap<String, String> transformed = Multimaps.transformEntries(multimap, transformer); assertThat(transformed.entries()).has().exactly(immutableEntry("a", "a"), immutableEntry("a", "a"), immutableEntry("b", "nob")).inOrder(); } @GwtIncompatible(value = "untested") public void testTransformListEntries() { ListMultimap<String, Integer> multimap = ImmutableListMultimap.of("a", 1, "a", 4, "b", 6, "a", 4); EntryTransformer<String, Integer, String> transformer = new EntryTransformer<String, Integer, String>() { @Override public String transformEntry(String key, Integer value) { return key + value; } }; ListMultimap<String, String> transformed = Multimaps.transformEntries(multimap, transformer); assertEquals( ImmutableListMultimap.of("a", "a1", "a", "a4", "a", "a4", "b", "b6"), transformed); assertEquals("{a=[a1, a4, a4], b=[b6]}", transformed.toString()); } public <K, V> void testSynchronizedMultimapSampleCodeCompilation() { K key = null; Multimap<K, V> multimap = Multimaps.synchronizedMultimap( HashMultimap.<K, V>create()); Collection<V> values = multimap.get(key); // Needn't be in synchronized block synchronized (multimap) { // Synchronizing on multimap, not values! Iterator<V> i = values.iterator(); // Must be in synchronized block while (i.hasNext()) { foo(i.next()); } } } private static void foo(Object o) {} public void testFilteredKeysSetMultimapReplaceValues() { SetMultimap<String, Integer> multimap = LinkedHashMultimap.create(); multimap.put("foo", 1); multimap.put("bar", 2); multimap.put("baz", 3); multimap.put("bar", 4); SetMultimap<String, Integer> filtered = Multimaps.filterKeys( multimap, Predicates.in(ImmutableSet.of("foo", "bar"))); assertEquals( ImmutableSet.of(), filtered.replaceValues("baz", ImmutableSet.<Integer>of())); try { filtered.replaceValues("baz", ImmutableSet.of(5)); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } public void testFilteredKeysSetMultimapGetBadValue() { SetMultimap<String, Integer> multimap = LinkedHashMultimap.create(); multimap.put("foo", 1); multimap.put("bar", 2); multimap.put("baz", 3); multimap.put("bar", 4); SetMultimap<String, Integer> filtered = Multimaps.filterKeys( multimap, Predicates.in(ImmutableSet.of("foo", "bar"))); Set<Integer> bazSet = filtered.get("baz"); assertThat(bazSet).isEmpty(); try { bazSet.add(5); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } try { bazSet.addAll(ImmutableSet.of(6, 7)); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } public void testFilteredKeysListMultimapGetBadValue() { ListMultimap<String, Integer> multimap = ArrayListMultimap.create(); multimap.put("foo", 1); multimap.put("bar", 2); multimap.put("baz", 3); multimap.put("bar", 4); ListMultimap<String, Integer> filtered = Multimaps.filterKeys( multimap, Predicates.in(ImmutableSet.of("foo", "bar"))); List<Integer> bazList = filtered.get("baz"); assertThat(bazList).isEmpty(); try { bazList.add(5); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } try { bazList.add(0, 6); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } try { bazList.addAll(ImmutableList.of(7, 8)); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } try { bazList.addAll(0, ImmutableList.of(9, 10)); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } @GwtIncompatible("NullPointerTester") public void testNullPointers() { new NullPointerTester().testAllPublicStaticMethods(Multimaps.class); } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.query.metadata; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.Intervals; import io.druid.query.CacheStrategy; import io.druid.query.Druids; import io.druid.query.TableDataSource; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.LongMaxAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.metadata.metadata.ColumnAnalysis; import io.druid.query.metadata.metadata.SegmentAnalysis; import io.druid.query.metadata.metadata.SegmentMetadataQuery; import io.druid.query.spec.LegacySegmentSpec; import io.druid.segment.column.ValueType; import io.druid.timeline.LogicalSegment; import org.joda.time.Period; import org.junit.Assert; import org.junit.Test; import java.util.List; import java.util.Map; import java.util.stream.Collectors; public class SegmentMetadataQueryQueryToolChestTest { @Test public void testCacheStrategy() throws Exception { SegmentMetadataQuery query = new SegmentMetadataQuery( new TableDataSource("dummy"), new LegacySegmentSpec("2015-01-01/2015-01-02"), null, null, null, null, false, false ); CacheStrategy<SegmentAnalysis, SegmentAnalysis, SegmentMetadataQuery> strategy = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy(query); // Test cache key generation byte[] expectedKey = {0x04, 0x01, (byte) 0xFF, 0x00, 0x02, 0x04}; byte[] actualKey = strategy.computeCacheKey(query); Assert.assertArrayEquals(expectedKey, actualKey); SegmentAnalysis result = new SegmentAnalysis( "testSegment", ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of( "placement", new ColumnAnalysis( ValueType.STRING.toString(), true, 10881, 1, "preferred", "preferred", null ) ), 71982, 100, null, null, null, null ); Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result); ObjectMapper objectMapper = new DefaultObjectMapper(); SegmentAnalysis fromCacheValue = objectMapper.readValue( objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz() ); SegmentAnalysis fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue); Assert.assertEquals(result, fromCacheResult); } @Test public void testMergeAggregators() { final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, ImmutableMap.of( "foo", new LongSumAggregatorFactory("foo", "foo"), "baz", new DoubleSumAggregatorFactory("baz", "baz") ), null, null, null ); final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, ImmutableMap.of( "foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar") ), null, null, null ); Assert.assertEquals( ImmutableMap.of( "foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar"), "baz", new DoubleSumAggregatorFactory("baz", "baz") ), mergeStrict(analysis1, analysis2).getAggregators() ); Assert.assertEquals( ImmutableMap.of( "foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar"), "baz", new DoubleSumAggregatorFactory("baz", "baz") ), mergeLenient(analysis1, analysis2).getAggregators() ); } @Test public void testMergeAggregatorsOneNull() { final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, null, null, null, null ); final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, ImmutableMap.of( "foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar") ), null, null, null ); Assert.assertNull(mergeStrict(analysis1, analysis2).getAggregators()); Assert.assertEquals( ImmutableMap.of( "foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar") ), mergeLenient(analysis1, analysis2).getAggregators() ); } @Test public void testMergeAggregatorsAllNull() { final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, null, null, null, null ); final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, null, null, null, null ); Assert.assertNull(mergeStrict(analysis1, analysis2).getAggregators()); Assert.assertNull(mergeLenient(analysis1, analysis2).getAggregators()); } @Test public void testMergeAggregatorsConflict() { final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, ImmutableMap.of( "foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar") ), null, null, null ); final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, ImmutableMap.of( "foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleMaxAggregatorFactory("bar", "bar"), "baz", new LongMaxAggregatorFactory("baz", "baz") ), null, null, null ); final Map<String, AggregatorFactory> expectedLenient = Maps.newHashMap(); expectedLenient.put("foo", new LongSumAggregatorFactory("foo", "foo")); expectedLenient.put("bar", null); expectedLenient.put("baz", new LongMaxAggregatorFactory("baz", "baz")); Assert.assertNull(mergeStrict(analysis1, analysis2).getAggregators()); Assert.assertEquals(expectedLenient, mergeLenient(analysis1, analysis2).getAggregators()); // Simulate multi-level merge Assert.assertEquals( expectedLenient, mergeLenient( mergeLenient(analysis1, analysis2), mergeLenient(analysis1, analysis2) ).getAggregators() ); } @Test public void testFilterSegments() { final SegmentMetadataQueryConfig config = new SegmentMetadataQueryConfig(); final SegmentMetadataQueryQueryToolChest toolChest = new SegmentMetadataQueryQueryToolChest(config); final List<LogicalSegment> filteredSegments = toolChest.filterSegments( Druids.newSegmentMetadataQueryBuilder().dataSource("foo").merge(true).build(), ImmutableList .of( "2000-01-01/P1D", "2000-01-04/P1D", "2000-01-09/P1D", "2000-01-09/P1D" ) .stream() .map(interval -> (LogicalSegment) () -> Intervals.of(interval)) .collect(Collectors.toList()) ); Assert.assertEquals(Period.weeks(1), config.getDefaultHistory()); Assert.assertEquals( ImmutableList.of( Intervals.of("2000-01-04/P1D"), Intervals.of("2000-01-09/P1D"), Intervals.of("2000-01-09/P1D") ), filteredSegments.stream().map(LogicalSegment::getInterval).collect(Collectors.toList()) ); } @SuppressWarnings("ArgumentParameterSwap") @Test public void testMergeRollup() { final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, null, null, null, null ); final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, null, null, null, false ); final SegmentAnalysis analysis3 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, null, null, null, false ); final SegmentAnalysis analysis4 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, null, null, null, true ); final SegmentAnalysis analysis5 = new SegmentAnalysis( "id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, null, null, null, true ); Assert.assertNull(mergeStrict(analysis1, analysis2).isRollup()); Assert.assertNull(mergeStrict(analysis1, analysis4).isRollup()); Assert.assertNull(mergeStrict(analysis2, analysis4).isRollup()); Assert.assertFalse(mergeStrict(analysis2, analysis3).isRollup()); Assert.assertTrue(mergeStrict(analysis4, analysis5).isRollup()); } private static SegmentAnalysis mergeStrict(SegmentAnalysis analysis1, SegmentAnalysis analysis2) { return SegmentMetadataQueryQueryToolChest.finalizeAnalysis( SegmentMetadataQueryQueryToolChest.mergeAnalyses( analysis1, analysis2, false ) ); } private static SegmentAnalysis mergeLenient(SegmentAnalysis analysis1, SegmentAnalysis analysis2) { return SegmentMetadataQueryQueryToolChest.finalizeAnalysis( SegmentMetadataQueryQueryToolChest.mergeAnalyses( analysis1, analysis2, true ) ); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.hadoop.io.file.tfile; import java.io.DataOutputStream; import java.io.EOFException; import java.io.IOException; import java.util.Random; import static org.junit.Assert.fail; import static org.junit.Assert.assertTrue; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.file.tfile.TFile.Reader; import org.apache.hadoop.io.file.tfile.TFile.Writer; import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner; import org.apache.hadoop.test.GenericTestUtils; /** * * Streaming interfaces test case class using GZ compression codec, base class * of none and LZO compression classes. * */ public class TestTFileStreams { private static String ROOT = GenericTestUtils.getTestDir().getAbsolutePath(); private final static int BLOCK_SIZE = 512; private final static int K = 1024; private final static int M = K * K; protected boolean skip = false; private FileSystem fs; private Configuration conf; private Path path; private FSDataOutputStream out; Writer writer; private String compression = Compression.Algorithm.GZ.getName(); private String comparator = "memcmp"; private final String outputFile = getClass().getSimpleName(); public void init(String compression, String comparator) { this.compression = compression; this.comparator = comparator; } @Before public void setUp() throws IOException { conf = new Configuration(); path = new Path(ROOT, outputFile); fs = path.getFileSystem(conf); out = fs.create(path); writer = new Writer(out, BLOCK_SIZE, compression, comparator, conf); } @After public void tearDown() throws IOException { if (!skip) { try { closeOutput(); } catch (Exception e) { // no-op } fs.delete(path, true); } } @Test public void testNoEntry() throws IOException { if (skip) return; closeOutput(); TestTFileByteArrays.readRecords(fs, path, 0, conf); } @Test public void testOneEntryKnownLength() throws IOException { if (skip) return; writeRecords(1, true, true); TestTFileByteArrays.readRecords(fs, path, 1, conf); } @Test public void testOneEntryUnknownLength() throws IOException { if (skip) return; writeRecords(1, false, false); // TODO: will throw exception at getValueLength, it's inconsistent though; // getKeyLength returns a value correctly, though initial length is -1 TestTFileByteArrays.readRecords(fs, path, 1, conf); } // known key length, unknown value length @Test public void testOneEntryMixedLengths1() throws IOException { if (skip) return; writeRecords(1, true, false); TestTFileByteArrays.readRecords(fs, path, 1, conf); } // unknown key length, known value length @Test public void testOneEntryMixedLengths2() throws IOException { if (skip) return; writeRecords(1, false, true); TestTFileByteArrays.readRecords(fs, path, 1, conf); } @Test public void testTwoEntriesKnownLength() throws IOException { if (skip) return; writeRecords(2, true, true); TestTFileByteArrays.readRecords(fs, path, 2, conf); } // Negative test @Test public void testFailureAddKeyWithoutValue() throws IOException { if (skip) return; DataOutputStream dos = writer.prepareAppendKey(-1); dos.write("key0".getBytes()); try { closeOutput(); fail("Cannot add only a key without a value. "); } catch (IllegalStateException e) { // noop, expecting an exception } } @Test public void testFailureAddValueWithoutKey() throws IOException { if (skip) return; DataOutputStream outValue = null; try { outValue = writer.prepareAppendValue(6); outValue.write("value0".getBytes()); fail("Cannot add a value without adding key first. "); } catch (Exception e) { // noop, expecting an exception } finally { if (outValue != null) { outValue.close(); } } } @Test public void testFailureOneEntryKnownLength() throws IOException { if (skip) return; DataOutputStream outKey = writer.prepareAppendKey(2); try { outKey.write("key0".getBytes()); fail("Specified key length mismatched the actual key length."); } catch (IOException e) { // noop, expecting an exception } DataOutputStream outValue = null; try { outValue = writer.prepareAppendValue(6); outValue.write("value0".getBytes()); } catch (Exception e) { // noop, expecting an exception } } @Test public void testFailureKeyTooLong() throws IOException { if (skip) return; DataOutputStream outKey = writer.prepareAppendKey(2); try { outKey.write("key0".getBytes()); outKey.close(); fail("Key is longer than requested."); } catch (Exception e) { // noop, expecting an exception } finally { } } @Test public void testFailureKeyTooShort() throws IOException { if (skip) return; DataOutputStream outKey = writer.prepareAppendKey(4); outKey.write("key0".getBytes()); outKey.close(); DataOutputStream outValue = writer.prepareAppendValue(15); try { outValue.write("value0".getBytes()); outValue.close(); fail("Value is shorter than expected."); } catch (Exception e) { // noop, expecting an exception } finally { } } @Test public void testFailureValueTooLong() throws IOException { if (skip) return; DataOutputStream outKey = writer.prepareAppendKey(4); outKey.write("key0".getBytes()); outKey.close(); DataOutputStream outValue = writer.prepareAppendValue(3); try { outValue.write("value0".getBytes()); outValue.close(); fail("Value is longer than expected."); } catch (Exception e) { // noop, expecting an exception } try { outKey.close(); outKey.close(); } catch (Exception e) { fail("Second or more close() should have no effect."); } } @Test public void testFailureValueTooShort() throws IOException { if (skip) return; DataOutputStream outKey = writer.prepareAppendKey(8); try { outKey.write("key0".getBytes()); outKey.close(); fail("Key is shorter than expected."); } catch (Exception e) { // noop, expecting an exception } finally { } } @Test public void testFailureCloseKeyStreamManyTimesInWriter() throws IOException { if (skip) return; DataOutputStream outKey = writer.prepareAppendKey(4); try { outKey.write("key0".getBytes()); outKey.close(); } catch (Exception e) { // noop, expecting an exception } finally { try { outKey.close(); } catch (Exception e) { // no-op } } outKey.close(); outKey.close(); assertTrue("Multiple close should have no effect.", true); } @Test public void testFailureKeyLongerThan64K() throws IOException { if (skip) return; try { DataOutputStream outKey = writer.prepareAppendKey(64 * K + 1); fail("Failed to handle key longer than 64K."); } catch (IndexOutOfBoundsException e) { // noop, expecting exceptions } closeOutput(); } @Test public void testFailureKeyLongerThan64K_2() throws IOException { if (skip) return; DataOutputStream outKey = writer.prepareAppendKey(-1); try { byte[] buf = new byte[K]; Random rand = new Random(); for (int nx = 0; nx < K + 2; nx++) { rand.nextBytes(buf); outKey.write(buf); } outKey.close(); fail("Failed to handle key longer than 64K."); } catch (EOFException e) { // noop, expecting exceptions } finally { try { closeOutput(); } catch (Exception e) { // no-op } } } @Test public void testFailureNegativeOffset() throws IOException { if (skip) return; writeRecords(2, true, true); Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(); byte[] buf = new byte[K]; try { scanner.entry().getKey(buf, -1); fail("Failed to handle key negative offset."); } catch (Exception e) { // noop, expecting exceptions } finally { } scanner.close(); reader.close(); } /** * Verify that the compressed data size is less than raw data size. * * @throws IOException */ @Test public void testFailureCompressionNotWorking() throws IOException { if (skip) return; long rawDataSize = writeRecords(10000, false, false, false); if (!compression.equalsIgnoreCase(Compression.Algorithm.NONE.getName())) { assertTrue(out.getPos() < rawDataSize); } closeOutput(); } @Test public void testFailureCompressionNotWorking2() throws IOException { if (skip) return; long rawDataSize = writeRecords(10000, true, true, false); if (!compression.equalsIgnoreCase(Compression.Algorithm.NONE.getName())) { assertTrue(out.getPos() < rawDataSize); } closeOutput(); } private long writeRecords(int count, boolean knownKeyLength, boolean knownValueLength, boolean close) throws IOException { long rawDataSize = 0; for (int nx = 0; nx < count; nx++) { String key = TestTFileByteArrays.composeSortedKey("key", nx); DataOutputStream outKey = writer.prepareAppendKey(knownKeyLength ? key.length() : -1); outKey.write(key.getBytes()); outKey.close(); String value = "value" + nx; DataOutputStream outValue = writer.prepareAppendValue(knownValueLength ? value.length() : -1); outValue.write(value.getBytes()); outValue.close(); rawDataSize += WritableUtils.getVIntSize(key.getBytes().length) + key.getBytes().length + WritableUtils.getVIntSize(value.getBytes().length) + value.getBytes().length; } if (close) { closeOutput(); } return rawDataSize; } private long writeRecords(int count, boolean knownKeyLength, boolean knownValueLength) throws IOException { return writeRecords(count, knownKeyLength, knownValueLength, true); } private void closeOutput() throws IOException { if (writer != null) { writer.close(); writer = null; } if (out != null) { out.close(); out = null; } } }
package ch.unifr.pai.twice.mousecontrol.client; /* * Copyright 2013 Oliver Schmid * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Date; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.dom.client.Touch; import com.google.gwt.event.dom.client.TouchEndEvent; import com.google.gwt.event.dom.client.TouchEndHandler; import com.google.gwt.event.dom.client.TouchMoveEvent; import com.google.gwt.event.dom.client.TouchMoveHandler; import com.google.gwt.event.dom.client.TouchStartEvent; import com.google.gwt.event.dom.client.TouchStartHandler; import com.google.gwt.user.client.Timer; import com.google.gwt.user.client.ui.DockLayoutPanel; import com.google.gwt.user.client.ui.HTML; /** * The touchpad for mobile devices (based on scrolling for better performance). * * @author oli * */ public class TouchPadMobileWidget extends TouchPadWidget implements TouchStartHandler, TouchEndHandler, TouchMoveHandler { private int x; private int y; private boolean move; private boolean dragging = false; /** * Timer for recognizing a switch to drag mode */ private final Timer mouseDownTimer = new Timer() { @Override public void run() { down(true); downSent = true; dragging = true; if (isDoLog()) addToLog("startDrag", "cursorX=\"" + x + "\" cursorY=\"" + y + "\"", null); widget.getElement().setInnerHTML("<p>Your device is in dragging mode.</p><p> Tap on the screen to release.</p>"); } }; /** * Transform relative movements to absolute x- and y-coordinates * * @param dX * @param dY */ private void updatePos(int dX, int dY) { if (dX != 0) { int changeX = (int) Math.floor((dX * MOVEFACTOR)); x = Math.max(Math.min(x + changeX, screenWidth), 0); } if (dY != 0) { int changeY = (int) Math.floor((dY * MOVEFACTOR)); y = Math.max(Math.min(y + changeY, screenHeight), 0); } } private int lastX; private int lastY; private long fingerDownAt; private boolean downSent; DockLayoutPanel p = new DockLayoutPanel(Unit.PCT); // private MobileKeyboard keyboardButton = new MobileKeyboard("Keyboard", "Done"); HTML widget = new HTML(); public TouchPadMobileWidget() { super(false); widget.getElement().getStyle().setProperty("userSelect", "none"); widget.addTouchStartHandler(this); widget.addTouchEndHandler(this); widget.addTouchMoveHandler(this); widget.getElement().getStyle().setFontSize(20, Unit.PX); add(p); widget.setHeight("100%"); // p.addSouth(keyboardButton, 10); p.add(widget); // setWidgetTopBottom(widget, 0, Unit.PX, 0, Unit.PX); // setWidgetLeftRight(widget, 0, Unit.PX, 0, Unit.PX); } /* * (non-Javadoc) * @see ch.unifr.pai.twice.mousecontrol.client.TouchPadWidget#updateScreenDimensions() */ @Override protected void updateScreenDimensions() { } /* * (non-Javadoc) * @see ch.unifr.pai.twice.mousecontrol.client.TouchPadWidget#getX() */ @Override protected int getX() { return x; } /* * (non-Javadoc) * @see ch.unifr.pai.twice.mousecontrol.client.TouchPadWidget#getY() */ @Override protected int getY() { return y; } /** * On movement, calculate the new position of the mouse pointer on the shared screen * * @see com.google.gwt.event.dom.client.TouchMoveHandler#onTouchMove(com.google.gwt.event.dom.client.TouchMoveEvent) */ @Override public void onTouchMove(TouchMoveEvent event) { event.preventDefault(); event.stopPropagation(); if (event.getTouches().length() > 0) { Touch t = event.getTouches().get(0); int x = t.getClientX(); int y = t.getClientY(); int dX = x - lastX; int dY = y - lastY; if (Math.abs(dX) > MOVEMENTTHRESHOLD || Math.abs(dY) > MOVEMENTTHRESHOLD) { mouseDownTimer.cancel(); lastX = x; lastY = y; updatePos(dX, dY); move = true; } } } /** * On touch end - stop dragging * * @see com.google.gwt.event.dom.client.TouchEndHandler#onTouchEnd(com.google.gwt.event.dom.client.TouchEndEvent) */ @Override public void onTouchEnd(TouchEndEvent event) { event.preventDefault(); event.stopPropagation(); if (isDoLog()) addToLog("endTouch", "cursorX=\"" + x + "\" cursorY=\"" + y + "\"", null); if (fingerDownAt != -1) { if (!move) { stopDragging(); } } fingerDownAt = -1; move = false; } /* * (non-Javadoc) * @see ch.unifr.pai.twice.mousecontrol.client.TouchPadWidget#stopDragging() */ @Override protected void stopDragging() { if (!downSent) { down(true); mouseDownTimer.cancel(); } up(true); if (isDoLog()) addToLog("stopDrag", "cursorX=\"" + x + "\" cursorY=\"" + y + "\"", null); super.stopDragging(); widget.getElement().setInnerHTML(""); } /* * (non-Javadoc) * @see ch.unifr.pai.twice.mousecontrol.client.TouchPadWidget#stop() */ @Override public void stop() { if (dragging) { stopDragging(); } super.stop(); } /** * On touch start, check if it is the begin of a drag. * * @see com.google.gwt.event.dom.client.TouchStartHandler#onTouchStart(com.google.gwt.event.dom.client.TouchStartEvent) */ @Override public void onTouchStart(TouchStartEvent event) { event.preventDefault(); event.stopPropagation(); if (event.getTouches().length() > 0) { Touch t = event.getTouches().get(0); lastX = t.getClientX(); lastY = t.getClientY(); fingerDownAt = new Date().getTime(); if (isDoLog()) addToLog("startTouch", "cursorX=\"" + x + "\" cursorY=\"" + y + "\"", null); if (!move) { downSent = false; if (dragModeEnabled) mouseDownTimer.schedule(MOUSEDOWNTHRESHOLD); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.common.requests; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.errors.UnsupportedVersionException; import org.apache.kafka.common.message.TxnOffsetCommitRequestData; import org.apache.kafka.common.message.TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition; import org.apache.kafka.common.message.TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic; import org.apache.kafka.common.message.TxnOffsetCommitResponseData; import org.apache.kafka.common.message.TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition; import org.apache.kafka.common.message.TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic; import org.apache.kafka.common.protocol.ApiKeys; import org.apache.kafka.common.protocol.ByteBufferAccessor; import org.apache.kafka.common.protocol.Errors; import org.apache.kafka.common.record.RecordBatch; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; public class TxnOffsetCommitRequest extends AbstractRequest { private final TxnOffsetCommitRequestData data; public static class Builder extends AbstractRequest.Builder<TxnOffsetCommitRequest> { public final TxnOffsetCommitRequestData data; public Builder(final String transactionalId, final String consumerGroupId, final long producerId, final short producerEpoch, final Map<TopicPartition, CommittedOffset> pendingTxnOffsetCommits) { this(transactionalId, consumerGroupId, producerId, producerEpoch, pendingTxnOffsetCommits, JoinGroupRequest.UNKNOWN_MEMBER_ID, JoinGroupRequest.UNKNOWN_GENERATION_ID, Optional.empty()); } public Builder(final String transactionalId, final String consumerGroupId, final long producerId, final short producerEpoch, final Map<TopicPartition, CommittedOffset> pendingTxnOffsetCommits, final String memberId, final int generationId, final Optional<String> groupInstanceId) { super(ApiKeys.TXN_OFFSET_COMMIT); this.data = new TxnOffsetCommitRequestData() .setTransactionalId(transactionalId) .setGroupId(consumerGroupId) .setProducerId(producerId) .setProducerEpoch(producerEpoch) .setTopics(getTopics(pendingTxnOffsetCommits)) .setMemberId(memberId) .setGenerationId(generationId) .setGroupInstanceId(groupInstanceId.orElse(null)); } @Override public TxnOffsetCommitRequest build(short version) { if (version < 3 && groupMetadataSet()) { throw new UnsupportedVersionException("Broker doesn't support group metadata commit API on version " + version + ", minimum supported request version is 3 which requires brokers to be on version 2.5 or above."); } return new TxnOffsetCommitRequest(data, version); } private boolean groupMetadataSet() { return !data.memberId().equals(JoinGroupRequest.UNKNOWN_MEMBER_ID) || data.generationId() != JoinGroupRequest.UNKNOWN_GENERATION_ID || data.groupInstanceId() != null; } @Override public String toString() { return data.toString(); } } public TxnOffsetCommitRequest(TxnOffsetCommitRequestData data, short version) { super(ApiKeys.TXN_OFFSET_COMMIT, version); this.data = data; } public Map<TopicPartition, CommittedOffset> offsets() { List<TxnOffsetCommitRequestTopic> topics = data.topics(); Map<TopicPartition, CommittedOffset> offsetMap = new HashMap<>(); for (TxnOffsetCommitRequestTopic topic : topics) { for (TxnOffsetCommitRequestPartition partition : topic.partitions()) { offsetMap.put(new TopicPartition(topic.name(), partition.partitionIndex()), new CommittedOffset(partition.committedOffset(), partition.committedMetadata(), RequestUtils.getLeaderEpoch(partition.committedLeaderEpoch())) ); } } return offsetMap; } static List<TxnOffsetCommitRequestTopic> getTopics(Map<TopicPartition, CommittedOffset> pendingTxnOffsetCommits) { Map<String, List<TxnOffsetCommitRequestPartition>> topicPartitionMap = new HashMap<>(); for (Map.Entry<TopicPartition, CommittedOffset> entry : pendingTxnOffsetCommits.entrySet()) { TopicPartition topicPartition = entry.getKey(); CommittedOffset offset = entry.getValue(); List<TxnOffsetCommitRequestPartition> partitions = topicPartitionMap.getOrDefault(topicPartition.topic(), new ArrayList<>()); partitions.add(new TxnOffsetCommitRequestPartition() .setPartitionIndex(topicPartition.partition()) .setCommittedOffset(offset.offset) .setCommittedLeaderEpoch(offset.leaderEpoch.orElse(RecordBatch.NO_PARTITION_LEADER_EPOCH)) .setCommittedMetadata(offset.metadata) ); topicPartitionMap.put(topicPartition.topic(), partitions); } return topicPartitionMap.entrySet().stream() .map(entry -> new TxnOffsetCommitRequestTopic() .setName(entry.getKey()) .setPartitions(entry.getValue())) .collect(Collectors.toList()); } @Override public TxnOffsetCommitRequestData data() { return data; } static List<TxnOffsetCommitResponseTopic> getErrorResponseTopics(List<TxnOffsetCommitRequestTopic> requestTopics, Errors e) { List<TxnOffsetCommitResponseTopic> responseTopicData = new ArrayList<>(); for (TxnOffsetCommitRequestTopic entry : requestTopics) { List<TxnOffsetCommitResponsePartition> responsePartitions = new ArrayList<>(); for (TxnOffsetCommitRequestPartition requestPartition : entry.partitions()) { responsePartitions.add(new TxnOffsetCommitResponsePartition() .setPartitionIndex(requestPartition.partitionIndex()) .setErrorCode(e.code())); } responseTopicData.add(new TxnOffsetCommitResponseTopic() .setName(entry.name()) .setPartitions(responsePartitions) ); } return responseTopicData; } @Override public TxnOffsetCommitResponse getErrorResponse(int throttleTimeMs, Throwable e) { List<TxnOffsetCommitResponseTopic> responseTopicData = getErrorResponseTopics(data.topics(), Errors.forException(e)); return new TxnOffsetCommitResponse(new TxnOffsetCommitResponseData() .setThrottleTimeMs(throttleTimeMs) .setTopics(responseTopicData)); } public static TxnOffsetCommitRequest parse(ByteBuffer buffer, short version) { return new TxnOffsetCommitRequest(new TxnOffsetCommitRequestData( new ByteBufferAccessor(buffer), version), version); } public static class CommittedOffset { public final long offset; public final String metadata; public final Optional<Integer> leaderEpoch; public CommittedOffset(long offset, String metadata, Optional<Integer> leaderEpoch) { this.offset = offset; this.metadata = metadata; this.leaderEpoch = leaderEpoch; } @Override public String toString() { return "CommittedOffset(" + "offset=" + offset + ", leaderEpoch=" + leaderEpoch + ", metadata='" + metadata + "')"; } @Override public boolean equals(Object other) { if (!(other instanceof CommittedOffset)) { return false; } CommittedOffset otherOffset = (CommittedOffset) other; return this.offset == otherOffset.offset && this.leaderEpoch.equals(otherOffset.leaderEpoch) && Objects.equals(this.metadata, otherOffset.metadata); } @Override public int hashCode() { return Objects.hash(offset, leaderEpoch, metadata); } } }
/** * */ package com.zimbra.qa.selenium.projects.mobile.ui; import java.util.ArrayList; import java.util.List; import com.zimbra.qa.selenium.framework.items.ConversationItem; import com.zimbra.qa.selenium.framework.items.MailItem; import com.zimbra.qa.selenium.framework.ui.AbsApplication; import com.zimbra.qa.selenium.framework.ui.AbsPage; import com.zimbra.qa.selenium.framework.ui.AbsTab; import com.zimbra.qa.selenium.framework.ui.Action; import com.zimbra.qa.selenium.framework.ui.Button; import com.zimbra.qa.selenium.framework.util.HarnessException; import com.zimbra.qa.selenium.framework.util.SleepUtil; /** * @author Matt Rhoades * */ public class PageMail extends AbsTab { public static class Locators { // TODO: Need better locator that doesn't have content text public static final String zMailIsActive = "xpath=//a[contains(.,'Folders')]"; public static final String zDList_View = "xpath=//div[@id='dlist-view']"; public static final String zDList_View_2 = "//div[@id='dlist-view']/div"; } public PageMail(AbsApplication application) { super(application); logger.info("new " + PageMail.class.getCanonicalName()); } /* (non-Javadoc) * @see projects.admin.ui.AbsPage#isActive() */ @Override public boolean zIsActive() throws HarnessException { // Make sure the main page is active if ( !((AppMobileClient)MyApplication).zPageMain.zIsActive() ) { ((AppMobileClient)MyApplication).zPageMain.zNavigateTo(); } boolean active = this.sIsElementPresent(Locators.zMailIsActive); return (active); } /* (non-Javadoc) * @see projects.admin.ui.AbsPage#myPageName() */ @Override public String myPageName() { return (this.getClass().getName()); } /* (non-Javadoc) * @see projects.admin.ui.AbsPage#navigateTo() */ @Override public void zNavigateTo() throws HarnessException { // Check if this page is already active. if ( zIsActive() ) { return; } // Make sure we are logged into the Mobile app if ( !((AppMobileClient)MyApplication).zPageMain.zIsActive() ) { ((AppMobileClient)MyApplication).zPageMain.zNavigateTo(); } // Click on Mail icon sClick(PageMain.Locators.zAppbarMail); zWaitForActive(); } /** * Return a list of all messages in the current view * @return * @throws HarnessException */ public List<MailItem> zListGetMessages() throws HarnessException { throw new HarnessException("implement me!"); } /** * Return a list of all conversations in the current view * @return * @throws HarnessException */ public List<ConversationItem> zListGetConversations() throws HarnessException { List<ConversationItem> items = new ArrayList<ConversationItem>(); if (!sIsElementPresent(Locators.zDList_View)) throw new HarnessException("Unable to find the message list!"); int count = sGetXpathCount("//div[contains(@id, 'conv')]"); logger.info(count + " conversations found"); // Get each conversation's data from the table list for (int i = 1; i <= count; i++) { final String convLocator = "//div[contains(@id, 'conv')]["+ i +"]"; if ( !this.sIsElementPresent(convLocator) ) { throw new HarnessException("Can't find conversation row from locator "+ convLocator); } String locator; ConversationItem item = new ConversationItem(); // TODO: Is it checked? // TODO: Converstation icon // From: locator = convLocator + "//div[@class='from-span']"; if ( this.sIsElementPresent(locator) ) { item.gFrom = this.sGetText(locator); } else { item.gFrom = ""; } // Subject: locator = convLocator + "//div[@class='sub-span']"; if ( this.sIsElementPresent(locator) ) { item.gSubject = this.sGetText(locator); } else { item.gSubject = ""; } // From: locator = convLocator + "//div[@class='fragment-span']"; if ( this.sIsElementPresent(locator) ) { item.gFragment = this.sGetText(locator); } else { item.gFragment = ""; } // Add the new item to the list items.add(item); logger.info(item.prettyPrint()); } return (items); } /** * Refresh the inbox list by clicking "Get Mail" * @throws HarnessException */ public void zRefresh() throws HarnessException { this.sClick(PageMain.Locators.zAppbarContact); SleepUtil.sleepMedium(); this.sClick(PageMain.Locators.zAppbarMail); SleepUtil.sleepMedium(); } @Override public AbsPage zListItem(Action action, String item) throws HarnessException { // TODO Auto-generated method stub return null; } @Override public AbsPage zListItem(Action action, Button option, String item) throws HarnessException { // TODO Auto-generated method stub return null; } @Override public AbsPage zListItem(Action action, Button option, Button subOption ,String item) throws HarnessException { throw new HarnessException("Mobile page does not have context menu"); } @Override public AbsPage zToolbarPressButton(Button button) throws HarnessException { // TODO Auto-generated method stub return null; } @Override public AbsPage zToolbarPressPulldown(Button pulldown, Button option) throws HarnessException { // TODO Auto-generated method stub return null; } }
/* * Copyright 2002-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.beans.factory; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Field; import java.lang.reflect.Member; import org.springframework.core.MethodParameter; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; /** * A simple descriptor for an injection point, pointing to a method/constructor * parameter or a field. Exposed by {@link UnsatisfiedDependencyException}. * Also available as an argument for factory methods, reacting to the * requesting injection point for building a customized bean instance. * * @author Juergen Hoeller * @since 4.3 * @see UnsatisfiedDependencyException#getInjectionPoint() * @see org.springframework.beans.factory.config.DependencyDescriptor */ public class InjectionPoint { @Nullable protected MethodParameter methodParameter; @Nullable protected Field field; @Nullable private volatile Annotation[] fieldAnnotations; /** * Create an injection point descriptor for a method or constructor parameter. * @param methodParameter the MethodParameter to wrap */ public InjectionPoint(MethodParameter methodParameter) { Assert.notNull(methodParameter, "MethodParameter must not be null"); this.methodParameter = methodParameter; } /** * Create an injection point descriptor for a field. * @param field the field to wrap */ public InjectionPoint(Field field) { Assert.notNull(field, "Field must not be null"); this.field = field; } /** * Copy constructor. * @param original the original descriptor to create a copy from */ protected InjectionPoint(InjectionPoint original) { this.methodParameter = (original.methodParameter != null ? new MethodParameter(original.methodParameter) : null); this.field = original.field; this.fieldAnnotations = original.fieldAnnotations; } /** * Just available for serialization purposes in subclasses. */ protected InjectionPoint() { } /** * Return the wrapped MethodParameter, if any. * <p>Note: Either MethodParameter or Field is available. * @return the MethodParameter, or {@code null} if none */ @Nullable public MethodParameter getMethodParameter() { return this.methodParameter; } /** * Return the wrapped Field, if any. * <p>Note: Either MethodParameter or Field is available. * @return the Field, or {@code null} if none */ @Nullable public Field getField() { return this.field; } /** * Return the wrapped MethodParameter, assuming it is present. * @return the MethodParameter (never {@code null}) * @throws IllegalStateException if no MethodParameter is available * @since 5.0 */ protected final MethodParameter obtainMethodParameter() { Assert.state(this.methodParameter != null, "Neither Field nor MethodParameter"); return this.methodParameter; } /** * Obtain the annotations associated with the wrapped field or method/constructor parameter. */ public Annotation[] getAnnotations() { if (this.field != null) { Annotation[] fieldAnnotations = this.fieldAnnotations; if (fieldAnnotations == null) { fieldAnnotations = this.field.getAnnotations(); this.fieldAnnotations = fieldAnnotations; } return fieldAnnotations; } else { return obtainMethodParameter().getParameterAnnotations(); } } /** * Retrieve a field/parameter annotation of the given type, if any. * @param annotationType the annotation type to retrieve * @return the annotation instance, or {@code null} if none found * @since 4.3.9 */ @Nullable public <A extends Annotation> A getAnnotation(Class<A> annotationType) { return (this.field != null ? this.field.getAnnotation(annotationType) : obtainMethodParameter().getParameterAnnotation(annotationType)); } /** * Return the type declared by the underlying field or method/constructor parameter, * indicating the injection type. */ public Class<?> getDeclaredType() { return (this.field != null ? this.field.getType() : obtainMethodParameter().getParameterType()); } /** * Returns the wrapped member, containing the injection point. * @return the Field / Method / Constructor as Member */ public Member getMember() { return (this.field != null ? this.field : obtainMethodParameter().getMember()); } /** * Return the wrapped annotated element. * <p>Note: In case of a method/constructor parameter, this exposes * the annotations declared on the method or constructor itself * (i.e. at the method/constructor level, not at the parameter level). * Use {@link #getAnnotations()} to obtain parameter-level annotations in * such a scenario, transparently with corresponding field annotations. * @return the Field / Method / Constructor as AnnotatedElement */ public AnnotatedElement getAnnotatedElement() { return (this.field != null ? this.field : obtainMethodParameter().getAnnotatedElement()); } @Override public boolean equals(@Nullable Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } InjectionPoint otherPoint = (InjectionPoint) other; return (ObjectUtils.nullSafeEquals(this.field, otherPoint.field) && ObjectUtils.nullSafeEquals(this.methodParameter, otherPoint.methodParameter)); } @Override public int hashCode() { return (this.field != null ? this.field.hashCode() : ObjectUtils.nullSafeHashCode(this.methodParameter)); } @Override public String toString() { return (this.field != null ? "field '" + this.field.getName() + "'" : String.valueOf(this.methodParameter)); } }
/* * Copyright 2013-2014 Richard M. Hightower * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * __________ _____ __ .__ * \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____ * | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\ * | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ > * |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ / * \/ \/ \/ \/ \/ \//_____/ * ____. ___________ _____ ______________.___. * | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | | * | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | | * /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ | * \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______| * \/ \/ \/ \/ \/ \/ */ package io.advantageous.boon.primitive; import io.advantageous.boon.core.Exceptions; import io.advantageous.boon.core.reflection.Invoker; import java.nio.charset.StandardCharsets; import static io.advantageous.boon.core.Exceptions.die; public class Byt { public static byte[] grow( byte[] array, final int size ) { byte[] newArray = new byte[ array.length + size ]; System.arraycopy( array, 0, newArray, 0, array.length ); return newArray; } public static byte[] grow( byte[] array ) { byte[] newArray = new byte[ array.length * 2 ]; System.arraycopy( array, 0, newArray, 0, array.length ); return newArray; } public static byte[] shrink( byte[] array, int size ) { byte[] newArray = new byte[ array.length - size ]; System.arraycopy( array, 0, newArray, 0, array.length - size ); return newArray; } public static byte[] compact( byte[] array ) { int nullCount = 0; for ( byte ch : array ) { if ( ch == '\0' ) { nullCount++; } } byte[] newArray = new byte[ array.length - nullCount ]; int j = 0; for ( byte ch : array ) { if ( ch == '\0' ) { continue; } newArray[ j ] = ch; j++; } return newArray; } /** * Creates an array of bytes * * @param size size of the array you want to make * @return array of bytes */ public static byte[] arrayOfByte( final int size ) { return new byte[ size ]; } /** * @param array array * @return array */ public static byte[] array( final byte... array ) { return array; } /** * @param array array * @return array */ public static byte[] bytes( final byte... array ) { return array; } /** * @param str string * @return array */ public static byte[] bytes( String str ) { return str.getBytes( StandardCharsets.UTF_8 ); } public static int len( byte[] array ) { return array.length; } public static int lengthOf( byte[] array ) { return array.length; } public static byte atIndex( final byte[] array, final int index ) { return idx(array, index); } public static byte idx( final byte[] array, final int index ) { final int i = calculateIndex( array, index ); return array[ i ]; } public static void atIndex( final byte[] array, int index, byte value ) { idx(array, index, value); } public static void idx( final byte[] array, int index, byte value ) { final int i = calculateIndex( array, index ); array[ i ] = value; } public static byte[] sliceOf( byte[] array, int startIndex, int endIndex ) { return slc (array, startIndex, endIndex); } public static byte[] slc( byte[] array, int startIndex, int endIndex ) { final int start = calculateIndex( array, startIndex ); final int end = calculateEndIndex(array, endIndex); final int newLength = end - start; if ( newLength < 0 ) { throw new ArrayIndexOutOfBoundsException( String.format( "start index %d, end index %d, length %d", startIndex, endIndex, array.length ) ); } byte[] newArray = new byte[ newLength ]; System.arraycopy( array, start, newArray, 0, newLength ); return newArray; } public static byte[] sliceOf( byte[] array, int startIndex ) { return slc(array, startIndex); } public static byte[] slc( byte[] array, int startIndex ) { final int start = calculateIndex( array, startIndex ); final int newLength = array.length - start; if ( newLength < 0 ) { throw new ArrayIndexOutOfBoundsException( String.format( "start index %d, length %d", startIndex, array.length ) ); } byte[] newArray = new byte[ newLength ]; System.arraycopy( array, start, newArray, 0, newLength ); return newArray; } public static byte[] endSliceOf( byte[] array, int endIndex ) { return slcEnd(array, endIndex); } public static byte[] slcEnd( byte[] array, int endIndex ) { final int end = calculateEndIndex( array, endIndex ); final int newLength = end; // + (endIndex < 0 ? 1 : 0); if ( newLength < 0 ) { throw new ArrayIndexOutOfBoundsException( String.format( "start index %d, length %d", endIndex, array.length ) ); } byte[] newArray = new byte[ newLength ]; System.arraycopy( array, 0, newArray, 0, newLength ); return newArray; } public static boolean in( int value, byte... array ) { for ( int currentValue : array ) { if ( currentValue == value ) { return true; } } return false; } public static boolean inIntArray( byte value, int[] array ) { for ( int currentValue : array ) { if ( currentValue == value ) { return true; } } return false; } public static boolean in( int value, int offset, byte[] array ) { for ( int index = offset; index < array.length; index++ ) { int currentValue = array[ index ]; if ( currentValue == value ) { return true; } } return false; } public static boolean in( int value, int offset, int end, byte[] array ) { for ( int index = offset; index < end; index++ ) { int currentValue = array[ index ]; if ( currentValue == value ) { return true; } } return false; } public static byte[] copy( byte[] array ) { Exceptions.requireNonNull(array); byte[] newArray = new byte[ array.length ]; System.arraycopy( array, 0, newArray, 0, array.length ); return newArray; } public static byte[] copy( byte[] array, int offset, int length ) { Exceptions.requireNonNull( array ); byte[] newArray = new byte[ length ]; System.arraycopy( array, offset, newArray, 0, length ); return newArray; } public static byte[] add( byte[] array, byte v ) { byte[] newArray = new byte[ array.length + 1 ]; System.arraycopy( array, 0, newArray, 0, array.length ); newArray[ array.length ] = v; return newArray; } public static byte[] add( byte[] array, byte[] array2 ) { byte[] newArray = new byte[ array.length + array2.length ]; System.arraycopy( array, 0, newArray, 0, array.length ); System.arraycopy( array2, 0, newArray, array.length, array2.length ); return newArray; } public static byte[] insert( final byte[] array, final int idx, final byte v ) { if ( idx >= array.length ) { return add( array, v ); } final int index = calculateIndex( array, idx ); //Object newArray = Array.newInstance(array.getClass().getComponentType(), array.length+1); byte[] newArray = new byte[ array.length + 1 ]; if ( index != 0 ) { /* Copy up to the length in the array before the index. */ /* src sbegin dst dbegin length of copy */ System.arraycopy( array, 0, newArray, 0, index ); } boolean lastIndex = index == array.length - 1; int remainingIndex = array.length - index; if ( lastIndex ) { /* Copy the area after the insert. Make sure we don't write over the end. */ /* src sbegin dst dbegin length of copy */ System.arraycopy( array, index, newArray, index + 1, remainingIndex ); } else { /* Copy the area after the insert. */ /* src sbegin dst dbegin length of copy */ System.arraycopy( array, index, newArray, index + 1, remainingIndex ); } newArray[ index ] = v; return newArray; } public static byte[] insert( final byte[] array, final int fromIndex, final byte[] values ) { if ( fromIndex >= array.length ) { return add( array, values ); } final int index = calculateIndex( array, fromIndex ); //Object newArray = Array.newInstance(array.getClass().getComponentType(), array.length+1); byte[] newArray = new byte[ array.length + values.length ]; if ( index != 0 ) { /* Copy up to the length in the array before the index. */ /* src sbegin dst dbegin length of copy */ System.arraycopy( array, 0, newArray, 0, index ); } boolean lastIndex = index == array.length - 1; int toIndex = index + values.length; int remainingIndex = newArray.length - toIndex; if ( lastIndex ) { /* Copy the area after the insert. Make sure we don't write over the end. */ /* src sbegin dst dbegin length of copy */ System.arraycopy( array, index, newArray, index + values.length, remainingIndex ); } else { /* Copy the area after the insert. */ /* src sbegin dst dbegin length of copy */ System.arraycopy( array, index, newArray, index + values.length, remainingIndex ); } for ( int i = index, j = 0; i < toIndex; i++, j++ ) { newArray[ i ] = values[ j ]; } return newArray; } /* End universal methods. */ private static int calculateIndex( byte[] array, int originalIndex ) { final int length = array.length; int index = originalIndex; /* Adjust for reading from the right as in -1 reads the 4th element if the length is 5 */ if ( index < 0 ) { index = length + index; } /* Bounds check if it is still less than 0, then they have an negative index that is greater than length */ /* Bounds check if it is still less than 0, then they have an negative index that is greater than length */ if ( index < 0 ) { index = 0; } if ( index >= length ) { index = length - 1; } return index; } /* End universal methods. */ private static int calculateEndIndex( byte[] array, int originalIndex ) { final int length = array.length; int index = originalIndex; /* Adjust for reading from the right as in -1 reads the 4th element if the length is 5 */ if ( index < 0 ) { index = length + index; } /* Bounds check if it is still less than 0, then they have an negative index that is greater than length */ /* Bounds check if it is still less than 0, then they have an negative index that is greater than length */ if ( index < 0 ) { index = 0; } if ( index >length ) { index = length; } return index; } public static int idxInt( byte[] bytes, int off ) { return ( ( bytes[ off + 3 ] & 0xFF ) ) + ( ( bytes[ off + 2 ] & 0xFF ) << 8 ) + ( ( bytes[ off + 1 ] & 0xFF ) << 16 ) + ( ( bytes[ off ] ) << 24 ); } public static byte[] addInt( byte[] array, int v ) { byte[] arrayToHoldInt = new byte[ 4 ]; intTo( arrayToHoldInt, 0, v ); return add( array, arrayToHoldInt ); } public static byte[] insertIntInto( byte[] array, int index, int v ) { byte[] arrayToHoldInt = new byte[ 4 ]; intTo( arrayToHoldInt, 0, v ); return insert( array, index, arrayToHoldInt ); } public static void intTo( byte[] b, int off, int val ) { b[ off + 3 ] = ( byte ) ( val ); b[ off + 2 ] = ( byte ) ( val >>> 8 ); b[ off + 1 ] = ( byte ) ( val >>> 16 ); b[ off ] = ( byte ) ( val >>> 24 ); } public static void longTo( byte[] b, int off, long val ) { b[ off + 7 ] = ( byte ) ( val ); b[ off + 6 ] = ( byte ) ( val >>> 8 ); b[ off + 5 ] = ( byte ) ( val >>> 16 ); b[ off + 4 ] = ( byte ) ( val >>> 24 ); b[ off + 3 ] = ( byte ) ( val >>> 32 ); b[ off + 2 ] = ( byte ) ( val >>> 40 ); b[ off + 1 ] = ( byte ) ( val >>> 48 ); b[ off ] = ( byte ) ( val >>> 56 ); } public static byte[] addLong( byte[] array, long value ) { byte[] holder = new byte[ 8 ]; longTo( holder, 0, value ); return add( array, holder ); } public static long idxUnsignedInt( byte[] bytes, int off ) { return ( ( bytes[ off + 3 ] & 0xFFL ) ) + ( ( bytes[ off + 2 ] & 0xFFL ) << 8L ) + ( ( bytes[ off + 1 ] & 0xFFL ) << 16L ) + ( ( bytes[ off ] & 0xFFL ) << 24L ); } public static long idxLong( byte[] b, int off ) { return ( ( b[ off + 7 ] & 0xFFL ) ) + ( ( b[ off + 6 ] & 0xFFL ) << 8 ) + ( ( b[ off + 5 ] & 0xFFL ) << 16 ) + ( ( b[ off + 4 ] & 0xFFL ) << 24 ) + ( ( b[ off + 3 ] & 0xFFL ) << 32 ) + ( ( b[ off + 2 ] & 0xFFL ) << 40 ) + ( ( b[ off + 1 ] & 0xFFL ) << 48 ) + ( ( ( long ) b[ off ] ) << 56 ); } public static short idxShort( byte[] b, int off ) { return ( short ) ( ( b[ off + 1 ] & 0xFF ) + ( b[ off ] << 8 ) ); } public static byte[] addShort( byte[] array, short value ) { byte[] holder = new byte[ 2 ]; shortTo( holder, 0, value ); return add( array, holder ); } public static byte[] insertShortInto( byte[] array, int index, short value ) { byte[] holder = new byte[ 2 ]; shortTo( holder, 0, value ); return insert( array, index, holder ); } public static void shortTo( byte[] b, int off, short val ) { b[ off + 1 ] = ( byte ) ( val ); b[ off ] = ( byte ) ( val >>> 8 ); } public static char idxChar( byte[] b, int off ) { return ( char ) ( ( b[ off + 1 ] & 0xFF ) + ( b[ off ] << 8 ) ); } public static byte[] addChar( byte[] array, char value ) { byte[] holder = new byte[ 2 ]; charTo( holder, 0, value ); return add( array, holder ); } public static byte[] insertCharInto( byte[] array, int index, char value ) { byte[] holder = new byte[ 2 ]; charTo( holder, 0, value ); return insert( array, index, holder ); } public static void charTo( byte[] b, int off, char val ) { b[ off + 1 ] = ( byte ) ( val ); b[ off ] = ( byte ) ( val >>> 8 ); } public static void charTo( byte[] b, char val ) { b[ 1 ] = ( byte ) ( val ); b[ 0 ] = ( byte ) ( val >>> 8 ); } public static float idxFloat( byte[] array, int off ) { return Float.intBitsToFloat( idxInt( array, off ) ); } public static byte[] addFloat( byte[] array, float value ) { byte[] holder = new byte[ 4 ]; floatTo( holder, 0, value ); return add( array, holder ); } public static byte[] insertFloatInto( byte[] array, int index, float value ) { byte[] holder = new byte[ 4 ]; floatTo( holder, 0, value ); return insert( array, index, holder ); } public static void floatTo( byte[] array, int off, float val ) { intTo( array, off, Float.floatToIntBits( val ) ); } public static byte[] addDouble( byte[] array, double value ) { Exceptions.requireNonNull( array ); byte[] holder = new byte[ 4 ]; doubleTo( holder, 0, value ); return add( array, holder ); } public static byte[] insertDoubleInto( byte[] array, int index, double value ) { Exceptions.requireNonNull( array ); byte[] holder = new byte[ 4 ]; doubleTo( holder, 0, value ); return insert( array, index, holder ); } public static void doubleTo( byte[] b, int off, double val ) { longTo( b, off, Double.doubleToLongBits( val ) ); } public static double idxDouble( byte[] b, int off ) { return Double.longBitsToDouble( idxLong( b, off ) ); } // // // public static boolean booleanAt(byte[] b, int off) { // return b[off] != 0; // } // // // public static boolean booleanInBytePos1(int val) { // val = val & 0x01; // return val != 0; // } // // public static boolean booleanInBytePos2(int val) { // val = val & 0x02; // return val != 0; // } // // // public static boolean booleanInBytePos3(int val) { // val = val & 0x04; // return val != 0; // } // // public static boolean booleanInBytePos4(int val) { // val = val & 0x08; // return val != 0; // } // // public static boolean booleanInBytePos1(byte[] b, int off) { // int val = b[off] & 0x01; // return val != 0; // } // // public static boolean booleanInBytePos2(byte[] b, int off) { // int val = b[off] & 0x02; // return val != 0; // } // // // public static boolean booleanInBytePos3(byte[] b, int off) { // int val = b[off] & 0x04; // return val != 0; // } // // public static boolean booleanInBytePos4(byte[] b, int off) { // int val = b[off] & 0x08; // return val != 0; // } // // public static boolean booleanInBytePos5(byte[] b, int off) { // int val = b[off] & 0x10; // return val != 0; // } // // public static boolean booleanInBytePos6(byte[] b, int off) { // int val = b[off] & 0x20; // return val != 0; // } // // public static boolean booleanInBytePos7(byte[] b, int off) { // int val = b[off] & 0x40; // return val != 0; // } // // public static boolean booleanInBytePos8(byte[] b, int off) { // int val = b[off] & 0x80; // return val != 0; // } // // // public static int byteAt(byte[] b, int off) { // return b[off]; // } // // // public static int topNibbleAt(byte[] b, int off) { // return topNibbleAt (b[off] ); // } // // public static int bottomNibbleAt(byte[] b, int off) { // return bottomNibbleAt (b[off] ); // } // // public static int topNibbleAt(int val) { // return (val & 0xF0); // } // // public static int bottomNibbleAt(int val) { // return (val & 0x0F); // } // // // public static char charAt1(byte[] b, int off) { // return (char) ((b[off + 1] & 0xFF) + // (b[off] << 8)); // } // public static void _idx( final byte[] array, int startIndex, byte[] input ) { try { System.arraycopy( input, 0, array, startIndex, input.length ); } catch ( Exception ex ) { Exceptions.handle( String.format( "array size %d, startIndex %d, input length %d", array.length, startIndex, input.length ), ex ); } } public static void _idx( final byte[] array, int startIndex, byte[] input, int length ) { try { System.arraycopy( input, 0, array, startIndex, length ); } catch ( Exception ex ) { Exceptions.handle( String.format( "array size %d, startIndex %d, input length %d", array.length, startIndex, input.length ), ex ); } } public static void _idx( final byte[] output, int ouputStartIndex, byte[] input, int inputOffset, int length ) { try { System.arraycopy( input, inputOffset, output, ouputStartIndex, length ); } catch ( Exception ex ) { Exceptions.handle( String.format( "array size %d, startIndex %d, input length %d", output.length, ouputStartIndex, input.length ), ex ); } } public static int idxUnsignedShort( byte[] buffer, int off ) { int ch1 = buffer[ off ] & 0xFF; int ch2 = buffer[ off + 1 ] & 0xFF; return ( ch1 << 8 ) + ( ch2 ); } public static short idxUnsignedByte( byte[] array, int location ) { return ( short ) ( array[ location ] & 0xFF ); } public static void unsignedIntTo( byte[] b, int off, long val ) { b[ off + 3 ] = ( byte ) ( val ); b[ off + 2 ] = ( byte ) ( val >>> 8 ); b[ off + 1 ] = ( byte ) ( val >>> 16 ); b[ off ] = ( byte ) ( val >>> 24 ); } public static void unsignedShortTo( byte[] buffer, int off, int value ) { buffer[ off + 1 ] = ( byte ) ( value ); buffer[ off ] = ( byte ) ( value >>> 8 ); } public static void unsignedByteTo( byte[] buffer, int off, short value ) { buffer[ off ] = ( byte ) ( value ); } public static String utfString( byte[] jsonBytes ) { return new String (jsonBytes, StandardCharsets.UTF_8); } public static int reduceBy( final byte[] array, Object object ) { int sum = 0; for ( byte v : array ) { sum = (int) Invoker.invokeReducer(object, sum, v); } return sum; } /** * Checks to see if two arrays are equals * @param expected expected array * @param got got array * @return true if equal or throws exception if not. */ public static boolean equalsOrDie(byte[] expected, byte[] got) { if (expected.length != got.length) { Exceptions.die("Lengths did not match, expected length", expected.length, "but got", got.length); } for (int index=0; index< expected.length; index++) { if (expected[index]!= got[index]) { Exceptions.die("value at index did not match index", index, "expected value", expected[index], "but got", got[index]); } } return true; } /** * Checks to see if two arrays are equals * @param expected expected array * @param got got array * @return true if equal or false if not. */ public static boolean equals(byte[] expected, byte[] got) { if (expected.length != got.length) { return false; } for (int index=0; index< expected.length; index++) { if (expected[index]!= got[index]) { return false; } } return true; } }
/* * Copyright (c) 2014 Oculus Info Inc. * http://www.oculusinfo.com/ * * Released under the MIT License. * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.oculusinfo.binning.io.impl; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.*; import org.apache.log4j.Level; import org.apache.log4j.Logger; import com.oculusinfo.binning.TileData; import com.oculusinfo.binning.TileIndex; import com.oculusinfo.binning.io.PyramidIO; import com.oculusinfo.binning.io.serialization.TileSerializer; //import org.apache.hadoop.hbase.TableName; public class HBasePyramidIO implements PyramidIO { private static final String META_DATA_INDEX = "metadata"; public static class HBaseColumn { byte[] family; byte[] qualifier; HBaseColumn (byte[] family, byte[] qualifier) { this.family = family; this.qualifier = qualifier; } public byte[] getFamily () {return family;} public byte[] getQualifier () {return qualifier;} } private static final byte[] EMPTY_BYTES = new byte[0]; private static final byte[] TILE_FAMILY_NAME = "tileData".getBytes(); public static final HBaseColumn TILE_COLUMN = new HBaseColumn(TILE_FAMILY_NAME, EMPTY_BYTES); private static final byte[] METADATA_FAMILY_NAME = "metaData".getBytes(); public static final HBaseColumn METADATA_COLUMN = new HBaseColumn(METADATA_FAMILY_NAME, EMPTY_BYTES); private Configuration _config; private HBaseAdmin _admin; private HConnection _connection; public HBasePyramidIO (String zookeeperQuorum, String zookeeperPort, String hbaseMaster) throws IOException { Logger.getLogger("org.apache.zookeeper").setLevel(Level.WARN); Logger.getLogger("org.apache.hadoop").setLevel(Level.WARN); _config = HBaseConfiguration.create(); _config.set("hbase.zookeeper.quorum", zookeeperQuorum); _config.set("hbase.zookeeper.property.clientPort", zookeeperPort); _config.set("hbase.master", hbaseMaster); _admin = new HBaseAdmin(_config); _connection = HConnectionManager.createConnection(_config); } /** * Determine the row ID we use in HBase for a given tile index */ public static String rowIdFromTileIndex (TileIndex tile) { // Use the minimum possible number of digits for the tile key int digits = (int) Math.floor(Math.log10(1 << tile.getLevel()))+1; return String.format("%02d,%0"+digits+"d,%0"+digits+"d", tile.getLevel(), tile.getX(), tile.getY()); } /** * Determine tile index given a row id */ public static TileIndex tileIndexFromRowId (String rowId) { String[] fields = rowId.split(","); return new TileIndex(Integer.parseInt(fields[0]), Integer.parseInt(fields[1]), Integer.parseInt(fields[2])); } /** * Get the configuration used to connect to HBase. */ public Configuration getConfiguration () { return _config; } /* * Gets an existing table (without creating it) */ private HTableInterface getTable (String tableName) throws IOException { return _connection.getTable( tableName ); } /* * Given a put request (a request to put data into a table), add a single * entry into the request * * @param existingPut * The existing request. If null, a request will be created for * the given row. If non-null, no check will be performed to make * sure the put request is for the right row - this is the * responsibility of the caller. * @param rowId * The id of the row to put. This is only used if the existingPut * is null. * @param column * The column defining the entry in this row into which to put * the data * @param data * the data to put into the described entry. * @return The put request - the same as is passed in, or a new request if * none was passed in. */ private Put addToPut (Put existingPut, String rowId, HBaseColumn column, byte[] data) { if (null == existingPut) { existingPut = new Put(rowId.getBytes()); } existingPut.add(column.family, column.qualifier, data); return existingPut; } /* * Write a series of rows out to the given table * * @param table * The table to which to write * @param rows * The rows to write */ private void writeRows (String tableName, List<Row> rows) throws InterruptedException, IOException { HTableInterface table = getTable(tableName); table.batch(rows); table.flushCommits(); table.close(); } private Map<HBaseColumn, byte[]> decodeRawResult (Result row, HBaseColumn[] columns) { Map<HBaseColumn, byte[]> results = null; for (HBaseColumn column: columns) { if (row.containsColumn(column.family, column.qualifier)) { if (null == results) results = new HashMap<HBaseColumn, byte[]>(); results.put(column, row.getValue(column.family, column.qualifier)); } } return results; } /* * Read several rows of data. * * @param table * The table to read * @param rows * The rows to read * @param columns * The columns to read * @return A list, in the same order as the input rows of maps from column * id to value. Columns missing from the data are also missing from * the map. Rows which returned no data have a null instead of a * map. */ private List<Map<HBaseColumn, byte[]>> readRows (String tableName, List<String> rows, HBaseColumn... columns) throws IOException { HTableInterface table = getTable(tableName); List<Get> gets = new ArrayList<Get>(rows.size()); for (String rowId: rows) { Get get = new Get(rowId.getBytes()); for (HBaseColumn column: columns) { get.addColumn(column.family, column.qualifier); } gets.add(get); } Result[] results = table.get(gets); List<Map<HBaseColumn, byte[]>> allResults = new ArrayList<Map<HBaseColumn,byte[]>>(rows.size()); for (Result result: results) { allResults.add(decodeRawResult(result, columns)); } table.close(); return allResults; } @Override public void initializeForWrite (String tableName) throws IOException { if ( !_admin.tableExists(tableName) ) { try { HTableDescriptor tableDesc = new HTableDescriptor(tableName); HColumnDescriptor metadataFamily = new HColumnDescriptor(METADATA_FAMILY_NAME); tableDesc.addFamily(metadataFamily); HColumnDescriptor tileFamily = new HColumnDescriptor(TILE_FAMILY_NAME); tableDesc.addFamily(tileFamily); _admin.createTable(tableDesc); } catch (Exception e) { e.printStackTrace(); } } } @Override public <T> void writeTiles (String tableName, TileSerializer<T> serializer, Iterable<TileData<T>> data) throws IOException { List<Row> rows = new ArrayList<Row>(); for (TileData<T> tile: data) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); serializer.serialize(tile, baos); rows.add(addToPut(null, rowIdFromTileIndex(tile.getDefinition()), TILE_COLUMN, baos.toByteArray())); } try { writeRows(tableName, rows); } catch (InterruptedException e) { throw new IOException("Error writing tiles to HBase", e); } } @Override public void writeMetaData (String tableName, String metaData) throws IOException { try { List<Row> rows = new ArrayList<Row>(); rows.add(addToPut(null, META_DATA_INDEX, METADATA_COLUMN, metaData.getBytes())); Put put = new Put(META_DATA_INDEX.getBytes()); put.add(METADATA_FAMILY_NAME, EMPTY_BYTES, metaData.getBytes()); writeRows(tableName, rows); } catch (InterruptedException e) { throw new IOException("Error writing metadata to HBase", e); } } @Override public void initializeForRead(String pyramidId, int width, int height, Properties dataDescription) { try { initializeForWrite( pyramidId ); } catch (Exception e) { e.printStackTrace(); } } @Override public <T> List<TileData<T>> readTiles (String tableName, TileSerializer<T> serializer, Iterable<TileIndex> tiles) throws IOException { List<String> rowIds = new ArrayList<String>(); for (TileIndex tile: tiles) { rowIds.add(rowIdFromTileIndex(tile)); } List<Map<HBaseColumn, byte[]>> rawResults = readRows(tableName, rowIds, TILE_COLUMN); List<TileData<T>> results = new LinkedList<TileData<T>>(); Iterator<Map<HBaseColumn, byte[]>> iData = rawResults.iterator(); Iterator<TileIndex> indexIterator = tiles.iterator(); while (iData.hasNext()) { Map<HBaseColumn, byte[]> rawResult = iData.next(); TileIndex index = indexIterator.next(); if (null != rawResult) { byte[] rawData = rawResult.get(TILE_COLUMN); ByteArrayInputStream bais = new ByteArrayInputStream(rawData); TileData<T> data = serializer.deserialize(index, bais); results.add(data); } } return results; } @Override public <T> InputStream getTileStream (String tableName, TileSerializer<T> serializer, TileIndex tile) throws IOException { List<String> rowIds = new ArrayList<String>(); rowIds.add(rowIdFromTileIndex(tile)); List<Map<HBaseColumn, byte[]>> rawResults = readRows(tableName, rowIds, TILE_COLUMN); Iterator<Map<HBaseColumn, byte[]>> iData = rawResults.iterator(); if (iData.hasNext()) { Map<HBaseColumn, byte[]> rawResult = iData.next(); if (null != rawResult) { byte[] rawData = rawResult.get(TILE_COLUMN); return new ByteArrayInputStream(rawData); } } return null; } @Override public String readMetaData (String tableName) throws IOException { List<Map<HBaseColumn, byte[]>> rawData = readRows(tableName, Collections.singletonList(META_DATA_INDEX), METADATA_COLUMN); if (null == rawData) return null; if (rawData.isEmpty()) return null; if (null == rawData.get(0)) return null; if (!rawData.get(0).containsKey(METADATA_COLUMN)) return null; return new String(rawData.get(0).get(METADATA_COLUMN)); } @Override public void removeTiles (String tableName, Iterable<TileIndex> tiles) throws IOException { List<String> rowIds = new ArrayList<>(); for (TileIndex tile: tiles) { rowIds.add( rowIdFromTileIndex( tile ) ); } deleteRows(tableName, rowIds, TILE_COLUMN); } private void deleteRows (String tableName, List<String> rows, HBaseColumn... columns) throws IOException { HTableInterface table = getTable(tableName); List<Delete> deletes = new LinkedList<Delete>(); for (String rowId: rows) { Delete delete = new Delete(rowId.getBytes()); deletes.add(delete); } table.delete(deletes); table.close(); } public void dropTable( String tableName ) { try { _admin.disableTable( /*TableName.valueOf(*/ tableName /*)*/ ); _admin.deleteTable( /*TableName.valueOf(*/ tableName /*)*/ ); } catch (Exception e) {} } }
/* Generated By:JavaCC: Do not edit this line. MyParser.java */ class MyParser implements MyParserConstants { public static void main(String args[]) throws ParseException{ new MyParser(System.in).Start(); System.out.println("Parsesuccessful"); } static final public void Start() throws ParseException { trace_call("Start"); try { label_1: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case IF: case ID: case NUM: case REAL: ; break; default: jj_la1[0] = jj_gen; break label_1; } switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case IF: jj_consume_token(IF); break; case ID: jj_consume_token(ID); break; case NUM: jj_consume_token(NUM); break; case REAL: jj_consume_token(REAL); break; default: jj_la1[1] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } } finally { trace_return("Start"); } } static private boolean jj_initialized_once = false; static public MyParserTokenManager token_source; static SimpleCharStream jj_input_stream; static public Token token, jj_nt; static private int jj_ntk; static private int jj_gen; static final private int[] jj_la1 = new int[2]; static private int[] jj_la1_0; static { jj_la1_0(); } private static void jj_la1_0() { jj_la1_0 = new int[] {0x3a,0x3a,}; } public MyParser(java.io.InputStream stream) { if (jj_initialized_once) { System.out.println("ERROR: Second call to constructor of static parser. You must"); System.out.println(" either use ReInit() or set the JavaCC option STATIC to false"); System.out.println(" during parser generation."); throw new Error(); } jj_initialized_once = true; jj_input_stream = new SimpleCharStream(stream, 1, 1); token_source = new MyParserTokenManager(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 2; i++) jj_la1[i] = -1; } static public void ReInit(java.io.InputStream stream) { jj_input_stream.ReInit(stream, 1, 1); token_source.ReInit(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 2; i++) jj_la1[i] = -1; } public MyParser(java.io.Reader stream) { if (jj_initialized_once) { System.out.println("ERROR: Second call to constructor of static parser. You must"); System.out.println(" either use ReInit() or set the JavaCC option STATIC to false"); System.out.println(" during parser generation."); throw new Error(); } jj_initialized_once = true; jj_input_stream = new SimpleCharStream(stream, 1, 1); token_source = new MyParserTokenManager(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 2; i++) jj_la1[i] = -1; } static public void ReInit(java.io.Reader stream) { jj_input_stream.ReInit(stream, 1, 1); token_source.ReInit(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 2; i++) jj_la1[i] = -1; } public MyParser(MyParserTokenManager tm) { if (jj_initialized_once) { System.out.println("ERROR: Second call to constructor of static parser. You must"); System.out.println(" either use ReInit() or set the JavaCC option STATIC to false"); System.out.println(" during parser generation."); throw new Error(); } jj_initialized_once = true; token_source = tm; token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 2; i++) jj_la1[i] = -1; } public void ReInit(MyParserTokenManager tm) { token_source = tm; token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 2; i++) jj_la1[i] = -1; } static final private Token jj_consume_token(int kind) throws ParseException { Token oldToken; if ((oldToken = token).next != null) token = token.next; else token = token.next = token_source.getNextToken(); jj_ntk = -1; if (token.kind == kind) { jj_gen++; trace_token(token, ""); return token; } token = oldToken; jj_kind = kind; throw generateParseException(); } static final public Token getNextToken() { if (token.next != null) token = token.next; else token = token.next = token_source.getNextToken(); jj_ntk = -1; jj_gen++; trace_token(token, " (in getNextToken)"); return token; } static final public Token getToken(int index) { Token t = token; for (int i = 0; i < index; i++) { if (t.next != null) t = t.next; else t = t.next = token_source.getNextToken(); } return t; } static final private int jj_ntk() { if ((jj_nt=token.next) == null) return (jj_ntk = (token.next=token_source.getNextToken()).kind); else return (jj_ntk = jj_nt.kind); } static private java.util.Vector jj_expentries = new java.util.Vector(); static private int[] jj_expentry; static private int jj_kind = -1; static public ParseException generateParseException() { jj_expentries.removeAllElements(); boolean[] la1tokens = new boolean[11]; for (int i = 0; i < 11; i++) { la1tokens[i] = false; } if (jj_kind >= 0) { la1tokens[jj_kind] = true; jj_kind = -1; } for (int i = 0; i < 2; i++) { if (jj_la1[i] == jj_gen) { for (int j = 0; j < 32; j++) { if ((jj_la1_0[i] & (1<<j)) != 0) { la1tokens[j] = true; } } } } for (int i = 0; i < 11; i++) { if (la1tokens[i]) { jj_expentry = new int[1]; jj_expentry[0] = i; jj_expentries.addElement(jj_expentry); } } int[][] exptokseq = new int[jj_expentries.size()][]; for (int i = 0; i < jj_expentries.size(); i++) { exptokseq[i] = (int[])jj_expentries.elementAt(i); } return new ParseException(token, exptokseq, tokenImage); } static private int trace_indent = 0; static private boolean trace_enabled = true; static final public void enable_tracing() { trace_enabled = true; } static final public void disable_tracing() { trace_enabled = false; } static final private void trace_call(String s) { if (trace_enabled) { for (int i = 0; i < trace_indent; i++) { System.out.print(" "); } System.out.println("Call: " + s); } trace_indent = trace_indent + 2; } static final private void trace_return(String s) { trace_indent = trace_indent - 2; if (trace_enabled) { for (int i = 0; i < trace_indent; i++) { System.out.print(" "); } System.out.println("Return: " + s); } } static final private void trace_token(Token t, String where) { if (trace_enabled) { for (int i = 0; i < trace_indent; i++) { System.out.print(" "); } System.out.print("Consumed token: <" + tokenImage[t.kind]); if (t.kind != 0 && !tokenImage[t.kind].equals("\"" + t.image + "\"")) { System.out.print(": \"" + t.image + "\""); } System.out.println(">" + where); } } static final private void trace_scan(Token t1, int t2) { if (trace_enabled) { for (int i = 0; i < trace_indent; i++) { System.out.print(" "); } System.out.print("Visited token: <" + tokenImage[t1.kind]); if (t1.kind != 0 && !tokenImage[t1.kind].equals("\"" + t1.image + "\"")) { System.out.print(": \"" + t1.image + "\""); } System.out.println(">; Expected token: <" + tokenImage[t2] + ">"); } } }
/******************************************************************************* * Copyright (c) 2009 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation * Zend Technologies *******************************************************************************/ package org.eclipse.php.internal.debug.core.zend.model; import static org.eclipse.php.internal.debug.core.model.IPHPDataType.DataType.*; import static org.eclipse.php.internal.debug.core.model.IVariableFacet.Facet.*; import java.text.MessageFormat; import org.eclipse.debug.core.DebugEvent; import org.eclipse.debug.core.DebugException; import org.eclipse.debug.core.model.IValue; import org.eclipse.debug.core.model.IVariable; import org.eclipse.debug.ui.actions.IWatchExpressionFactoryAdapter; import org.eclipse.php.internal.debug.core.Logger; import org.eclipse.php.internal.debug.core.model.IPHPDataType; import org.eclipse.php.internal.debug.core.model.IVariableFacet; import org.eclipse.php.internal.debug.core.model.PHPDebugElement; import org.eclipse.php.internal.debug.core.zend.debugger.Expression; import org.eclipse.php.internal.debug.core.zend.debugger.ExpressionsManager; /** * A variable in a PHP stack frame */ public class PHPVariable extends PHPDebugElement implements IVariable, IPHPDataType { private Expression fExpression; private PHPValue fValue; private boolean fHasChanged = false; private String fName = null; /** * Constructs a variable contained in the given stack frame with the given * name. * */ public PHPVariable(PHPDebugTarget target, Expression expression) { super(target); this.fExpression = expression; this.fValue = new PHPValue(target, expression); } @Override public DataType getDataType() { return fExpression.getValue().getDataType(); } /* * (non-Javadoc) * * @see org.eclipse.debug.core.model.IVariable#getValue() */ public IValue getValue() throws DebugException { return fValue; } /* * (non-Javadoc) * * @see org.eclipse.debug.core.model.IVariable#getName() */ public String getName() throws DebugException { if (fName == null) { String endName = fExpression.getLastName(); if (fExpression.hasFacet(KIND_OBJECT_MEMBER)) { int idx = endName.lastIndexOf(':'); if (idx != -1) endName = endName.substring(idx + 1); } else if (fExpression.hasFacet(KIND_ARRAY_MEMBER)) { endName = '[' + endName + ']'; } fName = endName; } return fName; } /* * (non-Javadoc) * * @see org.eclipse.debug.core.model.IVariable#getReferenceTypeName() */ public String getReferenceTypeName() throws DebugException { return fValue.getReferenceTypeName(); } /* * (non-Javadoc) * * @see org.eclipse.debug.core.model.IVariable#hasValueChanged() */ public boolean hasValueChanged() throws DebugException { return fHasChanged; } /* * (non-Javadoc) * * @see * org.eclipse.debug.core.model.IValueModification#setValue(java.lang.String * ) */ public void setValue(String expression) throws DebugException { PHPDebugTarget debugTarget = (PHPDebugTarget) getDebugTarget(); ExpressionsManager expressionManager = debugTarget.getExpressionManager(); Expression changeVar = fExpression; if (fValue.getExpression().getValue().getDataType() == PHP_STRING) { expression = MessageFormat.format("\"{0}\"", expression); //$NON-NLS-1$ } boolean status = expressionManager.assignValue(changeVar, expression, 1); if (!status) { Logger.debugMSG("[" + this //$NON-NLS-1$ + "] PHPValue: Problem changing variable value"); //$NON-NLS-1$ } expressionManager.update(changeVar, 1); fValue.updateValue(changeVar.getValue()); fireChangeEvent(DebugEvent.CONTENT); } /* * (non-Javadoc) * * @see * org.eclipse.debug.core.model.IValueModification#setValue(org.eclipse. * debug.core.model.IValue) */ public void setValue(IValue value) throws DebugException { } /* * (non-Javadoc) * * @see * org.eclipse.debug.core.model.IValueModification#supportsValueModification * () */ public boolean supportsValueModification() { // Not supported yet if (fExpression.hasFacet(MOD_STATIC) || fExpression.hasFacet(VIRTUAL_CLASS)) return false; return true; } /* * (non-Javadoc) * * @see * org.eclipse.debug.core.model.IValueModification#verifyValue(java.lang * .String) */ public boolean verifyValue(String value) throws DebugException { switch (fExpression.getValue().getDataType()) { case PHP_BOOL: { if (!value.equalsIgnoreCase(String.valueOf(false)) && !value.equalsIgnoreCase(String.valueOf(true))) { return false; } break; } case PHP_FLOAT: case PHP_INT: { try { Double.parseDouble(value); return true; } catch (NumberFormatException nfe) { return false; } } default: break; } return true; } /* * (non-Javadoc) * * @see * org.eclipse.debug.core.model.IValueModification#verifyValue(org.eclipse * .debug.core.model.IValue) */ public boolean verifyValue(IValue value) throws DebugException { return true; } @SuppressWarnings("unchecked") public Object getAdapter(@SuppressWarnings("rawtypes") Class adapter) { if (adapter == IWatchExpressionFactoryAdapter.class) { return new WatchExpressionFactoryAdapter(); } if (adapter == Expression.class || adapter == IVariableFacet.class) { return fExpression; } return super.getAdapter(adapter); } protected Expression getExpression() { return fExpression; } protected String getFullName() { return fExpression.getFullName(); } protected void update(Expression expression) { // Get previous data type DataType previousDataType = getDataType(); // Catch previous value string if there is any String previousValueString = null; if (fValue != null) { previousValueString = fExpression.getValue().getValueAsString(); } // Bind new expression fExpression = expression; // Reset name fName = null; // Update value if (fValue != null && fValue.getDataType() == previousDataType) { fValue.update(expression); } else { fValue = new PHPValue((PHPDebugTarget) getDebugTarget(), fExpression); } // Check if value has changed if (previousValueString != null) { fHasChanged = !previousValueString.equals(fExpression.getValue().getValueAsString()); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.protocol.LayoutFlags; import org.apache.hadoop.io.IOUtils; import com.google.common.annotations.VisibleForTesting; /** * An implementation of the abstract class {@link EditLogOutputStream}, which * stores edits in a local file. */ @InterfaceAudience.Private public class EditLogFileOutputStream extends EditLogOutputStream { private static final Logger LOG = LoggerFactory.getLogger(EditLogFileOutputStream.class); public static final int MIN_PREALLOCATION_LENGTH = 1024 * 1024; private File file; private FileOutputStream fp; // file stream for storing edit logs private FileChannel fc; // channel of the file stream for sync private EditsDoubleBuffer doubleBuf; static final ByteBuffer fill = ByteBuffer.allocateDirect(MIN_PREALLOCATION_LENGTH); private boolean shouldSyncWritesAndSkipFsync = false; private static boolean shouldSkipFsyncForTests = false; static { fill.position(0); for (int i = 0; i < fill.capacity(); i++) { fill.put(FSEditLogOpCodes.OP_INVALID.getOpCode()); } } /** * Creates output buffers and file object. * * @param conf * Configuration object * @param name * File name to store edit log * @param size * Size of flush buffer * @throws IOException */ public EditLogFileOutputStream(Configuration conf, File name, int size) throws IOException { super(); shouldSyncWritesAndSkipFsync = conf.getBoolean( DFSConfigKeys.DFS_NAMENODE_EDITS_NOEDITLOGCHANNELFLUSH, DFSConfigKeys.DFS_NAMENODE_EDITS_NOEDITLOGCHANNELFLUSH_DEFAULT); file = name; doubleBuf = new EditsDoubleBuffer(size); RandomAccessFile rp; if (shouldSyncWritesAndSkipFsync) { rp = new RandomAccessFile(name, "rws"); } else { rp = new RandomAccessFile(name, "rw"); } fp = new FileOutputStream(rp.getFD()); // open for append fc = rp.getChannel(); fc.position(fc.size()); } @Override public void write(FSEditLogOp op) throws IOException { doubleBuf.writeOp(op); } /** * Write a transaction to the stream. The serialization format is: * <ul> * <li>the opcode (byte)</li> * <li>the transaction id (long)</li> * <li>the actual Writables for the transaction</li> * </ul> * */ @Override public void writeRaw(byte[] bytes, int offset, int length) throws IOException { doubleBuf.writeRaw(bytes, offset, length); } /** * Create empty edits logs file. */ @Override public void create(int layoutVersion) throws IOException { fc.truncate(0); fc.position(0); writeHeader(layoutVersion, doubleBuf.getCurrentBuf()); setReadyToFlush(); flush(); } /** * Write header information for this EditLogFileOutputStream to the provided * DataOutputSream. * * @param layoutVersion the LayoutVersion of the EditLog * @param out the output stream to write the header to. * @throws IOException in the event of error writing to the stream. */ @VisibleForTesting public static void writeHeader(int layoutVersion, DataOutputStream out) throws IOException { out.writeInt(layoutVersion); LayoutFlags.write(out); } @Override public void close() throws IOException { if (fp == null) { throw new IOException("Trying to use aborted output stream"); } try { // close should have been called after all pending transactions // have been flushed & synced. // if already closed, just skip if (doubleBuf != null) { doubleBuf.close(); doubleBuf = null; } // remove any preallocated padding bytes from the transaction log. if (fc != null && fc.isOpen()) { fc.truncate(fc.position()); fc.close(); fc = null; } fp.close(); fp = null; } finally { IOUtils.cleanupWithLogger(LOG, fc, fp); doubleBuf = null; fc = null; fp = null; } fp = null; } @Override public void abort() throws IOException { if (fp == null) { return; } IOUtils.cleanupWithLogger(LOG, fp); fp = null; } /** * All data that has been written to the stream so far will be flushed. New * data can be still written to the stream while flushing is performed. */ @Override public void setReadyToFlush() throws IOException { doubleBuf.setReadyToFlush(); } /** * Flush ready buffer to persistent store. currentBuffer is not flushed as it * accumulates new log records while readyBuffer will be flushed and synced. */ @Override public void flushAndSync(boolean durable) throws IOException { if (fp == null) { throw new IOException("Trying to use aborted output stream"); } if (doubleBuf.isFlushed()) { LOG.info("Nothing to flush"); return; } preallocate(); // preallocate file if necessary doubleBuf.flushTo(fp); if (durable && !shouldSkipFsyncForTests && !shouldSyncWritesAndSkipFsync) { fc.force(false); // metadata updates not needed } } /** * @return true if the number of buffered data exceeds the intial buffer size */ @Override public boolean shouldForceSync() { return doubleBuf.shouldForceSync(); } private void preallocate() throws IOException { long position = fc.position(); long size = fc.size(); int bufSize = doubleBuf.getReadyBuf().getLength(); long need = bufSize - (size - position); if (need <= 0) { return; } long oldSize = size; long total = 0; long fillCapacity = fill.capacity(); while (need > 0) { fill.position(0); IOUtils.writeFully(fc, fill, size); need -= fillCapacity; size += fillCapacity; total += fillCapacity; } if(LOG.isDebugEnabled()) { LOG.debug("Preallocated " + total + " bytes at the end of " + "the edit log (offset " + oldSize + ")"); } } /** * Returns the file associated with this stream. */ File getFile() { return file; } @Override public String toString() { return "EditLogFileOutputStream(" + file + ")"; } /** * @return true if this stream is currently open. */ public boolean isOpen() { return fp != null; } @VisibleForTesting public void setFileChannelForTesting(FileChannel fc) { this.fc = fc; } @VisibleForTesting public FileChannel getFileChannelForTesting() { return fc; } /** * For the purposes of unit tests, we don't need to actually * write durably to disk. So, we can skip the fsync() calls * for a speed improvement. * @param skip true if fsync should <em>not</em> be called */ @VisibleForTesting public static void setShouldSkipFsyncForTesting(boolean skip) { shouldSkipFsyncForTests = skip; } }
/* * Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.security.pkcs11; import java.io.*; import java.lang.ref.*; import java.math.BigInteger; import java.util.*; import java.security.*; import java.security.interfaces.*; import java.security.spec.*; import javax.crypto.*; import javax.crypto.interfaces.*; import javax.crypto.spec.*; import sun.security.rsa.RSAPublicKeyImpl; import sun.security.internal.interfaces.TlsMasterSecret; import sun.security.pkcs11.wrapper.*; import static sun.security.pkcs11.wrapper.PKCS11Constants.*; import sun.security.util.DerValue; import sun.security.util.Length; /** * Key implementation classes. * * In PKCS#11, the components of private and secret keys may or may not * be accessible. If they are, we use the algorithm specific key classes * (e.g. DSAPrivateKey) for compatibility with existing applications. * If the components are not accessible, we use a generic class that * only implements PrivateKey (or SecretKey). Whether the components of a * key are extractable is automatically determined when the key object is * created. * * @author Andreas Sterbenz * @since 1.5 */ abstract class P11Key implements Key, Length { private final static String PUBLIC = "public"; private final static String PRIVATE = "private"; private final static String SECRET = "secret"; // type of key, one of (PUBLIC, PRIVATE, SECRET) final String type; // token instance final Token token; // algorithm name, returned by getAlgorithm(), etc. final String algorithm; // key id final long keyID; // effective key length of the key, e.g. 56 for a DES key final int keyLength; // flags indicating whether the key is a token object, sensitive, extractable final boolean tokenObject, sensitive, extractable; // phantom reference notification clean up for session keys private final SessionKeyRef sessionKeyRef; P11Key(String type, Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { this.type = type; this.token = session.token; this.keyID = keyID; this.algorithm = algorithm; this.keyLength = keyLength; boolean tokenObject = false; boolean sensitive = false; boolean extractable = true; int n = (attributes == null) ? 0 : attributes.length; for (int i = 0; i < n; i++) { CK_ATTRIBUTE attr = attributes[i]; if (attr.type == CKA_TOKEN) { tokenObject = attr.getBoolean(); } else if (attr.type == CKA_SENSITIVE) { sensitive = attr.getBoolean(); } else if (attr.type == CKA_EXTRACTABLE) { extractable = attr.getBoolean(); } } this.tokenObject = tokenObject; this.sensitive = sensitive; this.extractable = extractable; if (tokenObject == false) { sessionKeyRef = new SessionKeyRef(this, keyID, session); } else { sessionKeyRef = null; } } // see JCA spec public final String getAlgorithm() { token.ensureValid(); return algorithm; } // see JCA spec public final byte[] getEncoded() { byte[] b = getEncodedInternal(); return (b == null) ? null : (byte[])b.clone(); } abstract byte[] getEncodedInternal(); public boolean equals(Object obj) { if (this == obj) { return true; } // equals() should never throw exceptions if (token.isValid() == false) { return false; } if (obj instanceof Key == false) { return false; } String thisFormat = getFormat(); if (thisFormat == null) { // no encoding, key only equal to itself // XXX getEncoded() for unextractable keys will change that return false; } Key other = (Key)obj; if (thisFormat.equals(other.getFormat()) == false) { return false; } byte[] thisEnc = this.getEncodedInternal(); byte[] otherEnc; if (obj instanceof P11Key) { otherEnc = ((P11Key)other).getEncodedInternal(); } else { otherEnc = other.getEncoded(); } return Arrays.equals(thisEnc, otherEnc); } public int hashCode() { // hashCode() should never throw exceptions if (token.isValid() == false) { return 0; } byte[] b1 = getEncodedInternal(); if (b1 == null) { return 0; } int r = b1.length; for (int i = 0; i < b1.length; i++) { r += (b1[i] & 0xff) * 37; } return r; } protected Object writeReplace() throws ObjectStreamException { KeyRep.Type type; String format = getFormat(); if (isPrivate() && "PKCS#8".equals(format)) { type = KeyRep.Type.PRIVATE; } else if (isPublic() && "X.509".equals(format)) { type = KeyRep.Type.PUBLIC; } else if (isSecret() && "RAW".equals(format)) { type = KeyRep.Type.SECRET; } else { // XXX short term serialization for unextractable keys throw new NotSerializableException ("Cannot serialize sensitive and unextractable keys"); } return new KeyRep(type, getAlgorithm(), format, getEncoded()); } public String toString() { token.ensureValid(); String s1 = token.provider.getName() + " " + algorithm + " " + type + " key, " + keyLength + " bits"; s1 += " (id " + keyID + ", " + (tokenObject ? "token" : "session") + " object"; if (isPublic()) { s1 += ")"; } else { s1 += ", " + (sensitive ? "" : "not ") + "sensitive"; s1 += ", " + (extractable ? "" : "un") + "extractable)"; } return s1; } /** * Return bit length of the key. */ @Override public int length() { return keyLength; } boolean isPublic() { return type == PUBLIC; } boolean isPrivate() { return type == PRIVATE; } boolean isSecret() { return type == SECRET; } void fetchAttributes(CK_ATTRIBUTE[] attributes) { Session tempSession = null; try { tempSession = token.getOpSession(); token.p11.C_GetAttributeValue(tempSession.id(), keyID, attributes); } catch (PKCS11Exception e) { throw new ProviderException(e); } finally { token.releaseSession(tempSession); } } private final static CK_ATTRIBUTE[] A0 = new CK_ATTRIBUTE[0]; private static CK_ATTRIBUTE[] getAttributes(Session session, long keyID, CK_ATTRIBUTE[] knownAttributes, CK_ATTRIBUTE[] desiredAttributes) { if (knownAttributes == null) { knownAttributes = A0; } for (int i = 0; i < desiredAttributes.length; i++) { // For each desired attribute, check to see if we have the value // available already. If everything is here, we save a native call. CK_ATTRIBUTE attr = desiredAttributes[i]; for (CK_ATTRIBUTE known : knownAttributes) { if ((attr.type == known.type) && (known.pValue != null)) { attr.pValue = known.pValue; break; // break inner for loop } } if (attr.pValue == null) { // nothing found, need to call C_GetAttributeValue() for (int j = 0; j < i; j++) { // clear values copied from knownAttributes desiredAttributes[j].pValue = null; } try { session.token.p11.C_GetAttributeValue (session.id(), keyID, desiredAttributes); } catch (PKCS11Exception e) { throw new ProviderException(e); } break; // break loop, goto return } } return desiredAttributes; } static SecretKey secretKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { attributes = getAttributes(session, keyID, attributes, new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_TOKEN), new CK_ATTRIBUTE(CKA_SENSITIVE), new CK_ATTRIBUTE(CKA_EXTRACTABLE), }); return new P11SecretKey(session, keyID, algorithm, keyLength, attributes); } static SecretKey masterSecretKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes, int major, int minor) { attributes = getAttributes(session, keyID, attributes, new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_TOKEN), new CK_ATTRIBUTE(CKA_SENSITIVE), new CK_ATTRIBUTE(CKA_EXTRACTABLE), }); return new P11TlsMasterSecretKey (session, keyID, algorithm, keyLength, attributes, major, minor); } // we assume that all components of public keys are always accessible static PublicKey publicKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { if (algorithm.equals("RSA")) { return new P11RSAPublicKey (session, keyID, algorithm, keyLength, attributes); } else if (algorithm.equals("DSA")) { return new P11DSAPublicKey (session, keyID, algorithm, keyLength, attributes); } else if (algorithm.equals("DH")) { return new P11DHPublicKey (session, keyID, algorithm, keyLength, attributes); } else if (algorithm.equals("EC")) { return new P11ECPublicKey (session, keyID, algorithm, keyLength, attributes); } else { throw new ProviderException ("Unknown public key algorithm " + algorithm); } } static PrivateKey privateKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { attributes = getAttributes(session, keyID, attributes, new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_TOKEN), new CK_ATTRIBUTE(CKA_SENSITIVE), new CK_ATTRIBUTE(CKA_EXTRACTABLE), }); if (attributes[1].getBoolean() || (attributes[2].getBoolean() == false)) { return new P11PrivateKey (session, keyID, algorithm, keyLength, attributes); } else { if (algorithm.equals("RSA")) { // XXX better test for RSA CRT keys (single getAttributes() call) // we need to determine whether this is a CRT key // see if we can obtain the public exponent // this should also be readable for sensitive/extractable keys CK_ATTRIBUTE[] attrs2 = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_PUBLIC_EXPONENT), }; boolean crtKey; try { session.token.p11.C_GetAttributeValue (session.id(), keyID, attrs2); crtKey = (attrs2[0].pValue instanceof byte[]); } catch (PKCS11Exception e) { // ignore, assume not available crtKey = false; } if (crtKey) { return new P11RSAPrivateKey (session, keyID, algorithm, keyLength, attributes); } else { return new P11RSAPrivateNonCRTKey (session, keyID, algorithm, keyLength, attributes); } } else if (algorithm.equals("DSA")) { return new P11DSAPrivateKey (session, keyID, algorithm, keyLength, attributes); } else if (algorithm.equals("DH")) { return new P11DHPrivateKey (session, keyID, algorithm, keyLength, attributes); } else if (algorithm.equals("EC")) { return new P11ECPrivateKey (session, keyID, algorithm, keyLength, attributes); } else { throw new ProviderException ("Unknown private key algorithm " + algorithm); } } } // class for sensitive and unextractable private keys private static final class P11PrivateKey extends P11Key implements PrivateKey { P11PrivateKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { super(PRIVATE, session, keyID, algorithm, keyLength, attributes); } // XXX temporary encoding for serialization purposes public String getFormat() { token.ensureValid(); return null; } byte[] getEncodedInternal() { token.ensureValid(); return null; } } private static class P11SecretKey extends P11Key implements SecretKey { private volatile byte[] encoded; P11SecretKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { super(SECRET, session, keyID, algorithm, keyLength, attributes); } public String getFormat() { token.ensureValid(); if (sensitive || (extractable == false)) { return null; } else { return "RAW"; } } byte[] getEncodedInternal() { token.ensureValid(); if (getFormat() == null) { return null; } byte[] b = encoded; if (b == null) { synchronized (this) { b = encoded; if (b == null) { Session tempSession = null; try { tempSession = token.getOpSession(); CK_ATTRIBUTE[] attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_VALUE), }; token.p11.C_GetAttributeValue (tempSession.id(), keyID, attributes); b = attributes[0].getByteArray(); } catch (PKCS11Exception e) { throw new ProviderException(e); } finally { token.releaseSession(tempSession); } encoded = b; } } } return b; } } private static class P11TlsMasterSecretKey extends P11SecretKey implements TlsMasterSecret { private final int majorVersion, minorVersion; P11TlsMasterSecretKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes, int major, int minor) { super(session, keyID, algorithm, keyLength, attributes); this.majorVersion = major; this.minorVersion = minor; } public int getMajorVersion() { return majorVersion; } public int getMinorVersion() { return minorVersion; } } // RSA CRT private key private static final class P11RSAPrivateKey extends P11Key implements RSAPrivateCrtKey { private BigInteger n, e, d, p, q, pe, qe, coeff; private byte[] encoded; P11RSAPrivateKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { super(PRIVATE, session, keyID, algorithm, keyLength, attributes); } private synchronized void fetchValues() { token.ensureValid(); if (n != null) { return; } CK_ATTRIBUTE[] attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_MODULUS), new CK_ATTRIBUTE(CKA_PUBLIC_EXPONENT), new CK_ATTRIBUTE(CKA_PRIVATE_EXPONENT), new CK_ATTRIBUTE(CKA_PRIME_1), new CK_ATTRIBUTE(CKA_PRIME_2), new CK_ATTRIBUTE(CKA_EXPONENT_1), new CK_ATTRIBUTE(CKA_EXPONENT_2), new CK_ATTRIBUTE(CKA_COEFFICIENT), }; fetchAttributes(attributes); n = attributes[0].getBigInteger(); e = attributes[1].getBigInteger(); d = attributes[2].getBigInteger(); p = attributes[3].getBigInteger(); q = attributes[4].getBigInteger(); pe = attributes[5].getBigInteger(); qe = attributes[6].getBigInteger(); coeff = attributes[7].getBigInteger(); } public String getFormat() { token.ensureValid(); return "PKCS#8"; } synchronized byte[] getEncodedInternal() { token.ensureValid(); if (encoded == null) { fetchValues(); try { // XXX make constructor in SunRsaSign provider public // and call it directly KeyFactory factory = KeyFactory.getInstance ("RSA", P11Util.getSunRsaSignProvider()); Key newKey = factory.translateKey(this); encoded = newKey.getEncoded(); } catch (GeneralSecurityException e) { throw new ProviderException(e); } } return encoded; } public BigInteger getModulus() { fetchValues(); return n; } public BigInteger getPublicExponent() { fetchValues(); return e; } public BigInteger getPrivateExponent() { fetchValues(); return d; } public BigInteger getPrimeP() { fetchValues(); return p; } public BigInteger getPrimeQ() { fetchValues(); return q; } public BigInteger getPrimeExponentP() { fetchValues(); return pe; } public BigInteger getPrimeExponentQ() { fetchValues(); return qe; } public BigInteger getCrtCoefficient() { fetchValues(); return coeff; } public String toString() { fetchValues(); StringBuilder sb = new StringBuilder(super.toString()); sb.append("\n modulus: "); sb.append(n); sb.append("\n public exponent: "); sb.append(e); sb.append("\n private exponent: "); sb.append(d); sb.append("\n prime p: "); sb.append(p); sb.append("\n prime q: "); sb.append(q); sb.append("\n prime exponent p: "); sb.append(pe); sb.append("\n prime exponent q: "); sb.append(qe); sb.append("\n crt coefficient: "); sb.append(coeff); return sb.toString(); } } // RSA non-CRT private key private static final class P11RSAPrivateNonCRTKey extends P11Key implements RSAPrivateKey { private BigInteger n, d; private byte[] encoded; P11RSAPrivateNonCRTKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { super(PRIVATE, session, keyID, algorithm, keyLength, attributes); } private synchronized void fetchValues() { token.ensureValid(); if (n != null) { return; } CK_ATTRIBUTE[] attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_MODULUS), new CK_ATTRIBUTE(CKA_PRIVATE_EXPONENT), }; fetchAttributes(attributes); n = attributes[0].getBigInteger(); d = attributes[1].getBigInteger(); } public String getFormat() { token.ensureValid(); return "PKCS#8"; } synchronized byte[] getEncodedInternal() { token.ensureValid(); if (encoded == null) { fetchValues(); try { // XXX make constructor in SunRsaSign provider public // and call it directly KeyFactory factory = KeyFactory.getInstance ("RSA", P11Util.getSunRsaSignProvider()); Key newKey = factory.translateKey(this); encoded = newKey.getEncoded(); } catch (GeneralSecurityException e) { throw new ProviderException(e); } } return encoded; } public BigInteger getModulus() { fetchValues(); return n; } public BigInteger getPrivateExponent() { fetchValues(); return d; } public String toString() { fetchValues(); StringBuilder sb = new StringBuilder(super.toString()); sb.append("\n modulus: "); sb.append(n); sb.append("\n private exponent: "); sb.append(d); return sb.toString(); } } private static final class P11RSAPublicKey extends P11Key implements RSAPublicKey { private BigInteger n, e; private byte[] encoded; P11RSAPublicKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { super(PUBLIC, session, keyID, algorithm, keyLength, attributes); } private synchronized void fetchValues() { token.ensureValid(); if (n != null) { return; } CK_ATTRIBUTE[] attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_MODULUS), new CK_ATTRIBUTE(CKA_PUBLIC_EXPONENT), }; fetchAttributes(attributes); n = attributes[0].getBigInteger(); e = attributes[1].getBigInteger(); } public String getFormat() { token.ensureValid(); return "X.509"; } synchronized byte[] getEncodedInternal() { token.ensureValid(); if (encoded == null) { fetchValues(); try { encoded = new RSAPublicKeyImpl(n, e).getEncoded(); } catch (InvalidKeyException e) { throw new ProviderException(e); } } return encoded; } public BigInteger getModulus() { fetchValues(); return n; } public BigInteger getPublicExponent() { fetchValues(); return e; } public String toString() { fetchValues(); return super.toString() + "\n modulus: " + n + "\n public exponent: " + e; } } private static final class P11DSAPublicKey extends P11Key implements DSAPublicKey { private BigInteger y; private DSAParams params; private byte[] encoded; P11DSAPublicKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { super(PUBLIC, session, keyID, algorithm, keyLength, attributes); } private synchronized void fetchValues() { token.ensureValid(); if (y != null) { return; } CK_ATTRIBUTE[] attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_VALUE), new CK_ATTRIBUTE(CKA_PRIME), new CK_ATTRIBUTE(CKA_SUBPRIME), new CK_ATTRIBUTE(CKA_BASE), }; fetchAttributes(attributes); y = attributes[0].getBigInteger(); params = new DSAParameterSpec( attributes[1].getBigInteger(), attributes[2].getBigInteger(), attributes[3].getBigInteger() ); } public String getFormat() { token.ensureValid(); return "X.509"; } synchronized byte[] getEncodedInternal() { token.ensureValid(); if (encoded == null) { fetchValues(); try { Key key = new sun.security.provider.DSAPublicKey (y, params.getP(), params.getQ(), params.getG()); encoded = key.getEncoded(); } catch (InvalidKeyException e) { throw new ProviderException(e); } } return encoded; } public BigInteger getY() { fetchValues(); return y; } public DSAParams getParams() { fetchValues(); return params; } public String toString() { fetchValues(); return super.toString() + "\n y: " + y + "\n p: " + params.getP() + "\n q: " + params.getQ() + "\n g: " + params.getG(); } } private static final class P11DSAPrivateKey extends P11Key implements DSAPrivateKey { private BigInteger x; private DSAParams params; private byte[] encoded; P11DSAPrivateKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { super(PRIVATE, session, keyID, algorithm, keyLength, attributes); } private synchronized void fetchValues() { token.ensureValid(); if (x != null) { return; } CK_ATTRIBUTE[] attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_VALUE), new CK_ATTRIBUTE(CKA_PRIME), new CK_ATTRIBUTE(CKA_SUBPRIME), new CK_ATTRIBUTE(CKA_BASE), }; fetchAttributes(attributes); x = attributes[0].getBigInteger(); params = new DSAParameterSpec( attributes[1].getBigInteger(), attributes[2].getBigInteger(), attributes[3].getBigInteger() ); } public String getFormat() { token.ensureValid(); return "PKCS#8"; } synchronized byte[] getEncodedInternal() { token.ensureValid(); if (encoded == null) { fetchValues(); try { Key key = new sun.security.provider.DSAPrivateKey (x, params.getP(), params.getQ(), params.getG()); encoded = key.getEncoded(); } catch (InvalidKeyException e) { throw new ProviderException(e); } } return encoded; } public BigInteger getX() { fetchValues(); return x; } public DSAParams getParams() { fetchValues(); return params; } public String toString() { fetchValues(); return super.toString() + "\n x: " + x + "\n p: " + params.getP() + "\n q: " + params.getQ() + "\n g: " + params.getG(); } } private static final class P11DHPrivateKey extends P11Key implements DHPrivateKey { private BigInteger x; private DHParameterSpec params; private byte[] encoded; P11DHPrivateKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { super(PRIVATE, session, keyID, algorithm, keyLength, attributes); } private synchronized void fetchValues() { token.ensureValid(); if (x != null) { return; } CK_ATTRIBUTE[] attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_VALUE), new CK_ATTRIBUTE(CKA_PRIME), new CK_ATTRIBUTE(CKA_BASE), }; fetchAttributes(attributes); x = attributes[0].getBigInteger(); params = new DHParameterSpec( attributes[1].getBigInteger(), attributes[2].getBigInteger() ); } public String getFormat() { token.ensureValid(); return "PKCS#8"; } synchronized byte[] getEncodedInternal() { token.ensureValid(); if (encoded == null) { fetchValues(); try { DHPrivateKeySpec spec = new DHPrivateKeySpec (x, params.getP(), params.getG()); KeyFactory kf = KeyFactory.getInstance ("DH", P11Util.getSunJceProvider()); Key key = kf.generatePrivate(spec); encoded = key.getEncoded(); } catch (GeneralSecurityException e) { throw new ProviderException(e); } } return encoded; } public BigInteger getX() { fetchValues(); return x; } public DHParameterSpec getParams() { fetchValues(); return params; } public String toString() { fetchValues(); return super.toString() + "\n x: " + x + "\n p: " + params.getP() + "\n g: " + params.getG(); } } private static final class P11DHPublicKey extends P11Key implements DHPublicKey { private BigInteger y; private DHParameterSpec params; private byte[] encoded; P11DHPublicKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { super(PUBLIC, session, keyID, algorithm, keyLength, attributes); } private synchronized void fetchValues() { token.ensureValid(); if (y != null) { return; } CK_ATTRIBUTE[] attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_VALUE), new CK_ATTRIBUTE(CKA_PRIME), new CK_ATTRIBUTE(CKA_BASE), }; fetchAttributes(attributes); y = attributes[0].getBigInteger(); params = new DHParameterSpec( attributes[1].getBigInteger(), attributes[2].getBigInteger() ); } public String getFormat() { token.ensureValid(); return "X.509"; } synchronized byte[] getEncodedInternal() { token.ensureValid(); if (encoded == null) { fetchValues(); try { DHPublicKeySpec spec = new DHPublicKeySpec (y, params.getP(), params.getG()); KeyFactory kf = KeyFactory.getInstance ("DH", P11Util.getSunJceProvider()); Key key = kf.generatePublic(spec); encoded = key.getEncoded(); } catch (GeneralSecurityException e) { throw new ProviderException(e); } } return encoded; } public BigInteger getY() { fetchValues(); return y; } public DHParameterSpec getParams() { fetchValues(); return params; } public String toString() { fetchValues(); return super.toString() + "\n y: " + y + "\n p: " + params.getP() + "\n g: " + params.getG(); } } private static final class P11ECPrivateKey extends P11Key implements ECPrivateKey { private BigInteger s; private ECParameterSpec params; private byte[] encoded; P11ECPrivateKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { super(PRIVATE, session, keyID, algorithm, keyLength, attributes); } private synchronized void fetchValues() { token.ensureValid(); if (s != null) { return; } CK_ATTRIBUTE[] attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_VALUE), new CK_ATTRIBUTE(CKA_EC_PARAMS, params), }; fetchAttributes(attributes); s = attributes[0].getBigInteger(); try { params = P11ECKeyFactory.decodeParameters (attributes[1].getByteArray()); } catch (Exception e) { throw new RuntimeException("Could not parse key values", e); } } public String getFormat() { token.ensureValid(); return "PKCS#8"; } synchronized byte[] getEncodedInternal() { token.ensureValid(); if (encoded == null) { fetchValues(); try { Key key = new sun.security.ec.ECPrivateKeyImpl(s, params); encoded = key.getEncoded(); } catch (InvalidKeyException e) { throw new ProviderException(e); } } return encoded; } public BigInteger getS() { fetchValues(); return s; } public ECParameterSpec getParams() { fetchValues(); return params; } public String toString() { fetchValues(); return super.toString() + "\n private value: " + s + "\n parameters: " + params; } } private static final class P11ECPublicKey extends P11Key implements ECPublicKey { private ECPoint w; private ECParameterSpec params; private byte[] encoded; P11ECPublicKey(Session session, long keyID, String algorithm, int keyLength, CK_ATTRIBUTE[] attributes) { super(PUBLIC, session, keyID, algorithm, keyLength, attributes); } private synchronized void fetchValues() { token.ensureValid(); if (w != null) { return; } CK_ATTRIBUTE[] attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_EC_POINT), new CK_ATTRIBUTE(CKA_EC_PARAMS), }; fetchAttributes(attributes); try { params = P11ECKeyFactory.decodeParameters (attributes[1].getByteArray()); DerValue wECPoint = new DerValue(attributes[0].getByteArray()); if (wECPoint.getTag() != DerValue.tag_OctetString) throw new IOException("Unexpected tag: " + wECPoint.getTag()); params = P11ECKeyFactory.decodeParameters (attributes[1].getByteArray()); w = P11ECKeyFactory.decodePoint (wECPoint.getDataBytes(), params.getCurve()); } catch (Exception e) { throw new RuntimeException("Could not parse key values", e); } } public String getFormat() { token.ensureValid(); return "X.509"; } synchronized byte[] getEncodedInternal() { token.ensureValid(); if (encoded == null) { fetchValues(); try { Key key = new sun.security.ec.ECPublicKeyImpl(w, params); encoded = key.getEncoded(); } catch (InvalidKeyException e) { throw new ProviderException(e); } } return encoded; } public ECPoint getW() { fetchValues(); return w; } public ECParameterSpec getParams() { fetchValues(); return params; } public String toString() { fetchValues(); return super.toString() + "\n public x coord: " + w.getAffineX() + "\n public y coord: " + w.getAffineY() + "\n parameters: " + params; } } } /* * NOTE: Must use PhantomReference here and not WeakReference * otherwise the key maybe cleared before other objects which * still use these keys during finalization such as SSLSocket. */ final class SessionKeyRef extends PhantomReference<P11Key> implements Comparable<SessionKeyRef> { private static ReferenceQueue<P11Key> refQueue = new ReferenceQueue<P11Key>(); private static Set<SessionKeyRef> refList = Collections.synchronizedSortedSet(new TreeSet<SessionKeyRef>()); static ReferenceQueue<P11Key> referenceQueue() { return refQueue; } private static void drainRefQueueBounded() { while (true) { SessionKeyRef next = (SessionKeyRef) refQueue.poll(); if (next == null) break; next.dispose(); } } // handle to the native key private long keyID; private Session session; SessionKeyRef(P11Key key , long keyID, Session session) { super(key, refQueue); this.keyID = keyID; this.session = session; this.session.addObject(); refList.add(this); // TBD: run at some interval and not every time? drainRefQueueBounded(); } private void dispose() { refList.remove(this); if (session.token.isValid()) { Session newSession = null; try { newSession = session.token.getOpSession(); session.token.p11.C_DestroyObject(newSession.id(), keyID); } catch (PKCS11Exception e) { // ignore } finally { this.clear(); session.token.releaseSession(newSession); session.removeObject(); } } } public int compareTo(SessionKeyRef other) { if (this.keyID == other.keyID) { return 0; } else { return (this.keyID < other.keyID) ? -1 : 1; } } }
package cyclops.typeclasses; import com.oath.cyclops.hkt.Higher; import cyclops.arrow.Kleisli; import cyclops.arrow.MonoidK; import cyclops.control.Eval; import cyclops.control.Maybe; import cyclops.control.Option; import cyclops.control.State; import cyclops.data.LazySeq; import cyclops.data.Seq; import cyclops.data.tuple.Tuple2; import cyclops.function.*; import cyclops.instances.control.StateInstances; import cyclops.reactive.ReactiveSeq; import cyclops.typeclasses.foldable.Foldable; import cyclops.typeclasses.foldable.Unfoldable; import cyclops.typeclasses.functor.Compose; import cyclops.typeclasses.functor.Functor; import cyclops.typeclasses.monad.Applicative; import cyclops.typeclasses.monad.Monad; import cyclops.typeclasses.monad.MonadPlus; import cyclops.typeclasses.monad.MonadZero; import cyclops.typeclasses.monad.Traverse; import lombok.AllArgsConstructor; import java.util.function.BiFunction; import java.util.function.BiPredicate; import java.util.function.BinaryOperator; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; public class Do<W> { private final Monad<W> monad; private Do(Monad<W> monad) { this.monad = monad; } public <T1> Do1<T1> __(Higher<W, T1> a) { return new Do1<>(()->a); } public <T1> Do1<T1> _of(T1 a) { return new Do1<>(()->monad.unit(a)); } public <T1> Do1<T1> __(Supplier<Higher<W, T1>> a) { return new Do1<>(a); } public <T1> Do1<T1> _flatten(Higher<W, Higher<W, T1>> nested){ return new Do1<>(()->monad.flatten(nested)); } public <T1,R> Kleisli<W,T1,R> kliesli( Function<? super T1, ? extends R> fn){ return Kleisli.arrow(monad,fn); } public <T1,R> Kleisli<W,T1,R> kliesliK( Function<? super T1, ? extends Higher<W,R>> fn){ return Kleisli.of(monad,fn); } public DoUnfolds expand(Unfoldable<W> unfolds){ return new DoUnfolds(unfolds); } public DoUnfolds expand(Supplier<Unfoldable<W>> unfolds){ return new DoUnfolds(unfolds.get()); } @AllArgsConstructor public class DoUnfolds{ private final Unfoldable<W> unfolds; public <R, T> Do1<R> unfold(T b, Function<? super T, Option<Tuple2<R, T>>> fn){ return __(unfolds.unfold(b, fn)); } public <T> Do1<T> replicate(long n, T value) { return __(unfolds.replicate(n,value)); } public <R> Do1<R> none() { return __(unfolds.none()); } public <T> Do1<T> one(T a) { return __(unfolds.one(a)); } } public <W2,T1> DoNested<W2,T1> __(Functor<W2> f, Higher<W, Higher<W2, T1>> nested){ return new DoNested<>(nested,f); } public <W2,T1> DoNested<W2,T1> __(Supplier<Functor<W2>> f, Higher<W, Higher<W2, T1>> nested){ return new DoNested<>(nested,f.get()); } @AllArgsConstructor public class DoNested<W2,T1>{ private final Higher<W, Higher<W2, T1>> nested; private final Compose<W,W2> f; public DoNested(Higher<W, Higher<W2, T1>> nested, Functor<W2> f2){ this.nested =nested; this.f= Compose.compose(monad,f2); } public Do<W2>.Do1<T1> foldK(Foldable<W> folds,Monad<W2> m2,MonoidK<W2> monoid) { return Do.forEach(m2).__(()->folds.foldK(monoid, nested)); } public Do<W2>.Do1<T1> foldK(Supplier<Foldable<W>> folds,Supplier<Monad<W2>> m2,MonoidK<W2> monoid) { return foldK(folds.get(),m2.get(),monoid); } public Do<W>.Do1<T1> foldLeft(Foldable<W2> folds,Monoid<T1> monoid) { return __(()->monad.map_(nested,i->folds.foldLeft(monoid, i))); } public Do<W>.Do1<T1> foldLeft(Supplier<Foldable<W2>> folds,Monoid<T1> monoid) { return foldLeft(folds.get(),monoid); } public <R> Do<W>.Do1<R> map(Function<? super Higher<W2,T1>, ? extends R> fn) { return __(()->monad.map_(nested,fn)); } public Do<W2>.DoNested<W,T1> sequence(Traverse<W> traverse,Monad<W2> monad){ return Do.forEach(monad).__(f.outer(),traverse.sequenceA(monad,nested)); } public <R1,R2> R2 fold(Function<? super Higher<W, R1>, ? extends R2 > fn1, Function<? super Higher<W2,T1>, ? extends R1> fn2) { return fn1.apply(monad.map_(nested, fn2)); } public <R> R fold(Function<? super Higher<W, Higher<W2, T1>>, ? extends R> fn){ return fn.apply(nested); } } @AllArgsConstructor public class Do1<T1> { private final Supplier<Higher<W, T1>> a; public <T2> Do2<T2> __(Higher<W, T2> b) { return new Do2<>(Function1.constant(b)); } public <T2> Do2<T2> __(Function<T1,Higher<W, T2>> b) { return new Do2<>(b); } public <T2> Do2<T2> _of(T2 b) { return new Do2<T2>(Function1.constant(monad.unit(b))); } public <T2> Do2<T2> _flatten(Higher<W, Higher<W, T2>> nested){ return new Do2<>(in->monad.flatten(nested)); } public <R> Do1<R> map(Function<? super T1, ? extends R> mapper){ return new Do1<R>(()->monad.map_(a.get(),mapper)); } public <R> Do1<R> ap(Higher<W,Function<T1,R>> applicative){ return new Do1<R>(()->monad.ap(applicative,a.get())); } public Do1<T1> peek(Consumer<? super T1> mapper){ return new Do1<>(()->monad.peek(mapper,a.get())); } public <T2,R> Do1<R> zip(Higher<W, T2> fb, BiFunction<? super T1,? super T2,? extends R> f){ return new Do1<>(()->monad.zip(a.get(),fb,f)); } public Do1<T1> plus(MonadPlus<W> mp,Higher<W,T1> b){ return new Do1<>(()->mp.plus(a.get(),b)); } public Do1<T1> plus(Supplier<MonadPlus<W>> mp,Higher<W,T1> b){ return plus(mp.get(),b); } public Do1<Tuple2<T1,Long>> zipWithIndex(Traverse<W> traverse){ return Do.forEach(monad).__(()->traverse.zipWithIndex(a.get())); } public Do1<Tuple2<T1,Long>> zipWithIndex(Supplier<Traverse<W>>traverse){ return zipWithIndex(traverse.get()); } public Do1<T1> guard(MonadZero<W> monadZero,Predicate<? super T1> fn) { return new Do1<>(()->monadZero.filter(fn, a.get())); } public <R> Do1<R> yield(Function<? super T1, ? extends R> fn) { return Do.forEach(monad).__(()->monad.map_(a.get(), fn)); } public Higher<W,T1> unwrap(){ return a.get(); } public <R> R fold(Function<? super Higher<W,T1>,? extends R> fn){ return fn.apply(a.get()); } public <R> Eval<R> eval(Function<? super Higher<W,T1>,? extends R> fn){ return Eval.later(()->fn.apply(a.get())); } public <R> Do2<R> __fold(Foldable<W> folds,Function<? super Fold1.DoFoldable, ? extends R> fn){ return __(in->monad.unit(fn.apply(Do.folds(folds).__(a)))); } public <R> Do2<R> __fold(Supplier<Foldable<W>> folds,Function<? super Fold1.DoFoldable, ? extends R> fn){ return __fold(folds.get(),fn); } public Do1<T1> reverse(Traverse<W> traverse){ return Do.forEach(monad).__(()->traverse.reverse(a.get())); } public Do1<T1> reverse(Supplier<Traverse<W>> traverse){ return reverse(traverse.get()); } public Do1<String> show(Show<W> show){ return new Do1<>(()->monad.unit(show.show(a.get()))); } public Do2<String> _show(Show<W> show){ return __(i->monad.unit(show.show(a.get()))); } @AllArgsConstructor public class Do2<T2> { private final Function<T1,Higher<W, T2>> b; public <R> Do2<R> map(Function<? super T2, ? extends R> mapper){ return new Do2<>(in->monad.map_(b.apply(in),mapper)); } public <R> Do2<R> ap(Higher<W,Function<T2,R>> applicative){ return new Do2<R>(in->monad.ap(applicative,b.apply(in))); } public Do2<T2> peek(Consumer<? super T2> mapper){ return map(t->{ mapper.accept(t); return t; }); } public <T3,R> Do2<R> zip(Higher<W, T3> fb, BiFunction<? super T2,? super T3,? extends R> f){ return new Do2<R>(in->monad.zip(b.apply(in),fb,f)); } public Do2<String> show(Show<W> show){ return new Do2<String>(in->monad.unit(show.show(monad.flatMap_(a.get(), t -> b.apply(t))))); } public Do3<String> _show(Show<W> show){ return new Do3<>((x1,x2)->monad.unit(show.show(monad.flatMap_(a.get(), in -> b.apply(in))))); } public <T3> Do3<T3> __(Higher<W, T3> c) { return new Do3<>(Function2.constant(c)); } public <T3> Do3<T3> __(Supplier<Higher<W, T3>> c) { return new Do3<>(Function2._0(c)); } public <T3> Do3<T3> __(BiFunction<T1,T2,Higher<W, T3>> c) { return new Do3<>(c); } public <T3> Do3<T3> _flatten(Higher<W, Higher<W, T3>> nested){ return new Do3<T3>(Function2.constant(monad.flatten(nested))); } public <T3> Do3<T3> _of(T3 c) { return new Do3<>(Function2.constant(monad.unit(c))); } public Do2<T2> guard(MonadZero<W> monadZero, BiPredicate<? super T1,? super T2> fn) { return new Do2<>(t1->monadZero.filter(p->fn.test(t1,p), b.apply(t1))); } public <R> Do1<R> yield(BiFunction<? super T1, ? super T2, ? extends R> fn) { return Do.forEach(monad).__(()->monad.flatMap_(a.get(), in -> { return monad.map_(b.apply(in), in2 -> fn.apply(in, in2)); })); } public <R> R fold(Function<? super Higher<W,T2>,? extends R> fn){ return fn.apply(monad.flatMap_(a.get(),b)); } @AllArgsConstructor public class Do3<T3> { private final BiFunction<T1,T2,Higher<W, T3>> c; public <R> Do3<R> map(Function<? super T3, ? extends R> mapper){ return new Do3<>((a,b)->monad.map_(c.apply(a,b),mapper)); } public <R> Do3<R> ap(Higher<W,Function<T3,R>> applicative){ return new Do3<R>((a,b)->monad.ap(applicative,c.apply(a,b))); } public Do3<T3> peek(Consumer<? super T3> mapper){ return map(t->{ mapper.accept(t); return t; }); } public <T4,R> Do3<R> zip(Higher<W, T4> fb, BiFunction<? super T3,? super T4,? extends R> f){ return new Do3<R>((a,b)->monad.zip(c.apply(a,b),fb,f)); } public Do3<String> show(Show<W> show){ return new Do3<String>((a1,b1)->monad.unit(show.show(monad.flatMap_(a.get(), t1 -> monad.flatMap_(b.apply(t1),t2->c.apply(t1,t2)))))); } public Do4<String> _show(Show<W> show){ return new Do4<>((x1,x2,x3)->monad.unit(show.show(monad.flatMap_(a.get(), t1 -> monad.flatMap_(b.apply(t1),t2->c.apply(t1,t2)))))); } public <T4> Do4<T4> __(Higher<W, T4> d) { return new Do4<>(Function3.constant(d)); } public <T4> Do4<T4> __(Supplier<Higher<W, T4>> d) { return new Do4<>(Function3.lazyConstant(d)); } public <T4> Do4<T4> __(Function3<T1,T2,T3,Higher<W, T4>> c) { return new Do4<>(c); } public <T4> Do4<T4> _flatten(Higher<W, Higher<W, T4>> nested){ return new Do4<T4>(Function3.constant(monad.flatten(nested))); } public <T4> Do4<T4> _of(T4 d) { return new Do4<>(Function3.constant(monad.unit(d))); } public Do3<T3> guard(MonadZero<W> monadZero, Predicate3<? super T1,? super T2, ? super T3> fn) { return new Do3<>((t1, t2) -> monadZero.filter(p -> fn.test(t1, t2, p), c.apply(t1, t2))); } public <R> Do1<R> yield(Function3<? super T1, ? super T2, ? super T3, ? extends R> fn) { return Do.forEach(monad).__(()->monad.flatMap_(a.get(), in -> { Higher<W, R> hk2 = monad.flatMap_(b.apply(in), in2 -> { Higher<W, R> hk3 = monad.map_(c.apply(in,in2), in3 -> fn.apply(in, in2, in3)); return hk3; }); return hk2; })); } public <R> R fold(Function<? super Higher<W,T3>,? extends R> fn){ return fn.apply(monad.flatMap_(a.get(),t1->{ return monad.flatMap_(b.apply(t1),t2->{ return c.apply(t1,t2); } ); })); } @AllArgsConstructor public class Do4<T4> { private final Function3<T1,T2,T3,Higher<W, T4>> d; public <R> Do4<R> map(Function<? super T4, ? extends R> mapper){ return new Do4<>((a,b,c)->monad.map_(d.apply(a,b,c),mapper)); } public <R> Do4<R> ap(Higher<W,Function<T4,R>> applicative){ return new Do4<R>((a,b,c)->monad.ap(applicative,d.apply(a,b,c))); } public Do4<T4> peek(Consumer<? super T4> mapper){ return map(t->{ mapper.accept(t); return t; }); } public <T5,R> Do4<R> zip(Higher<W, T5> fb, BiFunction<? super T4,? super T5,? extends R> f){ return new Do4<R>((a,b,c)->monad.zip(d.apply(a,b,c),fb,f)); } public Do4<String> show(Show<W> show){ return new Do4<String>((a1,b1,c1)->monad.unit(show.show(monad.flatMap_(a.get(), t1 -> monad.flatMap_(b.apply(t1),t2->monad.flatMap_(c.apply(t1,t2),t3->d.apply(t1,t2,t3))))))); } public Do5<String> _show(Show<W> show){ return new Do5<>((x1,x2,x3,x4)->monad.unit(show.show(monad.flatMap_(a.get(), t1 -> monad.flatMap_(b.apply(t1),t2->monad.flatMap_(c.apply(t1,t2),t3->d.apply(t1,t2,t3))))))); } public <T5> Do5<T5> __(Higher<W, T5> e) { return new Do5<>(Function4.constant(e)); } public<T5> Do5<T5> __(Supplier<Higher<W, T5>> e) { return new Do5<>(Function4.lazyConstant(e)); } public <T5> Do5<T5> __(Function4<T1,T2,T3,T4,Higher<W, T5>> e) { return new Do5<>(e); } public <T5> Do5<T5> _of(T5 e) { return new Do5<>(Function4.constant(monad.unit(e))); } public <T5> Do5<T5> _flatten(Higher<W, Higher<W, T5>> nested){ return new Do5<T5>(Function4.constant(monad.flatten(nested))); } public Do4<T4> guard(MonadZero<W> monadZero, Predicate4<? super T1,? super T2, ? super T3, ? super T4> fn) { return new Do4<>((t1,t2,t3)->monadZero.filter(p->fn.test(t1,t2,t3,p), d.apply(t1,t2,t3))); } public <R> Do1<R> yield(Function4<? super T1, ? super T2, ? super T3, ? super T4, ? extends R> fn) { return Do.forEach(monad).__(()->monad.flatMap_(a.get(), in -> { Higher<W, R> hk2 = monad.flatMap_(b.apply(in), in2 -> { Higher<W, R> hk3 = monad.flatMap_(c.apply(in,in2), in3 -> { Higher<W, R> hk4 = monad.map_(d.apply(in,in2,in3), in4 -> fn.apply(in, in2, in3, in4)); return hk4; }); return hk3; }); return hk2; })); } public <R> R fold(Function<? super Higher<W,T4>,? extends R> fn){ return fn.apply(monad.flatMap_(a.get(),t1->{ return monad.flatMap_(b.apply(t1),t2->{ return monad.flatMap_(c.apply(t1,t2),t3->{ return d.apply(t1,t2,t3); }); } ); })); } @AllArgsConstructor public class Do5<T5> { private final Function4<T1,T2,T3,T4,Higher<W, T5>> e; public <R> Do5<R> map(Function<? super T5, ? extends R> mapper){ return new Do5<>((a,b,c,d)->monad.map_(e.apply(a,b,c,d),mapper)); } public <R> Do5<R> ap(Higher<W,Function<T5,R>> applicative){ return new Do5<R>((a,b,c,d)->monad.ap(applicative,e.apply(a,b,c,d))); } public Do5<T5> peek(Consumer<? super T5> mapper){ return map(t->{ mapper.accept(t); return t; }); } public <T6,R> Do5<R> zip(Higher<W, T6> fb, BiFunction<? super T5,? super T6,? extends R> f){ return new Do5<R>((a,b,c,d)->monad.zip(e.apply(a,b,c,d),fb,f)); } public Do5<String> show(Show<W> show){ return new Do5<String>((a1,b1,c1,d1)->monad.unit(show.show(monad.flatMap_(a.get(), t1 -> monad.flatMap_(b.apply(t1),t2->monad.flatMap_(c.apply(t1,t2), t3->monad.flatMap_(d.apply(t1,t2,t3),t4->e.apply(t1,t2,t3,t4)))))))); } public Do6<String> _show(Show<W> show){ return new Do6<>((x1,x2,x3,x4,x5)->monad.unit(show.show(monad.flatMap_(a.get(), t1 -> monad.flatMap_(b.apply(t1),t2->monad.flatMap_(c.apply(t1,t2),t3->d.apply(t1,t2,t3))))))); } public <T6> Do6<T6> __(Higher<W, T6> e) { return new Do6<>(Function5.constant(e)); } public <T6> Do6<T6> __(Supplier<Higher<W, T6>> e) { return new Do6<>(Function5.lazyConstant(e)); } public <T6> Do6<T6> __(Function5<T1,T2,T3,T4,T5,Higher<W, T6>> e) { return new Do6<>(e); } public <T6> Do6<T6> _of(T6 e) { return new Do6<>(Function5.constant(monad.unit(e))); } public <T6> Do6<T6> _flatten(Higher<W, Higher<W, T6>> nested){ return new Do6<T6>(Function5.constant(monad.flatten(nested))); } public Do5<T5> guard(MonadZero<W> monadZero,Predicate5<? super T1,? super T2, ? super T3, ? super T4, ? super T5> fn) { return new Do5<>((t1,t2,t3,t4)->monadZero.filter(p->fn.test(t1,t2,t3,t4,p), e.apply(t1,t2,t3,t4))); } public <R> Do1<R> yield(Function5<? super T1, ? super T2, ? super T3, ? super T4, ? super T5,? extends R> fn) { return Do.forEach(monad).__(()-> monad.flatMap_(a.get(), in -> { Higher<W, R> hk2 = monad.flatMap_(b.apply(in), in2 -> { Higher<W, R> hk3 = monad.flatMap_(c.apply(in,in2), in3 -> { Higher<W, R> hk4 = monad.flatMap_(d.apply(in,in2,in3), in4 -> { Higher<W,R> hk5 = monad.map_(e.apply(in,in2,in3,in4),in5->fn.apply(in, in2, in3, in4,in5)); return hk5; }); return hk4; }); return hk3; }); return hk2; })); } public <R> R fold(Function<? super Higher<W,T5>,? extends R> fn){ return fn.apply(monad.flatMap_(a.get(),t1->{ return monad.flatMap_(b.apply(t1),t2->{ return monad.flatMap_(c.apply(t1,t2),t3->{ return monad.flatMap_(d.apply(t1,t2,t3),t4->{ return e.apply(t1,t2,t3,t4); }); }); } ); })); } @AllArgsConstructor public class Do6<T6> { private final Function5<T1,T2,T3,T4,T5, Higher<W, T6>> fh; public <R> Do6<R> map(Function<? super T6, ? extends R> mapper){ return new Do6<>((a,b,c,d,e)->monad.map_(fh.apply(a,b,c,d,e),mapper)); } public <R> Do6<R> ap(Higher<W,Function<T6,R>> applicative){ return new Do6<R>((a,b,c,d,e)->monad.ap(applicative,fh.apply(a,b,c,d,e))); } public Do6<T6> peek(Consumer<? super T6> mapper){ return map(t->{ mapper.accept(t); return t; }); } public <T7,R> Do6<R> zip(Higher<W, T7> fb, BiFunction<? super T6,? super T7,? extends R> fa){ return new Do6<R>((a,b,c,d,e)->monad.zip(fh.apply(a,b,c,d,e),fb,fa)); } public Do6<String> show(Show<W> show){ return new Do6<String>((a1,b1,c1,d1,e1)->monad.unit(show.show(monad.flatMap_(a.get(), t1 -> monad.flatMap_(b.apply(t1),t2->monad.flatMap_(c.apply(t1,t2), t3->monad.flatMap_(d.apply(t1,t2,t3),t4->e.apply(t1,t2,t3,t4)))))))); } public Do7<String> _show(Show<W> show){ return new Do7<>((x1,x2,x3,x4,x5,x6)->monad.unit(show.show(monad.flatMap_(a.get(), t1 -> monad.flatMap_(b.apply(t1),t2->monad.flatMap_(c.apply(t1,t2),t3->d.apply(t1,t2,t3))))))); } public <T7> Do7<T7> __(Higher<W, T7> e) { return new Do7<>(Function6.constant(e)); } public <T7> Do7<T7> __(Supplier<Higher<W, T7>> e) { return new Do7<>(Function6.lazyConstant(e)); } public <T7> Do7<T7> __(Function6<T1,T2,T3,T4,T5,T6,Higher<W, T7>> e) { return new Do7<>(e); } public <T7> Do7<T7> _of(T7 e) { return new Do7<>(Function6.constant(monad.unit(e))); } public <T7> Do7<T7> _flatten(Higher<W, Higher<W, T7>> nested){ return new Do7<T7>(Function6.constant(monad.flatten(nested))); } public Do6<T6> guard(MonadZero<W> monadZero, Predicate6<? super T1,? super T2, ? super T3, ? super T4, ? super T5, ? super T6> fn) { return new Do6<>((t1,t2,t3,t4,t5)->monadZero.filter(p->fn.test(t1,t2,t3,t4,t5,p), fh.apply(t1,t2,t3,t4,t5))); } public <R> Do1<R> yield(Function6<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? extends R> fn) { return Do.forEach(monad).__(()-> monad.flatMap_(a.get(), in -> { Higher<W, R> hk2 = monad.flatMap_(b.apply(in), in2 -> { Higher<W, R> hk3 = monad.flatMap_(c.apply(in,in2), in3 -> { Higher<W, R> hk4 = monad.flatMap_(d.apply(in,in2,in3), in4 -> { Higher<W,R> hk5 = monad.flatMap_(e.apply(in,in2,in3,in4), in5 -> { Higher<W,R> hk6 = monad.map_(fh.apply(in,in2,in3,in4,in5),in6->fn.apply(in, in2, in3, in4,in5,in6)); return hk6; }); return hk5; }); return hk4; }); return hk3; }); return hk2; })); } public <R> R fold(Function<? super Higher<W,T6>,? extends R> fn){ return fn.apply(monad.flatMap_(a.get(),t1->{ return monad.flatMap_(b.apply(t1),t2->{ return monad.flatMap_(c.apply(t1,t2),t3->{ return monad.flatMap_(d.apply(t1,t2,t3),t4->{ return monad.flatMap_(e.apply(t1,t2,t3,t4),t5 -> { return fh.apply(t1,t2,t3,t4,t5); }); }); }); } ); })); } @AllArgsConstructor public class Do7<T7> { private final Function6<T1,T2,T3,T4,T5,T6, Higher<W, T7>> gh; public <R> Do7<R> map(Function<? super T7, ? extends R> mapper){ return new Do7<>((a,b,c,d,e,f)->monad.map_(gh.apply(a,b,c,d,e,f),mapper)); } public <R> Do7<R> ap(Higher<W,Function<T7,R>> applicative){ return new Do7<R>((a,b,c,d,e,f)->monad.ap(applicative,gh.apply(a,b,c,d,e,f))); } public Do7<T7> peek(Consumer<? super T7> mapper){ return map(t->{ mapper.accept(t); return t; }); } public <T8,R> Do7<R> zip(Higher<W, T8> fb, BiFunction<? super T7,? super T8,? extends R> fa){ return new Do7<R>((a,b,c,d,e,f)->monad.zip(gh.apply(a,b,c,d,e,f),fb,fa)); } public Do7<String> show(Show<W> show){ return new Do7<String>((a1,b1,c1,d1,e1,f1)->monad.unit(show.show(monad.flatMap_(a.get(), t1 -> monad.flatMap_(b.apply(t1),t2->monad.flatMap_(c.apply(t1,t2), t3->monad.flatMap_(d.apply(t1,t2,t3),t4->e.apply(t1,t2,t3,t4)))))))); } public Do8<String> _show(Show<W> show){ return new Do8<>((x1,x2,x3,x4,x5,x6,x7)->monad.unit(show.show(monad.flatMap_(a.get(), t1 -> monad.flatMap_(b.apply(t1),t2->monad.flatMap_(c.apply(t1,t2),t3->d.apply(t1,t2,t3))))))); } public <T8> Do8<T8> __(Higher<W, T8> e) { return new Do8<>(Function7.constant(e)); } public <T8> Do8<T8> __(Supplier<Higher<W, T8>> e) { return new Do8<>(Function7.lazyConstant(e)); } public <T8> Do8<T8> __(Function7<T1,T2,T3,T4,T5,T6,T7,Higher<W, T8>> e) { return new Do8<>(e); } public <T8> Do8<T8> _of(T8 e) { return new Do8<>(Function7.constant(monad.unit(e))); } public <T8> Do8<T8> _flatten(Higher<W, Higher<W, T8>> nested){ return new Do8<>(Function7.constant(monad.flatten(nested))); } public Do7<T7> guard(MonadZero<W> monadZero, Predicate7<? super T1,? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7> fn) { return new Do7<>((t1,t2,t3,t4,t5,t6)->monadZero.filter(p->fn.test(t1,t2,t3,t4,t5,t6,p), gh.apply(t1,t2,t3,t4,t5,t6))); } public <R> Do1<R> yield(Function7<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? extends R> fn) { return Do.forEach(monad).__(()-> monad.flatMap_(a.get(), in -> { Higher<W, R> hk2 = monad.flatMap_(b.apply(in), in2 -> { Higher<W, R> hk3 = monad.flatMap_(c.apply(in,in2), in3 -> { Higher<W, R> hk4 = monad.flatMap_(d.apply(in,in2,in3), in4 -> { Higher<W,R> hk5 = monad.flatMap_(e.apply(in,in2,in3,in4), in5 -> { Higher<W,R> hk6 = monad.flatMap_(fh.apply(in,in2,in3,in4,in5),in6->{ Higher<W,R> hk7 = monad.map_(gh.apply(in,in2,in3,in4,in5,in6),in7->fn.apply(in, in2, in3, in4,in5,in6,in7)); return hk7; }); return hk6; }); return hk5; }); return hk4; }); return hk3; }); return hk2; })); } public <R> R fold(Function<? super Higher<W,T7>,? extends R> fn){ return fn.apply(monad.flatMap_(a.get(),t1->{ return monad.flatMap_(b.apply(t1),t2->{ return monad.flatMap_(c.apply(t1,t2),t3->{ return monad.flatMap_(d.apply(t1,t2,t3),t4->{ return monad.flatMap_(e.apply(t1,t2,t3,t4),t5 -> { return monad.flatMap_(fh.apply(t1,t2,t3,t4,t5), t6 -> { return gh.apply(t1,t2,t3,t4,t5,t6); }); }); }); }); } ); })); } @AllArgsConstructor public class Do8<T8> { private final Function7<T1,T2,T3,T4,T5,T6,T7, Higher<W, T8>> eh; public <R> Do8<R> map(Function<? super T8, ? extends R> mapper){ return new Do8<>((a,b,c,d,e,f,g)->monad.map_(eh.apply(a,b,c,d,e,f,g),mapper)); } public <R> Do8<R> ap(Higher<W,Function<T8,R>> applicative){ return new Do8<R>((a,b,c,d,e,f,g)->monad.ap(applicative,eh.apply(a,b,c,d,e,f,g))); } public Do8<T8> peek(Consumer<? super T8> mapper){ return map(t->{ mapper.accept(t); return t; }); } public <T9,R> Do8<R> zip(Higher<W, T9> fb, BiFunction<? super T8,? super T9,? extends R> fa){ return new Do8<R>((a,b,c,d,e,f,g)->monad.zip(eh.apply(a,b,c,d,e,f,g),fb,fa)); } public Do8<String> show(Show<W> show){ return new Do8<String>((a1,b1,c1,d1,e1,f1,e2)->monad.unit(show.show(monad.flatMap_(a.get(), t1 -> monad.flatMap_(b.apply(t1),t2->monad.flatMap_(c.apply(t1,t2), t3->monad.flatMap_(d.apply(t1,t2,t3),t4->e.apply(t1,t2,t3,t4)))))))); } public Do8<T8> guard(MonadZero<W> monadZero, Predicate8<? super T1,? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8> fn) { return new Do8<>((t1,t2,t3,t4,t5,t6,t7)->monadZero.filter(p->fn.test(t1,t2,t3,t4,t5,t6,t7,p), eh.apply(t1,t2,t3,t4,t5,t6,t7))); } public <R> Do1<R> yield(Function8<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? extends R> fn) { return Do.forEach(monad).__(()-> monad.flatMap_(a.get(), in -> { Higher<W, R> hk2 = monad.flatMap_(b.apply(in), in2 -> { Higher<W, R> hk3 = monad.flatMap_(c.apply(in,in2), in3 -> { Higher<W, R> hk4 = monad.flatMap_(d.apply(in,in2,in3), in4 -> { Higher<W,R> hk5 = monad.flatMap_(e.apply(in,in2,in3,in4), in5 -> { Higher<W,R> hk6 = monad.flatMap_(fh.apply(in,in2,in3,in4,in5),in6->{ Higher<W,R> hk7 = monad.flatMap_(gh.apply(in,in2,in3,in4,in5,in6),in7->{ Higher<W,R> hk8 = monad.map_(eh.apply(in,in2,in3,in4,in5,in6,in7),in8->fn.apply(in, in2, in3, in4,in5,in6,in7,in8)); return hk8; }); return hk7; }); return hk6; }); return hk5; }); return hk4; }); return hk3; }); return hk2; })); } public <R> R fold(Function<? super Higher<W,T8>,? extends R> fn){ return fn.apply(monad.flatMap_(a.get(),t1->{ return monad.flatMap_(b.apply(t1),t2->{ return monad.flatMap_(c.apply(t1,t2),t3->{ return monad.flatMap_(d.apply(t1,t2,t3),t4->{ return monad.flatMap_(e.apply(t1,t2,t3,t4),t5 -> { return monad.flatMap_(fh.apply(t1,t2,t3,t4,t5), t6 -> { return monad.flatMap_(gh.apply(t1,t2,t3,t4,t5,t6), t7 -> { return eh.apply(t1,t2,t3,t4,t5,t6,t7); }); }); }); }); }); } ); })); } } } } } } } } } public static <W> Do<W> forEach(Monad<W> a){ return new Do(a); } public static <W> Do<W> forEach(Supplier<Monad<W>> a){ return forEach(a.get()); } public static <W> Fold1<W> folds(Foldable<W> foldable){ return new Fold1<>(foldable); } public static <W> Fold1<W> folds(Supplier<Foldable<W>> foldable){ return new Fold1<>(foldable.get()); } public static <W> Traverse1<W> traverse(Traverse<W> traverse){ return new Traverse1<>(traverse); } public static <W> Traverse1<W> traverse(Supplier<Traverse<W>> traverse){ return new Traverse1<>(traverse.get()); } public static <W> Sequence1<W> sequence(Traverse<W> traverse){ return new Sequence1<>(traverse); } public static <W> Sequence1<W> sequence(Supplier<Traverse<W>> traverse){ return new Sequence1<>(traverse.get()); } @AllArgsConstructor public static class Sequence1<W>{ private final Traverse<W> traverse; public <W2,T1> DoSequence<W2,T1> __(Higher<W, Higher<W2,T1>> a) { return new DoSequence<>(a); } public <W2,T1> DoSequence<W2,T1> __(Supplier<Higher<W, Higher<W2,T1>>> a) { return new DoSequence<>(a.get()); } @AllArgsConstructor public class DoSequence<W2,T1>{ private final Higher<W, Higher<W2,T1>> a; private Higher<W2, Higher<W, T1>> sequenceA(Applicative<W2> applicative){ return traverse.sequenceA(applicative,a); } public Do<W2>.DoNested<W,T1> traverse(Monad<W2> m1, Applicative<W2> applicative){ return Do.forEach(m1).__(traverse, sequenceA(applicative)); } } } @AllArgsConstructor public static class Traverse1<W>{ private final Traverse<W> traverse; public <T1> DoTraverse<T1> __(Higher<W, T1> a) { return new DoTraverse<>(a); } public <T1> DoTraverse<T1> __(Supplier<Higher<W, T1>> a) { return new DoTraverse<>(a.get()); } @AllArgsConstructor public class DoTraverse<T1>{ private final Higher<W, T1> a; private <W2,R> Higher<W2, Higher<W, R>> traverse(Applicative<W2> applicative, Function<? super T1, ? extends Higher<W2, R>> fn){ return traverse.traverseA(applicative,fn,a); } public <W2,R> Do<W2>.DoNested<W,R> traverse(Monad<W2> m1, Function<? super T1, ? extends Higher<W2, R>> fn){ return Do.forEach(m1).__(traverse, traverse.traverseA(m1, fn, a)); } public <S,R,R2> State<S,R2> traverseS(Function<? super T1, ? extends State<S,R>> fn,Function<Higher<W,R>,R2> foldFn){ return State.narrowK(traverse(StateInstances.applicative(), fn)).map(foldFn); } public <S,R> Tuple2<S, Do<W>.Do1<R>> runTraverseS(Monad<W> monad,Function<? super T1, ? extends State<S,R>> fn, S val) { return traverse.runTraverseS(fn,a,val).map2(i -> Do.forEach(monad).__(i)); } public Do<W>.Do1<T1> reverse(Monad<W> monad){ return Do.forEach(monad).__(traverse.reverse(a)); } public <S,R> Tuple2<S, Do<W>.Do1<R>> mapAccumL (Monad<W> monad,BiFunction<? super S, ? super T1, ? extends Tuple2<S,R>> f,S z) { return traverse.mapAccumL(f, a, z) .map2(i -> Do.forEach(monad).__(i)); } public <R> R foldMap(Monoid<R> mb, final Function<? super T1,? extends R> fn) { return traverse.foldMap(mb,fn,a); } public <R> Do<W>.Do1<R> mapWithIndex(Monad<W> monad,BiFunction<? super T1,Long,? extends R> f) { return Do.forEach(monad) .__(traverse.mapWithIndex(f,a)); } public <W2,T2,R> Do<W>.Do1<R> zipWith(Monad<W> monad,Foldable<W2> foldable, BiFunction<? super T1,? super Maybe<T2>,? extends R> f, Higher<W2, T2> ds2) { return Do.forEach(monad) .__(traverse.zipWith(foldable,f,a,ds2)); } public <R> Do<W>.Do1<Tuple2<T1,Long>> zipWithIndex(Monad<W> monad) { return Do.forEach(monad) .__(traverse.zipWithIndex(a)); } } } @AllArgsConstructor public static class Fold1<W> { private final Foldable<W> folds; public <T1> DoFoldable<T1> __(Higher<W, T1> a) { return new DoFoldable<>(a); } public <T1> DoFoldable<T1> __(Supplier<Higher<W, T1>> a) { return new DoFoldable<>(a.get()); } @AllArgsConstructor public class DoFoldable<T1>{ private final Higher<W, T1> a; public <R> R foldMap(final Monoid<R> mb, final Function<? super T1,? extends R> fn){ return folds.foldMap(mb,fn,a); } public <R> R foldr(final Function< T1, Function< R, R>> fn, R r){ return folds.foldr(fn,r,a); } public T1 foldRight(Monoid<T1> monoid){ return folds.foldRight(monoid,a); } public T1 foldRight(T1 identity, BinaryOperator<T1> semigroup){ return folds.foldRight(identity,semigroup,a); } public T1 foldLeft(Monoid<T1> monoid){ return folds.foldLeft(monoid,a); } public T1 foldLeft(T1 identity, BinaryOperator<T1> semigroup){ return folds.foldLeft(identity,semigroup,a); } public long size() { return folds.size(a); } public Seq<T1> seq(){ return folds.seq(a); } public LazySeq<T1> lazySeq(){ return folds.lazySeq(a); } public ReactiveSeq<T1> stream(){ return folds.stream(a); } public T1 intercalate(Monoid<T1> monoid, T1 value ){ return seq().intersperse(value).foldLeft(monoid); } public Option<T1> getAt(int index){ return seq().get(index); } public boolean anyMatch(Predicate<? super T1> pred){ return folds.anyMatch(pred,a); } public boolean allMatch(Predicate<? super T1> pred){ return folds.allMatch(pred,a); } } } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeEditor.printing; import com.intellij.ide.highlighter.HighlighterFactory; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.actionSystem.LangDataKeys; import com.intellij.openapi.actionSystem.PlatformDataKeys; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.ex.DocumentEx; import com.intellij.openapi.editor.highlighter.EditorHighlighter; import com.intellij.openapi.fileTypes.FileTypes; import com.intellij.openapi.progress.*; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.util.PsiUtilBase; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import java.awt.print.*; import java.util.List; class PrintManager { private static final Logger LOG = Logger.getInstance("#com.intellij.codeEditor.printing.PrintManager"); public static void executePrint(DataContext dataContext) { final Project project = PlatformDataKeys.PROJECT.getData(dataContext); if (project == null) return; PsiDirectory[] psiDirectory = new PsiDirectory[1]; PsiElement psiElement = LangDataKeys.PSI_ELEMENT.getData(dataContext); if (psiElement instanceof PsiDirectory) { psiDirectory[0] = (PsiDirectory)psiElement; } PsiFile psiFile = LangDataKeys.PSI_FILE.getData(dataContext); String[] shortFileName = new String[1]; String[] directoryName = new String[1]; if (psiFile != null || psiDirectory[0] != null) { if (psiFile != null) { shortFileName[0] = psiFile.getName(); if (psiDirectory[0] == null) { psiDirectory[0] = psiFile.getContainingDirectory(); } } if (psiDirectory[0] != null) { directoryName[0] = psiDirectory[0].getVirtualFile().getPresentableUrl(); } } Editor editor = PlatformDataKeys.EDITOR.getData(dataContext); String text = null; if (editor != null) { if (editor.getSelectionModel().hasSelection()) { text = CodeEditorBundle.message("print.selected.text.radio"); } else { text = psiFile == null ? "Console text" : null; } } PrintDialog printDialog = new PrintDialog(shortFileName[0], directoryName[0], text, project); printDialog.reset(); printDialog.show(); if (!printDialog.isOK()) { return; } printDialog.apply(); final PageFormat pageFormat = createPageFormat(); final BasePainter painter; PrintSettings printSettings = PrintSettings.getInstance(); if (printSettings.getPrintScope() != PrintSettings.PRINT_DIRECTORY) { if (psiFile == null && editor == null) return; TextPainter textPainter = psiFile != null ? initTextPainter(psiFile, editor) : initTextPainter((DocumentEx)editor.getDocument(), project); if (textPainter == null) return; if (printSettings.getPrintScope() == PrintSettings.PRINT_SELECTED_TEXT && editor != null && editor.getSelectionModel().hasSelection()) { int firstLine = editor.getDocument().getLineNumber(editor.getSelectionModel().getSelectionStart()); textPainter.setSegment(editor.getSelectionModel().getSelectionStart(), editor.getSelectionModel().getSelectionEnd(), firstLine + 1); } painter = textPainter; } else { List<Pair<PsiFile, Editor>> filesList = ContainerUtil.newArrayList(); boolean isRecursive = printSettings.isIncludeSubdirectories(); addToPsiFileList(psiDirectory[0], filesList, isRecursive); painter = new MultiFilePainter(filesList); } Pageable document = new Pageable() { @Override public int getNumberOfPages() { return Pageable.UNKNOWN_NUMBER_OF_PAGES; } @Override public PageFormat getPageFormat(int pageIndex) throws IndexOutOfBoundsException { return pageFormat; } @Override public Printable getPrintable(int pageIndex) throws IndexOutOfBoundsException { return painter; } }; final PrinterJob printerJob = PrinterJob.getPrinterJob(); try { printerJob.setPageable(document); printerJob.setPrintable(painter, pageFormat); if (!printerJob.printDialog()) { return; } } catch (Exception e) { LOG.warn(e); } PsiDocumentManager.getInstance(project).commitAllDocuments(); ProgressManager.getInstance() .run(new Task.Backgroundable(project, CodeEditorBundle.message("print.progress"), true, PerformInBackgroundOption.ALWAYS_BACKGROUND) { @Override public void run(@NotNull ProgressIndicator indicator) { try { painter.setProgress(indicator); printerJob.print(); } catch (ProcessCanceledException e) { LOG.info("Cancelled"); printerJob.cancel(); } catch (Exception e) { LOG.error(e); } } }); } private static void addToPsiFileList(PsiDirectory psiDirectory, List<Pair<PsiFile, Editor>> filesList, boolean isRecursive) { PsiFile[] files = psiDirectory.getFiles(); for (PsiFile file : files) { filesList.add(Pair.create(file, PsiUtilBase.findEditor(file))); } if (isRecursive) { for (PsiDirectory directory : psiDirectory.getSubdirectories()) { if (!Project.DIRECTORY_STORE_FOLDER.equals(directory.getName())) { addToPsiFileList(directory, filesList, isRecursive); } } } } private static PageFormat createPageFormat() { PrintSettings printSettings = PrintSettings.getInstance(); PageFormat pageFormat = new PageFormat(); Paper paper = new Paper(); String paperSize = printSettings.PAPER_SIZE; double paperWidth = PageSizes.getWidth(paperSize) * 72; double paperHeight = PageSizes.getHeight(paperSize) * 72; double leftMargin = printSettings.LEFT_MARGIN * 72; double rightMargin = printSettings.RIGHT_MARGIN * 72; double topMargin = printSettings.TOP_MARGIN * 72; double bottomMargin = printSettings.BOTTOM_MARGIN * 72; paper.setSize(paperWidth, paperHeight); if (printSettings.PORTRAIT_LAYOUT) { pageFormat.setOrientation(PageFormat.PORTRAIT); paperWidth -= leftMargin + rightMargin; paperHeight -= topMargin + bottomMargin; paper.setImageableArea(leftMargin, topMargin, paperWidth, paperHeight); } else { pageFormat.setOrientation(PageFormat.LANDSCAPE); paperWidth -= topMargin + bottomMargin; paperHeight -= leftMargin + rightMargin; paper.setImageableArea(rightMargin, topMargin, paperWidth, paperHeight); } pageFormat.setPaper(paper); return pageFormat; } static TextPainter initTextPainter(final PsiFile psiFile, final Editor editor) { return ApplicationManager.getApplication().runReadAction(new Computable<TextPainter>() { @Override public TextPainter compute() { return doInitTextPainter(psiFile, editor); } }); } private static TextPainter doInitTextPainter(final PsiFile psiFile, final Editor editor) { VirtualFile virtualFile = psiFile.getVirtualFile(); if (virtualFile == null) return null; DocumentEx doc = (DocumentEx)PsiDocumentManager.getInstance(psiFile.getProject()).getDocument(psiFile); if (doc == null) return null; EditorHighlighter highlighter = HighlighterFactory.createHighlighter(psiFile.getProject(), virtualFile); highlighter.setText(doc.getCharsSequence()); return new TextPainter(doc, highlighter, virtualFile.getPresentableUrl(), psiFile, psiFile.getFileType(), editor); } private static TextPainter initTextPainter(@NotNull final DocumentEx doc, final Project project) { final TextPainter[] res = new TextPainter[1]; ApplicationManager.getApplication().runReadAction( new Runnable() { @Override public void run() { res[0] = doInitTextPainter(doc, project); } } ); return res[0]; } private static TextPainter doInitTextPainter(@NotNull final DocumentEx doc, Project project) { EditorHighlighter highlighter = HighlighterFactory.createHighlighter(project, "unknown"); highlighter.setText(doc.getCharsSequence()); return new TextPainter(doc, highlighter, "unknown", project, FileTypes.PLAIN_TEXT, null); } }
/* * Copyright 2010 DTO Labs, Inc. (http://dtolabs.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * ServerService.java * * User: Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a> * Created: Feb 17, 2010 3:49:04 PM * $Id$ */ package com.dtolabs.client.services; import com.dtolabs.client.utils.HttpClientException; import com.dtolabs.client.utils.WebserviceHttpClient; import com.dtolabs.client.utils.WebserviceHttpClientFactory; import com.dtolabs.client.utils.WebserviceResponse; import com.dtolabs.rundeck.core.CoreException; import com.dtolabs.rundeck.core.common.Framework; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.Map; /** * ServerService provides the ability to make webservice requests to the Web server. * * @author Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a> * @version $Revision$ */ public class ServerService { private WebConnectionParameters connParams; /** * Create ServerService using the Framework to provdie connection parameters * * @param url connection url * @param username connection username * @param password connection password */ public ServerService(final String url, final String username, final String password) { this(new WebConnectionParameters() { public String getPassword() { return password; } public String getUsername() { return username; } public String getServerUrl() { return url; } }); } /** * Create ServerService using the connection parameters * * @param connParams the connection info */ public ServerService(final WebConnectionParameters connParams) { this.connParams = connParams; } /** * Make the request to the ItNav workbench. * * @param urlPath the path for the request * @param queryParams any query parameters * @param uploadFile a file to upload with the request. * @param method HTTP connection method, e.g. "get","post","put","delete". * * @param uploadFileParam name of the uploaded file param * @return parsed XML document, or null * * @throws com.dtolabs.rundeck.core.CoreException * if an error occurs * @throws java.net.MalformedURLException if connection URL or urlPath params are malformed. */ public WebserviceResponse makeRundeckRequest(final String urlPath, final Map queryParams, final File uploadFile, final String method, final String uploadFileParam) throws CoreException, MalformedURLException { return makeRundeckRequest(urlPath, queryParams, uploadFile, method, null, uploadFileParam); } /** * Make the request to the ItNav workbench. * * @param urlPath the path for the request * @param queryParams any query parameters * @param formData form data * * @return parsed XML document, or null * * @throws com.dtolabs.rundeck.core.CoreException * if an error occurs * @throws java.net.MalformedURLException if connection URL or urlPath params are malformed. */ public WebserviceResponse makeRundeckRequest(final String urlPath, final Map queryParams, final Map<String, ? extends Object> formData) throws CoreException, MalformedURLException { return makeRundeckRequest(urlPath, queryParams, null, null, null, formData, null); } /** * Make the request to the ItNav workbench. * * @param uploadFileParam name of the uploaded file param * @param urlPath the path for the request * @param queryParams any query parameters * @param uploadFile a file to upload with the request. * @param method HTTP connection method, e.g. "get","post","put","delete". * @param expectedContentType content type * * @return parsed XML document, or null * * @throws com.dtolabs.rundeck.core.CoreException * if an error occurs * @throws java.net.MalformedURLException if connection URL or urlPath params are malformed. */ public WebserviceResponse makeRundeckRequest(final String urlPath, final Map queryParams, final File uploadFile, final String method, final String expectedContentType, final String uploadFileParam) throws CoreException, MalformedURLException { return makeRundeckRequest(urlPath, queryParams, uploadFile, method, expectedContentType, null, uploadFileParam); } /** * Make the request to the ItNav workbench. * * @param uploadFileParam name of the uploaded file param * @param urlPath the path for the request * @param queryParams any query parameters * @param uploadFile a file to upload with the request. * @param method HTTP connection method, e.g. "get","post","put","delete". * @param expectedContentType expected content type * @param formData data * * @return parsed XML document, or null * * @throws com.dtolabs.rundeck.core.CoreException * if an error occurs * @throws java.net.MalformedURLException if connection URL or urlPath params are malformed. */ public WebserviceResponse makeRundeckRequest(final String urlPath, final Map queryParams, final File uploadFile, final String method, final String expectedContentType, final Map<String, ? extends Object> formData, final String uploadFileParam) throws CoreException, MalformedURLException { if (null == connParams) { throw new IllegalArgumentException("WebConnectionParameters must be specified"); } final URL jcUrl = new URL(connParams.getServerUrl()); final String jcBasePath = jcUrl.getPath(); final WebserviceHttpClient hc ; if(null==formData || formData.size()<1){ hc= WebserviceHttpClientFactory.getInstance().getWebserviceHttpClient(jcUrl + urlPath, jcBasePath, connParams.getUsername(), connParams.getPassword(), queryParams, uploadFile, uploadFileParam,null,expectedContentType); }else{ hc = WebserviceHttpClientFactory.getInstance().getWebserviceHttpClient(jcUrl + urlPath, jcBasePath, connParams.getUsername(), connParams.getPassword(), queryParams, formData); } hc.setRequestHeader("X-Rundeck-API-XML-Response-Wrapper","true"); if (null != method) { hc.setMethodType(method); } try { hc.makeRequest(); } catch (IOException e) { throw new CoreException("Error making server request to " + jcUrl + ": " + e.getMessage(), e); } catch (HttpClientException e) { throw new CoreException("Error making server request to " + jcUrl + ": " + e.getMessage(), e); } return hc; } /** * Return the connection params configured for this ServerService * @return connection params */ public WebConnectionParameters getConnParams() { return connParams; } /** * Set the connection params. * @param connParams connection params */ public void setConnParams(final WebConnectionParameters connParams) { this.connParams = connParams; } /** * An interface for providing connection parameters for the web app */ public static interface WebConnectionParameters { /** * Return the password * * @return password */ public String getPassword(); /** * Return the user name * * @return username */ public String getUsername(); /** * Return the URL * * @return connection URL */ public String getServerUrl(); } }
package org.apache.lucene.search; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoubleField; import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.FloatField; import org.apache.lucene.document.IntField; import org.apache.lucene.document.LongField; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; /** Base class to test range queries. */ public class BaseTestRangeFilter extends LuceneTestCase { public static final boolean F = false; public static final boolean T = true; /** * Collation interacts badly with hyphens -- collation produces different * ordering than Unicode code-point ordering -- so two indexes are created: * one which can't have negative random integers, for testing collated ranges, * and the other which can have negative random integers, for all other tests. */ static class TestIndex { int maxR; int minR; boolean allowNegativeRandomInts; Directory index; TestIndex(Random random, int minR, int maxR, boolean allowNegativeRandomInts) { this.minR = minR; this.maxR = maxR; this.allowNegativeRandomInts = allowNegativeRandomInts; index = newDirectory(random); } } static IndexReader signedIndexReader; static IndexReader unsignedIndexReader; static TestIndex signedIndexDir; static TestIndex unsignedIndexDir; static int minId = 0; static int maxId; static final int intLength = Integer.toString(Integer.MAX_VALUE).length(); /** * a simple padding function that should work with any int */ public static String pad(int n) { StringBuilder b = new StringBuilder(40); String p = "0"; if (n < 0) { p = "-"; n = Integer.MAX_VALUE + n + 1; } b.append(p); String s = Integer.toString(n); for (int i = s.length(); i <= intLength; i++) { b.append("0"); } b.append(s); return b.toString(); } @BeforeClass public static void beforeClassBaseTestRangeFilter() throws Exception { maxId = atLeast(500); signedIndexDir = new TestIndex(random(), Integer.MAX_VALUE, Integer.MIN_VALUE, true); unsignedIndexDir = new TestIndex(random(), Integer.MAX_VALUE, 0, false); signedIndexReader = build(random(), signedIndexDir); unsignedIndexReader = build(random(), unsignedIndexDir); } @AfterClass public static void afterClassBaseTestRangeFilter() throws Exception { signedIndexReader.close(); unsignedIndexReader.close(); signedIndexDir.index.close(); unsignedIndexDir.index.close(); signedIndexReader = null; unsignedIndexReader = null; signedIndexDir = null; unsignedIndexDir = null; } private static IndexReader build(Random random, TestIndex index) throws IOException { /* build an index */ Document doc = new Document(); Field idField = newStringField(random, "id", "", Field.Store.YES); Field idDVField = new SortedDocValuesField("id", new BytesRef()); Field intIdField = new IntField("id_int", 0, Store.YES); Field intDVField = new NumericDocValuesField("id_int", 0); Field floatIdField = new FloatField("id_float", 0, Store.YES); Field floatDVField = new NumericDocValuesField("id_float", 0); Field longIdField = new LongField("id_long", 0, Store.YES); Field longDVField = new NumericDocValuesField("id_long", 0); Field doubleIdField = new DoubleField("id_double", 0, Store.YES); Field doubleDVField = new NumericDocValuesField("id_double", 0); Field randField = newStringField(random, "rand", "", Field.Store.YES); Field randDVField = new SortedDocValuesField("rand", new BytesRef()); Field bodyField = newStringField(random, "body", "", Field.Store.NO); Field bodyDVField = new SortedDocValuesField("body", new BytesRef()); doc.add(idField); doc.add(idDVField); doc.add(intIdField); doc.add(intDVField); doc.add(floatIdField); doc.add(floatDVField); doc.add(longIdField); doc.add(longDVField); doc.add(doubleIdField); doc.add(doubleDVField); doc.add(randField); doc.add(randDVField); doc.add(bodyField); doc.add(bodyDVField); RandomIndexWriter writer = new RandomIndexWriter(random, index.index, newIndexWriterConfig(random, new MockAnalyzer(random)) .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(TestUtil.nextInt(random, 50, 1000)).setMergePolicy(newLogMergePolicy())); TestUtil.reduceOpenFiles(writer.w); while(true) { int minCount = 0; int maxCount = 0; for (int d = minId; d <= maxId; d++) { idField.setStringValue(pad(d)); idDVField.setBytesValue(new BytesRef(pad(d))); intIdField.setIntValue(d); intDVField.setLongValue(d); floatIdField.setFloatValue(d); floatDVField.setLongValue(Float.floatToRawIntBits(d)); longIdField.setLongValue(d); longDVField.setLongValue(d); doubleIdField.setDoubleValue(d); doubleDVField.setLongValue(Double.doubleToRawLongBits(d)); int r = index.allowNegativeRandomInts ? random.nextInt() : random .nextInt(Integer.MAX_VALUE); if (index.maxR < r) { index.maxR = r; maxCount = 1; } else if (index.maxR == r) { maxCount++; } if (r < index.minR) { index.minR = r; minCount = 1; } else if (r == index.minR) { minCount++; } randField.setStringValue(pad(r)); randDVField.setBytesValue(new BytesRef(pad(r))); bodyField.setStringValue("body"); bodyDVField.setBytesValue(new BytesRef("body")); writer.addDocument(doc); } if (minCount == 1 && maxCount == 1) { // our subclasses rely on only 1 doc having the min or // max, so, we loop until we satisfy that. it should be // exceedingly rare (Yonik calculates 1 in ~429,000) // times) that this loop requires more than one try: IndexReader ir = writer.getReader(); writer.close(); return ir; } // try again writer.deleteAll(); } } @Test public void testPad() { int[] tests = new int[] {-9999999, -99560, -100, -3, -1, 0, 3, 9, 10, 1000, 999999999}; for (int i = 0; i < tests.length - 1; i++) { int a = tests[i]; int b = tests[i + 1]; String aa = pad(a); String bb = pad(b); String label = a + ":" + aa + " vs " + b + ":" + bb; assertEquals("length of " + label, aa.length(), bb.length()); assertTrue("compare less than " + label, aa.compareTo(bb) < 0); } } }
/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.ddmlib; import java.io.IOException; import java.nio.ByteBuffer; /** * Handle thread status updates. */ final class HandleThread extends ChunkHandler { public static final int CHUNK_THEN = type("THEN"); public static final int CHUNK_THCR = type("THCR"); public static final int CHUNK_THDE = type("THDE"); public static final int CHUNK_THST = type("THST"); public static final int CHUNK_THNM = type("THNM"); public static final int CHUNK_STKL = type("STKL"); private static final HandleThread mInst = new HandleThread(); // only read/written by requestThreadUpdates() private static volatile boolean sThreadStatusReqRunning = false; private static volatile boolean sThreadStackTraceReqRunning = false; private HandleThread() {} /** * Register for the packets we expect to get from the client. */ public static void register(MonitorThread mt) { mt.registerChunkHandler(CHUNK_THCR, mInst); mt.registerChunkHandler(CHUNK_THDE, mInst); mt.registerChunkHandler(CHUNK_THST, mInst); mt.registerChunkHandler(CHUNK_THNM, mInst); mt.registerChunkHandler(CHUNK_STKL, mInst); } /** * Client is ready. */ @Override public void clientReady(Client client) throws IOException { Log.d("ddm-thread", "Now ready: " + client); if (client.isThreadUpdateEnabled()) sendTHEN(client, true); } /** * Client went away. */ @Override public void clientDisconnected(Client client) {} /** * Chunk handler entry point. */ @Override public void handleChunk(Client client, int type, ByteBuffer data, boolean isReply, int msgId) { Log.d("ddm-thread", "handling " + ChunkHandler.name(type)); if (type == CHUNK_THCR) { handleTHCR(client, data); } else if (type == CHUNK_THDE) { handleTHDE(client, data); } else if (type == CHUNK_THST) { handleTHST(client, data); } else if (type == CHUNK_THNM) { handleTHNM(client, data); } else if (type == CHUNK_STKL) { handleSTKL(client, data); } else { handleUnknownChunk(client, type, data, isReply, msgId); } } /* * Handle a thread creation message. * * We should be tolerant of receiving a duplicate create message. (It * shouldn't happen with the current implementation.) */ private void handleTHCR(Client client, ByteBuffer data) { int threadId, nameLen; String name; threadId = data.getInt(); nameLen = data.getInt(); name = ByteBufferUtil.getString(data, nameLen); Log.v("ddm-thread", "THCR: " + threadId + " '" + name + "'"); client.getClientData().addThread(threadId, name); client.update(Client.CHANGE_THREAD_DATA); } /* * Handle a thread death message. */ private void handleTHDE(Client client, ByteBuffer data) { int threadId; threadId = data.getInt(); Log.v("ddm-thread", "THDE: " + threadId); client.getClientData().removeThread(threadId); client.update(Client.CHANGE_THREAD_DATA); } /* * Handle a thread status update message. * * Response has: * (1b) header len * (1b) bytes per entry * (2b) thread count * Then, for each thread: * (4b) threadId (matches value from THCR) * (1b) thread status * (4b) tid * (4b) utime * (4b) stime */ private void handleTHST(Client client, ByteBuffer data) { int headerLen, bytesPerEntry, extraPerEntry; int threadCount; headerLen = (data.get() & 0xff); bytesPerEntry = (data.get() & 0xff); threadCount = data.getShort(); headerLen -= 4; // we've read 4 bytes while (headerLen-- > 0) data.get(); extraPerEntry = bytesPerEntry - 18; // we want 18 bytes Log.v("ddm-thread", "THST: threadCount=" + threadCount); /* * For each thread, extract the data, find the appropriate * client, and add it to the ClientData. */ for (int i = 0; i < threadCount; i++) { int threadId, status, tid, utime, stime; boolean isDaemon = false; threadId = data.getInt(); status = data.get(); tid = data.getInt(); utime = data.getInt(); stime = data.getInt(); if (bytesPerEntry >= 18) isDaemon = (data.get() != 0); Log.v("ddm-thread", " id=" + threadId + ", status=" + status + ", tid=" + tid + ", utime=" + utime + ", stime=" + stime); ClientData cd = client.getClientData(); ThreadInfo threadInfo = cd.getThread(threadId); if (threadInfo != null) threadInfo.updateThread(status, tid, utime, stime, isDaemon); else Log.d("ddms", "Thread with id=" + threadId + " not found"); // slurp up any extra for (int slurp = extraPerEntry; slurp > 0; slurp--) data.get(); } client.update(Client.CHANGE_THREAD_DATA); } /* * Handle a THNM (THread NaMe) message. We get one of these after * somebody calls Thread.setName() on a running thread. */ private void handleTHNM(Client client, ByteBuffer data) { int threadId, nameLen; String name; threadId = data.getInt(); nameLen = data.getInt(); name = ByteBufferUtil.getString(data, nameLen); Log.v("ddm-thread", "THNM: " + threadId + " '" + name + "'"); ThreadInfo threadInfo = client.getClientData().getThread(threadId); if (threadInfo != null) { threadInfo.setThreadName(name); client.update(Client.CHANGE_THREAD_DATA); } else { Log.d("ddms", "Thread with id=" + threadId + " not found"); } } /** * Parse an incoming STKL. */ private void handleSTKL(Client client, ByteBuffer data) { StackTraceElement[] trace; int i, threadId, stackDepth; @SuppressWarnings("unused") int future; future = data.getInt(); threadId = data.getInt(); Log.v("ddms", "STKL: " + threadId); /* un-serialize the StackTraceElement[] */ stackDepth = data.getInt(); trace = new StackTraceElement[stackDepth]; for (i = 0; i < stackDepth; i++) { String className, methodName, fileName; int len, lineNumber; len = data.getInt(); className = ByteBufferUtil.getString(data, len); len = data.getInt(); methodName = ByteBufferUtil.getString(data, len); len = data.getInt(); if (len == 0) { fileName = null; } else { fileName = ByteBufferUtil.getString(data, len); } lineNumber = data.getInt(); trace[i] = new StackTraceElement(className, methodName, fileName, lineNumber); } ThreadInfo threadInfo = client.getClientData().getThread(threadId); if (threadInfo != null) { threadInfo.setStackCall(trace); client.update(Client.CHANGE_THREAD_STACKTRACE); } else { Log.d("STKL", String.format( "Got stackcall for thread %1$d, which does not exists (anymore?).", //$NON-NLS-1$ threadId)); } } /** * Send a THEN (THread notification ENable) request to the client. */ public static void sendTHEN(Client client, boolean enable) throws IOException { ByteBuffer rawBuf = allocBuffer(1); JdwpPacket packet = new JdwpPacket(rawBuf); ByteBuffer buf = getChunkDataBuf(rawBuf); if (enable) buf.put((byte)1); else buf.put((byte)0); finishChunkPacket(packet, CHUNK_THEN, buf.position()); Log.d("ddm-thread", "Sending " + name(CHUNK_THEN) + ": " + enable); client.sendAndConsume(packet, mInst); } /** * Send a STKL (STacK List) request to the client. The VM will suspend * the target thread, obtain its stack, and return it. If the thread * is no longer running, a failure result will be returned. */ public static void sendSTKL(Client client, int threadId) throws IOException { if (false) { Log.d("ddm-thread", "would send STKL " + threadId); return; } ByteBuffer rawBuf = allocBuffer(4); JdwpPacket packet = new JdwpPacket(rawBuf); ByteBuffer buf = getChunkDataBuf(rawBuf); buf.putInt(threadId); finishChunkPacket(packet, CHUNK_STKL, buf.position()); Log.d("ddm-thread", "Sending " + name(CHUNK_STKL) + ": " + threadId); client.sendAndConsume(packet, mInst); } /** * This is called periodically from the UI thread. To avoid locking * the UI while we request the updates, we create a new thread. * */ static void requestThreadUpdate(final Client client) { if (client.isDdmAware() && client.isThreadUpdateEnabled()) { if (sThreadStatusReqRunning) { Log.w("ddms", "Waiting for previous thread update req to finish"); return; } new Thread("Thread Status Req") { @Override public void run() { sThreadStatusReqRunning = true; try { sendTHST(client); } catch (IOException ioe) { Log.d("ddms", "Unable to request thread updates from " + client + ": " + ioe.getMessage()); } finally { sThreadStatusReqRunning = false; } } }.start(); } } static void requestThreadStackCallRefresh(final Client client, final int threadId) { if (client.isDdmAware() && client.isThreadUpdateEnabled()) { if (sThreadStackTraceReqRunning) { Log.w("ddms", "Waiting for previous thread stack call req to finish"); return; } new Thread("Thread Status Req") { @Override public void run() { sThreadStackTraceReqRunning = true; try { sendSTKL(client, threadId); } catch (IOException ioe) { Log.d("ddms", "Unable to request thread stack call updates from " + client + ": " + ioe.getMessage()); } finally { sThreadStackTraceReqRunning = false; } } }.start(); } } /* * Send a THST request to the specified client. */ private static void sendTHST(Client client) throws IOException { ByteBuffer rawBuf = allocBuffer(0); JdwpPacket packet = new JdwpPacket(rawBuf); ByteBuffer buf = getChunkDataBuf(rawBuf); // nothing much to say finishChunkPacket(packet, CHUNK_THST, buf.position()); Log.d("ddm-thread", "Sending " + name(CHUNK_THST)); client.sendAndConsume(packet, mInst); } }
/* * Copyright (c) 2010-2014 William Bittle http://www.dyn4j.org/ * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions * and the following disclaimer in the documentation and/or other materials provided with the * distribution. * * Neither the name of dyn4j nor the names of its contributors may be used to endorse or * promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.dyn4j.geometry; import org.dyn4j.resources.Messages; /** * Represents an axis aligned bounding box. * @author William Bittle * @version 3.1.5 * @since 3.0.0 */ public class AABB { /** The minimum extent */ protected Vector2 min; /** The maximum extent */ protected Vector2 max; /** * Full constructor. * @param minX the minimum x extent * @param minY the minimum y extent * @param maxX the maximum x extent * @param maxY the maximum y extent */ public AABB(double minX, double minY, double maxX, double maxY) { this(new Vector2(minX, minY), new Vector2(maxX, maxY)); } /** * Full constructor. * @param min the minimum extent * @param max the maximum extent */ public AABB(Vector2 min, Vector2 max) { // check the min and max if (min.x > max.x || min.y > max.y) throw new IllegalArgumentException(Messages.getString("geometry.aabb.invalidMinMax")); this.min = min; this.max = max; } /** * Full constructor. * @param radius the radius of a circle fitting inside an AABB * @since 3.1.5 */ public AABB(double radius) { this(null, radius); } /** * Full constructor. * <p> * Creates an AABB for a circle with the given center and radius. * @param center the center of the circle * @param radius the radius of the circle * @since 3.1.5 */ public AABB(Vector2 center, double radius) { if (radius < 0) throw new IllegalArgumentException(Messages.getString("geometry.aabb.invalidRadius")); if (center == null) { this.min = new Vector2(-radius, -radius); this.max = new Vector2( radius, radius); } else { this.min = new Vector2(center.x - radius, center.y - radius); this.max = new Vector2(center.x + radius, center.y + radius); } } /** * Copy constructor. * @param aabb the {@link AABB} to copy * @since 3.1.1 */ public AABB(AABB aabb) { this.min = aabb.min.copy(); this.max = aabb.max.copy(); } /* (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("AABB[Min=").append(this.min) .append("|Max=").append(this.max) .append("]"); return sb.toString(); } /** * Translates the AABB by the given translation. * @param translation the translation * @since 3.1.0 */ public void translate(Vector2 translation) { this.max.add(translation); this.min.add(translation); } /** * Returns a new AABB of this AABB translated by the * given translation amount. * @param translation the translation * @return AABB * @since 3.1.1 */ public AABB getTranslated(Vector2 translation) { return new AABB( this.min.sum(translation), this.max.sum(translation)); } /** * Returns the width of this {@link AABB}. * @return double * @since 3.0.1 */ public double getWidth() { return this.max.x - this.min.x; } /** * Returns the height of this {@link AABB}. * @return double * @since 3.0.1 */ public double getHeight() { return this.max.y - this.min.y; } /** * Returns the perimeter of this {@link AABB}. * @return double */ public double getPerimeter() { return 2 * (this.max.x - this.min.x + this.max.y - this.min.y); } /** * Returns the area of this {@link AABB};. * @return double */ public double getArea() { return (this.max.x - this.min.x) * (this.max.y - this.min.y); } /** * Performs a union of this {@link AABB} and the given {@link AABB} placing * the result of the union into this {@link AABB}. * @param aabb the {@link AABB} to union */ public void union(AABB aabb) { this.min.x = Math.min(this.min.x, aabb.min.x); this.min.y = Math.min(this.min.y, aabb.min.y); this.max.x = Math.max(this.max.x, aabb.max.x); this.max.y = Math.max(this.max.y, aabb.max.y); } /** * Performs a union of this {@link AABB} and the given {@link AABB} returning * a new {@link AABB} containing the result. * @param aabb the {@link AABB} to union * @return {@link AABB} the resulting union */ public AABB getUnion(AABB aabb) { Vector2 min = new Vector2(); Vector2 max = new Vector2(); min.x = Math.min(this.min.x, aabb.min.x); min.y = Math.min(this.min.y, aabb.min.y); max.x = Math.max(this.max.x, aabb.max.x); max.y = Math.max(this.max.y, aabb.max.y); return new AABB(min, max); } /** * Performs the intersection of this {@link AABB} and the given {@link AABB} placing * the result into this {@link AABB}. * <p> * If the given {@link AABB} does not overlap this {@link AABB}, this {@link AABB} is * set to a zero {@link AABB}. * @param aabb the {@link AABB} to intersect * @since 3.1.1 */ public void intersection(AABB aabb) { this.min.x = Math.max(this.min.x, aabb.min.x); this.min.y = Math.max(this.min.y, aabb.min.y); this.max.x = Math.min(this.max.x, aabb.max.x); this.max.y = Math.min(this.max.y, aabb.max.y); // check for a bad AABB if (this.min.x > this.max.x || this.min.y > this.max.y) { // the two AABBs were not overlapping // set this AABB to a degenerate one this.min.x = 0.0; this.min.y = 0.0; this.max.x = 0.0; this.max.y = 0.0; } } /** * Performs the intersection of this {@link AABB} and the given {@link AABB} returning * the result in a new {@link AABB}. * <p> * If the given {@link AABB} does not overlap this {@link AABB}, a zero {@link AABB} is * returned. * @param aabb the {@link AABB} to intersect * @return {@link AABB} * @since 3.1.1 */ public AABB getIntersection(AABB aabb) { Vector2 min = new Vector2(); Vector2 max = new Vector2(); min.x = Math.max(this.min.x, aabb.min.x); min.y = Math.max(this.min.y, aabb.min.y); max.x = Math.min(this.max.x, aabb.max.x); max.y = Math.min(this.max.y, aabb.max.y); // check for a bad AABB if (min.x > max.x || min.y > max.y) { // the two AABBs were not overlapping // return a degenerate one return new AABB(new Vector2(), new Vector2()); } return new AABB(min, max); } /** * Expands this {@link AABB} by half the given expansion in each direction. * <p> * The expansion can be negative to shrink the {@link AABB}. However, if the expansion is * greater than the current width/height, the {@link AABB} can become invalid. In this * case, the AABB will become a degenerate AABB at the mid point of the min and max for * the respective coordinates. * @param expansion the expansion amount */ public void expand(double expansion) { double e = expansion * 0.5; this.min.x -= e; this.min.y -= e; this.max.x += e; this.max.y += e; // we only need to verify the new aabb if the expansion // was inwardly if (expansion < 0.0) { // if the aabb is invalid then set the min/max(es) to // the middle value of their current values if (this.min.x > this.max.x) { double mid = (this.min.x + this.max.x) * 0.5; this.min.x = mid; this.max.x = mid; } if (this.min.y > this.max.y) { double mid = (this.min.y + this.max.y) * 0.5; this.min.y = mid; this.max.y = mid; } } } /** * Returns a new {@link AABB} of this AABB expanded by half the given expansion * in both the x and y directions. * <p> * The expansion can be negative to shrink the {@link AABB}. However, if the expansion is * greater than the current width/height, the {@link AABB} can become invalid. In this * case, the AABB will become a degenerate AABB at the mid point of the min and max for * the respective coordinates. * @param expansion the expansion amount * @return {@link AABB} * @since 3.1.1 */ public AABB getExpanded(double expansion) { double e = expansion * 0.5; double minx = this.min.x - e; double miny = this.min.y - e; double maxx = this.max.x + e; double maxy = this.max.y + e; // we only need to verify the new aabb if the expansion // was inwardly if (expansion < 0.0) { // if the aabb is invalid then set the min/max(es) to // the middle value of their current values if (minx > maxx) { double mid = (minx + maxx) * 0.5; minx = mid; maxx = mid; } if (miny > maxy) { double mid = (miny + maxy) * 0.5; miny = mid; maxy = mid; } } return new AABB( new Vector2(minx, miny), new Vector2(maxx, maxy)); } /** * Returns true if the given {@link AABB} and this {@link AABB} overlap. * @param aabb the {@link AABB} to test * @return boolean true if the {@link AABB}s overlap */ public boolean overlaps(AABB aabb) { // check for overlap along the x-axis if (this.min.x > aabb.max.x || this.max.x < aabb.min.x) { // the aabbs do not overlap along the x-axis return false; } else { // check for overlap along the y-axis if (this.min.y > aabb.max.y || this.max.y < aabb.min.y) { // the aabbs do not overlap along the y-axis return false; } else { return true; } } } /** * Returns true if the given {@link AABB} is contained within this {@link AABB}. * @param aabb the {@link AABB} to test * @return boolean */ public boolean contains(AABB aabb) { if (this.min.x <= aabb.min.x && this.max.x >= aabb.max.x) { if (this.min.y <= aabb.min.y && this.max.y >= aabb.max.y) { return true; } } return false; } /** * Returns true if the given point is contained within this {@link AABB}. * @param point the point to test * @return boolean * @since 3.1.1 */ public boolean contains(Vector2 point) { return this.contains(point.x, point.y); } /** * Returns true if the given point's coordinates are contained within this {@link AABB}. * @param x the x coordinate of the point * @param y the y coordinate of the point * @return boolean * @since 3.1.1 */ public boolean contains(double x, double y) { if (this.min.x <= x && this.max.x >= x) { if (this.min.y <= y && this.max.y >= y) { return true; } } return false; } /** * Returns true if this {@link AABB} is degenerate. * <p> * A degenerate {@link AABB} is one where its min and max x or y * coordinates are equal. * @return boolean * @since 3.1.1 */ public boolean isDegenerate() { return this.min.x == this.max.x || this.min.y == this.max.y; } /** * Returns true if this {@link AABB} is degenerate given * the specified error. * <p> * An {@link AABB} is degenerate given some error if * max - min <= error for either the x or y coordinate. * @param error the allowed error * @return boolean * @since 3.1.1 * @see #isDegenerate() */ public boolean isDegenerate(double error) { return Math.abs(this.max.x - this.min.x) <= error || Math.abs(this.max.y - this.min.y) <= error; } /** * Returns the minimum x extent. * @return double */ public double getMinX() { return this.min.x; } /** * Returns the maximum x extent. * @return double */ public double getMaxX() { return this.max.x; } /** * Returns the maximum y extent. * @return double */ public double getMaxY() { return this.max.y; } /** * Returns the minimum y extent. * @return double */ public double getMinY() { return this.min.y; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * created at Sep 24, 2001 * @author Jeka */ package com.intellij.refactoring.move.moveInner; import com.intellij.codeInsight.ChangeContextUtil; import com.intellij.codeInsight.CodeInsightUtilBase; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Ref; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.codeStyle.VariableKind; import com.intellij.psi.javadoc.PsiDocComment; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.BaseRefactoringProcessor; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.listeners.RefactoringElementListener; import com.intellij.refactoring.move.MoveCallback; import com.intellij.refactoring.move.moveClassesOrPackages.MoveClassesOrPackagesUtil; import com.intellij.refactoring.rename.RenameUtil; import com.intellij.refactoring.util.ConflictsUtil; import com.intellij.refactoring.util.NonCodeUsageInfo; import com.intellij.refactoring.util.RefactoringUIUtil; import com.intellij.refactoring.util.RefactoringUtil; import com.intellij.usageView.UsageInfo; import com.intellij.usageView.UsageViewDescriptor; import com.intellij.usageView.UsageViewUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.VisibilityUtil; import com.intellij.util.containers.HashMap; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; public class MoveInnerProcessor extends BaseRefactoringProcessor { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.move.moveInner.MoveInnerProcessor"); private MoveCallback myMoveCallback; private PsiClass myInnerClass; private PsiClass myOuterClass; private PsiElement myTargetContainer; private String myParameterNameOuterClass; private String myFieldNameOuterClass; private String myDescriptiveName = ""; private String myNewClassName; private boolean mySearchInComments; private boolean mySearchInNonJavaFiles; private NonCodeUsageInfo[] myNonCodeUsages; public MoveInnerProcessor(Project project, MoveCallback moveCallback) { super(project); myMoveCallback = moveCallback; } public MoveInnerProcessor(Project project, PsiClass innerClass, String name, boolean passOuterClass, String parameterName, final PsiElement targetContainer) { super(project); setup(innerClass, name, passOuterClass, parameterName, true, true, targetContainer); } protected String getCommandName() { return RefactoringBundle.message("move.inner.class.command", myDescriptiveName); } protected UsageViewDescriptor createUsageViewDescriptor(UsageInfo[] usages) { return new MoveInnerViewDescriptor(myInnerClass); } @NotNull protected UsageInfo[] findUsages() { LOG.assertTrue(myTargetContainer != null); Collection<PsiReference> innerClassRefs = ReferencesSearch.search(myInnerClass).findAll(); ArrayList<UsageInfo> usageInfos = new ArrayList<UsageInfo>(innerClassRefs.size()); for (PsiReference innerClassRef : innerClassRefs) { PsiElement ref = innerClassRef.getElement(); if (!PsiTreeUtil.isAncestor(myInnerClass, ref, true)) { // do not show self-references usageInfos.add(new UsageInfo(ref)); } } final String newQName; if (myTargetContainer instanceof PsiDirectory) { final PsiDirectory targetDirectory = (PsiDirectory)myTargetContainer; final PsiPackage aPackage = JavaDirectoryService.getInstance().getPackage(targetDirectory); LOG.assertTrue(aPackage != null); newQName = aPackage.getQualifiedName() + "." + myNewClassName; } else if (myTargetContainer instanceof PsiClass) { final String qName = ((PsiClass)myTargetContainer).getQualifiedName(); if (qName != null) { newQName = qName + "." + myNewClassName; } else { newQName = myNewClassName; } } else { newQName = myNewClassName; } MoveClassesOrPackagesUtil.findNonCodeUsages(mySearchInComments, mySearchInNonJavaFiles, myInnerClass, newQName, usageInfos); return usageInfos.toArray(new UsageInfo[usageInfos.size()]); } protected void refreshElements(PsiElement[] elements) { boolean condition = elements.length == 1 && elements[0] instanceof PsiClass; LOG.assertTrue(condition); myInnerClass = (PsiClass)elements[0]; } public boolean isSearchInComments() { return mySearchInComments; } public void setSearchInComments(boolean searchInComments) { mySearchInComments = searchInComments; } public boolean isSearchInNonJavaFiles() { return mySearchInNonJavaFiles; } public void setSearchInNonJavaFiles(boolean searchInNonJavaFiles) { mySearchInNonJavaFiles = searchInNonJavaFiles; } protected void performRefactoring(final UsageInfo[] usages) { PsiManager manager = PsiManager.getInstance(myProject); final PsiElementFactory factory = JavaPsiFacade.getInstance(manager.getProject()).getElementFactory(); final RefactoringElementListener elementListener = getTransaction().getElementListener(myInnerClass); String newClassName = myNewClassName; try { PsiField field = null; if (myParameterNameOuterClass != null) { // pass outer as a parameter field = factory.createField(myFieldNameOuterClass, factory.createType(myOuterClass)); field = (PsiField)myInnerClass.add(field); myInnerClass = field.getContainingClass(); addFieldInitializationToConstructors(myInnerClass, field, myParameterNameOuterClass); } ChangeContextUtil.encodeContextInfo(myInnerClass, false); PsiClass newClass; if (myTargetContainer instanceof PsiDirectory) { myInnerClass = CodeInsightUtilBase.forcePsiPostprocessAndRestoreElement(myInnerClass); newClass = JavaDirectoryService.getInstance().createClass((PsiDirectory)myTargetContainer, newClassName); PsiDocComment defaultDocComment = newClass.getDocComment(); if (defaultDocComment != null && myInnerClass.getDocComment() == null) { myInnerClass = (PsiClass)myInnerClass.addAfter(defaultDocComment, null).getParent(); } newClass = (PsiClass)newClass.replace(myInnerClass); PsiUtil.setModifierProperty(newClass, PsiModifier.STATIC, false); PsiUtil.setModifierProperty(newClass, PsiModifier.PRIVATE, false); PsiUtil.setModifierProperty(newClass, PsiModifier.PROTECTED, false); final boolean makePublic = needPublicAccess(); if (makePublic) { PsiUtil.setModifierProperty(newClass, PsiModifier.PUBLIC, true); } final PsiMethod[] constructors = newClass.getConstructors(); for (PsiMethod constructor : constructors) { final PsiModifierList modifierList = constructor.getModifierList(); modifierList.setModifierProperty(PsiModifier.PRIVATE, false); modifierList.setModifierProperty(PsiModifier.PROTECTED, false); if (makePublic) { modifierList.setModifierProperty(PsiModifier.PUBLIC, true); } } } else { newClass = (PsiClass)myTargetContainer.add(myInnerClass); } newClass.setName(newClassName); // replace references in a new class to old inner class with references to itself for (PsiReference ref : ReferencesSearch.search(myInnerClass, new LocalSearchScope(newClass), true)) { PsiElement element = ref.getElement(); if (element.getParent() instanceof PsiJavaCodeReferenceElement) { PsiJavaCodeReferenceElement parentRef = (PsiJavaCodeReferenceElement)element.getParent(); PsiElement parentRefElement = parentRef.resolve(); if (parentRefElement instanceof PsiClass) { // reference to inner class inside our inner parentRef.getQualifier().delete(); continue; } } ref.bindToElement(newClass); } List<PsiReference> referencesToRebind = new ArrayList<PsiReference>(); for (UsageInfo usage : usages) { if (usage.isNonCodeUsage) continue; PsiElement refElement = usage.getElement(); PsiReference[] references = refElement.getReferences(); for (PsiReference reference : references) { if (reference.isReferenceTo(myInnerClass)) { referencesToRebind.add(reference); } } } myInnerClass.delete(); // correct references in usages for (UsageInfo usage : usages) { if (usage.isNonCodeUsage) continue; PsiElement refElement = usage.getElement(); if (myParameterNameOuterClass != null) { // should pass outer as parameter PsiElement refParent = refElement.getParent(); if (refParent instanceof PsiNewExpression || refParent instanceof PsiAnonymousClass) { PsiNewExpression newExpr = refParent instanceof PsiNewExpression ? (PsiNewExpression)refParent : (PsiNewExpression)refParent.getParent(); PsiExpressionList argList = newExpr.getArgumentList(); if (argList != null) { // can happen in incomplete code if (newExpr.getQualifier() == null) { PsiThisExpression thisExpr; PsiClass parentClass = RefactoringUtil.getThisClass(newExpr); if (myOuterClass.equals(parentClass)) { thisExpr = RefactoringUtil.createThisExpression(manager, null); } else { thisExpr = RefactoringUtil.createThisExpression(manager, myOuterClass); } argList.addAfter(thisExpr, null); } else { argList.addAfter(newExpr.getQualifier(), null); newExpr.getQualifier().delete(); } } } } } for (PsiReference reference : referencesToRebind) { reference.bindToElement(newClass); } if (field != null) { final PsiExpression paramAccessExpression = factory.createExpressionFromText(myParameterNameOuterClass, null); for (final PsiMethod constructor : newClass.getConstructors()) { final PsiStatement[] statements = constructor.getBody().getStatements(); if (statements.length > 0) { if (statements[0] instanceof PsiExpressionStatement) { PsiExpression expression = ((PsiExpressionStatement)statements[0]).getExpression(); if (expression instanceof PsiMethodCallExpression) { @NonNls String text = ((PsiMethodCallExpression)expression).getMethodExpression().getText(); if ("this".equals(text) || "super".equals(text)) { ChangeContextUtil.decodeContextInfo(expression, myOuterClass, paramAccessExpression); } } } } } PsiExpression accessExpression = factory.createExpressionFromText(myFieldNameOuterClass, null); ChangeContextUtil.decodeContextInfo(newClass, myOuterClass, accessExpression); } else { ChangeContextUtil.decodeContextInfo(newClass, null, null); } PsiFile targetFile = newClass.getContainingFile(); OpenFileDescriptor descriptor = new OpenFileDescriptor(myProject, targetFile.getVirtualFile(), newClass.getTextOffset()); FileEditorManager.getInstance(myProject).openTextEditor(descriptor, true); if (myMoveCallback != null) { myMoveCallback.refactoringCompleted(); } elementListener.elementMoved(newClass); List<NonCodeUsageInfo> nonCodeUsages = new ArrayList<NonCodeUsageInfo>(); for (UsageInfo usage : usages) { if (usage instanceof NonCodeUsageInfo) { nonCodeUsages.add((NonCodeUsageInfo)usage); } } myNonCodeUsages = nonCodeUsages.toArray(new NonCodeUsageInfo[nonCodeUsages.size()]); } catch (IncorrectOperationException e) { LOG.error(e); } } private boolean needPublicAccess() { if (myOuterClass.isInterface()) { return true; } if (myTargetContainer instanceof PsiDirectory) { PsiPackage targetPackage = JavaDirectoryService.getInstance().getPackage((PsiDirectory)myTargetContainer); if (targetPackage != null && !isInPackage(myOuterClass.getContainingFile(), targetPackage)) { return true; } } return false; } protected void performPsiSpoilingRefactoring() { if (myNonCodeUsages != null) { RenameUtil.renameNonCodeUsages(myProject, myNonCodeUsages); } } protected boolean preprocessUsages(Ref<UsageInfo[]> refUsages) { final MultiMap<PsiElement, String> conflicts = new MultiMap<PsiElement, String>(); final HashMap<PsiElement,HashSet<PsiElement>> reported = new HashMap<PsiElement, HashSet<PsiElement>>(); class Visitor extends JavaRecursiveElementWalkingVisitor { @Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { PsiElement resolved = reference.resolve(); if (resolved instanceof PsiMember && PsiTreeUtil.isAncestor(myInnerClass, resolved, true) && becomesInaccessible((PsiMember)resolved)) { registerConflict(reference, resolved, reported, conflicts); } } @Override public void visitClass(PsiClass aClass) { if (aClass == myInnerClass) return; super.visitClass(aClass); } } // if (myInnerClass.hasModifierProperty(PsiModifier.)) { myOuterClass.accept(new Visitor()); myInnerClass.accept(new JavaRecursiveElementWalkingVisitor() { @Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { super.visitReferenceElement(reference); final PsiElement resolve = reference.resolve(); if (resolve instanceof PsiMember) { if (PsiTreeUtil.isAncestor(myOuterClass, resolve, true) && !PsiTreeUtil.isAncestor(myInnerClass, resolve, true)) { if (becomesInaccessible((PsiMember)resolve)) { registerConflict(reference, resolve, reported, conflicts); } } } } }); return showConflicts(conflicts, refUsages.get()); } private static void registerConflict(PsiJavaCodeReferenceElement reference, PsiElement resolved, HashMap<PsiElement, HashSet<PsiElement>> reported, MultiMap<PsiElement, String> conflicts) { final PsiElement container = ConflictsUtil.getContainer(reference); HashSet<PsiElement> containerSet = reported.get(resolved); if (containerSet == null) { containerSet = new HashSet<PsiElement>(); reported.put(resolved, containerSet); } if (!containerSet.contains(container)) { containerSet.add(container); String message = RefactoringBundle.message("0.will.become.inaccessible.from.1", RefactoringUIUtil.getDescription(resolved, true), RefactoringUIUtil.getDescription(container, true)); conflicts.putValue(resolved, message); } } private boolean becomesInaccessible(PsiMember element) { final String visibilityModifier = VisibilityUtil.getVisibilityModifier(element.getModifierList()); if (PsiModifier.PRIVATE.equals(visibilityModifier)) return true; if (PsiModifier.PUBLIC.equals(visibilityModifier)) return false; final PsiFile containingFile = myOuterClass.getContainingFile(); if (myTargetContainer instanceof PsiDirectory) { final PsiPackage aPackage = JavaDirectoryService.getInstance().getPackage((PsiDirectory)myTargetContainer); return !isInPackage(containingFile, aPackage); } // target container is a class PsiFile targetFile = myTargetContainer.getContainingFile(); if (targetFile != null) { final PsiDirectory containingDirectory = targetFile.getContainingDirectory(); if (containingDirectory != null) { final PsiPackage targetPackage = JavaDirectoryService.getInstance().getPackage(containingDirectory); return isInPackage(containingFile, targetPackage); } } return false; } private static boolean isInPackage(final PsiFile containingFile, PsiPackage aPackage) { if (containingFile != null) { final PsiDirectory containingDirectory = containingFile.getContainingDirectory(); if (containingDirectory != null) { PsiPackage filePackage = JavaDirectoryService.getInstance().getPackage(containingDirectory); if (filePackage != null && !filePackage.getQualifiedName().equals( aPackage.getQualifiedName())) { return false; } } } return true; } public void setup(final PsiClass innerClass, final String className, final boolean passOuterClass, final String parameterName, boolean searchInComments, boolean searchInNonJava, @NotNull final PsiElement targetContainer) { myNewClassName = className; myInnerClass = innerClass; myDescriptiveName = UsageViewUtil.getDescriptiveName(myInnerClass); myOuterClass = myInnerClass.getContainingClass(); myTargetContainer = targetContainer; JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(myProject); myParameterNameOuterClass = passOuterClass ? parameterName : null; if (myParameterNameOuterClass != null) { myFieldNameOuterClass = codeStyleManager.variableNameToPropertyName(myParameterNameOuterClass, VariableKind.PARAMETER); myFieldNameOuterClass = codeStyleManager.propertyNameToVariableName(myFieldNameOuterClass, VariableKind.FIELD); } mySearchInComments = searchInComments; mySearchInNonJavaFiles = searchInNonJava; } private void addFieldInitializationToConstructors(PsiClass aClass, PsiField field, String parameterName) throws IncorrectOperationException { PsiMethod[] constructors = aClass.getConstructors(); PsiElementFactory factory = JavaPsiFacade.getInstance(myProject).getElementFactory(); if (constructors.length > 0) { for (PsiMethod constructor : constructors) { if (parameterName != null) { PsiParameterList parameterList = constructor.getParameterList(); PsiParameter parameter = factory.createParameter(parameterName, field.getType()); parameterList.addAfter(parameter, null); } PsiCodeBlock body = constructor.getBody(); if (body == null) continue; PsiStatement[] statements = body.getStatements(); if (statements.length > 0) { PsiStatement first = statements[0]; if (first instanceof PsiExpressionStatement) { PsiExpression expression = ((PsiExpressionStatement)first).getExpression(); if (expression instanceof PsiMethodCallExpression) { @NonNls String text = ((PsiMethodCallExpression)expression).getMethodExpression().getText(); if ("this".equals(text)) { continue; } } } } createAssignmentStatement(constructor, field.getName(), parameterName); } } else { PsiMethod constructor = factory.createConstructor(); if (parameterName != null) { PsiParameterList parameterList = constructor.getParameterList(); PsiParameter parameter = factory.createParameter(parameterName, field.getType()); parameterList.add(parameter); } createAssignmentStatement(constructor, field.getName(), parameterName); aClass.add(constructor); } } private PsiStatement createAssignmentStatement(PsiMethod constructor, String fieldname, String parameterName) throws IncorrectOperationException { PsiElementFactory factory = JavaPsiFacade.getInstance(myProject).getElementFactory(); @NonNls String pattern = fieldname + "=a;"; if (fieldname.equals(parameterName)) { pattern = "this." + pattern; } PsiExpressionStatement statement = (PsiExpressionStatement)factory.createStatementFromText(pattern, null); statement = (PsiExpressionStatement)CodeStyleManager.getInstance(myProject).reformat(statement); PsiCodeBlock body = constructor.getBody(); statement = (PsiExpressionStatement)body.addAfter(statement, getAnchorElement(body)); PsiAssignmentExpression assignment = (PsiAssignmentExpression)statement.getExpression(); PsiReferenceExpression rExpr = (PsiReferenceExpression)assignment.getRExpression(); PsiIdentifier identifier = (PsiIdentifier)rExpr.getReferenceNameElement(); identifier.replace(factory.createIdentifier(parameterName)); return statement; } @Nullable private static PsiElement getAnchorElement(PsiCodeBlock body) { PsiStatement[] statements = body.getStatements(); if (statements.length > 0) { PsiStatement first = statements[0]; if (first instanceof PsiExpressionStatement) { PsiExpression expression = ((PsiExpressionStatement)first).getExpression(); if (expression instanceof PsiMethodCallExpression) { PsiReferenceExpression methodCall = ((PsiMethodCallExpression)expression).getMethodExpression(); @NonNls String text = methodCall.getText(); if ("super".equals(text)) { return first; } } } } return null; } public PsiClass getInnerClass() { return myInnerClass; } public String getNewClassName() { return myNewClassName; } public boolean shouldPassParameter() { return myParameterNameOuterClass != null; } public String getParameterName() { return myParameterNameOuterClass; } }
/* * JBoss, Home of Professional Open Source * Copyright 2011 Red Hat Inc. and/or its affiliates and other contributors * as indicated by the @authors tag. All rights reserved. * See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.arquillian.container.test.impl.client.container; import java.util.List; import java.util.Map; import java.util.logging.Logger; import org.jboss.arquillian.container.spi.Container; import org.jboss.arquillian.container.spi.ContainerRegistry; import org.jboss.arquillian.container.spi.client.deployment.Deployment; import org.jboss.arquillian.container.spi.client.deployment.DeploymentScenario; import org.jboss.arquillian.container.spi.client.deployment.TargetDescription; import org.jboss.arquillian.container.spi.event.ContainerControlEvent; import org.jboss.arquillian.container.spi.event.DeployDeployment; import org.jboss.arquillian.container.spi.event.KillContainer; import org.jboss.arquillian.container.spi.event.SetupContainer; import org.jboss.arquillian.container.spi.event.StartContainer; import org.jboss.arquillian.container.spi.event.StopContainer; import org.jboss.arquillian.container.spi.event.UnDeployDeployment; import org.jboss.arquillian.container.test.api.ContainerController; import org.jboss.arquillian.core.api.Event; import org.jboss.arquillian.core.api.Instance; import org.jboss.arquillian.core.api.annotation.Inject; /** * ClientContainerController * * @author <a href="mailto:mgencur@redhat.com">Martin Gencur</a> * @version $Revision: $ * */ public class ClientContainerController implements ContainerController { private final Logger log = Logger.getLogger(ClientContainerController.class.getName()); @Inject private Event<ContainerControlEvent> event; @Inject private Instance<ContainerRegistry> containerRegistry; @Inject private Instance<DeploymentScenario> deploymentScenario; @Override public void start(String containerQualifier) { DeploymentScenario scenario = deploymentScenario.get(); if(scenario == null) { throw new IllegalArgumentException("No deployment scenario in context"); } ContainerRegistry registry = containerRegistry.get(); if (registry == null) { throw new IllegalArgumentException("No container registry in context"); } if (!containerExists(registry.getContainers(), containerQualifier)) { throw new IllegalArgumentException("No container found in registry with name " + containerQualifier); } if (!isControllableContainer(registry.getContainers(), containerQualifier)) { throw new IllegalArgumentException("Could not start " + containerQualifier + " container. The container life cycle is controlled by Arquillian"); } List<Deployment> managedDeployments = scenario.startupDeploymentsFor(new TargetDescription(containerQualifier)); Container container = registry.getContainer(new TargetDescription(containerQualifier)); log.info("Manual starting of a server instance"); event.fire(new StartContainer(container)); for (Deployment d : managedDeployments) { if(d.getDescription().managed() && "custom".equalsIgnoreCase(container.getContainerConfiguration().getMode())) { throw new IllegalStateException( "Trying to deploy managed deployment " + d.getDescription().getName() + " to custom mode container " + container.getName()); } if (!d.isDeployed()) { log.info("Automatic deploying of the managed deployment with name " + d.getDescription().getName() + " for the container with name " + container.getName()); event.fire(new DeployDeployment(container, d)); } } } @Override public void start(String containerQualifier, Map<String, String> config) { DeploymentScenario scenario = deploymentScenario.get(); if(scenario == null) { throw new IllegalArgumentException("No deployment scenario in context"); } ContainerRegistry registry = containerRegistry.get(); if (registry == null) { throw new IllegalArgumentException("No container registry in context"); } if (!containerExists(registry.getContainers(), containerQualifier)) { throw new IllegalArgumentException("No container with the specified name exists"); } if (!isControllableContainer(registry.getContainers(), containerQualifier)) { throw new IllegalArgumentException("Could not start " + containerQualifier + " container. The container life cycle is controlled by Arquillian"); } List<Deployment> managedDeployments = scenario.startupDeploymentsFor(new TargetDescription(containerQualifier)); Container container = registry.getContainer(new TargetDescription(containerQualifier)); for (String name : config.keySet()) { container.getContainerConfiguration().overrideProperty(name, config.get(name)); } log.info("Manual starting of a server instance with overridden configuration. New configuration: " + container.getContainerConfiguration().getContainerProperties()); event.fire(new SetupContainer(container)); event.fire(new StartContainer(container)); for (Deployment d : managedDeployments) { if (!d.isDeployed()) { log.info("Automatic deploying of the managed deployment with name " + d.getDescription().getName() + " for the container with name " + container.getName()); event.fire(new DeployDeployment(container, d)); } } } @Override public void stop(String containerQualifier) { DeploymentScenario scenario = deploymentScenario.get(); if(scenario == null) { throw new IllegalArgumentException("No deployment scenario in context"); } ContainerRegistry registry = containerRegistry.get(); if (registry == null) { throw new IllegalArgumentException("No container registry in context"); } if (!containerExists(registry.getContainers(), containerQualifier)) { throw new IllegalArgumentException("No container with the specified name exists"); } if (!isControllableContainer(registry.getContainers(), containerQualifier)) { throw new IllegalArgumentException("Could not stop " + containerQualifier + " container. The container life cycle is controlled by Arquillian"); } Container container = registry.getContainer(new TargetDescription(containerQualifier)); List<Deployment> managedDeployments = scenario.startupDeploymentsFor(new TargetDescription(containerQualifier)); for (Deployment d : managedDeployments) { if (d.isDeployed()) { log.info("Automatic undeploying of the managed deployment with name " + d.getDescription().getName() + " from the container with name " + container.getName()); event.fire(new UnDeployDeployment(container, d)); } } log.info("Manual stopping of a server instance"); event.fire(new StopContainer(container)); } @Override public void kill(String containerQualifier) { ContainerRegistry registry = containerRegistry.get(); if (registry == null) { throw new IllegalArgumentException("No container registry in context"); } if (!containerExists(registry.getContainers(), containerQualifier)) { throw new IllegalArgumentException("No container with the specified name exists"); } if (!isControllableContainer(registry.getContainers(), containerQualifier)) { throw new IllegalArgumentException("Could not start " + containerQualifier + " container. The container life cycle is controlled by Arquillian"); } Container container = registry.getContainer(new TargetDescription(containerQualifier)); log.info("Hard killing of a server instance"); event.fire(new KillContainer(container)); } @Override public boolean isStarted(String containerQualifier) { ContainerRegistry registry = containerRegistry.get(); if (registry == null) { throw new IllegalArgumentException("No container registry in context"); } if (!containerExists(registry.getContainers(), containerQualifier)) { throw new IllegalArgumentException("No container found in registry with name " + containerQualifier); } Container container = registry.getContainer(new TargetDescription(containerQualifier)); return container.getState() == Container.State.STARTED; } protected boolean containerExists(List<Container> containers, String name) { for (Container container : containers) { if (container.getName().equals(name)) { return true; } } return false; } protected boolean isControllableContainer(List<Container> containers, String containerQualifier) { for (Container container : containers) { String contianerMode = container.getContainerConfiguration().getMode(); if (container.getName().equals(containerQualifier) && ("manual".equalsIgnoreCase(contianerMode) || "custom".equalsIgnoreCase(contianerMode))) { return true; } } return false; } protected Event<ContainerControlEvent> getContainerControllerEvent() { return event; } protected Instance<ContainerRegistry> getContainerRegistry() { return containerRegistry; } protected Instance<DeploymentScenario> getDeploymentScenario() { return deploymentScenario; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; /** * A base for {@link MultiTableInputFormat}s. Receives a list of * {@link Scan} instances that define the input tables and * filters etc. Subclasses may use other TableRecordReader implementations. */ @InterfaceAudience.Public @InterfaceStability.Evolving public abstract class MultiTableInputFormatBase extends InputFormat<ImmutableBytesWritable, Result> { final Log LOG = LogFactory.getLog(MultiTableInputFormatBase.class); /** Holds the set of scans used to define the input. */ private List<Scan> scans; /** The reader scanning the table, can be a custom one. */ private TableRecordReader tableRecordReader = null; /** * Builds a TableRecordReader. If no TableRecordReader was provided, uses the * default. * * @param split The split to work with. * @param context The current context. * @return The newly created record reader. * @throws IOException When creating the reader fails. * @throws InterruptedException when record reader initialization fails * @see org.apache.hadoop.mapreduce.InputFormat#createRecordReader( * org.apache.hadoop.mapreduce.InputSplit, * org.apache.hadoop.mapreduce.TaskAttemptContext) */ @Override public RecordReader<ImmutableBytesWritable, Result> createRecordReader( InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { TableSplit tSplit = (TableSplit) split; if (tSplit.getTableName() == null) { throw new IOException("Cannot create a record reader because of a" + " previous error. Please look at the previous logs lines from" + " the task's full log for more details."); } HTable table = new HTable(context.getConfiguration(), tSplit.getTableName()); TableRecordReader trr = this.tableRecordReader; // if no table record reader was provided use default if (trr == null) { trr = new TableRecordReader(); } Scan sc = tSplit.getScan(); sc.setStartRow(tSplit.getStartRow()); sc.setStopRow(tSplit.getEndRow()); trr.setScan(sc); trr.setHTable(table); trr.initialize(split, context); return trr; } /** * Calculates the splits that will serve as input for the map tasks. The * number of splits matches the number of regions in a table. * * @param context The current job context. * @return The list of input splits. * @throws IOException When creating the list of splits fails. * @see org.apache.hadoop.mapreduce.InputFormat#getSplits(org.apache.hadoop.mapreduce.JobContext) */ @Override public List<InputSplit> getSplits(JobContext context) throws IOException { if (scans.isEmpty()) { throw new IOException("No scans were provided."); } List<InputSplit> splits = new ArrayList<InputSplit>(); for (Scan scan : scans) { byte[] tableName = scan.getAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME); if (tableName == null) throw new IOException("A scan object did not have a table name"); HTable table = null; try { table = new HTable(context.getConfiguration(), tableName); Pair<byte[][], byte[][]> keys = table.getStartEndKeys(); if (keys == null || keys.getFirst() == null || keys.getFirst().length == 0) { throw new IOException("Expecting at least one region for table : " + Bytes.toString(tableName)); } int count = 0; byte[] startRow = scan.getStartRow(); byte[] stopRow = scan.getStopRow(); for (int i = 0; i < keys.getFirst().length; i++) { if (!includeRegionInSplit(keys.getFirst()[i], keys.getSecond()[i])) { continue; } String regionLocation = table.getRegionLocation(keys.getFirst()[i], false).getHostname(); // determine if the given start and stop keys fall into the range if ((startRow.length == 0 || keys.getSecond()[i].length == 0 || Bytes.compareTo(startRow, keys.getSecond()[i]) < 0) && (stopRow.length == 0 || Bytes.compareTo(stopRow, keys.getFirst()[i]) > 0)) { byte[] splitStart = startRow.length == 0 || Bytes.compareTo(keys.getFirst()[i], startRow) >= 0 ? keys .getFirst()[i] : startRow; byte[] splitStop = (stopRow.length == 0 || Bytes.compareTo(keys.getSecond()[i], stopRow) <= 0) && keys.getSecond()[i].length > 0 ? keys .getSecond()[i] : stopRow; InputSplit split = new TableSplit(table.getFullyQualifiedTableName(), scan, splitStart, splitStop, regionLocation); splits.add(split); if (LOG.isDebugEnabled()) LOG.debug("getSplits: split -> " + (count++) + " -> " + split); } } } finally { if (null != table) table.close(); } } return splits; } /** * Test if the given region is to be included in the InputSplit while * splitting the regions of a table. * <p> * This optimization is effective when there is a specific reasoning to * exclude an entire region from the M-R job, (and hence, not contributing to * the InputSplit), given the start and end keys of the same. <br> * Useful when we need to remember the last-processed top record and revisit * the [last, current) interval for M-R processing, continuously. In addition * to reducing InputSplits, reduces the load on the region server as well, due * to the ordering of the keys. <br> * <br> * Note: It is possible that <code>endKey.length() == 0 </code> , for the last * (recent) region. <br> * Override this method, if you want to bulk exclude regions altogether from * M-R. By default, no region is excluded( i.e. all regions are included). * * @param startKey Start key of the region * @param endKey End key of the region * @return true, if this region needs to be included as part of the input * (default). */ protected boolean includeRegionInSplit(final byte[] startKey, final byte[] endKey) { return true; } /** * Allows subclasses to get the list of {@link Scan} objects. */ protected List<Scan> getScans() { return this.scans; } /** * Allows subclasses to set the list of {@link Scan} objects. * * @param scans The list of {@link Scan} used to define the input */ protected void setScans(List<Scan> scans) { this.scans = scans; } /** * Allows subclasses to set the {@link TableRecordReader}. * * @param tableRecordReader A different {@link TableRecordReader} * implementation. */ protected void setTableRecordReader(TableRecordReader tableRecordReader) { this.tableRecordReader = tableRecordReader; } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.dynamodbv2.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Represents the properties of a global secondary index. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dynamodb-2012-08-10/GlobalSecondaryIndex" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GlobalSecondaryIndex implements Serializable, Cloneable, StructuredPojo { /** * <p> * The name of the global secondary index. The name must be unique among all other indexes on this table. * </p> */ private String indexName; /** * <p> * The complete key schema for a global secondary index, which consists of one or more pairs of attribute names and * key types: * </p> * <ul> * <li> * <p> * <code>HASH</code> - partition key * </p> * </li> * <li> * <p> * <code>RANGE</code> - sort key * </p> * </li> * </ul> * <note> * <p> * The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives from * DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based on their * partition key values. * </p> * <p> * The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives from the * way DynamoDB stores items with the same partition key physically close together, in sorted order by the sort key * value. * </p> * </note> */ private java.util.List<KeySchemaElement> keySchema; /** * <p> * Represents attributes that are copied (projected) from the table into the global secondary index. These are in * addition to the primary key attributes and index key attributes, which are automatically projected. * </p> */ private Projection projection; /** * <p> * Represents the provisioned throughput settings for the specified global secondary index. * </p> * <p> * For current minimum and maximum provisioned throughput values, see <a * href="http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html">Limits</a> in the <i>Amazon * DynamoDB Developer Guide</i>. * </p> */ private ProvisionedThroughput provisionedThroughput; /** * <p> * The name of the global secondary index. The name must be unique among all other indexes on this table. * </p> * * @param indexName * The name of the global secondary index. The name must be unique among all other indexes on this table. */ public void setIndexName(String indexName) { this.indexName = indexName; } /** * <p> * The name of the global secondary index. The name must be unique among all other indexes on this table. * </p> * * @return The name of the global secondary index. The name must be unique among all other indexes on this table. */ public String getIndexName() { return this.indexName; } /** * <p> * The name of the global secondary index. The name must be unique among all other indexes on this table. * </p> * * @param indexName * The name of the global secondary index. The name must be unique among all other indexes on this table. * @return Returns a reference to this object so that method calls can be chained together. */ public GlobalSecondaryIndex withIndexName(String indexName) { setIndexName(indexName); return this; } /** * <p> * The complete key schema for a global secondary index, which consists of one or more pairs of attribute names and * key types: * </p> * <ul> * <li> * <p> * <code>HASH</code> - partition key * </p> * </li> * <li> * <p> * <code>RANGE</code> - sort key * </p> * </li> * </ul> * <note> * <p> * The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives from * DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based on their * partition key values. * </p> * <p> * The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives from the * way DynamoDB stores items with the same partition key physically close together, in sorted order by the sort key * value. * </p> * </note> * * @return The complete key schema for a global secondary index, which consists of one or more pairs of attribute * names and key types:</p> * <ul> * <li> * <p> * <code>HASH</code> - partition key * </p> * </li> * <li> * <p> * <code>RANGE</code> - sort key * </p> * </li> * </ul> * <note> * <p> * The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" * derives from DynamoDB' usage of an internal hash function to evenly distribute data items across * partitions, based on their partition key values. * </p> * <p> * The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives * from the way DynamoDB stores items with the same partition key physically close together, in sorted order * by the sort key value. * </p> */ public java.util.List<KeySchemaElement> getKeySchema() { return keySchema; } /** * <p> * The complete key schema for a global secondary index, which consists of one or more pairs of attribute names and * key types: * </p> * <ul> * <li> * <p> * <code>HASH</code> - partition key * </p> * </li> * <li> * <p> * <code>RANGE</code> - sort key * </p> * </li> * </ul> * <note> * <p> * The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives from * DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based on their * partition key values. * </p> * <p> * The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives from the * way DynamoDB stores items with the same partition key physically close together, in sorted order by the sort key * value. * </p> * </note> * * @param keySchema * The complete key schema for a global secondary index, which consists of one or more pairs of attribute * names and key types:</p> * <ul> * <li> * <p> * <code>HASH</code> - partition key * </p> * </li> * <li> * <p> * <code>RANGE</code> - sort key * </p> * </li> * </ul> * <note> * <p> * The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives * from DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based * on their partition key values. * </p> * <p> * The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives * from the way DynamoDB stores items with the same partition key physically close together, in sorted order * by the sort key value. * </p> */ public void setKeySchema(java.util.Collection<KeySchemaElement> keySchema) { if (keySchema == null) { this.keySchema = null; return; } this.keySchema = new java.util.ArrayList<KeySchemaElement>(keySchema); } /** * <p> * The complete key schema for a global secondary index, which consists of one or more pairs of attribute names and * key types: * </p> * <ul> * <li> * <p> * <code>HASH</code> - partition key * </p> * </li> * <li> * <p> * <code>RANGE</code> - sort key * </p> * </li> * </ul> * <note> * <p> * The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives from * DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based on their * partition key values. * </p> * <p> * The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives from the * way DynamoDB stores items with the same partition key physically close together, in sorted order by the sort key * value. * </p> * </note> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setKeySchema(java.util.Collection)} or {@link #withKeySchema(java.util.Collection)} if you want to * override the existing values. * </p> * * @param keySchema * The complete key schema for a global secondary index, which consists of one or more pairs of attribute * names and key types:</p> * <ul> * <li> * <p> * <code>HASH</code> - partition key * </p> * </li> * <li> * <p> * <code>RANGE</code> - sort key * </p> * </li> * </ul> * <note> * <p> * The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives * from DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based * on their partition key values. * </p> * <p> * The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives * from the way DynamoDB stores items with the same partition key physically close together, in sorted order * by the sort key value. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public GlobalSecondaryIndex withKeySchema(KeySchemaElement... keySchema) { if (this.keySchema == null) { setKeySchema(new java.util.ArrayList<KeySchemaElement>(keySchema.length)); } for (KeySchemaElement ele : keySchema) { this.keySchema.add(ele); } return this; } /** * <p> * The complete key schema for a global secondary index, which consists of one or more pairs of attribute names and * key types: * </p> * <ul> * <li> * <p> * <code>HASH</code> - partition key * </p> * </li> * <li> * <p> * <code>RANGE</code> - sort key * </p> * </li> * </ul> * <note> * <p> * The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives from * DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based on their * partition key values. * </p> * <p> * The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives from the * way DynamoDB stores items with the same partition key physically close together, in sorted order by the sort key * value. * </p> * </note> * * @param keySchema * The complete key schema for a global secondary index, which consists of one or more pairs of attribute * names and key types:</p> * <ul> * <li> * <p> * <code>HASH</code> - partition key * </p> * </li> * <li> * <p> * <code>RANGE</code> - sort key * </p> * </li> * </ul> * <note> * <p> * The partition key of an item is also known as its <i>hash attribute</i>. The term "hash attribute" derives * from DynamoDB' usage of an internal hash function to evenly distribute data items across partitions, based * on their partition key values. * </p> * <p> * The sort key of an item is also known as its <i>range attribute</i>. The term "range attribute" derives * from the way DynamoDB stores items with the same partition key physically close together, in sorted order * by the sort key value. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public GlobalSecondaryIndex withKeySchema(java.util.Collection<KeySchemaElement> keySchema) { setKeySchema(keySchema); return this; } /** * <p> * Represents attributes that are copied (projected) from the table into the global secondary index. These are in * addition to the primary key attributes and index key attributes, which are automatically projected. * </p> * * @param projection * Represents attributes that are copied (projected) from the table into the global secondary index. These * are in addition to the primary key attributes and index key attributes, which are automatically projected. */ public void setProjection(Projection projection) { this.projection = projection; } /** * <p> * Represents attributes that are copied (projected) from the table into the global secondary index. These are in * addition to the primary key attributes and index key attributes, which are automatically projected. * </p> * * @return Represents attributes that are copied (projected) from the table into the global secondary index. These * are in addition to the primary key attributes and index key attributes, which are automatically * projected. */ public Projection getProjection() { return this.projection; } /** * <p> * Represents attributes that are copied (projected) from the table into the global secondary index. These are in * addition to the primary key attributes and index key attributes, which are automatically projected. * </p> * * @param projection * Represents attributes that are copied (projected) from the table into the global secondary index. These * are in addition to the primary key attributes and index key attributes, which are automatically projected. * @return Returns a reference to this object so that method calls can be chained together. */ public GlobalSecondaryIndex withProjection(Projection projection) { setProjection(projection); return this; } /** * <p> * Represents the provisioned throughput settings for the specified global secondary index. * </p> * <p> * For current minimum and maximum provisioned throughput values, see <a * href="http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html">Limits</a> in the <i>Amazon * DynamoDB Developer Guide</i>. * </p> * * @param provisionedThroughput * Represents the provisioned throughput settings for the specified global secondary index.</p> * <p> * For current minimum and maximum provisioned throughput values, see <a * href="http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html">Limits</a> in the * <i>Amazon DynamoDB Developer Guide</i>. */ public void setProvisionedThroughput(ProvisionedThroughput provisionedThroughput) { this.provisionedThroughput = provisionedThroughput; } /** * <p> * Represents the provisioned throughput settings for the specified global secondary index. * </p> * <p> * For current minimum and maximum provisioned throughput values, see <a * href="http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html">Limits</a> in the <i>Amazon * DynamoDB Developer Guide</i>. * </p> * * @return Represents the provisioned throughput settings for the specified global secondary index.</p> * <p> * For current minimum and maximum provisioned throughput values, see <a * href="http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html">Limits</a> in the * <i>Amazon DynamoDB Developer Guide</i>. */ public ProvisionedThroughput getProvisionedThroughput() { return this.provisionedThroughput; } /** * <p> * Represents the provisioned throughput settings for the specified global secondary index. * </p> * <p> * For current minimum and maximum provisioned throughput values, see <a * href="http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html">Limits</a> in the <i>Amazon * DynamoDB Developer Guide</i>. * </p> * * @param provisionedThroughput * Represents the provisioned throughput settings for the specified global secondary index.</p> * <p> * For current minimum and maximum provisioned throughput values, see <a * href="http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html">Limits</a> in the * <i>Amazon DynamoDB Developer Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public GlobalSecondaryIndex withProvisionedThroughput(ProvisionedThroughput provisionedThroughput) { setProvisionedThroughput(provisionedThroughput); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getIndexName() != null) sb.append("IndexName: ").append(getIndexName()).append(","); if (getKeySchema() != null) sb.append("KeySchema: ").append(getKeySchema()).append(","); if (getProjection() != null) sb.append("Projection: ").append(getProjection()).append(","); if (getProvisionedThroughput() != null) sb.append("ProvisionedThroughput: ").append(getProvisionedThroughput()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GlobalSecondaryIndex == false) return false; GlobalSecondaryIndex other = (GlobalSecondaryIndex) obj; if (other.getIndexName() == null ^ this.getIndexName() == null) return false; if (other.getIndexName() != null && other.getIndexName().equals(this.getIndexName()) == false) return false; if (other.getKeySchema() == null ^ this.getKeySchema() == null) return false; if (other.getKeySchema() != null && other.getKeySchema().equals(this.getKeySchema()) == false) return false; if (other.getProjection() == null ^ this.getProjection() == null) return false; if (other.getProjection() != null && other.getProjection().equals(this.getProjection()) == false) return false; if (other.getProvisionedThroughput() == null ^ this.getProvisionedThroughput() == null) return false; if (other.getProvisionedThroughput() != null && other.getProvisionedThroughput().equals(this.getProvisionedThroughput()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getIndexName() == null) ? 0 : getIndexName().hashCode()); hashCode = prime * hashCode + ((getKeySchema() == null) ? 0 : getKeySchema().hashCode()); hashCode = prime * hashCode + ((getProjection() == null) ? 0 : getProjection().hashCode()); hashCode = prime * hashCode + ((getProvisionedThroughput() == null) ? 0 : getProvisionedThroughput().hashCode()); return hashCode; } @Override public GlobalSecondaryIndex clone() { try { return (GlobalSecondaryIndex) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.dynamodbv2.model.transform.GlobalSecondaryIndexMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package nl.peterbloem.kit.data.classification; import java.awt.AlphaComposite; import java.awt.Color; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.awt.image.BufferedImageOp; import java.io.IOException; import java.util.AbstractList; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import org.apache.commons.math3.linear.ArrayRealVector; import org.apache.commons.math3.linear.RealVector; import nl.peterbloem.kit.data.Point; public class Classifiers { public static ArrayList<Color> colors = new ArrayList<Color>(); public static ArrayList<Color> componentColors = new ArrayList<Color>(); public static Color errorColor = Color.RED; static { colors.add(Color.BLACK); colors.add(Color.WHITE); colors.add(Color.BLUE); colors.add(Color.GREEN); colors.add(Color.YELLOW); colors.add(Color.PINK); componentColors.add(Color.GREEN); componentColors.add(Color.RED); componentColors.add(Color.YELLOW); componentColors.add(Color.ORANGE); componentColors.add(Color.WHITE); componentColors.add(Color.CYAN); componentColors.add(Color.MAGENTA); componentColors.add(Color.PINK); componentColors.add(Color.LIGHT_GRAY); componentColors.add(Color.DARK_GRAY); } // // /** // * Draws a classifier of dimension two. // * // * @param res The resolution of the smallest side of the image. // */ // public static BufferedImage draw(Classifier classifier, int res) // { // return draw( // classifier, // new double[]{-1.0, 1.0}, // new double[]{-1.0, 1.0}, // res); // } // /** // * Draws a classifier of dimension two. // * // * @param res The resolution of the smallest side of the image. // */ // public static BufferedImage drawClassifier(IFSClassifier classifier, int res, int samples, boolean log) // throws IOException // { // return drawClassifier(classifier, null, res, samples, log); // } // // public static BufferedImage drawClassifier( // IFSClassifier classifier, Dataset<Integer> dataset, // int res, int samples, boolean log) // throws IOException // { // int depth = (int)classifier.getDepth(); // int width = (dataset == null) ? 2 : 3; // // BufferedImage result = new BufferedImage( // width * res, res, BufferedImage.TYPE_INT_ARGB); // Graphics2D graphics = result.createGraphics(); // // graphics.setBackground(Color.black); // graphics.clearRect(0, 0, result.getWidth(), result.getHeight()); // // BufferedImage current = drawClassifier( // classifier, // new double[]{-1.0, 1.0}, // new double[]{-1.0, 1.0}, // res); // graphics.drawImage(current, 0, 0, null); // // graphics.setComposite(MiscComposite.getInstance(MiscComposite.ADD, 1.0f)); // // List<Point> points = new ArrayList<Point>(); // List<IFSDensityModel> models = classifier.models(); // // for(int i = 0; i < models.size(); i++) // { // BufferedImageOp op = new LookupFilter(new LinearColormap( // Color.BLACK.getRGB(), // componentColors.get(i).getRGB())); // // points.clear(); // // models.get(i).endPoints(depth, points, null); // // current = Datasets.drawDataset(points, res, log); // // graphics.drawImage(current, op, res, 0); // } // // if(dataset != null) // { // BufferedImage dataImage = Datasets.drawDataset(dataset, res, log); // graphics.drawImage(dataImage, res*2, 0, null); // } // // graphics.dispose(); // // return result; // } // /** // * Draw a classifier with its representation. // * // * @param classifier // * @param res // * @param samples The suggested number of points fo rthe representation // * @param log // * @return // * @throws IOException // */ // public static BufferedImage drawClassifier( // RepresentingClassifier classifier, int res, int samples, boolean log) // throws IOException // { // int width = 2; // // BufferedImage result = new BufferedImage( // width * res, res, BufferedImage.TYPE_INT_ARGB); // Graphics2D graphics = result.createGraphics(); // // graphics.setBackground(Color.black); // graphics.clearRect(0, 0, result.getWidth(), result.getHeight()); // // BufferedImage current = drawClassifier( // classifier, // new double[]{-1.0, 1.0}, // new double[]{-1.0, 1.0}, // res); // graphics.drawImage(current, 0, 0, null); // // graphics.setComposite(MiscComposite.getInstance(MiscComposite.ADD, 1.0f)); // // Dataset<Integer> rep = classifier.representation(samples); // Pointset points; // // for(int target : rep.targetSet()) // { // BufferedImageOp op = new LookupFilter(new LinearColormap( // Color.BLACK.getRGB(), // componentColors.get(target).getRGB())); // // points = rep.pointsByTargetUniform(target); // current = Datasets.drawDataset(points, res, log); // // graphics.drawImage(current, op, res, 0); // } // // graphics.dispose(); // // return result; // } // /** // * Draws a classifier whose dimensionality is two. // * // * @param res The resolution of the smallest side of the image. // */ // public static BufferedImage draw(Classifier classifier, // double[] xrange, // double[] yrange, // int res) // { // if(classifier.dimension() != 2) // throw new IllegalArgumentException("Classifier must have dimensionality two (has "+classifier.dimension()+")"); // // double xDelta = xrange[1] - xrange[0], // yDelta = yrange[1] - yrange[0]; // // double maxDelta = Math.max(xDelta, yDelta); // double minDelta = Math.min(xDelta, yDelta); // // double step = minDelta/(double) res; // // int xRes = (int) (xDelta / step); // int yRes = (int) (yDelta / step); // // BufferedImage image = // new BufferedImage(xRes, yRes, BufferedImage.TYPE_INT_RGB); // // double x, y; // int classInt; // Point p; // Color color; // // // for(int i = 0; i < xRes; i++) // { // x = xrange[0] + step*0.5 + step * i; // for(int j = 0; j < yRes; j++) // { // y = yrange[0] + step*0.5 + step * j; // p = new Point(x, y); // // classInt = classifier.classify(p); // if(classInt >= 0 && classInt < colors.size()) // color = colors.get(classInt); // else // color = errorColor; // // image.setRGB(i, j, color.getRGB()); // } // } // // return image; // } // // // public static BufferedImage draw(Classified<Point> data, // int res, // boolean log) // { // double[] range = new double[]{-1.0, 1.0}; // return draw(data, range, range, res, log); // } // // public static BufferedImage draw( // Classified<Point> data, // double[] xrange, // double[] yrange, // int res, // boolean log) // { // BufferedImage image = new BufferedImage(res, res, BufferedImage.TYPE_INT_ARGB); // Graphics2D graphics = image.createGraphics(); // // graphics.setBackground(Color.black); // graphics.clearRect(0, 0, image.getWidth(), image.getHeight()); // // BufferedImage current = null; // // graphics.setComposite(AlphaComposite.SrcAtop); // // for(int i = 0; i < data.numClasses(); i++) // { // System.out.println(i + ":" + data.points(i).size()); // current = Draw.draw(data.points(i), xrange, yrange, res, res, log, true); // // // * Colorize // current = Draw.colorize(componentColors.get(i)).filter(current, null); // // graphics.drawImage(current, 0, 0, null); // } // // graphics.dispose(); // // return image; // } /** * Returns a classifier that classifies 2d points according to the * mandelbrot set. The classifier does not learn, and is only used to define * a learning task for other classifiers. * * @return */ public static Classifier mandelbrot() { return new MandelbrotClassifier(); } private static class MandelbrotClassifier extends AbstractClassifier { // private AffineMap map = new AffineMap(Arrays.asList(3.0, -0.5, 0.0, 0.0), AffineMap.Mode.SIMILITUDE); public MandelbrotClassifier() { super(2, 2); } public int classify(Point point) { double range = 10.0, topRange = 100000.0; int steps = 1000; double x0 = point.get(0) - 0.5, y0 = point.get(1); double x = x0, y = y0; double xp, yp; for(int i = 0; i < steps; i ++) { xp = x*x - y*y + x0; yp = 2*x*y + y0; x = xp; y = yp; if(x*x + y*y > topRange*topRange) break; } if(x*x + y*y > range * range) return 1; return 0; } } /** * The Newton fractal as a classifier. * * The Newton fractal is a coloring of points in the complex plane by which * of the possible roots of a complex polynomial Netwon's method converges to. * @return */ public static Classifier newton() { return new NewtonClassifier(); } private static class NewtonClassifier extends AbstractClassifier { private static DistanceClassifier base; static { base = new DistanceClassifier(new Point(-0.5, -0.9)); base.addPoint(new Point(-0.5, 0.9)); base.addPoint(new Point(1.0, 0.0)); } public NewtonClassifier() { super(2, 3); } public int classify(Point point) { int steps = 2000; double x = point.get(0), y = point.get(1); double xp, yp; for(int i = 0; i < steps; i ++) { double poly = x*x*x*x + 2*x*x*y*y + y*y*y*y; xp = (2.0/3.0) * x + (1.0/3.0) * (x*x - y*y)/poly; yp = (2.0/3.0) * y + (1.0/3.0) * (-2*x*y)/poly; x = xp; y = yp; } // System.out.printf("%.2f\t %.2f\n", x, y); return base.classify(new Point(x,y)); } } /** * A classifier for the magnet fractal. * * The magnet fractal is a simulation of a simple physical system consisting * of a metal pendulum suspended over three magnets. The initial states * (points in the plane) are colored by which magnet the pendulum halts * over, when released from that state. * * @return */ public static Classifier magnet() { return new MagnetClassifier(); } private static class MagnetClassifier extends AbstractClassifier { private static ArrayList<RealVector> sources = new ArrayList<RealVector>(); private int maxSteps = 20000; private int minSteps = 500; private double stopDist = 0.01; private double stopVelocity = 0.01; private double frictConst = 0.00125; private double magnConst = 0.000002; private double pendheight = 0.02; private double dt = 1.0; static { // sets the three sources at the vertices of an equilateral triangle double a = 0.25, y = a * Math.sin((30.0/360.0) * (2 * Math.PI)), x = a * Math.cos((30.0/360.0) * (2 * Math.PI)); for(int i = 0; i < 3; i++) sources.add(new ArrayRealVector(2)); sources.get(0).setEntry(0, 0); sources.get(0).setEntry(1, a); sources.get(1).setEntry(0, x); sources.get(1).setEntry(1,-y); sources.get(2).setEntry(0,-x); sources.get(2).setEntry(1,-y); } public MagnetClassifier() { super(2, 3); } public int classify(Point point) { RealVector velocity = new ArrayRealVector(2); RealVector acceleration = new ArrayRealVector(2); RealVector accelerationNew = new ArrayRealVector(2); RealVector accelerationPrev = new ArrayRealVector(2); RealVector dist = new ArrayRealVector(2); RealVector position = new ArrayRealVector(2); position.setEntry(0, point.get(0)); position.setEntry(1, point.get(1)); for(int i = 0; i < maxSteps; i++) { // System.out.println(position + " " + velocity); position = position.add(velocity.mapMultiply(dt)); position = position.add(acceleration.mapMultiply(sq(dt) * (2.0/3.0))); position = position.add(accelerationPrev.mapMultiply(-sq(dt) * (1.0/6.0))); for(int s = 0; s < sources.size(); s++) { RealVector source = sources.get(s); // dist = position - source dist.setSubVector(0, position); dist = dist.add(source.mapMultiply(-1.0)); double sourceDist = dist.getNorm(); double norm = 0.0; for(int k = 0; k < dist.getDimension(); k++) norm += sq(dist.getEntry(k)); norm = Math.sqrt(norm + sq(pendheight)); dist.mapMultiplyToSelf(1.0/(norm*norm*norm)); accelerationNew = accelerationNew.add(dist.mapMultiply(-magnConst)); // System.out.println(velocity.getNorm() + "\t" + sourceDist + "\t" + source); // System.out.println(i); if(i > minSteps && sourceDist < stopDist && velocity.getNorm() < stopVelocity) { return s; } } accelerationNew = accelerationNew.add(velocity.mapMultiply(-frictConst)); velocity = velocity.add(accelerationNew .mapMultiply( dt*(1.0/3.0))); velocity = velocity.add(acceleration .mapMultiply( dt*(5.0/6.0))); velocity = velocity.add(accelerationPrev .mapMultiply(-dt*(1.0/6.0))); RealVector tmp = accelerationPrev; accelerationPrev = acceleration; acceleration = accelerationNew; accelerationNew = tmp; accelerationNew.set(0.0); } System.out.println("?"); return -1; } private static double sq(double in) { return in*in; } @Override public int dimension() { return 2; } @Override public int size() { return 3; } } /** * A classifier for the game of nim * * @param numHeaps * @param max * @return */ public static Classifier nim(int numHeaps, int max) { return new NimClassifier(numHeaps, max); } private static class NimClassifier extends AbstractClassifier { private double max = 200; public NimClassifier(int numHeaps, int max) { super(numHeaps, 2); this.max = max; } public int classify(Point point) { List<Integer> ints = new ArrayList<Integer>(); for(double d : point) ints.add( (int)Math.floor(((d+1.0)/2.0) * max) ); int nimSum = 0; for(int heapSize : ints) nimSum = nimSum ^ heapSize; if(nimSum == 0) return 0; return 1; } } public static Classifier wythoff(int max) { return new WythoffClassifier(max); } private static class WythoffClassifier extends AbstractClassifier { private double max = 200; private double phi = 1.6180339887498948482; public WythoffClassifier(int max) { super(2, 2); this.max = max; } public int classify(Point point) { int n = (int)Math.floor(((point.get(0) + 1.0)/2.0) * max); int m = (int)Math.floor(((point.get(1) + 1.0)/2.0) * max); int k = m - n; if((int)Math.floor(phi * phi * k) == m && (int)Math.floor(phi * k) == n) return 0; return 1; } } /** * Returns a classifier that classifies points according to the bi-unit * square or high dimensional analog * * @return */ public static Classifier square(int n, double r) { return new SquareClassifier(n , r); } private static class SquareClassifier extends AbstractClassifier { private double r = 0.5; public SquareClassifier(int n) { super(n, 2); } public SquareClassifier(int n, double r) { super(n, 2); this.r = r; } public int classify(Point point) { for(double x : point) if(x > r || x < -r) return 0; return 1; } public void learn(List<? extends List<Double>> data, List<Integer> classes) { throw new UnsupportedOperationException("The Square classifier doesn't learn"); } } public static Classifier line(int dim) { return new LineClassifier(dim); } private static class LineClassifier extends AbstractClassifier { public LineClassifier(int n) { super(n, 2); } public int classify(Point point) { if(point.get(0) > 0 ) return 0; return 1; } public void learn(List<? extends List<Double>> data, List<Integer> classes) { throw new UnsupportedOperationException("The Square classifier doesn't learn"); } } public static Classifier sine() { return new SineClassifier(); } private static class SineClassifier extends AbstractClassifier { public SineClassifier() { super(2, 2); } public int classify(Point point) { double x = point.get(0); if(point.get(1) > Math.sin(x*6.0)/2.0) return 0; return 1; } public void learn(List<? extends List<Double>> data, List<Integer> classes) { throw new UnsupportedOperationException("The Square classifier doesn't learn"); } } // // public static Classifier ifs(int depth) // { // return new IFSExampleClassifier(depth); // } // // private static class IFSExampleClassifier extends AbstractClassifier // { // int depth; // IFS<Similitude> model = IFSs.square(); // // public IFSExampleClassifier(int depth) // { // super(2, 2); // this.depth = depth; // } // // public int classify(Point point) // { // List<Integer> code = IFS.code(model, point, depth); // // int sum = 0; // for(int i : code) // sum += i; // // return code.contains(0) ? 1 : 0; // } // } }
/* * Copyright 2016-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.log; import static com.facebook.buck.util.MoreThrowables.getInitialCause; import static com.facebook.buck.util.MoreThrowables.getThrowableOrigin; import com.facebook.buck.util.immutables.BuckStyleImmutable; import com.facebook.buck.util.network.hostname.HostnameFetching; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.immutables.value.Value; import java.io.IOException; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.logging.LogRecord; @Value.Immutable @BuckStyleImmutable abstract class AbstractErrorLogRecord { private static final ThreadIdToCommandIdMapper MAPPER = GlobalStateManager .singleton() .getThreadIdToCommandIdMapper(); private static final CommandIdToIsDaemonMapper IS_DAEMON_MAPPER = GlobalStateManager .singleton() .getCommandIdToIsDaemonMapper(); private static final CommandIdToIsSuperConsoleEnabledMapper IS_SUPERCONSOLE_ENABLED_MAPPER = GlobalStateManager .singleton() .getCommandIdToIsSuperConsoleEnabledMapper(); private static final Logger LOG = Logger.get(AbstractErrorLogRecord.class); public abstract LogRecord getRecord(); public abstract ImmutableList<String> getLogs(); @Value.Derived public ImmutableMap<String, String> getTraits() { String logger = getRecord().getLoggerName(); String hostname = "unknown"; try { hostname = HostnameFetching.getHostname(); } catch (IOException e) { LOG.debug(e, "Unable to fetch hostname"); } ImmutableMap<String, String> traits = ImmutableMap.<String, String> builder() .put("severity", getRecord().getLevel().toString()) .put("logger", logger != null ? logger : "unknown") .put("buckGitCommit", System.getProperty("buck.git_commit", "unknown")) .put("javaVersion", System.getProperty("java.version", "unknown")) .put("os", System.getProperty("os.name", "unknown")) .put("osVersion", System.getProperty("os.version", "unknown")) .put("user", System.getProperty("user.name", "unknown")) .put("hostname", hostname) .build(); return traits; } @Value.Derived public String getMessage() { Optional<String> initialErr = Optional.empty(); Optional<String> initialErrorMsg = Optional.empty(); Optional<String> errorMsg = Optional.empty(); Throwable throwable = getRecord().getThrown(); if (throwable != null) { initialErr = Optional.ofNullable(getInitialCause(throwable).getClass().getName()); if (throwable.getMessage() != null) { initialErrorMsg = Optional.ofNullable(getInitialCause(throwable).getLocalizedMessage()); } } errorMsg = Optional.ofNullable(getRecord().getMessage()); StringBuilder sb = new StringBuilder(); for (Optional<String> field : ImmutableList.of(initialErr, initialErrorMsg, errorMsg)) { sb.append(field.orElse("")); if (field.isPresent()) { sb.append(": "); } } sb.append(getRecord().getLoggerName()); return sb.toString(); } /** * Computes a category key based on relevant LogRecord information. If an exception is * present, categorizes on the class + method that threw it. If no exception * is found, categorizes on the logger name and the beginning of the message. */ @Value.Derived public String getCategory() { String logger = ""; if (getRecord().getLoggerName() != null) { logger = getRecord().getLoggerName(); } StringBuilder sb = new StringBuilder(logger).append(":"); Throwable throwable = getRecord().getThrown(); if (throwable != null) { sb.append(extractClassMethod(getThrowableOrigin(getInitialCause(throwable)))); } else { sb.append(truncateMessage(getRecord().getMessage())); } return sb.toString(); } @Value.Derived public long getTime() { return TimeUnit.MILLISECONDS.toSeconds(getRecord().getMillis()); } @Value.Derived public Optional<String> getLogger() { return Optional.ofNullable(getRecord().getLoggerName()); } @Value.Derived public Optional<String> getBuildUuid() { String buildUuid = MAPPER.threadIdToCommandId(getRecord().getThreadID()); return Optional.ofNullable(buildUuid); } @Value.Derived public Optional<Boolean> getIsSuperConsoleEnabled() { String buildUuid = MAPPER.threadIdToCommandId(getRecord().getThreadID()); if (buildUuid == null) { return Optional.empty(); } return Optional.ofNullable( IS_SUPERCONSOLE_ENABLED_MAPPER .commandIdToIsSuperConsoleEnabled(buildUuid)); } @Value.Derived public Optional<Boolean> getIsDaemon() { String buildUuid = MAPPER.threadIdToCommandId(getRecord().getThreadID()); if (buildUuid == null) { return Optional.empty(); } return Optional.ofNullable(IS_DAEMON_MAPPER.commandIdToIsRunningAsDaemon(buildUuid)); } @Value.Derived public Optional<StackTraceElement[]> getStack() { Throwable throwable = getRecord().getThrown(); if (throwable != null) { return Optional.ofNullable(throwable.getStackTrace()); } return Optional.empty(); } @Value.Derived public Optional<String> getErrorMessage() { Throwable throwable = getRecord().getThrown(); if (throwable != null && throwable.getMessage() != null) { return Optional.ofNullable(throwable.getMessage()); } return Optional.empty(); } @Value.Derived public Optional<String> getInitialError() { Throwable throwable = getRecord().getThrown(); if (throwable != null) { return Optional.ofNullable(getInitialCause(throwable).getClass().getName()); } return Optional.empty(); } @Value.Derived public Optional<String> getInitialErrorMsg() { Throwable throwable = getRecord().getThrown(); if (throwable != null) { return Optional.ofNullable(getInitialCause(throwable).getLocalizedMessage()); } return Optional.empty(); } @Value.Derived public Optional<String> getOrigin() { Throwable throwable = getRecord().getThrown(); if (throwable != null) { return Optional.ofNullable(getThrowableOrigin(throwable)); } return Optional.empty(); } /** * We expect uploaded log records to contain a stack trace, but if they don't * the logged message is important. To address the issue that these records * often contain parametrized values, only first word (1 & 2 if first has 2 or * less chars) of message is taken into account. */ private String truncateMessage(String name) { String[] words = name.split("\\s+"); if (words.length > 1 && words[0].length() < 3) { return words[0] + " " + words[1]; } return words[0]; } /** * Extracts minimum valuable information set from lines in the following format: * package.classname.method(filename:line_number) */ private String extractClassMethod(String name) { if (name != null) { return name.split("\\(", 1)[0]; } return ""; } }
// Copyright (C) 2018 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.schema; import static com.google.gerrit.server.notedb.NoteDbTable.GROUPS; import static com.google.gerrit.server.notedb.NotesMigration.DISABLE_REVIEW_DB; import static com.google.gerrit.server.notedb.NotesMigration.SECTION_NOTE_DB; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.flogger.FluentLogger; import com.google.gerrit.common.Nullable; import com.google.gerrit.common.data.GroupDescription; import com.google.gerrit.common.data.GroupReference; import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.AccountGroup; import com.google.gerrit.reviewdb.client.RefNames; import com.google.gerrit.reviewdb.server.ReviewDb; import com.google.gerrit.reviewdb.server.ReviewDbWrapper; import com.google.gerrit.server.GerritPersonIdent; import com.google.gerrit.server.account.AccountConfig; import com.google.gerrit.server.config.AllUsersName; import com.google.gerrit.server.config.GerritServerConfig; import com.google.gerrit.server.config.GerritServerIdProvider; import com.google.gerrit.server.config.SitePaths; import com.google.gerrit.server.git.GitRepositoryManager; import com.google.gerrit.server.group.SystemGroupBackend; import com.google.gerrit.server.group.db.AuditLogFormatter; import com.google.gerrit.server.group.db.GroupNameNotes; import com.google.gerrit.server.update.RefUpdateUtil; import com.google.gwtorm.server.OrmException; import com.google.inject.Inject; import com.google.inject.Provider; import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import org.eclipse.jgit.errors.ConfigInvalidException; import org.eclipse.jgit.lib.BatchRefUpdate; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.ObjectInserter; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.lib.Repository; /** Migrate groups from ReviewDb to NoteDb. */ public class Schema_167 extends SchemaVersion { private static final FluentLogger logger = FluentLogger.forEnclosingClass(); private final GitRepositoryManager repoManager; private final AllUsersName allUsersName; private final Config gerritConfig; private final SitePaths sitePaths; private final PersonIdent serverIdent; private final SystemGroupBackend systemGroupBackend; @Inject protected Schema_167( Provider<Schema_166> prior, GitRepositoryManager repoManager, AllUsersName allUsersName, @GerritServerConfig Config gerritConfig, SitePaths sitePaths, @GerritPersonIdent PersonIdent serverIdent, SystemGroupBackend systemGroupBackend) { super(prior); this.repoManager = repoManager; this.allUsersName = allUsersName; this.gerritConfig = gerritConfig; this.sitePaths = sitePaths; this.serverIdent = serverIdent; this.systemGroupBackend = systemGroupBackend; } @Override protected void migrateData(ReviewDb db, UpdateUI ui) throws OrmException, SQLException { if (gerritConfig.getBoolean(SECTION_NOTE_DB, GROUPS.key(), DISABLE_REVIEW_DB, false)) { // Groups in ReviewDb have already been disabled, nothing to do. return; } try (Repository allUsersRepo = repoManager.openRepository(allUsersName)) { List<GroupReference> allGroupReferences = readGroupReferencesFromReviewDb(db); BatchRefUpdate batchRefUpdate = allUsersRepo.getRefDatabase().newBatchUpdate(); writeAllGroupNamesToNoteDb(allUsersRepo, allGroupReferences, batchRefUpdate); GroupRebuilder groupRebuilder = createGroupRebuilder(db, allUsersRepo); for (GroupReference groupReference : allGroupReferences) { migrateOneGroupToNoteDb( db, allUsersRepo, groupRebuilder, groupReference.getUUID(), batchRefUpdate); } RefUpdateUtil.executeChecked(batchRefUpdate, allUsersRepo); } catch (IOException | ConfigInvalidException e) { throw new OrmException( String.format("Failed to migrate groups to NoteDb for %s", allUsersName.get()), e); } } private List<GroupReference> readGroupReferencesFromReviewDb(ReviewDb db) throws SQLException { try (Statement stmt = ReviewDbWrapper.unwrapJbdcSchema(db).getConnection().createStatement(); ResultSet rs = stmt.executeQuery("SELECT group_uuid, name FROM account_groups")) { List<GroupReference> allGroupReferences = new ArrayList<>(); while (rs.next()) { AccountGroup.UUID groupUuid = new AccountGroup.UUID(rs.getString(1)); String groupName = rs.getString(2); allGroupReferences.add(new GroupReference(groupUuid, groupName)); } return allGroupReferences; } } private void writeAllGroupNamesToNoteDb( Repository allUsersRepo, List<GroupReference> allGroupReferences, BatchRefUpdate batchRefUpdate) throws IOException { try (ObjectInserter inserter = allUsersRepo.newObjectInserter()) { GroupNameNotes.updateAllGroups( allUsersRepo, inserter, batchRefUpdate, allGroupReferences, serverIdent); inserter.flush(); } } private GroupRebuilder createGroupRebuilder(ReviewDb db, Repository allUsersRepo) throws IOException, ConfigInvalidException { AuditLogFormatter auditLogFormatter = createAuditLogFormatter(db, allUsersRepo, gerritConfig, sitePaths); return new GroupRebuilder(serverIdent, allUsersName, auditLogFormatter); } private AuditLogFormatter createAuditLogFormatter( ReviewDb db, Repository allUsersRepo, Config gerritConfig, SitePaths sitePaths) throws IOException, ConfigInvalidException { String serverId = new GerritServerIdProvider(gerritConfig, sitePaths).get(); SimpleInMemoryAccountCache accountCache = new SimpleInMemoryAccountCache(allUsersName, allUsersRepo); SimpleInMemoryGroupCache groupCache = new SimpleInMemoryGroupCache(db); return AuditLogFormatter.create( accountCache::get, uuid -> { if (systemGroupBackend.handles(uuid)) { return Optional.ofNullable(systemGroupBackend.get(uuid)); } return groupCache.get(uuid); }, serverId); } private static void migrateOneGroupToNoteDb( ReviewDb db, Repository allUsersRepo, GroupRebuilder rebuilder, AccountGroup.UUID uuid, BatchRefUpdate batchRefUpdate) throws ConfigInvalidException, IOException, OrmException { GroupBundle reviewDbBundle = GroupBundle.Factory.fromReviewDb(db, uuid); RefUpdateUtil.deleteChecked(allUsersRepo, RefNames.refsGroups(uuid)); rebuilder.rebuild(allUsersRepo, reviewDbBundle, batchRefUpdate); } // The regular account cache isn't available during init. -> Use a simple replacement which tries // to load every account only once from disk. private static class SimpleInMemoryAccountCache { private final AllUsersName allUsersName; private final Repository allUsersRepo; private Map<Account.Id, Optional<Account>> accounts = new HashMap<>(); public SimpleInMemoryAccountCache(AllUsersName allUsersName, Repository allUsersRepo) { this.allUsersName = allUsersName; this.allUsersRepo = allUsersRepo; } public Optional<Account> get(Account.Id accountId) { accounts.computeIfAbsent(accountId, this::load); return accounts.get(accountId); } private Optional<Account> load(Account.Id accountId) { try { AccountConfig accountConfig = new AccountConfig(accountId, allUsersName, allUsersRepo).load(); return accountConfig.getLoadedAccount(); } catch (IOException | ConfigInvalidException ignored) { logger.atWarning().withCause(ignored).log( "Failed to load account %s." + " Cannot get account name for group audit log commit messages.", accountId.get()); return Optional.empty(); } } } // The regular GroupBackends (especially external GroupBackends) and our internal group cache // aren't available during init. -> Use a simple replacement which tries to look up only internal // groups and which loads every internal group only once from disc. (There's no way we can look up // external groups during init. As we need those groups only for cosmetic aspects in // AuditLogFormatter, it's safe to exclude them.) private static class SimpleInMemoryGroupCache { private final ReviewDb db; private Map<AccountGroup.UUID, Optional<GroupDescription.Basic>> groups = new HashMap<>(); public SimpleInMemoryGroupCache(ReviewDb db) { this.db = db; } public Optional<GroupDescription.Basic> get(AccountGroup.UUID groupUuid) { groups.computeIfAbsent(groupUuid, this::load); return groups.get(groupUuid); } private Optional<GroupDescription.Basic> load(AccountGroup.UUID groupUuid) { if (!AccountGroup.isInternalGroup(groupUuid)) { return Optional.empty(); } List<GroupDescription.Basic> groupDescriptions = getGroupDescriptions(groupUuid); if (groupDescriptions.size() == 1) { return Optional.of(Iterables.getOnlyElement(groupDescriptions)); } return Optional.empty(); } private List<GroupDescription.Basic> getGroupDescriptions(AccountGroup.UUID groupUuid) { try (Statement stmt = ReviewDbWrapper.unwrapJbdcSchema(db).getConnection().createStatement(); ResultSet rs = stmt.executeQuery( "SELECT name FROM account_groups where group_uuid = '" + groupUuid + "'")) { List<GroupDescription.Basic> groupDescriptions = new ArrayList<>(); while (rs.next()) { String groupName = rs.getString(1); groupDescriptions.add(toGroupDescription(groupUuid, groupName)); } return groupDescriptions; } catch (SQLException ignored) { logger.atWarning().withCause(ignored).log( "Failed to load group %s." + " Cannot get group name for group audit log commit messages.", groupUuid.get()); return ImmutableList.of(); } } private static GroupDescription.Basic toGroupDescription( AccountGroup.UUID groupUuid, String groupName) { return new GroupDescription.Basic() { @Override public AccountGroup.UUID getGroupUUID() { return groupUuid; } @Override public String getName() { return groupName; } @Nullable @Override public String getEmailAddress() { return null; } @Nullable @Override public String getUrl() { return null; } }; } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.ui.tree; import com.intellij.ide.util.treeView.AbstractTreeBuilder; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.ActionCallback; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.ui.ScrollingUtil; import com.intellij.ui.SimpleColoredComponent; import com.intellij.ui.awt.RelativePoint; import com.intellij.ui.treeStructure.Tree; import com.intellij.util.Range; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.plaf.basic.BasicTreeUI; import javax.swing.tree.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.util.*; import java.util.List; public final class TreeUtil { private static final Logger LOG = Logger.getInstance("#com.intellij.util.ui.tree.TreeUtil"); @NonNls @NotNull private static final String TREE_UTIL_SCROLL_TIME_STAMP = "TreeUtil.scrollTimeStamp"; private TreeUtil() {} /** * @param tree JTree to collect expanded paths from. * @param paths output parameter. */ public static void collectExpandedPaths(@NotNull final JTree tree, @NotNull final List<TreePath> paths){ final TreeModel model = tree.getModel(); final Object root = model.getRoot(); LOG.assertTrue(root != null); collectExpandedPathsImpl(tree, paths, new TreePath(root)); } @NotNull public static List<TreePath> collectExpandedPaths(@NotNull final JTree tree){ final ArrayList<TreePath> result = new ArrayList<>(); final Object root = tree.getModel().getRoot(); final TreePath rootPath = new TreePath(root); result.addAll(collectExpandedPaths(tree, rootPath)); return result; } @NotNull public static <T> List<T> collectSelectedObjectsOfType(@NotNull JTree tree, @NotNull Class<T> clazz) { final TreePath[] selections = tree.getSelectionPaths(); if (selections != null) { final ArrayList<T> result = new ArrayList<>(); for (TreePath selection : selections) { final DefaultMutableTreeNode node = (DefaultMutableTreeNode)selection.getLastPathComponent(); final Object userObject = node.getUserObject(); if (clazz.isInstance(userObject)) { //noinspection unchecked result.add((T)userObject); } } return result; } return Collections.emptyList(); } @NotNull public static List<TreePath> collectExpandedPaths(@NotNull final JTree tree, @NotNull TreePath path){ final ArrayList<TreePath> result = new ArrayList<>(); if (!tree.isExpanded(path)) return result; final Object lastPathComponent = path.getLastPathComponent(); final TreeModel model = tree.getModel(); if (model.isLeaf(lastPathComponent)) { result.add(path); } else { boolean pathWasAdded = false; for(int i = model.getChildCount(lastPathComponent) - 1; i >= 0 ; i--){ final TreePath childPath = path.pathByAddingChild(model.getChild(lastPathComponent, i)); if (model.isLeaf(lastPathComponent)) { if (!pathWasAdded) { result.add(path); pathWasAdded= true; } } else if (tree.isExpanded(childPath)) { result.addAll(collectExpandedPaths(tree, childPath)); } else { if (!pathWasAdded) { result.add(path); pathWasAdded= true; } } } } return result; } private static boolean collectExpandedPathsImpl(@NotNull final JTree tree, @NotNull final Collection<TreePath> paths, @NotNull final TreePath path){ final TreeModel model = tree.getModel(); final Object lastPathComponent = path.getLastPathComponent(); if(model.isLeaf(lastPathComponent)){ return false; } boolean hasExpandedChildren = false; for(int i = model.getChildCount(lastPathComponent) - 1; i >= 0 ; i--){ hasExpandedChildren |= collectExpandedPathsImpl(tree, paths, path.pathByAddingChild(model.getChild(lastPathComponent, i))); } if(!hasExpandedChildren){ paths.add(path); return true; } else{ return false; } } /** * Expands specified paths. * @param tree JTree to apply expansion status to * @param paths to expand. See {@link #collectExpandedPaths(javax.swing.JTree, java.util.List)} */ public static void restoreExpandedPaths(@NotNull final JTree tree, @NotNull final List<TreePath> paths){ for(int i = paths.size() - 1; i >= 0; i--){ tree.expandPath(paths.get(i)); } } @NotNull public static TreePath getPath(@NotNull TreeNode aRootNode, @NotNull TreeNode aNode) { TreeNode[] nodes = getPathFromRootTo(aRootNode, aNode, true); return new TreePath(nodes); } public static boolean isAncestor(@NotNull TreeNode ancestor, @NotNull TreeNode node) { TreeNode parent = node; while (parent != null) { if (parent == ancestor) return true; parent = parent.getParent(); } return false; } private static boolean isAncestor(@NotNull final TreePath ancestor, @NotNull final TreePath path) { if (path.getPathCount() < ancestor.getPathCount()) return false; for (int i = 0; i < ancestor.getPathCount(); i++) if (!path.getPathComponent(i).equals(ancestor.getPathComponent(i))) return false; return true; } private static boolean isDescendants(@NotNull final TreePath path, @NotNull final TreePath[] paths) { for (final TreePath ancestor : paths) { if (isAncestor(ancestor, path)) return true; } return false; } @NotNull public static TreePath getPathFromRoot(@NotNull TreeNode node) { TreeNode[] path = getPathFromRootTo(null, node, false); return new TreePath(path); } @NotNull private static TreeNode[] getPathFromRootTo(@Nullable TreeNode root, @NotNull TreeNode node, boolean includeRoot) { int height = 0; for (TreeNode n = node; n != root; n = n.getParent()) { height++; } TreeNode[] path = new TreeNode[includeRoot ? height+1 : height]; int i = path.length-1; for (TreeNode n = node; i>=0; n = n.getParent()) { path[i--] = n; } return path; } @Nullable public static TreeNode findNodeWithObject(final Object object, @NotNull final TreeModel model, final Object parent) { for (int i = 0; i < model.getChildCount(parent); i++) { final DefaultMutableTreeNode childNode = (DefaultMutableTreeNode) model.getChild(parent, i); if (childNode.getUserObject().equals(object)) return childNode; } return null; } /** * Removes last component in the current selection path. * @param tree to remove selected node from. */ public static void removeSelected(@NotNull final JTree tree) { TreePath[] paths = tree.getSelectionPaths(); if (paths == null) { return; } for (TreePath path : paths) { removeLastPathComponent((DefaultTreeModel) tree.getModel(), path).restoreSelection(tree); } } public static void removeLastPathComponent(@NotNull final JTree tree, @NotNull final TreePath pathToBeRemoved){ removeLastPathComponent((DefaultTreeModel)tree.getModel(), pathToBeRemoved).restoreSelection(tree); } @Nullable public static DefaultMutableTreeNode findNodeWithObject(@NotNull final DefaultMutableTreeNode aRoot, final Object aObject) { if (Comparing.equal(aRoot.getUserObject(), aObject)) { return aRoot; } else { for (int i = 0; i < aRoot.getChildCount(); i++) { final DefaultMutableTreeNode candidate = findNodeWithObject((DefaultMutableTreeNode) aRoot.getChildAt(i), aObject); if (null != candidate) { return candidate; } } return null; } } @NotNull public static TreePath findCommonPath(@NotNull final TreePath[] treePaths) { LOG.assertTrue(areComponentsEqual(treePaths, 0)); TreePath result = new TreePath(treePaths[0].getPathComponent(0)); int pathIndex = 1; while (areComponentsEqual(treePaths, pathIndex)) { result = result.pathByAddingChild(treePaths[0].getPathComponent(pathIndex)); pathIndex++; } return result; } @NotNull public static ActionCallback selectFirstNode(@NotNull JTree tree) { TreePath selectionPath = getFirstNodePath(tree); return selectPath(tree, selectionPath); } @NotNull public static TreePath getFirstNodePath(@NotNull JTree tree) { final TreeModel model = tree.getModel(); final Object root = model.getRoot(); TreePath selectionPath = new TreePath(root); if (!tree.isRootVisible() && model.getChildCount(root) > 0) { selectionPath = selectionPath.pathByAddingChild(model.getChild(root, 0)); } return selectionPath; } @NotNull public static TreePath getFirstLeafNodePath(@NotNull JTree tree) { final TreeModel model = tree.getModel(); Object root = model.getRoot(); TreePath selectionPath = new TreePath(root); while (model.getChildCount(root) > 0) { final Object child = model.getChild(root, 0); selectionPath = selectionPath.pathByAddingChild(child); root = child; } return selectionPath; } @NotNull private static IndexTreePathState removeLastPathComponent(@NotNull final DefaultTreeModel model, @NotNull final TreePath pathToBeRemoved) { final IndexTreePathState selectionState = new IndexTreePathState(pathToBeRemoved); if (((MutableTreeNode) pathToBeRemoved.getLastPathComponent()).getParent() == null) return selectionState; model.removeNodeFromParent((MutableTreeNode)pathToBeRemoved.getLastPathComponent()); return selectionState; } private static boolean areComponentsEqual(@NotNull final TreePath[] paths, final int componentIndex) { if (paths[0].getPathCount() <= componentIndex) return false; final Object pathComponent = paths[0].getPathComponent(componentIndex); for (final TreePath treePath : paths) { if (treePath.getPathCount() <= componentIndex) return false; if (!pathComponent.equals(treePath.getPathComponent(componentIndex))) return false; } return true; } @NotNull private static TreePath[] removeDuplicates(@NotNull final TreePath[] paths) { final ArrayList<TreePath> result = new ArrayList<>(); for (final TreePath path : paths) { if (!result.contains(path)) result.add(path); } return result.toArray(new TreePath[result.size()]); } @NotNull public static TreePath[] selectMaximals(@Nullable final TreePath[] paths) { if (paths == null) return new TreePath[0]; final TreePath[] noDuplicates = removeDuplicates(paths); final ArrayList<TreePath> result = new ArrayList<>(); for (final TreePath path : noDuplicates) { final ArrayList<TreePath> otherPaths = new ArrayList<>(Arrays.asList(noDuplicates)); otherPaths.remove(path); if (!isDescendants(path, otherPaths.toArray(new TreePath[otherPaths.size()]))) result.add(path); } return result.toArray(new TreePath[result.size()]); } public static void sort(@NotNull final DefaultTreeModel model, @Nullable Comparator comparator) { sort((DefaultMutableTreeNode) model.getRoot(), comparator); } public static void sort(@NotNull final DefaultMutableTreeNode node, @Nullable Comparator comparator) { final List<TreeNode> children = childrenToArray(node); Collections.sort(children, comparator); node.removeAllChildren(); addChildrenTo(node, children); for (int i = 0; i < node.getChildCount(); i++) { sort((DefaultMutableTreeNode) node.getChildAt(i), comparator); } } public static void addChildrenTo(@NotNull final MutableTreeNode node, @NotNull final List<TreeNode> children) { for (final Object aChildren : children) { final MutableTreeNode child = (MutableTreeNode)aChildren; node.insert(child, node.getChildCount()); } } public static boolean traverse(@NotNull final TreeNode node, @NotNull final Traverse traverse) { final int childCount = node.getChildCount(); for (int i = 0; i < childCount; i++){ if (!traverse(node.getChildAt(i), traverse)) return false; } return traverse.accept(node); } public static boolean traverseDepth(@NotNull final TreeNode node, @NotNull final Traverse traverse) { if (!traverse.accept(node)) return false; final int childCount = node.getChildCount(); for (int i = 0; i < childCount; i++) if (!traverseDepth(node.getChildAt(i), traverse)) return false; return true; } @NotNull public static ActionCallback selectPath(@NotNull final JTree tree, final TreePath path) { return selectPath(tree, path, true); } @NotNull public static ActionCallback selectPath(@NotNull final JTree tree, final TreePath path, boolean center) { tree.makeVisible(path); if (center) { return showRowCentred(tree, tree.getRowForPath(path)); } else { final int row = tree.getRowForPath(path); return showAndSelect(tree, row - ScrollingUtil.ROW_PADDING, row + ScrollingUtil.ROW_PADDING, row, -1); } } @NotNull public static ActionCallback moveDown(@NotNull final JTree tree) { final int size = tree.getRowCount(); int row = tree.getLeadSelectionRow(); if (row < size - 1) { row++; return showAndSelect(tree, row, row + 2, row, getSelectedRow(tree), false, true, true); } else { return ActionCallback.DONE; } } @NotNull public static ActionCallback moveUp(@NotNull final JTree tree) { int row = tree.getLeadSelectionRow(); if (row > 0) { row--; return showAndSelect(tree, row - 2, row, row, getSelectedRow(tree), false, true, true); } else { return ActionCallback.DONE; } } @NotNull public static ActionCallback movePageUp(@NotNull final JTree tree) { final int visible = getVisibleRowCount(tree); if (visible <= 0){ return moveHome(tree); } final int decrement = visible - 1; final int row = Math.max(getSelectedRow(tree) - decrement, 0); final int top = getFirstVisibleRow(tree) - decrement; final int bottom = top + visible - 1; return showAndSelect(tree, top, bottom, row, getSelectedRow(tree)); } @NotNull public static ActionCallback movePageDown(@NotNull final JTree tree) { final int visible = getVisibleRowCount(tree); if (visible <= 0){ return moveEnd(tree); } final int size = tree.getRowCount(); final int increment = visible - 1; final int index = Math.min(getSelectedRow(tree) + increment, size - 1); final int top = getFirstVisibleRow(tree) + increment; final int bottom = top + visible - 1; return showAndSelect(tree, top, bottom, index, getSelectedRow(tree)); } @NotNull private static ActionCallback moveHome(@NotNull final JTree tree) { return showRowCentred(tree, 0); } @NotNull private static ActionCallback moveEnd(@NotNull final JTree tree) { return showRowCentred(tree, tree.getRowCount() - 1); } @NotNull private static ActionCallback showRowCentred(@NotNull final JTree tree, final int row) { return showRowCentered(tree, row, true); } @NotNull public static ActionCallback showRowCentered(@NotNull final JTree tree, final int row, final boolean centerHorizontally) { return showRowCentered(tree, row, centerHorizontally, true); } @NotNull public static ActionCallback showRowCentered(@NotNull final JTree tree, final int row, final boolean centerHorizontally, boolean scroll) { final int visible = getVisibleRowCount(tree); final int top = visible > 0 ? row - (visible - 1)/ 2 : row; final int bottom = visible > 0 ? top + visible - 1 : row; return showAndSelect(tree, top, bottom, row, -1, false, scroll, false); } @NotNull public static ActionCallback showAndSelect(@NotNull final JTree tree, int top, int bottom, final int row, final int previous) { return showAndSelect(tree, top, bottom, row, previous, false); } @NotNull public static ActionCallback showAndSelect(@NotNull final JTree tree, int top, int bottom, final int row, final int previous, boolean addToSelection) { return showAndSelect(tree, top, bottom, row, previous, addToSelection, true, false); } @NotNull public static ActionCallback showAndSelect(@NotNull final JTree tree, int top, int bottom, final int row, final int previous, final boolean addToSelection, final boolean scroll) { return showAndSelect(tree, top, bottom, row, previous, addToSelection, scroll, false); } @NotNull public static ActionCallback showAndSelect(@NotNull final JTree tree, int top, int bottom, final int row, final int previous, final boolean addToSelection, final boolean scroll, final boolean resetSelection) { final TreePath path = tree.getPathForRow(row); if (path == null) return ActionCallback.DONE; final int size = tree.getRowCount(); if (size == 0) { tree.clearSelection(); return ActionCallback.DONE; } if (top < 0){ top = 0; } if (bottom >= size){ bottom = size - 1; } if (row >= tree.getRowCount()) return ActionCallback.DONE; boolean okToScroll = true; if (tree.isShowing()) { if (!tree.isValid()) { tree.validate(); } } else { Application app = ApplicationManager.getApplication(); if (app != null && app.isUnitTestMode()) { okToScroll = false; } } Runnable selectRunnable = () -> { if (!tree.isRowSelected(row)) { if (addToSelection) { tree.getSelectionModel().addSelectionPath(tree.getPathForRow(row)); } else { tree.setSelectionRow(row); } } else if (resetSelection) { if (!addToSelection) { tree.setSelectionRow(row); } } }; if (!okToScroll) { selectRunnable.run(); return ActionCallback.DONE; } final Rectangle rowBounds = tree.getRowBounds(row); if (rowBounds == null) return ActionCallback.DONE; Rectangle topBounds = tree.getRowBounds(top); if (topBounds == null) { topBounds = rowBounds; } Rectangle bottomBounds = tree.getRowBounds(bottom); if (bottomBounds == null) { bottomBounds = rowBounds; } Rectangle bounds = topBounds.union(bottomBounds); bounds.x = rowBounds.x; bounds.width = rowBounds.width; final Rectangle visible = tree.getVisibleRect(); if (visible.contains(bounds)) { bounds = null; } else { final Component comp = tree.getCellRenderer().getTreeCellRendererComponent(tree, path.getLastPathComponent(), true, true, false, row, false); if (comp instanceof SimpleColoredComponent) { final SimpleColoredComponent renderer = (SimpleColoredComponent)comp; final Dimension scrollableSize = renderer.computePreferredSize(true); bounds.width = scrollableSize.width; } } final ActionCallback callback = new ActionCallback(); selectRunnable.run(); if (bounds != null) { final Range<Integer> range = getExpandControlRange(tree, path); if (range != null) { int delta = bounds.x - range.getFrom().intValue(); bounds.x -= delta; bounds.width -= delta; } if (visible.width < bounds.width) { bounds.width = visible.width; } if (tree instanceof Tree && !((Tree)tree).isHorizontalAutoScrollingEnabled()) { bounds.x = 0; } final Rectangle b1 = bounds; final Runnable runnable = () -> { if (scroll) { AbstractTreeBuilder builder = AbstractTreeBuilder.getBuilderFor(tree); if (builder != null) { builder.getReady(TreeUtil.class).doWhenDone(() -> tree.scrollRectToVisible(b1)); callback.setDone(); } else { tree.scrollRectToVisible(b1); Long ts = (Long)tree.getClientProperty(TREE_UTIL_SCROLL_TIME_STAMP); if (ts == null) { ts = 0L; } ts = ts.longValue() + 1; tree.putClientProperty(TREE_UTIL_SCROLL_TIME_STAMP, ts); final long targetValue = ts.longValue(); SwingUtilities.invokeLater(() -> { Long actual = (Long)tree.getClientProperty(TREE_UTIL_SCROLL_TIME_STAMP); if (actual == null || targetValue < actual.longValue()) return; if (!tree.getVisibleRect().contains(b1)) { tree.scrollRectToVisible(b1); } callback.setDone(); }); } } callback.setDone(); }; runnable.run(); } else { callback.setDone(); } return callback; } // this method returns FIRST selected row but not LEAD private static int getSelectedRow(@NotNull final JTree tree) { return tree.getRowForPath(tree.getSelectionPath()); } private static int getFirstVisibleRow(@NotNull final JTree tree) { final Rectangle visible = tree.getVisibleRect(); int row = -1; for (int i=0; i < tree.getRowCount(); i++) { final Rectangle bounds = tree.getRowBounds(i); if (visible.y <= bounds.y && visible.y + visible.height >= bounds.y + bounds.height) { row = i; break; } } return row; } public static int getVisibleRowCount(@NotNull final JTree tree) { final Rectangle visible = tree.getVisibleRect(); if (visible == null) return 0; int count = 0; for (int i=0; i < tree.getRowCount(); i++) { final Rectangle bounds = tree.getRowBounds(i); if (bounds == null) continue; if (visible.y <= bounds.y && visible.y + visible.height >= bounds.y + bounds.height) { count++; } } return count; } /** * works correctly for trees with fixed row height only. * For variable height trees (e.g. trees with custom tree node renderer) use the {@link #getVisibleRowCount(JTree)} which is slower */ public static int getVisibleRowCountForFixedRowHeight(@NotNull final JTree tree) { // myTree.getVisibleRowCount returns 20 Rectangle bounds = tree.getRowBounds(0); int rowHeight = bounds == null ? 0 : bounds.height; return rowHeight == 0 ? tree.getVisibleRowCount() : tree.getVisibleRect().height / rowHeight; } @SuppressWarnings({"HardCodedStringLiteral"}) public static void installActions(@NotNull final JTree tree) { tree.getActionMap().put("scrollUpChangeSelection", new AbstractAction() { @Override public void actionPerformed(final ActionEvent e) { movePageUp(tree); } }); tree.getActionMap().put("scrollDownChangeSelection", new AbstractAction() { @Override public void actionPerformed(final ActionEvent e) { movePageDown(tree); } }); tree.getActionMap().put("selectPrevious", new AbstractAction() { @Override public void actionPerformed(final ActionEvent e) { moveUp(tree); } }); tree.getActionMap().put("selectNext", new AbstractAction() { @Override public void actionPerformed(final ActionEvent e) { moveDown(tree); } }); copyAction(tree, "selectLast", "selectLastChangeLead"); copyAction(tree, "selectFirst", "selectFirstChangeLead"); InputMap inputMap = tree.getInputMap(JComponent.WHEN_FOCUSED); UIUtil.maybeInstall(inputMap, "scrollUpChangeSelection", KeyStroke.getKeyStroke(KeyEvent.VK_PAGE_UP, 0)); UIUtil.maybeInstall(inputMap, "scrollDownChangeSelection", KeyStroke.getKeyStroke(KeyEvent.VK_PAGE_DOWN, 0)); UIUtil.maybeInstall(inputMap, "selectNext", KeyStroke.getKeyStroke(KeyEvent.VK_DOWN, 0)); UIUtil.maybeInstall(inputMap, "selectPrevious", KeyStroke.getKeyStroke(KeyEvent.VK_UP, 0)); UIUtil.maybeInstall(inputMap, "selectLast", KeyStroke.getKeyStroke(KeyEvent.VK_END, 0)); UIUtil.maybeInstall(inputMap, "selectFirst", KeyStroke.getKeyStroke(KeyEvent.VK_HOME, 0)); } private static void copyAction(@NotNull final JTree tree, String original, String copyTo) { final Action action = tree.getActionMap().get(original); if (action != null) { tree.getActionMap().put(copyTo, action); } } public static void collapseAll(@NotNull final JTree tree, final int keepSelectionLevel) { final TreePath leadSelectionPath = tree.getLeadSelectionPath(); // Collapse all int row = tree.getRowCount() - 1; while (row >= 0) { tree.collapseRow(row); row--; } final DefaultMutableTreeNode root = (DefaultMutableTreeNode)tree.getModel().getRoot(); tree.expandPath(new TreePath(root)); if (leadSelectionPath != null) { final Object[] path = leadSelectionPath.getPath(); final Object[] pathToSelect = new Object[path.length > keepSelectionLevel && keepSelectionLevel >= 0 ? keepSelectionLevel : path.length]; System.arraycopy(path, 0, pathToSelect, 0, pathToSelect.length); if (pathToSelect.length == 0) return; selectPath(tree, new TreePath(pathToSelect)); } } public static void selectNode(@NotNull final JTree tree, final TreeNode node) { selectPath(tree, getPathFromRoot(node)); } public static void moveSelectedRow(@NotNull final JTree tree, final int direction){ final TreePath selectionPath = tree.getSelectionPath(); final DefaultMutableTreeNode treeNode = (DefaultMutableTreeNode)selectionPath.getLastPathComponent(); final DefaultMutableTreeNode parent = (DefaultMutableTreeNode)treeNode.getParent(); final int idx = parent.getIndex(treeNode); ((DefaultTreeModel)tree.getModel()).removeNodeFromParent(treeNode); ((DefaultTreeModel)tree.getModel()).insertNodeInto(treeNode, parent, idx + direction); selectNode(tree, treeNode); } @NotNull public static ArrayList<TreeNode> childrenToArray(@NotNull final TreeNode node) { //ApplicationManager.getApplication().assertIsDispatchThread(); final int size = node.getChildCount(); final ArrayList<TreeNode> result = new ArrayList<>(size); for(int i = 0; i < size; i++){ TreeNode child = node.getChildAt(i); LOG.assertTrue(child != null); result.add(child); } return result; } public static void expandRootChildIfOnlyOne(@Nullable final JTree tree) { if (tree == null) return; final Runnable runnable = () -> { final DefaultMutableTreeNode root = (DefaultMutableTreeNode)tree.getModel().getRoot(); tree.expandPath(new TreePath(new Object[]{root})); if (root.getChildCount() == 1) { TreeNode firstChild = root.getFirstChild(); tree.expandPath(new TreePath(new Object[]{root, firstChild})); } }; UIUtil.invokeLaterIfNeeded(runnable); } public static void expandAll(@NotNull final JTree tree) { tree.expandPath(new TreePath(tree.getModel().getRoot())); int oldRowCount = 0; do { int rowCount = tree.getRowCount(); if (rowCount == oldRowCount) break; oldRowCount = rowCount; for (int i = 0; i < rowCount; i++) { tree.expandRow(i); } } while (true); } /** * Expands n levels of the tree counting from the root * @param tree to expand nodes of * @param levels depths of the expantion */ public static void expand(@NotNull JTree tree, int levels) { expand(tree, new TreePath(tree.getModel().getRoot()), levels); } private static void expand(@NotNull JTree tree, @NotNull TreePath path, int levels) { if (levels == 0) return; tree.expandPath(path); TreeNode node = (TreeNode)path.getLastPathComponent(); Enumeration children = node.children(); while (children.hasMoreElements()) { expand(tree, path.pathByAddingChild(children.nextElement()) , levels - 1); } } @NotNull public static ActionCallback selectInTree(DefaultMutableTreeNode node, boolean requestFocus, @NotNull JTree tree) { return selectInTree(node, requestFocus, tree, true); } @NotNull public static ActionCallback selectInTree(@Nullable DefaultMutableTreeNode node, boolean requestFocus, @NotNull JTree tree, boolean center) { if (node == null) return ActionCallback.DONE; final TreePath treePath = new TreePath(node.getPath()); tree.expandPath(treePath); if (requestFocus) { tree.requestFocus(); } return selectPath(tree, treePath, center); } @NotNull public static ActionCallback selectInTree(Project project, @Nullable DefaultMutableTreeNode node, boolean requestFocus, @NotNull JTree tree, boolean center) { if (node == null) return ActionCallback.DONE; final TreePath treePath = new TreePath(node.getPath()); tree.expandPath(treePath); if (requestFocus) { ActionCallback result = new ActionCallback(2); IdeFocusManager.getInstance(project).requestFocus(tree, true).notifyWhenDone(result); selectPath(tree, treePath, center).notifyWhenDone(result); return result; } return selectPath(tree, treePath, center); } @NotNull public static List<TreePath> collectSelectedPaths(@NotNull final JTree tree, @NotNull final TreePath treePath) { final ArrayList<TreePath> result = new ArrayList<>(); final TreePath[] selections = tree.getSelectionPaths(); if (selections != null) { for (TreePath selection : selections) { if (treePath.isDescendant(selection)) { result.add(selection); } } } return result; } public static void unselect(@NotNull JTree tree, @NotNull final DefaultMutableTreeNode node) { final TreePath rootPath = new TreePath(node.getPath()); final TreePath[] selectionPaths = tree.getSelectionPaths(); if (selectionPaths != null) { for (TreePath selectionPath : selectionPaths) { if (selectionPath.getPathCount() > rootPath.getPathCount() && rootPath.isDescendant(selectionPath)) { tree.removeSelectionPath(selectionPath); } } } } @Nullable public static Range<Integer> getExpandControlRange(@NotNull final JTree aTree, @Nullable final TreePath path) { TreeModel treeModel = aTree.getModel(); final BasicTreeUI basicTreeUI = (BasicTreeUI)aTree.getUI(); Icon expandedIcon = basicTreeUI.getExpandedIcon(); Range<Integer> box = null; if (path != null && !treeModel.isLeaf(path.getLastPathComponent())) { int boxWidth; Insets i = aTree.getInsets(); if (expandedIcon != null) { boxWidth = expandedIcon.getIconWidth(); } else { boxWidth = 8; } int boxLeftX = i != null ? i.left : 0; boolean leftToRight = aTree.getComponentOrientation().isLeftToRight(); int depthOffset = getDepthOffset(aTree); int totalChildIndent = basicTreeUI.getLeftChildIndent() + basicTreeUI.getRightChildIndent(); if (leftToRight) { boxLeftX += (path.getPathCount() + depthOffset - 2) * totalChildIndent + basicTreeUI.getLeftChildIndent() - boxWidth / 2; } int boxRightX = boxLeftX + boxWidth; box = new Range<>(boxLeftX, boxRightX); } return box; } public static int getDepthOffset(@NotNull JTree aTree) { if (aTree.isRootVisible()) { return aTree.getShowsRootHandles() ? 1 : 0; } else { return aTree.getShowsRootHandles() ? 0 : -1; } } @NotNull public static RelativePoint getPointForSelection(@NotNull JTree aTree) { final int[] rows = aTree.getSelectionRows(); if (rows == null || rows.length == 0) { return RelativePoint.getCenterOf(aTree); } return getPointForRow(aTree, rows[rows.length - 1]); } @NotNull public static RelativePoint getPointForRow(@NotNull JTree aTree, int aRow) { return getPointForPath(aTree, aTree.getPathForRow(aRow)); } @NotNull public static RelativePoint getPointForPath(@NotNull JTree aTree, TreePath path) { final Rectangle rowBounds = aTree.getPathBounds(path); rowBounds.x += 20; return getPointForBounds(aTree, rowBounds); } @NotNull public static RelativePoint getPointForBounds(JComponent aComponent, @NotNull final Rectangle aBounds) { return new RelativePoint(aComponent, new Point(aBounds.x, (int)aBounds.getMaxY())); } public static boolean isOverSelection(@NotNull final JTree tree, @NotNull final Point point) { TreePath path = tree.getPathForLocation(point.x, point.y); return path != null && tree.getSelectionModel().isPathSelected(path); } public static void dropSelectionButUnderPoint(@NotNull JTree tree, @NotNull Point treePoint) { final TreePath toRetain = tree.getPathForLocation(treePoint.x, treePoint.y); if (toRetain == null) return; TreePath[] selection = tree.getSelectionModel().getSelectionPaths(); selection = selection == null ? new TreePath[0] : selection; for (TreePath each : selection) { if (toRetain.equals(each)) continue; tree.getSelectionModel().removeSelectionPath(each); } } public interface Traverse{ boolean accept(Object node); } public static void ensureSelection(@NotNull JTree tree) { final TreePath[] paths = tree.getSelectionPaths(); if (paths != null) { for (TreePath each : paths) { if (tree.getRowForPath(each) >= 0 && tree.isVisible(each)) { return; } } } for (int eachRow = 0; eachRow < tree.getRowCount(); eachRow++) { TreePath eachPath = tree.getPathForRow(eachRow); if (eachPath != null && tree.isVisible(eachPath)) { tree.setSelectionPath(eachPath); break; } } } public static int indexedBinarySearch(@NotNull TreeNode parent, @NotNull TreeNode key, Comparator comparator) { int low = 0; int high = parent.getChildCount() - 1; while (low <= high) { int mid = (low + high) / 2; TreeNode treeNode = parent.getChildAt(mid); int cmp = comparator.compare(treeNode, key); if (cmp < 0) { low = mid + 1; } else if (cmp > 0) { high = mid - 1; } else { return mid; // key found } } return -(low + 1); // key not found } @NotNull public static Comparator<TreePath> getDisplayOrderComparator(@NotNull final JTree tree) { return (path1, path2) -> tree.getRowForPath(path1) - tree.getRowForPath(path2); } }
/* * Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.axis2.transport.rabbitmq; import com.rabbitmq.client.AMQP; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; import com.rabbitmq.client.QueueingConsumer; import com.rabbitmq.client.ShutdownSignalException; import org.apache.axis2.transport.base.threads.WorkerPool; import org.apache.axis2.transport.rabbitmq.utils.RabbitMQUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Hashtable; import java.util.List; import java.util.Map; /** * Each service will have one ServiceTaskManager instance that will create, manage and also destroy * idle tasks created for it, for message receipt. It uses the MessageListenerTask to poll for the * RabbitMQ AMQP Listening destination and consume messages. The consumed messages is build and sent to * axis2 engine for processing */ public class ServiceTaskManager { private static final Log log = LogFactory.getLog(ServiceTaskManager.class); private static final int STATE_STOPPED = 0; private static final int STATE_STARTED = 1; private static final int STATE_PAUSED = 2; private static final int STATE_SHUTTING_DOWN = 3; private static final int STATE_FAILURE = 4; private volatile int activeTaskCount = 0; private WorkerPool workerPool = null; private String serviceName; private Hashtable<String, String> rabbitMQProperties = new Hashtable<String, String>(); private final ConnectionFactory connectionFactory; private final List<MessageListenerTask> pollingTasks = Collections.synchronizedList(new ArrayList<MessageListenerTask>()); private RabbitMQMessageReceiver rabbitMQMessageReceiver; private int serviceTaskManagerState = STATE_STOPPED; public ServiceTaskManager( ConnectionFactory connectionFactory) { this.connectionFactory = connectionFactory; } /** * Start the Task Manager by adding a new MessageListenerTask to he worker pool. */ public synchronized void start() { workerPool.execute(new MessageListenerTask()); serviceTaskManagerState = STATE_STARTED; } public synchronized void stop() { if (serviceTaskManagerState != STATE_FAILURE) { serviceTaskManagerState = STATE_SHUTTING_DOWN; } synchronized (pollingTasks) { for (MessageListenerTask lstTask : pollingTasks) { lstTask.requestShutdown(); } } if (serviceTaskManagerState != STATE_FAILURE) { serviceTaskManagerState = STATE_STOPPED; } } public synchronized void pause() { //TODO implement me .. } public synchronized void resume() { //TODO implement me .. } public void setWorkerPool(WorkerPool workerPool) { this.workerPool = workerPool; } public void setRabbitMQMessageReceiver(RabbitMQMessageReceiver rabbitMQMessageReceiver) { this.rabbitMQMessageReceiver = rabbitMQMessageReceiver; } public Hashtable<String, String> getRabbitMQProperties() { return rabbitMQProperties; } public void addRabbitMQProperties(Map<String, String> rabbitMQProperties) { this.rabbitMQProperties.putAll(rabbitMQProperties); } public void removeAMQPProperties(String key) { this.rabbitMQProperties.remove(key); } /** * The actual threads/tasks that perform message polling */ private class MessageListenerTask implements Runnable { private Connection connection = null; private Channel channel = null; private boolean autoAck = false; private volatile int workerState = STATE_STOPPED; private volatile boolean idle = false; private volatile boolean connected = false; /** * As soon as we create a new polling task, add it to the STM for control later */ MessageListenerTask() { synchronized (pollingTasks) { pollingTasks.add(this); } } public void pause() { //TODO implement me } public void resume() { //TODO implement me } /** * Execute the polling worker task */ public void run() { workerState = STATE_STARTED; activeTaskCount++; try { connection = getConnection(); if (channel == null) { channel = connection.createChannel(); } QueueingConsumer queueingConsumer = createQueueConsumer(channel); while (isActive()) { try { if (!channel.isOpen()) { channel = queueingConsumer.getChannel(); } channel.txSelect(); } catch (IOException e) { log.error("Error while starting transaction", e); continue; } boolean successful = false; RabbitMQMessage message = null; try { message = getConsumerDelivery(queueingConsumer); } catch (InterruptedException e) { log.error("Error while consuming message", e); continue; } if (message != null) { idle = false; try { successful = handleMessage(message); } finally { if (successful) { try { channel.basicAck(message.getDeliveryTag(), false); channel.txCommit(); } catch (IOException e) { log.error("Error while commiting transaction", e); } } else { try { channel.txRollback(); } catch (IOException e) { log.error("Error while trying to roll back transaction", e); } } } } else { idle = true; } } } catch (IOException e) { handleException("Error while reciving message from queue", e); } finally { closeConnection(); workerState = STATE_STOPPED; activeTaskCount--; synchronized (pollingTasks) { pollingTasks.remove(this); } } } /** * Create a queue consumer using the properties form transport listener configuration * * @return the queue consumer * @throws IOException on error */ private QueueingConsumer createQueueConsumer(Channel channel) throws IOException { QueueingConsumer consumer = null; try { String queueName = rabbitMQProperties.get(RabbitMQConstants.QUEUE_NAME); String exchangeName = rabbitMQProperties.get(RabbitMQConstants.EXCHANGE_NAME); String autoAckStringValue = rabbitMQProperties.get(RabbitMQConstants.QUEUE_AUTO_ACK); if (autoAckStringValue != null) { autoAck = Boolean.parseBoolean(autoAckStringValue); } //If no queue name is specified then service name will be used as queue name if (queueName == null || queueName.equals("")) { queueName = serviceName; log.warn("No queue name is specified for " + serviceName + ". " + "Service name will be used as queue name"); } channel.queueDeclare(queueName, RabbitMQUtils.isDurableQueue(rabbitMQProperties), RabbitMQUtils.isExclusiveQueue(rabbitMQProperties), RabbitMQUtils.isAutoDeleteQueue(rabbitMQProperties), null); consumer = new QueueingConsumer(channel); if (exchangeName != null && !exchangeName.equals("")) { String exchangerType = rabbitMQProperties.get(RabbitMQConstants.EXCHANGE_TYPE); if (exchangerType != null) { String durable = rabbitMQProperties.get(RabbitMQConstants.EXCHANGE_DURABLE); if (durable != null) { channel.exchangeDeclare(exchangeName, exchangerType, Boolean.parseBoolean(durable)); } else { channel.exchangeDeclare(exchangeName, exchangerType, true); } } else { channel.exchangeDeclare(exchangeName, "direct", true); } channel.queueBind(queueName, exchangeName, queueName); } String consumerTagString = rabbitMQProperties.get(RabbitMQConstants.CONSUMER_TAG); if (consumerTagString != null) { channel.basicConsume(queueName, autoAck, consumerTagString, consumer); } else { channel.basicConsume(queueName, autoAck, consumer); } } catch (IOException e) { handleException("Error while creating consumer", e); } return consumer; } /** * Returns the delivery from the consumer * * @param consumer the consumer to get the delivery * @return RabbitMQMessage consumed by the consumer * @throws InterruptedException on error */ private RabbitMQMessage getConsumerDelivery(QueueingConsumer consumer) throws InterruptedException { RabbitMQMessage message = new RabbitMQMessage(); QueueingConsumer.Delivery delivery = null; try { delivery = consumer.nextDelivery(); } catch (ShutdownSignalException e) { //ignore return null; } if (delivery != null) { AMQP.BasicProperties properties = delivery.getProperties(); Map<String, Object> headers = properties.getHeaders(); message.setBody(delivery.getBody()); message.setDeliveryTag(delivery.getEnvelope().getDeliveryTag()); message.setReplyTo(properties.getReplyTo()); message.setMessageId(properties.getMessageId()); message.setContentType(properties.getContentType()); message.setContentEncoding(properties.getContentEncoding()); message.setCorrelationId(properties.getCorrelationId()); if (headers != null) { message.setHeaders(headers); if (headers.get(RabbitMQConstants.SOAP_ACTION) != null) { message.setSoapAction(headers.get( RabbitMQConstants.SOAP_ACTION).toString()); } } } return message; } /** * Invoke message receiver on received messages * * @param message the AMQP message received */ private boolean handleMessage(RabbitMQMessage message) { boolean successful; successful = rabbitMQMessageReceiver.onMessage(message); return successful; } protected void requestShutdown() { workerState = STATE_SHUTTING_DOWN; closeConnection(); } private boolean isActive() { return workerState == STATE_STARTED; } protected boolean isTaskIdle() { return idle; } public boolean isConnected() { return connected; } public void setConnected(boolean connected) { this.connected = connected; } private Connection getConnection() { if (connection == null) { connection = createConnection(); setConnected(true); } return connection; } private void closeConnection() { if (connection != null && connection.isOpen()) { try { connection.close(); } catch (IOException e) { log.error("Error while closing connection ", e); } finally { connection = null; } } } private Connection createConnection() { Connection connection = null; try { connection = connectionFactory.createConnection(); } catch (Exception e) { log.error("Error while creating AMQP Connection...", e); } return connection; } } public String getServiceName() { return serviceName; } public void setServiceName(String serviceName) { this.serviceName = serviceName; } private void handleException(String msg, Exception e) { log.error(msg, e); throw new AxisRabbitMQException(msg, e); } }
/* * The MIT License * * Copyright (c) 2009-, Sun Microsystems, Inc., CloudBees, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.sun.akuma; import com.sun.jna.Memory; import com.sun.jna.Native; import com.sun.jna.NativeLong; import com.sun.jna.StringArray; import static com.sun.akuma.CLibrary.LIBC; import java.io.FileWriter; import java.io.IOException; import java.io.File; import java.lang.reflect.Method; import java.util.logging.Level; import java.util.logging.Logger; /** * Forks a copy of the current process into the background. * * <p> * Because of the fork/exec involved in doing this, your code has to call Daemonizer in a certain sequence. * Specifically, from your main method: * <pre> * public static void main(String[] args) { * Daemon d = new Daemon(); * if(d.isDaemonized()) { * // perform initialization as a daemon * // this involves in closing file descriptors, recording PIDs, etc. * d.{@linkplain #init() init}(); * } else { * // if you are already daemonized, no point in daemonizing yourself again, * // so do this only when you aren't daemonizing. * if(you decide to launch a copy into background) { * d.daemonize(...); * System.exit(0); * } * } * * // your normal main code follows * // this part can be executed in two ways * // 1) the user runs your process in the foreground * // 2) you decided to daemonize yourself, in which case the newly forked daemon will execute this code, * // while the originally executed foreground Java process exits before it gets here. * ... * } * </pre> * * <p> * Alternatively, your main class can extend from Daemon, so that you can customize some of the behaviors. * * @author Kohsuke Kawaguchi */ public class Daemon { /** * Do all the necessary steps in one go. * * @param daemonize * Parse the command line arguments and if the application should be * daemonized, pass in true. */ public void all(boolean daemonize) throws Exception { if(isDaemonized()) init(); else { if(daemonize) { daemonize(); System.exit(0); } } } /** * Returns true if the current process is already launched as a daemon * via {@link #daemonize()}. */ public boolean isDaemonized() { return System.getProperty(Daemon.class.getName())!=null; } /** * Relaunches the JVM with the exact same arguments into the daemon. */ public void daemonize() throws IOException { daemonize(JavaVMArguments.current()); } /** * Relaunches the JVM with the given arguments into the daemon. */ public void daemonize(JavaVMArguments args) { if(isDaemonized()) throw new IllegalStateException("Already running as a daemon"); if (System.getProperty("com.sun.management.jmxremote.port") != null) { try { Method m = Class.forName("sun.management.Agent").getDeclaredMethod("stopRemoteManagementAgent"); m.setAccessible(true); m.invoke(null); } catch (Exception x) { LOGGER.log(Level.SEVERE, "could not simulate jcmd $$ ManagementAgent.stop (JENKINS-14529)", x); } } // let the child process now that it's a daemon args.setSystemProperty(Daemon.class.getName(),"daemonized"); // prepare for a fork String exe = getCurrentExecutable(); StringArray sa = args.toStringArray(); int i = LIBC.fork(); if(i<0) { LIBC.perror("initial fork failed"); System.exit(-1); } if(i==0) { // with fork, we lose all the other critical threads, to exec to Java again LIBC.execv(exe,sa); System.err.println("exec failed"); LIBC.perror("initial exec failed"); System.exit(-1); } // parent exits } /** * Overwrites the current process with a new Java VM with the given JVM arguments. */ public static void selfExec(JavaVMArguments args) { LIBC.execv(getCurrentExecutable(), args.toStringArray()); } /** * Prepares the current process to act as a daemon. * The daemon's PID is written to the file <code>/var/run/daemon.pid</code>. */ public void init() throws Exception { init("/var/run/daemon.pid"); } /** * Prepares the current process to act as a daemon. * @param pidFile the filename to which the daemon's PID is written; * or, <code>null</code> to skip writing a PID file. */ @SuppressWarnings({"OctalInteger"}) public void init(String pidFile) throws Exception { // start a new process session LIBC.setsid(); closeDescriptors(); chdirToRoot(); if (pidFile != null) writePidFile(pidFile); } /** * Closes inherited file descriptors. * * <p> * This method can be overridden to no-op in a subtype. Useful for debugging daemon processes * when they don't work correctly. */ protected void closeDescriptors() throws IOException { if(!Boolean.getBoolean(Daemon.class.getName()+".keepDescriptors")) { System.out.close(); System.err.close(); System.in.close(); } // ideally we'd like to close all other descriptors, but that would close // jar files used as classpath, and break JVM. } /** * change directory to '/' to avoid locking directories. */ protected void chdirToRoot() { LIBC.chdir("/"); System.setProperty("user.dir","/"); } /** * Writes out the PID of the current process to the specified file. * @param pidFile the filename to write the PID to. */ protected void writePidFile(String pidFile) throws IOException { try { FileWriter fw = new FileWriter(pidFile); fw.write(String.valueOf(LIBC.getpid())); fw.close(); } catch (IOException e) { // if failed to write, keep going because maybe we are run from non-root } } /** * Gets the current executable name. */ public static String getCurrentExecutable() { int pid = LIBC.getpid(); String name = "/proc/" + pid + "/exe"; File exe = new File(name); if(exe.exists()) { try { String path = resolveSymlink(exe); if (path!=null) return path; } catch (IOException e) { LOGGER.log(Level.FINE,"Failed to resolve symlink "+exe,e); } return name; } // cross-platform fallback return System.getProperty("java.home")+"/bin/java"; } private static String resolveSymlink(File link) throws IOException { String filename = link.getAbsolutePath(); for (int sz=512; sz < 65536; sz*=2) { Memory m = new Memory(sz); int r = LIBC.readlink(filename,m,new NativeLong(sz)); if (r<0) { int err = Native.getLastError(); if (err==22/*EINVAL --- but is this really portable?*/) return null; // this means it's not a symlink throw new IOException("Failed to readlink "+link+" error="+ err+" "+ LIBC.strerror(err)); } if (r==sz) continue; // buffer too small byte[] buf = new byte[r]; m.read(0,buf,0,r); return new String(buf); } throw new IOException("Failed to readlink "+link); } /** * Flavor of {@link Daemon} that doesn't change the current directory. * * <p> * This turns out to be often more useful as JavaVM can take lot of arguments and system properties * that use paths, and when we CD they won't work. */ public static class WithoutChdir extends Daemon { @Override protected void chdirToRoot() { // noop } } private static final Logger LOGGER = Logger.getLogger(Daemon.class.getName()); }
/* * Copyright (c) 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.taobao.weex.devtools.json; import com.taobao.weex.devtools.common.ExceptionUtil; import com.taobao.weex.devtools.json.annotation.JsonProperty; import com.taobao.weex.devtools.json.annotation.JsonValue; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; /** * This class is a lightweight version of Jackson's ObjectMapper. It is designed to have a minimal * subset of the functionality required for stetho. * <p> * It would be awesome if there were a lightweight library that supported converting between * arbitrary {@link Object} and {@link JSONObject} representations. * <p> * Admittedly the other approach would be to use an Annotation Processor to create static conversion * functions that discover something like a {@link JsonProperty} and create a function at compile * time however since this is just being used for a simple debug utility and Kit-Kat caches the * results of reflection this class is sufficient for stethos needs. */ public class ObjectMapper { @GuardedBy("mJsonValueMethodCache") private final Map<Class<?>, Method> mJsonValueMethodCache = new IdentityHashMap<>(); /** * Support mapping between arbitrary classes and {@link JSONObject}. * <note> * It is possible for a {@link Throwable} to be propagated out of this class if there is an * {@link InvocationTargetException}. * </note> * @param fromValue * @param toValueType * @param <T> * @return * @throws IllegalArgumentException when there is an error converting. One of either * {@code fromValue.getClass()} or {@code toValueType} must be {@link JSONObject}. */ public <T> T convertValue(Object fromValue, Class<T> toValueType) throws IllegalArgumentException { if (fromValue == null) { return null; } if (toValueType != Object.class && toValueType.isAssignableFrom(fromValue.getClass())) { return (T) fromValue; } try { if (fromValue instanceof JSONObject) { return _convertFromJSONObject((JSONObject) fromValue, toValueType); } else if (toValueType == JSONObject.class) { return (T) _convertToJSONObject(fromValue); } else { throw new IllegalArgumentException( "Expecting either fromValue or toValueType to be a JSONObject"); } } catch (NoSuchMethodException e) { throw new IllegalArgumentException(e); } catch (IllegalAccessException e) { throw new IllegalArgumentException(e); } catch (InstantiationException e) { throw new IllegalArgumentException(e); } catch (JSONException e) { throw new IllegalArgumentException(e); } catch (InvocationTargetException e) { throw ExceptionUtil.propagate(e.getCause()); } } private <T> T _convertFromJSONObject(JSONObject jsonObject, Class<T> type) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException, JSONException { Constructor<T> constructor = type.getDeclaredConstructor((Class[]) null); constructor.setAccessible(true); T instance = constructor.newInstance(); Field[] fields = type.getFields(); for (int i = 0; i < fields.length; ++i) { Field field = fields[i]; Object value = jsonObject.opt(field.getName()); Object setValue = getValueForField(field, value); try { field.set(instance, setValue); } catch (IllegalArgumentException e) { throw new IllegalArgumentException( "Class: " + type.getSimpleName() + " " + "Field: " + field.getName() + " type " + setValue.getClass().getName(), e); } } return instance; } private Object getValueForField(Field field, Object value) throws JSONException { try { if (value != null) { if (value == JSONObject.NULL) { return null; } if (value.getClass() == field.getType()) { return value; } if (value instanceof JSONObject) { return convertValue(value, field.getType()); } else { if (field.getType().isEnum()) { return getEnumValue((String) value, field.getType().asSubclass(Enum.class)); } else if (value instanceof JSONArray) { return convertArrayToList(field, (JSONArray) value); } else if (value instanceof Number) { // Need to convert value to Number This happens because json treats 1 as an Integer even // if the field is supposed to be a Long Number numberValue = (Number) value; Class<?> clazz = field.getType(); if (clazz == Integer.class || clazz == int.class) { return numberValue.intValue(); } else if (clazz == Long.class || clazz == long.class) { return numberValue.longValue(); } else if (clazz == Double.class || clazz == double.class) { return numberValue.doubleValue(); } else if (clazz == Float.class || clazz == float.class) { return numberValue.floatValue(); } else if (clazz == Byte.class || clazz == byte.class) { return numberValue.byteValue(); } else if (clazz == Short.class || clazz == short.class) { return numberValue.shortValue(); } else { throw new IllegalArgumentException("Not setup to handle class " + clazz.getName()); } } } } } catch (IllegalAccessException e) { throw new IllegalArgumentException("Unable to set value for field " + field.getName(), e); } return value; } private Enum getEnumValue(String value, Class<? extends Enum> clazz) { Method method = getJsonValueMethod(clazz); if (method != null) { return getEnumByMethod(value, clazz, method); } else { return Enum.valueOf(clazz, value); } } /** * In this case we know that there is an {@link Enum} decorated with {@link JsonValue}. This means * that we need to iterate through all of the values of the {@link Enum} returned by the given * {@link Method} to check the given value. * @param value * @param clazz * @param method * @return */ private Enum getEnumByMethod(String value, Class<? extends Enum> clazz, Method method) { Enum[] enumValues = clazz.getEnumConstants(); // Start at the front to ensure first always wins for (int i = 0; i < enumValues.length; ++i) { Enum enumValue = enumValues[i]; try { Object o = method.invoke(enumValue); if (o != null) { if (o.toString().equals(value)) { return enumValue; } } } catch (Exception ex) { throw new IllegalArgumentException(ex); } } throw new IllegalArgumentException("No enum constant " + clazz.getName() + "." + value); } private List<Object> convertArrayToList(Field field, JSONArray array) throws IllegalAccessException, JSONException { if (List.class.isAssignableFrom(field.getType())) { ParameterizedType parameterizedType = (ParameterizedType) field.getGenericType(); Type[] types = parameterizedType.getActualTypeArguments(); if (types.length != 1) { throw new IllegalArgumentException("Only able to handle a single type in a list " + field.getName()); } Class arrayClass = (Class)types[0]; List<Object> objectList = new ArrayList<Object>(); for (int i = 0; i < array.length(); ++i) { if (arrayClass.isEnum()) { objectList.add(getEnumValue(array.getString(i), arrayClass)); } else if (canDirectlySerializeClass(arrayClass)) { objectList.add(array.get(i)); } else { JSONObject jsonObject = array.getJSONObject(i); if (jsonObject == null) { objectList.add(null); } else { objectList.add(convertValue(jsonObject, arrayClass)); } } } return objectList; } else { throw new IllegalArgumentException("only know how to deserialize List<?> on field " + field.getName()); } } private JSONObject _convertToJSONObject(Object fromValue) throws JSONException, InvocationTargetException, IllegalAccessException { JSONObject jsonObject = new JSONObject(); Field[] fields = fromValue.getClass().getFields(); for (int i = 0; i < fields.length; ++i) { JsonProperty property = fields[i].getAnnotation(JsonProperty.class); if (property != null) { // AutoBox here ... Object value = fields[i].get(fromValue); Class clazz = fields[i].getType(); if (value != null) { clazz = value.getClass(); } String name = fields[i].getName(); if (property.required() && value == null) { value = JSONObject.NULL; } else if (value == JSONObject.NULL) { // Leave it as null in this case. } else { value = getJsonValue(value, clazz, fields[i]); } jsonObject.put(name, value); } } return jsonObject; } private Object getJsonValue(Object value, Class<?> clazz, Field field) throws InvocationTargetException, IllegalAccessException { if (value == null) { // Now technically we /could/ return JsonNode.NULL here but Chrome's webkit inspector croaks // if you pass a null "id" return null; } if (List.class.isAssignableFrom(clazz)) { return convertListToJsonArray(value); } // Finally check to see if there is a JsonValue present Method m = getJsonValueMethod(clazz); if (m != null) { return m.invoke(value); } if (!canDirectlySerializeClass(clazz)) { return convertValue(value, JSONObject.class); } // JSON has no support for NaN, Infinity or -Infinity, so we serialize // then as strings. Google Chrome's inspector will accept them just fine. if (clazz.equals(Double.class) || clazz.equals(Float.class)) { double doubleValue = ((Number) value).doubleValue(); if (Double.isNaN(doubleValue)) { return "NaN"; } else if (doubleValue == Double.POSITIVE_INFINITY) { return "Infinity"; } else if (doubleValue == Double.NEGATIVE_INFINITY) { return "-Infinity"; } } // hmm we should be able to directly serialize here... return value; } public JSONArray convertListToJsonArray(Object value) throws InvocationTargetException, IllegalAccessException { JSONArray array = new JSONArray(); List<Object> list = (List<Object>) value; for(Object obj : list) { // Send null, if this is an array of arrays we are screwed array.put(obj != null ? getJsonValue(obj, obj.getClass(), null /* field */) : null); } return array; } /** * * @param clazz * @return the first method annotated with {@link JsonValue} or null if one does not exist. */ @Nullable private Method getJsonValueMethod(Class<?> clazz) { synchronized (mJsonValueMethodCache) { Method method = mJsonValueMethodCache.get(clazz); if (method == null && !mJsonValueMethodCache.containsKey(clazz)) { method = getJsonValueMethodImpl(clazz); mJsonValueMethodCache.put(clazz, method); } return method; } } @Nullable private static Method getJsonValueMethodImpl(Class<?> clazz) { Method[] methods = clazz.getMethods(); for(int i = 0; i < methods.length; ++i) { Annotation jsonValue = methods[i].getAnnotation(JsonValue.class); if (jsonValue != null) { return methods[i]; } } return null; } private static boolean canDirectlySerializeClass(Class clazz) { return isWrapperOrPrimitiveType(clazz) || clazz.equals(String.class); } private static boolean isWrapperOrPrimitiveType(Class<?> clazz) { return clazz.isPrimitive() || clazz.equals(Boolean.class) || clazz.equals(Integer.class) || clazz.equals(Character.class) || clazz.equals(Byte.class) || clazz.equals(Short.class) || clazz.equals(Double.class) || clazz.equals(Long.class) || clazz.equals(Float.class); } }
/** */ package com.specmate.model.testspecification.impl; import com.specmate.model.base.BasePackage; import com.specmate.model.base.IDescribed; import com.specmate.model.base.INamed; import com.specmate.model.testspecification.ParameterAssignment; import com.specmate.model.testspecification.ParameterType; import com.specmate.model.testspecification.TestParameter; import com.specmate.model.testspecification.TestspecificationPackage; import java.util.Collection; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.util.InternalEList; import org.eclipse.emf.internal.cdo.CDOObjectImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Test Parameter</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link com.specmate.model.testspecification.impl.TestParameterImpl#getId <em>Id</em>}</li> * <li>{@link com.specmate.model.testspecification.impl.TestParameterImpl#getName <em>Name</em>}</li> * <li>{@link com.specmate.model.testspecification.impl.TestParameterImpl#getDescription <em>Description</em>}</li> * <li>{@link com.specmate.model.testspecification.impl.TestParameterImpl#getType <em>Type</em>}</li> * <li>{@link com.specmate.model.testspecification.impl.TestParameterImpl#getAssignments <em>Assignments</em>}</li> * </ul> * * @generated */ public class TestParameterImpl extends CDOObjectImpl implements TestParameter { /** * The default value of the '{@link #getId() <em>Id</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getId() * @generated * @ordered */ protected static final String ID_EDEFAULT = null; /** * The default value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected static final String NAME_EDEFAULT = null; /** * The default value of the '{@link #getDescription() <em>Description</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getDescription() * @generated * @ordered */ protected static final String DESCRIPTION_EDEFAULT = null; /** * The default value of the '{@link #getType() <em>Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getType() * @generated * @ordered */ protected static final ParameterType TYPE_EDEFAULT = ParameterType.INPUT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TestParameterImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return TestspecificationPackage.Literals.TEST_PARAMETER; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected int eStaticFeatureCount() { return 0; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getId() { return (String)eDynamicGet(TestspecificationPackage.TEST_PARAMETER__ID, BasePackage.Literals.IID__ID, true, true); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setId(String newId) { eDynamicSet(TestspecificationPackage.TEST_PARAMETER__ID, BasePackage.Literals.IID__ID, newId); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getName() { return (String)eDynamicGet(TestspecificationPackage.TEST_PARAMETER__NAME, BasePackage.Literals.INAMED__NAME, true, true); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setName(String newName) { eDynamicSet(TestspecificationPackage.TEST_PARAMETER__NAME, BasePackage.Literals.INAMED__NAME, newName); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getDescription() { return (String)eDynamicGet(TestspecificationPackage.TEST_PARAMETER__DESCRIPTION, BasePackage.Literals.IDESCRIBED__DESCRIPTION, true, true); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setDescription(String newDescription) { eDynamicSet(TestspecificationPackage.TEST_PARAMETER__DESCRIPTION, BasePackage.Literals.IDESCRIBED__DESCRIPTION, newDescription); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ParameterType getType() { return (ParameterType)eDynamicGet(TestspecificationPackage.TEST_PARAMETER__TYPE, TestspecificationPackage.Literals.TEST_PARAMETER__TYPE, true, true); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setType(ParameterType newType) { eDynamicSet(TestspecificationPackage.TEST_PARAMETER__TYPE, TestspecificationPackage.Literals.TEST_PARAMETER__TYPE, newType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") public EList<ParameterAssignment> getAssignments() { return (EList<ParameterAssignment>)eDynamicGet(TestspecificationPackage.TEST_PARAMETER__ASSIGNMENTS, TestspecificationPackage.Literals.TEST_PARAMETER__ASSIGNMENTS, true, true); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case TestspecificationPackage.TEST_PARAMETER__ASSIGNMENTS: return ((InternalEList<InternalEObject>)(InternalEList<?>)getAssignments()).basicAdd(otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case TestspecificationPackage.TEST_PARAMETER__ASSIGNMENTS: return ((InternalEList<?>)getAssignments()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case TestspecificationPackage.TEST_PARAMETER__ID: return getId(); case TestspecificationPackage.TEST_PARAMETER__NAME: return getName(); case TestspecificationPackage.TEST_PARAMETER__DESCRIPTION: return getDescription(); case TestspecificationPackage.TEST_PARAMETER__TYPE: return getType(); case TestspecificationPackage.TEST_PARAMETER__ASSIGNMENTS: return getAssignments(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case TestspecificationPackage.TEST_PARAMETER__ID: setId((String)newValue); return; case TestspecificationPackage.TEST_PARAMETER__NAME: setName((String)newValue); return; case TestspecificationPackage.TEST_PARAMETER__DESCRIPTION: setDescription((String)newValue); return; case TestspecificationPackage.TEST_PARAMETER__TYPE: setType((ParameterType)newValue); return; case TestspecificationPackage.TEST_PARAMETER__ASSIGNMENTS: getAssignments().clear(); getAssignments().addAll((Collection<? extends ParameterAssignment>)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case TestspecificationPackage.TEST_PARAMETER__ID: setId(ID_EDEFAULT); return; case TestspecificationPackage.TEST_PARAMETER__NAME: setName(NAME_EDEFAULT); return; case TestspecificationPackage.TEST_PARAMETER__DESCRIPTION: setDescription(DESCRIPTION_EDEFAULT); return; case TestspecificationPackage.TEST_PARAMETER__TYPE: setType(TYPE_EDEFAULT); return; case TestspecificationPackage.TEST_PARAMETER__ASSIGNMENTS: getAssignments().clear(); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case TestspecificationPackage.TEST_PARAMETER__ID: return ID_EDEFAULT == null ? getId() != null : !ID_EDEFAULT.equals(getId()); case TestspecificationPackage.TEST_PARAMETER__NAME: return NAME_EDEFAULT == null ? getName() != null : !NAME_EDEFAULT.equals(getName()); case TestspecificationPackage.TEST_PARAMETER__DESCRIPTION: return DESCRIPTION_EDEFAULT == null ? getDescription() != null : !DESCRIPTION_EDEFAULT.equals(getDescription()); case TestspecificationPackage.TEST_PARAMETER__TYPE: return getType() != TYPE_EDEFAULT; case TestspecificationPackage.TEST_PARAMETER__ASSIGNMENTS: return !getAssignments().isEmpty(); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public int eBaseStructuralFeatureID(int derivedFeatureID, Class<?> baseClass) { if (baseClass == INamed.class) { switch (derivedFeatureID) { case TestspecificationPackage.TEST_PARAMETER__NAME: return BasePackage.INAMED__NAME; default: return -1; } } if (baseClass == IDescribed.class) { switch (derivedFeatureID) { case TestspecificationPackage.TEST_PARAMETER__DESCRIPTION: return BasePackage.IDESCRIBED__DESCRIPTION; default: return -1; } } return super.eBaseStructuralFeatureID(derivedFeatureID, baseClass); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public int eDerivedStructuralFeatureID(int baseFeatureID, Class<?> baseClass) { if (baseClass == INamed.class) { switch (baseFeatureID) { case BasePackage.INAMED__NAME: return TestspecificationPackage.TEST_PARAMETER__NAME; default: return -1; } } if (baseClass == IDescribed.class) { switch (baseFeatureID) { case BasePackage.IDESCRIBED__DESCRIPTION: return TestspecificationPackage.TEST_PARAMETER__DESCRIPTION; default: return -1; } } return super.eDerivedStructuralFeatureID(baseFeatureID, baseClass); } } //TestParameterImpl
/* * (c) Copyright 2018 Palantir Technologies Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.transaction.impl; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.primitives.UnsignedBytes; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; import com.palantir.atlasdb.encoding.PtBytes; import com.palantir.atlasdb.keyvalue.api.Cell; import com.palantir.atlasdb.keyvalue.api.ColumnSelection; import com.palantir.atlasdb.keyvalue.api.RowResult; import com.palantir.atlasdb.keyvalue.api.TableReference; import com.palantir.atlasdb.keyvalue.impl.Cells; import com.palantir.atlasdb.transaction.api.Transaction; import com.palantir.atlasdb.transaction.api.TransactionFailedException; import com.palantir.atlasdb.transaction.service.TransactionService; import com.palantir.common.base.Throwables; import com.palantir.util.Pair; import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.NavigableMap; import java.util.Set; import java.util.SortedMap; import java.util.concurrent.ExecutionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CachingTransaction extends ForwardingTransaction { private static final Logger log = LoggerFactory.getLogger(CachingTransaction.class); private static final long DEFAULT_MAX_CACHED_CELLS = 10_000_000; private final Transaction delegate; private final Cache<Pair<String, Cell>, byte[]> cellCache; public CachingTransaction(Transaction delegate) { this(delegate, DEFAULT_MAX_CACHED_CELLS); } public CachingTransaction(Transaction delegate, long maxCachedCells) { this.delegate = delegate; cellCache = CacheBuilder.newBuilder() .maximumSize(maxCachedCells) .softValues() .recordStats() .build(); } @Override public Transaction delegate() { return delegate; } @Override public NavigableMap<byte[], RowResult<byte[]>> getRows( TableReference tableRef, Iterable<byte[]> rows, ColumnSelection columnSelection) { if (Iterables.isEmpty(rows)) { return AbstractTransaction.EMPTY_SORTED_ROWS; } if (columnSelection.allColumnsSelected()) { NavigableMap<byte[], RowResult<byte[]>> loaded = super.getRows(tableRef, rows, columnSelection); cacheLoadedRows(tableRef, loaded.values()); return loaded; } else { Set<byte[]> toLoad = new HashSet<>(); ImmutableSortedMap.Builder<byte[], RowResult<byte[]>> inCache = ImmutableSortedMap.orderedBy(UnsignedBytes.lexicographicalComparator()); for (byte[] row : rows) { ImmutableSortedMap.Builder<byte[], byte[]> matches = ImmutableSortedMap.orderedBy(UnsignedBytes.lexicographicalComparator()); boolean nonEmpty = false; boolean shouldLoad = false; for (byte[] col : columnSelection.getSelectedColumns()) { byte[] val = getCachedCellIfPresent(tableRef, Cell.create(row, col)); if (val == null) { shouldLoad = true; break; } else if (val.length != 0) { matches.put(col, val); nonEmpty = true; } } if (shouldLoad) { toLoad.add(row); } else if (nonEmpty) { inCache.put(row, RowResult.create(row, matches.build())); } } SortedMap<byte[], RowResult<byte[]>> results = super.getRows(tableRef, toLoad, columnSelection); cacheLoadedRows(tableRef, toLoad, columnSelection.getSelectedColumns(), results); inCache.putAll(results); return inCache.build(); } } @Override public Map<Cell, byte[]> get(TableReference tableRef, Set<Cell> cells) { try { return getWithLoader( tableRef, cells, (tableReference, toRead) -> Futures.immediateFuture(super.get(tableReference, toRead))) .get(); } catch (InterruptedException | ExecutionException e) { throw Throwables.rewrapAndThrowUncheckedException(e.getCause()); } } @Override public ListenableFuture<Map<Cell, byte[]>> getAsync(TableReference tableRef, Set<Cell> cells) { return getWithLoader(tableRef, cells, super::getAsync); } private ListenableFuture<Map<Cell, byte[]>> getWithLoader( TableReference tableRef, Set<Cell> cells, CellLoader cellLoader) { if (cells.isEmpty()) { return Futures.immediateFuture(ImmutableMap.of()); } Set<Cell> toLoad = new HashSet<>(); Map<Cell, byte[]> cacheHit = Maps.newHashMapWithExpectedSize(cells.size()); for (Cell cell : cells) { byte[] val = getCachedCellIfPresent(tableRef, cell); if (val != null) { if (val.length > 0) { cacheHit.put(cell, val); } } else { toLoad.add(cell); } } return Futures.transform( cellLoader.load(tableRef, toLoad), loadedCells -> { cacheLoadedCells(tableRef, toLoad, loadedCells); cacheHit.putAll(loadedCells); return cacheHit; }, MoreExecutors.directExecutor()); } @Override public final void delete(TableReference tableRef, Set<Cell> cells) { super.delete(tableRef, cells); addToCache(tableRef, Cells.constantValueMap(cells, PtBytes.EMPTY_BYTE_ARRAY)); } @Override public void put(TableReference tableRef, Map<Cell, byte[]> values) { super.put(tableRef, values); addToCache(tableRef, values); } private void addToCache(TableReference tableRef, Map<Cell, byte[]> values) { for (Map.Entry<Cell, byte[]> e : values.entrySet()) { byte[] value = e.getValue(); if (value == null) { value = PtBytes.EMPTY_BYTE_ARRAY; } cacheLoadedCell(tableRef, e.getKey(), value); } } private void cacheLoadedRows(TableReference tableRef, Iterable<RowResult<byte[]>> rowView) { for (RowResult<byte[]> loadedRow : rowView) { for (Map.Entry<Cell, byte[]> e : loadedRow.getCells()) { cacheLoadedCell(tableRef, e.getKey(), e.getValue()); } } } private void cacheLoadedRows( TableReference tableRef, Iterable<byte[]> toLoad, Collection<byte[]> columnNames, SortedMap<byte[], RowResult<byte[]>> toCache) { for (byte[] row : toLoad) { SortedMap<byte[], byte[]> columnValues = toCache.get(row) != null ? toCache.get(row).getColumns() : ImmutableSortedMap.of(); for (byte[] columnName : columnNames) { byte[] value = columnValues.get(columnName); if (value == null) { value = PtBytes.EMPTY_BYTE_ARRAY; } cacheLoadedCell(tableRef, Cell.create(row, columnName), value); } } } private void cacheLoadedCells(TableReference tableRef, Set<Cell> toLoad, Map<Cell, byte[]> toCache) { for (Cell key : toLoad) { byte[] value = toCache.get(key); if (value == null) { value = PtBytes.EMPTY_BYTE_ARRAY; } cacheLoadedCell(tableRef, key, value); } } private byte[] getCachedCellIfPresent(TableReference tableRef, Cell cell) { return cellCache.getIfPresent(Pair.create(tableRef.getQualifiedName(), cell)); } private void cacheLoadedCell(TableReference tableRef, Cell cell, byte[] value) { cellCache.put(Pair.create(tableRef.getQualifiedName(), cell), value); } // Log cache stats on commit or abort. // Note we check for logging enabled because actually getting stats is not necessarily trivial // (it must aggregate stats from all cache segments) @Override public void commit() throws TransactionFailedException { try { super.commit(); } finally { if (log.isDebugEnabled()) { log.debug("CachingTransaction cache stats on commit: {}", cellCache.stats()); } } } @Override public void commit(TransactionService txService) throws TransactionFailedException { try { super.commit(txService); } finally { if (log.isDebugEnabled()) { log.debug("CachingTransaction cache stats on commit(txService): {}", cellCache.stats()); } } } @Override public void abort() { try { super.abort(); } finally { if (log.isDebugEnabled()) { log.debug("CachingTransaction cache stats on abort: {}", cellCache.stats()); } } } @FunctionalInterface private interface CellLoader { ListenableFuture<Map<Cell, byte[]>> load(TableReference tableReference, Set<Cell> toRead); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.yarn.cli; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.PosixParser; import org.apache.commons.lang3.StringUtils; import org.apache.flink.client.cli.CliFrontendParser; import org.apache.flink.client.cli.CustomCommandLine; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.GlobalConfiguration; import org.apache.flink.configuration.HighAvailabilityOptions; import org.apache.flink.configuration.IllegalConfigurationException; import org.apache.flink.runtime.clusterframework.ApplicationStatus; import org.apache.flink.runtime.clusterframework.messages.GetClusterStatusResponse; import org.apache.flink.runtime.security.SecurityUtils; import org.apache.flink.util.Preconditions; import org.apache.flink.yarn.AbstractYarnClusterDescriptor; import org.apache.flink.yarn.YarnClusterClient; import org.apache.flink.yarn.YarnClusterDescriptor; import org.apache.hadoop.fs.Path; import org.apache.hadoop.yarn.util.ConverterUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.URL; import java.net.URLDecoder; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.Callable; import static org.apache.flink.client.cli.CliFrontendParser.ADDRESS_OPTION; import static org.apache.flink.configuration.ConfigConstants.HA_ZOOKEEPER_NAMESPACE_KEY; /** * Class handling the command line interface to the YARN session. */ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient> { private static final Logger LOG = LoggerFactory.getLogger(FlinkYarnSessionCli.class); //------------------------------------ Constants ------------------------- public static final String CONFIG_FILE_LOGBACK_NAME = "logback.xml"; public static final String CONFIG_FILE_LOG4J_NAME = "log4j.properties"; private static final int CLIENT_POLLING_INTERVALL = 3; /** The id for the CommandLine interface */ private static final String ID = "yarn-cluster"; // YARN-session related constants private static final String YARN_PROPERTIES_FILE = ".yarn-properties-"; static final String YARN_APPLICATION_ID_KEY = "applicationID"; private static final String YARN_PROPERTIES_PARALLELISM = "parallelism"; private static final String YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING = "dynamicPropertiesString"; private static final String YARN_DYNAMIC_PROPERTIES_SEPARATOR = "@@"; // this has to be a regex for String.split() //------------------------------------ Command Line argument options ------------------------- // the prefix transformation is used by the CliFrontend static constructor. private final Option QUERY; // --- or --- private final Option APPLICATION_ID; // --- or --- private final Option QUEUE; private final Option SHIP_PATH; private final Option FLINK_JAR; private final Option JM_MEMORY; private final Option TM_MEMORY; private final Option CONTAINER; private final Option SLOTS; private final Option DETACHED; private final Option ZOOKEEPER_NAMESPACE; /** * @deprecated Streaming mode has been deprecated without replacement. Set the * {@link ConfigConstants#TASK_MANAGER_MEMORY_PRE_ALLOCATE_KEY} configuration * key to true to get the previous batch mode behaviour. */ @Deprecated private final Option STREAMING; private final Option NAME; private final Options ALL_OPTIONS; /** * Dynamic properties allow the user to specify additional configuration values with -D, such as * <tt> -Dfs.overwrite-files=true -Dtaskmanager.network.memory.min=536346624</tt> */ private final Option DYNAMIC_PROPERTIES; private final boolean acceptInteractiveInput; //------------------------------------ Internal fields ------------------------- private YarnClusterClient yarnCluster; private boolean detachedMode = false; public FlinkYarnSessionCli(String shortPrefix, String longPrefix) { this(shortPrefix, longPrefix, true); } public FlinkYarnSessionCli(String shortPrefix, String longPrefix, boolean acceptInteractiveInput) { this.acceptInteractiveInput = acceptInteractiveInput; QUERY = new Option(shortPrefix + "q", longPrefix + "query", false, "Display available YARN resources (memory, cores)"); APPLICATION_ID = new Option(shortPrefix + "id", longPrefix + "applicationId", true, "Attach to running YARN session"); QUEUE = new Option(shortPrefix + "qu", longPrefix + "queue", true, "Specify YARN queue."); SHIP_PATH = new Option(shortPrefix + "t", longPrefix + "ship", true, "Ship files in the specified directory (t for transfer)"); FLINK_JAR = new Option(shortPrefix + "j", longPrefix + "jar", true, "Path to Flink jar file"); JM_MEMORY = new Option(shortPrefix + "jm", longPrefix + "jobManagerMemory", true, "Memory for JobManager Container [in MB]"); TM_MEMORY = new Option(shortPrefix + "tm", longPrefix + "taskManagerMemory", true, "Memory per TaskManager Container [in MB]"); CONTAINER = new Option(shortPrefix + "n", longPrefix + "container", true, "Number of YARN container to allocate (=Number of Task Managers)"); SLOTS = new Option(shortPrefix + "s", longPrefix + "slots", true, "Number of slots per TaskManager"); DYNAMIC_PROPERTIES = new Option(shortPrefix + "D", true, "Dynamic properties"); DETACHED = new Option(shortPrefix + "d", longPrefix + "detached", false, "Start detached"); STREAMING = new Option(shortPrefix + "st", longPrefix + "streaming", false, "Start Flink in streaming mode"); NAME = new Option(shortPrefix + "nm", longPrefix + "name", true, "Set a custom name for the application on YARN"); ZOOKEEPER_NAMESPACE = new Option(shortPrefix + "z", longPrefix + "zookeeperNamespace", true, "Namespace to create the Zookeeper sub-paths for high availability mode"); ALL_OPTIONS = new Options(); ALL_OPTIONS.addOption(FLINK_JAR); ALL_OPTIONS.addOption(JM_MEMORY); ALL_OPTIONS.addOption(TM_MEMORY); ALL_OPTIONS.addOption(CONTAINER); ALL_OPTIONS.addOption(QUEUE); ALL_OPTIONS.addOption(QUERY); ALL_OPTIONS.addOption(SHIP_PATH); ALL_OPTIONS.addOption(SLOTS); ALL_OPTIONS.addOption(DYNAMIC_PROPERTIES); ALL_OPTIONS.addOption(DETACHED); ALL_OPTIONS.addOption(STREAMING); ALL_OPTIONS.addOption(NAME); ALL_OPTIONS.addOption(APPLICATION_ID); ALL_OPTIONS.addOption(ZOOKEEPER_NAMESPACE); } /** * Tries to load a Flink Yarn properties file and returns the Yarn application id if successful * @param cmdLine The command-line parameters * @param flinkConfiguration The flink configuration * @return Yarn application id or null if none could be retrieved */ private String loadYarnPropertiesFile(CommandLine cmdLine, Configuration flinkConfiguration) { String jobManagerOption = cmdLine.getOptionValue(ADDRESS_OPTION.getOpt(), null); if (jobManagerOption != null) { // don't resume from properties file if a JobManager has been specified return null; } for (Option option : cmdLine.getOptions()) { if (ALL_OPTIONS.hasOption(option.getOpt())) { if (!option.getOpt().equals(DETACHED.getOpt())) { // don't resume from properties file if yarn options have been specified return null; } } } // load the YARN properties File propertiesFile = getYarnPropertiesLocation(flinkConfiguration); if (!propertiesFile.exists()) { return null; } logAndSysout("Found YARN properties file " + propertiesFile.getAbsolutePath()); Properties yarnProperties = new Properties(); try { try (InputStream is = new FileInputStream(propertiesFile)) { yarnProperties.load(is); } } catch (IOException e) { throw new RuntimeException("Cannot read the YARN properties file", e); } // get the Yarn application id from the properties file String applicationID = yarnProperties.getProperty(YARN_APPLICATION_ID_KEY); if (applicationID == null) { throw new IllegalConfigurationException("Yarn properties file found but doesn't contain a " + "Yarn application id. Please delete the file at " + propertiesFile.getAbsolutePath()); } try { // try converting id to ApplicationId ConverterUtils.toApplicationId(applicationID); } catch (Exception e) { throw new RuntimeException("YARN properties contains an invalid entry for " + "application id: " + applicationID, e); } logAndSysout("Using Yarn application id from YARN properties " + applicationID); // configure the default parallelism from YARN String propParallelism = yarnProperties.getProperty(YARN_PROPERTIES_PARALLELISM); if (propParallelism != null) { // maybe the property is not set try { int parallelism = Integer.parseInt(propParallelism); flinkConfiguration.setInteger(ConfigConstants.DEFAULT_PARALLELISM_KEY, parallelism); logAndSysout("YARN properties set default parallelism to " + parallelism); } catch (NumberFormatException e) { throw new RuntimeException("Error while parsing the YARN properties: " + "Property " + YARN_PROPERTIES_PARALLELISM + " is not an integer."); } } // handle the YARN client's dynamic properties String dynamicPropertiesEncoded = yarnProperties.getProperty(YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING); Map<String, String> dynamicProperties = getDynamicProperties(dynamicPropertiesEncoded); for (Map.Entry<String, String> dynamicProperty : dynamicProperties.entrySet()) { flinkConfiguration.setString(dynamicProperty.getKey(), dynamicProperty.getValue()); } return applicationID; } public AbstractYarnClusterDescriptor createDescriptor(String defaultApplicationName, CommandLine cmd) { AbstractYarnClusterDescriptor yarnClusterDescriptor = getClusterDescriptor(); if (!cmd.hasOption(CONTAINER.getOpt())) { // number of containers is required option! LOG.error("Missing required argument {}", CONTAINER.getOpt()); printUsage(); throw new IllegalArgumentException("Missing required argument " + CONTAINER.getOpt()); } yarnClusterDescriptor.setTaskManagerCount(Integer.valueOf(cmd.getOptionValue(CONTAINER.getOpt()))); // Jar Path Path localJarPath; if (cmd.hasOption(FLINK_JAR.getOpt())) { String userPath = cmd.getOptionValue(FLINK_JAR.getOpt()); if (!userPath.startsWith("file://")) { userPath = "file://" + userPath; } localJarPath = new Path(userPath); } else { LOG.info("No path for the flink jar passed. Using the location of " + yarnClusterDescriptor.getClass() + " to locate the jar"); String encodedJarPath = yarnClusterDescriptor.getClass().getProtectionDomain().getCodeSource().getLocation().getPath(); try { // we have to decode the url encoded parts of the path String decodedPath = URLDecoder.decode(encodedJarPath, Charset.defaultCharset().name()); localJarPath = new Path(new File(decodedPath).toURI()); } catch (UnsupportedEncodingException e) { throw new RuntimeException("Couldn't decode the encoded Flink dist jar path: " + encodedJarPath + " Please supply a path manually via the -" + FLINK_JAR.getOpt() + " option."); } } yarnClusterDescriptor.setLocalJarPath(localJarPath); List<File> shipFiles = new ArrayList<>(); // path to directory to ship if (cmd.hasOption(SHIP_PATH.getOpt())) { String shipPath = cmd.getOptionValue(SHIP_PATH.getOpt()); File shipDir = new File(shipPath); if (shipDir.isDirectory()) { shipFiles.add(shipDir); } else { LOG.warn("Ship directory is not a directory. Ignoring it."); } } yarnClusterDescriptor.addShipFiles(shipFiles); // queue if (cmd.hasOption(QUEUE.getOpt())) { yarnClusterDescriptor.setQueue(cmd.getOptionValue(QUEUE.getOpt())); } // JobManager Memory if (cmd.hasOption(JM_MEMORY.getOpt())) { int jmMemory = Integer.valueOf(cmd.getOptionValue(JM_MEMORY.getOpt())); yarnClusterDescriptor.setJobManagerMemory(jmMemory); } // Task Managers memory if (cmd.hasOption(TM_MEMORY.getOpt())) { int tmMemory = Integer.valueOf(cmd.getOptionValue(TM_MEMORY.getOpt())); yarnClusterDescriptor.setTaskManagerMemory(tmMemory); } if (cmd.hasOption(SLOTS.getOpt())) { int slots = Integer.valueOf(cmd.getOptionValue(SLOTS.getOpt())); yarnClusterDescriptor.setTaskManagerSlots(slots); } String[] dynamicProperties = null; if (cmd.hasOption(DYNAMIC_PROPERTIES.getOpt())) { dynamicProperties = cmd.getOptionValues(DYNAMIC_PROPERTIES.getOpt()); } String dynamicPropertiesEncoded = StringUtils.join(dynamicProperties, YARN_DYNAMIC_PROPERTIES_SEPARATOR); yarnClusterDescriptor.setDynamicPropertiesEncoded(dynamicPropertiesEncoded); if (cmd.hasOption(DETACHED.getOpt()) || cmd.hasOption(CliFrontendParser.DETACHED_OPTION.getOpt())) { this.detachedMode = true; yarnClusterDescriptor.setDetachedMode(true); } if(cmd.hasOption(NAME.getOpt())) { yarnClusterDescriptor.setName(cmd.getOptionValue(NAME.getOpt())); } else { // set the default application name, if none is specified if(defaultApplicationName != null) { yarnClusterDescriptor.setName(defaultApplicationName); } } if (cmd.hasOption(ZOOKEEPER_NAMESPACE.getOpt())) { String zookeeperNamespace = cmd.getOptionValue(ZOOKEEPER_NAMESPACE.getOpt()); yarnClusterDescriptor.setZookeeperNamespace(zookeeperNamespace); } // ----- Convenience ----- // the number of slots available from YARN: int yarnTmSlots = yarnClusterDescriptor.getTaskManagerSlots(); if (yarnTmSlots == -1) { yarnTmSlots = 1; yarnClusterDescriptor.setTaskManagerSlots(yarnTmSlots); } int maxSlots = yarnTmSlots * yarnClusterDescriptor.getTaskManagerCount(); int userParallelism = Integer.valueOf(cmd.getOptionValue(CliFrontendParser.PARALLELISM_OPTION.getOpt(), "-1")); if (userParallelism != -1) { int slotsPerTM = (int) Math.ceil((double) userParallelism / yarnClusterDescriptor.getTaskManagerCount()); String message = "The YARN cluster has " + maxSlots + " slots available, " + "but the user requested a parallelism of " + userParallelism + " on YARN. " + "Each of the " + yarnClusterDescriptor.getTaskManagerCount() + " TaskManagers " + "will get "+slotsPerTM+" slots."; logAndSysout(message); yarnClusterDescriptor.setTaskManagerSlots(slotsPerTM); } return yarnClusterDescriptor; } private void printUsage() { System.out.println("Usage:"); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(200); formatter.setLeftPadding(5); formatter.setSyntaxPrefix(" Required"); Options req = new Options(); req.addOption(CONTAINER); formatter.printHelp(" ", req); formatter.setSyntaxPrefix(" Optional"); Options options = new Options(); addGeneralOptions(options); addRunOptions(options); formatter.printHelp(" ", options); } private static void writeYarnProperties(Properties properties, File propertiesFile) { try (final OutputStream out = new FileOutputStream(propertiesFile)) { properties.store(out, "Generated YARN properties file"); } catch (IOException e) { throw new RuntimeException("Error writing the properties file", e); } propertiesFile.setReadable(true, false); // readable for all. } public static void runInteractiveCli(YarnClusterClient yarnCluster, boolean readConsoleInput) { final String HELP = "Available commands:\n" + "help - show these commands\n" + "stop - stop the YARN session"; int numTaskmanagers = 0; try { BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); label: while (true) { // ------------------ check if there are updates by the cluster ----------- try { GetClusterStatusResponse status = yarnCluster.getClusterStatus(); LOG.debug("Received status message: {}", status); if (status != null && numTaskmanagers != status.numRegisteredTaskManagers()) { System.err.println("Number of connected TaskManagers changed to " + status.numRegisteredTaskManagers() + ". " + "Slots available: " + status.totalNumberOfSlots()); numTaskmanagers = status.numRegisteredTaskManagers(); } } catch (Exception e) { LOG.warn("Could not retrieve the current cluster status. Skipping current retrieval attempt ...", e); } List<String> messages = yarnCluster.getNewMessages(); if (messages != null && messages.size() > 0) { System.err.println("New messages from the YARN cluster: "); for (String msg : messages) { System.err.println(msg); } } if (yarnCluster.getApplicationStatus() != ApplicationStatus.SUCCEEDED) { System.err.println("The YARN cluster has failed"); yarnCluster.shutdown(); } // wait until CLIENT_POLLING_INTERVAL is over or the user entered something. long startTime = System.currentTimeMillis(); while ((System.currentTimeMillis() - startTime) < CLIENT_POLLING_INTERVALL * 1000 && (!readConsoleInput || !in.ready())) { Thread.sleep(200); } //------------- handle interactive command by user. ---------------------- if (readConsoleInput && in.ready()) { String command = in.readLine(); switch (command) { case "quit": case "stop": yarnCluster.shutdownCluster(); break label; case "help": System.err.println(HELP); break; default: System.err.println("Unknown command '" + command + "'. Showing help: \n" + HELP); break; } } if (yarnCluster.hasBeenShutdown()) { LOG.info("Stopping interactive command line interface, YARN cluster has been stopped."); break; } } } catch(Exception e) { LOG.warn("Exception while running the interactive command line interface", e); } } public static void main(final String[] args) throws Exception { final FlinkYarnSessionCli cli = new FlinkYarnSessionCli("", ""); // no prefix for the YARN session Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(); SecurityUtils.install(new SecurityUtils.SecurityConfiguration(flinkConfiguration)); int retCode = SecurityUtils.getInstalledContext().runSecured(new Callable<Integer>() { @Override public Integer call() { return cli.run(args); } }); System.exit(retCode); } @Override public boolean isActive(CommandLine commandLine, Configuration configuration) { String jobManagerOption = commandLine.getOptionValue(ADDRESS_OPTION.getOpt(), null); boolean yarnJobManager = ID.equals(jobManagerOption); boolean yarnAppId = commandLine.hasOption(APPLICATION_ID.getOpt()); return yarnJobManager || yarnAppId || loadYarnPropertiesFile(commandLine, configuration) != null; } @Override public String getId() { return ID; } @Override public void addRunOptions(Options baseOptions) { for (Object option : ALL_OPTIONS.getOptions()) { baseOptions.addOption((Option) option); } } @Override public void addGeneralOptions(Options baseOptions) { baseOptions.addOption(APPLICATION_ID); } @Override public YarnClusterClient retrieveCluster( CommandLine cmdLine, Configuration config) throws UnsupportedOperationException { // first check for an application id, then try to load from yarn properties String applicationID = cmdLine.hasOption(APPLICATION_ID.getOpt()) ? cmdLine.getOptionValue(APPLICATION_ID.getOpt()) : loadYarnPropertiesFile(cmdLine, config); if(null != applicationID) { String zkNamespace = cmdLine.hasOption(ZOOKEEPER_NAMESPACE.getOpt()) ? cmdLine.getOptionValue(ZOOKEEPER_NAMESPACE.getOpt()) : config.getString(HighAvailabilityOptions.HA_CLUSTER_ID, applicationID); config.setString(HighAvailabilityOptions.HA_CLUSTER_ID, zkNamespace); AbstractYarnClusterDescriptor yarnDescriptor = getClusterDescriptor(); yarnDescriptor.setFlinkConfiguration(config); return yarnDescriptor.retrieve(applicationID); } else { throw new UnsupportedOperationException("Could not resume a Yarn cluster."); } } @Override public YarnClusterClient createCluster( String applicationName, CommandLine cmdLine, Configuration config, List<URL> userJarFiles) { Preconditions.checkNotNull(userJarFiles, "User jar files should not be null."); AbstractYarnClusterDescriptor yarnClusterDescriptor = createDescriptor(applicationName, cmdLine); yarnClusterDescriptor.setFlinkConfiguration(config); yarnClusterDescriptor.setProvidedUserJarFiles(userJarFiles); try { return yarnClusterDescriptor.deploy(); } catch (Exception e) { throw new RuntimeException("Error deploying the YARN cluster", e); } } public int run(String[] args) { // // Command Line Options // Options options = new Options(); addGeneralOptions(options); addRunOptions(options); CommandLineParser parser = new PosixParser(); CommandLine cmd; try { cmd = parser.parse(options, args); } catch(Exception e) { System.out.println(e.getMessage()); printUsage(); return 1; } // Query cluster for metrics if (cmd.hasOption(QUERY.getOpt())) { AbstractYarnClusterDescriptor yarnDescriptor = getClusterDescriptor(); String description; try { description = yarnDescriptor.getClusterDescription(); } catch (Exception e) { System.err.println("Error while querying the YARN cluster for available resources: "+e.getMessage()); e.printStackTrace(System.err); return 1; } System.out.println(description); return 0; } else if (cmd.hasOption(APPLICATION_ID.getOpt())) { AbstractYarnClusterDescriptor yarnDescriptor = getClusterDescriptor(); //configure ZK namespace depending on the value passed String zkNamespace = cmd.hasOption(ZOOKEEPER_NAMESPACE.getOpt()) ? cmd.getOptionValue(ZOOKEEPER_NAMESPACE.getOpt()) :yarnDescriptor.getFlinkConfiguration() .getString(HA_ZOOKEEPER_NAMESPACE_KEY, cmd.getOptionValue(APPLICATION_ID.getOpt())); LOG.info("Going to use the ZK namespace: {}", zkNamespace); yarnDescriptor.getFlinkConfiguration().setString(HA_ZOOKEEPER_NAMESPACE_KEY, zkNamespace); try { yarnCluster = yarnDescriptor.retrieve(cmd.getOptionValue(APPLICATION_ID.getOpt())); } catch (Exception e) { throw new RuntimeException("Could not retrieve existing Yarn application", e); } if (detachedMode) { LOG.info("The Flink YARN client has been started in detached mode. In order to stop " + "Flink on YARN, use the following command or a YARN web interface to stop it:\n" + "yarn application -kill " + APPLICATION_ID.getOpt()); yarnCluster.disconnect(); } else { runInteractiveCli(yarnCluster, true); } } else { AbstractYarnClusterDescriptor yarnDescriptor; try { yarnDescriptor = createDescriptor(null, cmd); } catch (Exception e) { System.err.println("Error while starting the YARN Client: " + e.getMessage()); e.printStackTrace(System.err); return 1; } try { yarnCluster = yarnDescriptor.deploy(); } catch (Exception e) { System.err.println("Error while deploying YARN cluster: "+e.getMessage()); e.printStackTrace(System.err); return 1; } //------------------ ClusterClient deployed, handle connection details String jobManagerAddress = yarnCluster.getJobManagerAddress().getAddress().getHostName() + ":" + yarnCluster.getJobManagerAddress().getPort(); System.out.println("Flink JobManager is now running on " + jobManagerAddress); System.out.println("JobManager Web Interface: " + yarnCluster.getWebInterfaceURL()); // file that we write into the conf/ dir containing the jobManager address and the dop. File yarnPropertiesFile = getYarnPropertiesLocation(yarnCluster.getFlinkConfiguration()); Properties yarnProps = new Properties(); yarnProps.setProperty(YARN_APPLICATION_ID_KEY, yarnCluster.getApplicationId().toString()); if (yarnDescriptor.getTaskManagerSlots() != -1) { String parallelism = Integer.toString(yarnDescriptor.getTaskManagerSlots() * yarnDescriptor.getTaskManagerCount()); yarnProps.setProperty(YARN_PROPERTIES_PARALLELISM, parallelism); } // add dynamic properties if (yarnDescriptor.getDynamicPropertiesEncoded() != null) { yarnProps.setProperty(YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING, yarnDescriptor.getDynamicPropertiesEncoded()); } writeYarnProperties(yarnProps, yarnPropertiesFile); //------------------ ClusterClient running, let user control it ------------ if (detachedMode) { // print info and quit: LOG.info("The Flink YARN client has been started in detached mode. In order to stop " + "Flink on YARN, use the following command or a YARN web interface to stop it:\n" + "yarn application -kill " + yarnCluster.getApplicationId()); yarnCluster.waitForClusterToBeReady(); yarnCluster.disconnect(); } else { runInteractiveCli(yarnCluster, acceptInteractiveInput); } } return 0; } /** * Utility method for tests. */ public void stop() { if (yarnCluster != null) { LOG.info("Command line interface is shutting down the yarnCluster"); try { yarnCluster.shutdown(); } catch (Throwable t) { LOG.warn("Could not properly shutdown the yarn cluster.", t); } } } private void logAndSysout(String message) { LOG.info(message); System.out.println(message); } public static Map<String, String> getDynamicProperties(String dynamicPropertiesEncoded) { if (dynamicPropertiesEncoded != null && dynamicPropertiesEncoded.length() > 0) { Map<String, String> properties = new HashMap<>(); String[] propertyLines = dynamicPropertiesEncoded.split(YARN_DYNAMIC_PROPERTIES_SEPARATOR); for (String propLine : propertyLines) { if (propLine == null) { continue; } int firstEquals = propLine.indexOf("="); if (firstEquals >= 0) { String key = propLine.substring(0, firstEquals).trim(); String value = propLine.substring(firstEquals + 1, propLine.length()).trim(); if (!key.isEmpty()) { properties.put(key, value); } } } return properties; } else { return Collections.emptyMap(); } } public static File getYarnPropertiesLocation(Configuration conf) { String defaultPropertiesFileLocation = System.getProperty("java.io.tmpdir"); String currentUser = System.getProperty("user.name"); String propertiesFileLocation = conf.getString(ConfigConstants.YARN_PROPERTIES_FILE_LOCATION, defaultPropertiesFileLocation); return new File(propertiesFileLocation, YARN_PROPERTIES_FILE + currentUser); } protected AbstractYarnClusterDescriptor getClusterDescriptor() { return new YarnClusterDescriptor(); } }
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.util; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.cloud.dataflow.sdk.coders.AtomicCoder; import com.google.cloud.dataflow.sdk.coders.ByteArrayCoder; import com.google.cloud.dataflow.sdk.coders.Coder; import com.google.cloud.dataflow.sdk.coders.CoderException; import com.google.common.base.MoreObjects; import com.google.common.io.ByteStreams; import com.google.common.primitives.UnsignedBytes; import com.fasterxml.jackson.annotation.JsonCreator; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Arrays; import java.util.Comparator; import javax.annotation.concurrent.NotThreadSafe; /** * An elastic-sized byte array which allows you to manipulate it as a stream, or access * it directly. This allows for a quick succession of moving bytes from an {@link InputStream} * to this wrapper to be used as an {@link OutputStream} and vice versa. This wrapper * also provides random access to bytes stored within. This wrapper allows users to finely * control the number of byte copies that occur. * * Anything stored within the in-memory buffer from offset {@link #size()} is considered temporary * unused storage. */ @NotThreadSafe public class RandomAccessData { /** * A {@link Coder} which encodes the valid parts of this stream. * This follows the same encoding scheme as {@link ByteArrayCoder}. * This coder is deterministic and consistent with equals. * * This coder does not support encoding positive infinity. */ public static class RandomAccessDataCoder extends AtomicCoder<RandomAccessData> { private static final RandomAccessDataCoder INSTANCE = new RandomAccessDataCoder(); @JsonCreator public static RandomAccessDataCoder of() { return INSTANCE; } @Override public void encode(RandomAccessData value, OutputStream outStream, Coder.Context context) throws CoderException, IOException { if (value == POSITIVE_INFINITY) { throw new CoderException("Positive infinity can not be encoded."); } if (!context.isWholeStream) { VarInt.encode(value.size, outStream); } value.writeTo(outStream, 0, value.size); } @Override public RandomAccessData decode(InputStream inStream, Coder.Context context) throws CoderException, IOException { RandomAccessData rval = new RandomAccessData(); if (!context.isWholeStream) { int length = VarInt.decodeInt(inStream); rval.readFrom(inStream, 0, length); } else { ByteStreams.copy(inStream, rval.asOutputStream()); } return rval; } @Override public boolean consistentWithEquals() { return true; } @Override public boolean isRegisterByteSizeObserverCheap( RandomAccessData value, Coder.Context context) { return true; } @Override protected long getEncodedElementByteSize(RandomAccessData value, Coder.Context context) throws Exception { if (value == null) { throw new CoderException("cannot encode a null in memory stream"); } long size = 0; if (!context.isWholeStream) { size += VarInt.getLength(value.size); } return size + value.size; } } public static final UnsignedLexicographicalComparator UNSIGNED_LEXICOGRAPHICAL_COMPARATOR = new UnsignedLexicographicalComparator(); /** * A {@link Comparator} that compares two byte arrays lexicographically. It compares * values as a list of unsigned bytes. The first pair of values that follow any common prefix, * or when one array is a prefix of the other, treats the shorter array as the lesser. * For example, [] < [0x01] < [0x01, 0x7F] < [0x01, 0x80] < [0x02] < POSITIVE INFINITY. * * <p>Note that a token type of positive infinity is supported and is greater than * all other {@link RandomAccessData}. */ public static final class UnsignedLexicographicalComparator implements Comparator<RandomAccessData> { // Do not instantiate private UnsignedLexicographicalComparator() { } @Override public int compare(RandomAccessData o1, RandomAccessData o2) { return compare(o1, o2, 0 /* start from the beginning */); } /** * Compare the two sets of bytes starting at the given offset. */ public int compare(RandomAccessData o1, RandomAccessData o2, int startOffset) { if (o1 == o2) { return 0; } if (o1 == POSITIVE_INFINITY) { return 1; } if (o2 == POSITIVE_INFINITY) { return -1; } int minBytesLen = Math.min(o1.size, o2.size); for (int i = startOffset; i < minBytesLen; i++) { // unsigned comparison int b1 = o1.buffer[i] & 0xFF; int b2 = o2.buffer[i] & 0xFF; if (b1 == b2) { continue; } // Return the stream with the smaller byte as the smaller value. return b1 - b2; } // If one is a prefix of the other, return the shorter one as the smaller one. // If both lengths are equal, then both streams are equal. return o1.size - o2.size; } /** * Compute the length of the common prefix of the two provided sets of bytes. */ public int commonPrefixLength(RandomAccessData o1, RandomAccessData o2) { int minBytesLen = Math.min(o1.size, o2.size); for (int i = 0; i < minBytesLen; i++) { // unsigned comparison int b1 = o1.buffer[i] & 0xFF; int b2 = o2.buffer[i] & 0xFF; if (b1 != b2) { return i; } } return minBytesLen; } } /** A token type representing positive infinity. */ static final RandomAccessData POSITIVE_INFINITY = new RandomAccessData(0); /** * Returns a RandomAccessData that is the smallest value of same length which * is strictly greater than this. Note that if this is empty or is all 0xFF then * a token value of positive infinity is returned. * * The {@link UnsignedLexicographicalComparator} supports comparing {@link RandomAccessData} * with support for positive infinitiy. */ public RandomAccessData increment() throws IOException { RandomAccessData copy = copy(); for (int i = copy.size - 1; i >= 0; --i) { if (copy.buffer[i] != UnsignedBytes.MAX_VALUE) { copy.buffer[i] = UnsignedBytes.checkedCast(UnsignedBytes.toInt(copy.buffer[i]) + 1); return copy; } } return POSITIVE_INFINITY; } private static final int DEFAULT_INITIAL_BUFFER_SIZE = 128; /** Constructs a RandomAccessData with a default buffer size. */ public RandomAccessData() { this(DEFAULT_INITIAL_BUFFER_SIZE); } /** Constructs a RandomAccessData with the initial buffer. */ public RandomAccessData(byte[] initialBuffer) { checkNotNull(initialBuffer); this.buffer = initialBuffer; this.size = initialBuffer.length; } /** Constructs a RandomAccessData with the given buffer size. */ public RandomAccessData(int initialBufferSize) { checkArgument(initialBufferSize >= 0, "Expected initial buffer size to be greater than zero."); this.buffer = new byte[initialBufferSize]; } private byte[] buffer; private int size; /** Returns the backing array. */ public byte[] array() { return buffer; } /** Returns the number of bytes in the backing array that are valid. */ public int size() { return size; } /** Resets the end of the stream to the specified position. */ public void resetTo(int position) { ensureCapacity(position); size = position; } private final OutputStream outputStream = new OutputStream() { @Override public void write(int b) throws IOException { ensureCapacity(size + 1); buffer[size] = (byte) b; size += 1; } @Override public void write(byte[] b, int offset, int length) throws IOException { ensureCapacity(size + length); System.arraycopy(b, offset, buffer, size, length); size += length; } }; /** * Returns an output stream which writes to the backing buffer from the current position. * Note that the internal buffer will grow as required to accomodate all data written. */ public OutputStream asOutputStream() { return outputStream; } /** * Returns an {@link InputStream} wrapper which supplies the portion of this backing byte buffer * starting at {@code offset} and up to {@code length} bytes. Note that the returned * {@link InputStream} is only a wrapper and any modifications to the underlying * {@link RandomAccessData} will be visible by the {@link InputStream}. */ public InputStream asInputStream(final int offset, final int length) { return new ByteArrayInputStream(buffer, offset, length); } /** * Writes {@code length} bytes starting at {@code offset} from the backing data store to the * specified output stream. */ public void writeTo(OutputStream out, int offset, int length) throws IOException { out.write(buffer, offset, length); } /** * Reads {@code length} bytes from the specified input stream writing them into the backing * data store starting at {@code offset}. * * <p>Note that the in memory stream will be grown to ensure there is enough capacity. */ public void readFrom(InputStream inStream, int offset, int length) throws IOException { ensureCapacity(offset + length); ByteStreams.readFully(inStream, buffer, offset, length); size = offset + length; } /** Returns a copy of this RandomAccessData. */ public RandomAccessData copy() throws IOException { RandomAccessData copy = new RandomAccessData(size); writeTo(copy.asOutputStream(), 0, size); return copy; } @Override public boolean equals(Object other) { if (other == this) { return true; } if (!(other instanceof RandomAccessData)) { return false; } return UNSIGNED_LEXICOGRAPHICAL_COMPARATOR.compare(this, (RandomAccessData) other) == 0; } @Override public int hashCode() { int result = 1; for (int i = 0; i < size; ++i) { result = 31 * result + buffer[i]; } return result; } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("buffer", Arrays.copyOf(buffer, size)) .add("size", size) .toString(); } private void ensureCapacity(int minCapacity) { // If we have enough space, don't grow the buffer. if (minCapacity <= buffer.length) { return; } // Try to double the size of the buffer, if thats not enough, just use the new capacity. // Note that we use Math.min(long, long) to not cause overflow on the multiplication. int newCapacity = (int) Math.min(Integer.MAX_VALUE, buffer.length * 2L); if (newCapacity < minCapacity) { newCapacity = minCapacity; } buffer = Arrays.copyOf(buffer, newCapacity); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import static org.junit.Assert.*; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentMap; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.AttributesFactory; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheException; import org.apache.geode.cache.PartitionAttributesFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionFactory; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.cache.Scope; import org.apache.geode.cache.server.CacheServer; import org.apache.geode.cache30.CacheSerializableRunnable; import org.apache.geode.cache30.ClientServerTestCase; import org.apache.geode.distributed.internal.DistributionConfig; import org.apache.geode.internal.cache.EventTracker.BulkOpHolder; import org.apache.geode.internal.cache.ha.ThreadIdentifier; import org.apache.geode.test.dunit.Assert; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.NetworkUtils; import org.apache.geode.test.dunit.SerializableRunnable; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.Wait; import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase; import org.apache.geode.test.junit.categories.DistributedTest; /** * Tests <code>EventTracker</code> management. * * @since GemFire 6.5 */ @Category(DistributedTest.class) public class EventTrackerDUnitTest extends JUnit4CacheTestCase { /** The port on which the <code>CacheServer</code> was started in this VM */ private static int cacheServerPort; /** The <code>Cache</code>'s <code>ExpiryTask</code>'s ping interval */ private static final String MESSAGE_TRACKING_TIMEOUT = "5000"; @Override public final void postTearDownCacheTestCase() throws Exception { disconnectAllFromDS(); } /** * Tests <code>EventTracker</code> is created and destroyed when a <code>Region</code> is created * and destroyed. */ @Test public void testEventTrackerCreateDestroy() throws CacheException { // Verify the Cache's ExpiryTask contains no EventTrackers GemFireCacheImpl cache = (GemFireCacheImpl) getCache(); EventTracker.ExpiryTask expiryTask = cache.getEventTrackerTask(); assertNotNull(expiryTask); // We start with 3 event trackers: // one for the PDX registry region // one for ManagementConstants.MONITORING_REGION // one for ManagementConstants.NOTIFICATION_REGION final int EXPECTED_TRACKERS = 3; assertEquals(EXPECTED_TRACKERS, expiryTask.getNumberOfTrackers()); // Create a distributed Region AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); LocalRegion region = (LocalRegion) createRegion(getName(), factory.create()); // Verify an EventTracker is created and is empty EventTracker eventTracker = region.getEventTracker(); assertNotNull(eventTracker); Map eventState = region.getEventState(); assertNotNull(eventState); assertEquals(0, eventState.size()); // Verify it and the root region's EventTracker are added to the Cache's ExpiryTask's trackers assertEquals(EXPECTED_TRACKERS + 2, expiryTask.getNumberOfTrackers()); // Destroy the Region region.destroyRegion(); // Verify the EventTracker is removed from the Cache's ExpiryTask's trackers assertEquals(EXPECTED_TRACKERS + 1, expiryTask.getNumberOfTrackers()); } /** * Tests adding threads to an <code>EventTracker</code>. */ @Test public void testEventTrackerAddThreadIdentifier() throws CacheException { Host host = Host.getHost(0); VM serverVM = host.getVM(0); VM clientVM = host.getVM(1); final String regionName = getName(); // Create Region in the server and verify tracker is created serverVM.invoke(new CacheSerializableRunnable("Create server") { public void run2() throws CacheException { // Create a distributed Region AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); LocalRegion region = (LocalRegion) createRegion(regionName, factory.create()); // Verify an EventTracker is created EventTracker eventTracker = region.getEventTracker(); assertNotNull(eventTracker); try { startCacheServer(); } catch (Exception ex) { Assert.fail("While starting CacheServer", ex); } } }); // Verify tracker in server contains no entries serverVM.invoke(new CacheSerializableRunnable("Do puts") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(regionName); Map eventState = region.getEventState(); assertEquals(0, eventState.size()); } }); // Create Create Region in the client final int port = serverVM.invoke(() -> EventTrackerDUnitTest.getCacheServerPort()); final String hostName = NetworkUtils.getServerHostName(host); clientVM.invoke(new CacheSerializableRunnable("Create client") { public void run2() throws CacheException { getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); ClientServerTestCase.configureConnectionPool(factory, hostName, port, -1, false, -1, -1, null); createRegion(regionName, factory.create()); } }); // Do puts in the client clientVM.invoke(new CacheSerializableRunnable("Do puts") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(regionName); for (int i = 0; i < 10; i++) { region.put(i, i); } } }); // Verify tracker in server contains an entry for client thread serverVM.invoke(new CacheSerializableRunnable("Do puts") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(regionName); Map eventState = region.getEventState(); assertEquals(1, eventState.size()); } }); } /** * Tests adding events to and removing events from an <code>EventTracker</code>. */ @Test public void testEventTrackerAddRemoveThreadIdentifier() throws CacheException { Host host = Host.getHost(0); VM serverVM = host.getVM(0); VM clientVM = host.getVM(1); final String regionName = getName(); // Create Region in the server and verify tracker is created serverVM.invoke(new CacheSerializableRunnable("Create server") { public void run2() throws CacheException { // Set the message tracking timeout System.setProperty(DistributionConfig.GEMFIRE_PREFIX + "messageTrackingTimeout", MESSAGE_TRACKING_TIMEOUT); // Create a distributed Region AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); LocalRegion region = (LocalRegion) createRegion(regionName, factory.create()); // Verify an EventTracker is created EventTracker eventTracker = region.getEventTracker(); assertNotNull(eventTracker); try { startCacheServer(); } catch (Exception ex) { Assert.fail("While starting CacheServer", ex); } } }); // Verify tracker in server contains no entries serverVM.invoke(new CacheSerializableRunnable("Do puts") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(regionName); Map eventState = region.getEventState(); assertEquals(0, eventState.size()); } }); // Create Create Region in the client final int port = serverVM.invoke(() -> EventTrackerDUnitTest.getCacheServerPort()); final String hostName = NetworkUtils.getServerHostName(host); clientVM.invoke(new CacheSerializableRunnable("Create client") { public void run2() throws CacheException { getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); ClientServerTestCase.configureConnectionPool(factory, hostName, port, -1, false, -1, -1, null); createRegion(regionName, factory.create()); } }); // Do puts in the client clientVM.invoke(new CacheSerializableRunnable("Do puts") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(regionName); for (int i = 0; i < 10; i++) { region.put(i, i); } } }); // Verify tracker in server serverVM.invoke(new CacheSerializableRunnable("Do puts") { public void run2() throws CacheException { // First verify it contains an entry LocalRegion region = (LocalRegion) getRootRegion().getSubregion(regionName); Map eventState = region.getEventState(); assertEquals(1, eventState.size()); // Pause for the message tracking timeout int waitTime = Integer.parseInt(MESSAGE_TRACKING_TIMEOUT) * 3; Wait.pause(waitTime); // Verify the server no longer contains an entry eventState = region.getEventState(); assertEquals(0, eventState.size()); } }); } /** * Test to make sure we don't leak put all events in the event tracker after multiple putAlls */ @Test public void testPutAllHoldersInEventTracker() { Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); SerializableRunnable createRegion = new SerializableRunnable("createRegion") { public void run() { Cache cache = getCache(); RegionFactory<Object, Object> rf = cache.createRegionFactory(RegionShortcut.PARTITION_REDUNDANT); PartitionAttributesFactory paf = new PartitionAttributesFactory(); paf.setRedundantCopies(1); paf.setTotalNumBuckets(3); rf.setPartitionAttributes(paf.create()); rf.setConcurrencyChecksEnabled(true); rf.create("partitioned"); rf = cache.createRegionFactory(RegionShortcut.REPLICATE); rf.setConcurrencyChecksEnabled(true); rf.create("replicate"); try { startCacheServer(); } catch (Exception ex) { Assert.fail("While starting CacheServer", ex); } } }; vm0.invoke(createRegion); vm1.invoke(createRegion); // Create Create Region in the client final int port = vm0.invoke(() -> EventTrackerDUnitTest.getCacheServerPort()); final String hostName = NetworkUtils.getServerHostName(host); vm2.invoke(new CacheSerializableRunnable("Create client") { public void run2() throws CacheException { getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); ClientServerTestCase.configureConnectionPool(factory, hostName, port, -1, false, -1, -1, null); createRootRegion("partitioned", factory.create()); createRootRegion("replicate", factory.create()); } }); doTwoPutAlls(vm2, "partitioned"); doTwoPutAlls(vm2, "replicate"); // Make sure that the event tracker for each bucket only records the last // event. checkBucketEventTracker(vm0, 0, 3); checkBucketEventTracker(vm1, 0, 3); checkBucketEventTracker(vm0, 1, 3); checkBucketEventTracker(vm1, 1, 3); checkBucketEventTracker(vm0, 2, 3); checkBucketEventTracker(vm1, 2, 3); checkReplicateEventTracker(vm0, 9); checkReplicateEventTracker(vm1, 9); } private void doTwoPutAlls(VM vm, final String regionName) { SerializableRunnable createData = new SerializableRunnable("putAlls") { public void run() { Cache cache = getCache(); Region region = cache.getRegion(regionName); Map putAllMap = new HashMap(); for (int i = 0; i < 9; i++) { putAllMap.put(i, i); } region.putAll(putAllMap); putAllMap.clear(); for (int i = 10; i < 19; i++) { putAllMap.put(i, i); } region.putAll(putAllMap); } }; vm.invoke(createData); } private SerializableRunnable checkReplicateEventTracker(VM vm, final int expectedEntryCount) { SerializableRunnable checkEventTracker = new SerializableRunnable("checkEventTracker") { public void run() { Cache cache = getCache(); DistributedRegion region = (DistributedRegion) cache.getRegion("replicate"); checkEventTracker(region, expectedEntryCount); } }; vm.invoke(checkEventTracker); return checkEventTracker; } private SerializableRunnable checkBucketEventTracker(VM vm, final int bucketNumber, final int expectedEntryCount) { SerializableRunnable checkEventTracker = new SerializableRunnable("checkEventTracker") { public void run() { Cache cache = getCache(); PartitionedRegion region = (PartitionedRegion) cache.getRegion("partitioned"); BucketRegion br = region.getBucketRegion(bucketNumber); checkEventTracker(br, expectedEntryCount); } }; vm.invoke(checkEventTracker); return checkEventTracker; } private void checkEventTracker(LocalRegion region, int numberOfEvents) { EventTracker tracker = region.getEventTracker(); ConcurrentMap<ThreadIdentifier, BulkOpHolder> memberToTags = tracker.getRecordedBulkOpVersionTags(); assertEquals("memberToTags=" + memberToTags, 1, memberToTags.size()); BulkOpHolder holder = memberToTags.values().iterator().next(); // We expect the holder to retain only the last putAll that was performed. assertEquals("entryToVersionTags=" + holder.entryVersionTags, numberOfEvents, holder.entryVersionTags.size()); } protected void startCacheServer() throws IOException { CacheServer cacheServer = getCache().addCacheServer(); cacheServer.setPort(0); cacheServer.start(); cacheServerPort = cacheServer.getPort(); } protected static int getCacheServerPort() { return cacheServerPort; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.commandInterface.commandLine.psi; import com.intellij.openapi.util.Pair; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiWhiteSpace; import com.intellij.util.containers.hash.HashMap; import com.jetbrains.commandInterface.commandLine.ValidationResult; import com.jetbrains.commandInterface.command.Argument; import com.jetbrains.commandInterface.command.Command; import com.jetbrains.commandInterface.command.Option; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Map; /** * Validation result provider and holder implemented as visitor. * @author Ilya.Kazakevich */ final class ValidationResultImpl extends CommandLineVisitor implements ValidationResult { /** * All options [name -> option] */ @NotNull private final Map<String, Option> myOptions = new HashMap<String, Option>(); /** * Available, but unused options [name -> option] */ @NotNull private final Map<String, Option> myUnusedOptions = new HashMap<String, Option>(); /** * We always need command to validate args */ @NotNull private final Command myCommand; /** * Number of next positional argument. I.e. will be 3 for "my_arg arg_1 arg_2" */ private int myCurrentPositionArgument; /** * If next arg is supposed to be option arg, then option and number of expected args stored here. * Null stored otherwise. */ @Nullable private Pair<Option, Integer> myCurrentOptionAndArgsLeft; /** * List of elements whose values are known to be bad */ @NotNull private final Collection<PsiElement> myBadValues = new ArrayList<PsiElement>(); /** * List of elements which is known to be excess */ @NotNull private final Collection<CommandLineArgument> myExcessArguments = new ArrayList<CommandLineArgument>(); /** * Map of arguments known to be option arguments [PSI argument -> option] */ @NotNull private final Map<CommandLineArgument, Option> myOptionArguments = new HashMap<CommandLineArgument, Option>(); /** * PSI argument -> argument map */ @NotNull private final Map<CommandLineArgument, Argument> myArguments = new HashMap<CommandLineArgument, Argument>(); private ValidationResultImpl(@NotNull final Command command) { for (final Option option : command.getOptions()) { for (final String optionName : option.getAllNames()) { myOptions.put(optionName, option); } } myUnusedOptions.putAll(myOptions);; myCommand = command; } @Override public boolean isBadValue(@NotNull final PsiElement element) { return myBadValues.contains(element); } @Override public boolean isExcessArgument(@NotNull final CommandLineArgument argument) { return myExcessArguments.contains(argument); } @Override @NotNull public Collection<Option> getUnusedOptions() { return myUnusedOptions.values(); } @Override @Nullable public Option getOptionForOptionArgument(@NotNull final CommandLineArgument argument) { return myOptionArguments.get(argument); } @Nullable @Override public Argument getArgument(final @NotNull CommandLineArgument commandLineArgument) { return myArguments.get(commandLineArgument); } @Override @Nullable public Option getOption(final @NotNull CommandLineOption option) { return myOptions.get(option.getOptionName()); } /** * Creates validation result by file * @param file file to validate * @return validation result or null if file has no command or command is unknown */ @Nullable static ValidationResult create(final CommandLineFile file) { final Command command = file.findRealCommand(); if (command == null) { return null; } final ValidationResultImpl validationLayout = new ValidationResultImpl(command); file.acceptChildren(validationLayout); return validationLayout; } @Override public void visitArgument(@NotNull final CommandLineArgument o) { super.visitArgument(o); if (myCurrentOptionAndArgsLeft != null) { processOptionArgument(o); return; } // Process as positional processPositionalArgument(o); } private void processPositionalArgument(@NotNull final CommandLineArgument o) { final Pair<Boolean, Argument> argumentPair = myCommand.getArgumentsInfo().getArgument(myCurrentPositionArgument++); if (argumentPair == null) { myExcessArguments.add(o); } else { processArgument(o, argumentPair.second); } } private void processOptionArgument(@NotNull final CommandLineArgument o) { assert myCurrentOptionAndArgsLeft != null: "Method can't be called if no current option exist"; if (myCurrentOptionAndArgsLeft.second > 0) { myCurrentOptionAndArgsLeft = Pair.create(myCurrentOptionAndArgsLeft.first, myCurrentOptionAndArgsLeft.second - 1); final Pair<Integer, Argument> argumentAndQuantity = myCurrentOptionAndArgsLeft.first.getArgumentAndQuantity(); // TODO: Use class instead of pair to prevent such a stupid checks assert argumentAndQuantity != null: "Option has arguments left but no argument info"; final Argument argumentInfo = argumentAndQuantity.getSecond(); processArgument(o, argumentInfo); myOptionArguments.put(o, myCurrentOptionAndArgsLeft.first); } else if (myCurrentOptionAndArgsLeft.second == 0) { myCurrentOptionAndArgsLeft = null; myExcessArguments.add(o); } } private void processArgument(@NotNull final CommandLineArgument o, final Argument argumentInfo) { myArguments.put(o, argumentInfo); if (!argumentInfo.isValid(o.getText())) { myBadValues.add(o); } } @Override public void visitWhiteSpace(final PsiWhiteSpace space) { super.visitWhiteSpace(space); // -aSHORT_OPT_ARGUMENT, but -a NEW_POSITION_ARGUMENT, so whitespace makes sense if (myCurrentOptionAndArgsLeft != null && myCurrentOptionAndArgsLeft.second == 0) { myCurrentOptionAndArgsLeft = null; } } @Override public void visitOption(@NotNull final CommandLineOption o) { super.visitOption(o); if (myUnusedOptions.containsKey(o.getOptionName())) { // Remove from list of available options final Option option = myUnusedOptions.remove(o.getOptionName()); for (final String optionName : option.getAllNames()) { myUnusedOptions.remove(optionName); } final Pair<Integer, Argument> argumentAndQuantity = option.getArgumentAndQuantity(); if (argumentAndQuantity != null) { myCurrentOptionAndArgsLeft = Pair.create(option, argumentAndQuantity.first); } else { myCurrentOptionAndArgsLeft = new Pair<Option, Integer>(option, 0); } } else { myBadValues.add(o); //No such option available } } @Nullable @Override public Pair<Boolean, Argument> getNextArg() { if (myCurrentOptionAndArgsLeft != null && myCurrentOptionAndArgsLeft.second > 0) { // Next arg is option arg final Pair<Integer, Argument> argumentAndQuantity = myCurrentOptionAndArgsLeft.first.getArgumentAndQuantity(); if (argumentAndQuantity != null) { // Option argument is always mandatory: https://docs.python.org/2/library/optparse.html#terminology return Pair.create(true, argumentAndQuantity.second); } } return myCommand.getArgumentsInfo().getArgument(myCurrentPositionArgument); } }
package life.catalogue.es.name; import com.fasterxml.jackson.core.JsonProcessingException; import life.catalogue.api.jackson.ApiModule; import life.catalogue.api.model.DSID; import life.catalogue.api.model.EditorialDecision; import life.catalogue.api.model.EditorialDecision.Mode; import life.catalogue.api.model.SimpleName; import life.catalogue.api.model.Taxon; import life.catalogue.api.search.NameUsageSearchParameter; import life.catalogue.api.search.NameUsageSearchRequest; import life.catalogue.api.search.NameUsageSearchResponse; import life.catalogue.api.search.NameUsageWrapper; import life.catalogue.common.tax.AuthorshipNormalizer; import life.catalogue.dao.DecisionDao; import life.catalogue.dao.NameDao; import life.catalogue.dao.TaxonDao; import life.catalogue.es.EsModule; import life.catalogue.es.EsReadWriteTestBase; import life.catalogue.es.EsSetupRule; import life.catalogue.es.NameUsageIndexService; import life.catalogue.es.model.NameUsageDocument; import life.catalogue.es.query.TermQuery; import life.catalogue.es.query.TermsQuery; import org.gbif.nameparser.api.Rank; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; import java.util.Collections; import java.util.Comparator; import java.util.List; import static java.util.stream.Collectors.toList; import static life.catalogue.db.PgSetupRule.getSqlSessionFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; /* * Full round-trips into Postgres via DAOs, out of Postgres via the NameUsageWrapperMapper, into Elasticsearch via the NameUsageIndexService * and finally out of Elasticsearch via the NameUsageSearchService. We have to massage the in-going out-going name usages slightly to allow * them to be compared, but not much. (For example the recursive query we execute in Postgres, and the resulting sort order, cannot be * emulated with Elasticsearch.) */ // @Ignore public class NameUsageIndexServiceIT extends EsReadWriteTestBase { private static final Logger LOG = LoggerFactory.getLogger(NameUsageIndexServiceIT.class); @Test public void indexDatasetTaxaOnly() throws IOException { // Create, insert (into postgres) and return 7 taxa belonging to EsSetupRule.DATASET_KEY List<Taxon> pgTaxa = createPgTaxa(7); createIndexService().indexDataset(EsSetupRule.DATASET_KEY); List<String> ids = pgTaxa.stream().map(Taxon::getId).collect(toList()); NameUsageSearchResponse res = query(new TermsQuery("usageId", ids)); List<Taxon> esTaxa = res.getResult().stream().map(nuw -> (Taxon) nuw.getUsage()).collect(toList()); massageTaxa(pgTaxa); massageTaxa(esTaxa); assertEquals(pgTaxa, esTaxa); } @Test public void createEditorialDecision() { // Insert 3 taxa into postgres NameUsageIndexService svc = createIndexService(); List<Taxon> pgTaxa = createPgTaxa(3); // Pump them over to Elasticsearch svc.indexDataset(EsSetupRule.DATASET_KEY); // Make 1st taxon the "subject" of an editorial decision Taxon edited = pgTaxa.get(0); EditorialDecision decision = new EditorialDecision(); decision.setSubject(SimpleName.of(edited)); decision.setMode(Mode.UPDATE); decision.setDatasetKey(edited.getDatasetKey()); decision.setSubjectDatasetKey(edited.getDatasetKey()); decision.setCreatedBy(edited.getCreatedBy()); decision.setModifiedBy(edited.getCreatedBy()); // Save the decision to postgres: triggers sync() on the index service DecisionDao dao = new DecisionDao(getSqlSessionFactory(), svc); dao.create(decision, 0); NameUsageSearchRequest request = new NameUsageSearchRequest(); request.addFilter(NameUsageSearchParameter.DECISION_MODE, Mode.UPDATE); NameUsageSearchResponse res = search(request); assertEquals(1, res.getResult().size()); assertEquals(edited.getId(), res.getResult().get(0).getUsage().getId()); } @Test @Ignore public void issue407() throws IOException { int USER_ID = 10; int DATASET_KEY = 11; // Extract a taxon from the JSON pasted by thomas into #407. That JSON doesn't have a JSON key (that was the issue), but // that suits us fine now. InputStream is = getClass().getResourceAsStream("/elastic/Issue407_document.json"); NameUsageDocument doc = EsModule.readDocument(is); NameUsageWrapper nuw = NameUsageWrapperConverter.inflate(doc.getPayload()); NameUsageWrapperConverter.enrichPayload(nuw, doc); Taxon taxon = (Taxon) nuw.getUsage(); // Insert that taxon into Postgres NameDao ndao = new NameDao(getSqlSessionFactory(), new AuthorshipNormalizer(Collections.emptyMap())); DSID<String> dsid = ndao.create(taxon.getName(), USER_ID); LOG.info(">>>>>>> Name inserted into database. ID: {}\n", dsid.getId()); TaxonDao tdao = new TaxonDao(getSqlSessionFactory(), NameUsageIndexService.passThru()); dsid = tdao.create(taxon, USER_ID); LOG.info(">>>>>>> Taxon inserted into database. ID: {}\n", EsModule.writeDebug(taxon)); // Index the dataset containing the taxon NameUsageIndexService svc = createIndexService(); svc.indexDataset(DATASET_KEY); // make sure the decision is empty NameUsageSearchResponse res = query(new TermQuery("usageId", dsid.getId())); // Query ES for the usage assertEquals(1, res.getResult().size()); // Yes, it's there! assertNull(res.getResult().get(0).getDecisions()); // and no decision key yet // Now create the decision is = getClass().getResourceAsStream("/elastic/Issue407_decision.json"); EditorialDecision decision = ApiModule.MAPPER.readValue(is, EditorialDecision.class); // the taxon has been assigned a new id, use it for the decision decision.getSubject().setId(taxon.getId()); DecisionDao ddao = new DecisionDao(getSqlSessionFactory(), svc); int key = ddao.create(decision, USER_ID); LOG.info(">>>>>>> Decision inserted into database: {}\n", EsModule.writeDebug(decision)); res = query(new TermQuery("decisionKey", key)); // Query ES for the decision key assertEquals(1, res.getResult().size()); // Yes, it's there! assertEquals(taxon.getId(), res.getResult().get(0).getUsage().getId()); // And it belongs to the taxon we just inserted res = query(new TermQuery("usageId", dsid.getId())); // Query ES for the usage assertEquals(1, res.getResult().size()); // Yes, it's there! assertEquals(key, (int) res.getResult().get(0).getDecisions().get(0).getKey()); // make sure it has the decision key } @Test public void updateEditorialDecision() { // Insert 3 taxa into postgresindexDatasetTaxaOnly NameUsageIndexService svc = createIndexService(); List<Taxon> pgTaxa = createPgTaxa(3); // Pump them over to Elasticsearch svc.indexDataset(EsSetupRule.DATASET_KEY); // Make 1st taxon the "subject" of an editorial decision Taxon edited = pgTaxa.get(0); EditorialDecision decision = new EditorialDecision(); decision.setSubject(SimpleName.of(edited)); decision.setMode(Mode.UPDATE); decision.setDatasetKey(edited.getDatasetKey()); decision.setSubjectDatasetKey(edited.getDatasetKey()); decision.setCreatedBy(edited.getCreatedBy()); decision.setModifiedBy(edited.getCreatedBy()); // Save the decision to postgres: triggers sync() on the index service DecisionDao dao = new DecisionDao(getSqlSessionFactory(), svc); int key = dao.create(decision, edited.getCreatedBy()); NameUsageSearchRequest request = new NameUsageSearchRequest(); request.addFilter(NameUsageSearchParameter.DECISION_MODE, Mode.UPDATE); NameUsageSearchResponse res = search(request); assertEquals(pgTaxa.get(0).getId(), res.getResult().get(0).getUsage().getId()); decision.setKey(key); // Change subject of the decision so now 2 taxa should be deleted first and then re-indexed. decision.setSubject(SimpleName.of(pgTaxa.get(1))); dao.update(decision, edited.getCreatedBy()); res = search(request); assertEquals(1, res.getResult().size()); // Still only 1 document with this decision key assertEquals(pgTaxa.get(1).getId(), res.getResult().get(0).getUsage().getId()); // But it's another document now } @Test public void deleteEditorialDecision() throws IOException { NameUsageIndexService svc = createIndexService(); List<Taxon> pgTaxa = createPgTaxa(4); // Pump them over to Elasticsearch svc.indexDataset(EsSetupRule.DATASET_KEY); // Make 1st taxon the "subject" of an editorial decision Taxon edited = pgTaxa.get(2); EditorialDecision decision = new EditorialDecision(); decision.setSubject(SimpleName.of(edited)); decision.setMode(Mode.UPDATE); decision.setDatasetKey(edited.getDatasetKey()); decision.setSubjectDatasetKey(edited.getDatasetKey()); decision.setCreatedBy(edited.getCreatedBy()); decision.setModifiedBy(edited.getCreatedBy()); // Save the decision to postgres: triggers sync() on the index service DecisionDao dao = new DecisionDao(getSqlSessionFactory(), svc); int key = dao.create(decision, edited.getCreatedBy()); NameUsageSearchRequest request = new NameUsageSearchRequest(); request.addFilter(NameUsageSearchParameter.DECISION_MODE, Mode.UPDATE); NameUsageSearchResponse res = search(request); assertEquals(pgTaxa.get(2).getId(), res.getResult().get(0).getUsage().getId()); dao.delete(key, 0); res = query(new TermQuery("usageId", pgTaxa.get(2).getId())); assertNull(res.getResult().get(0).getDecisions()); } // Some JSON to send using the REST API void printDecision() throws JsonProcessingException { SimpleName sn = new SimpleName("s1", "Larus Fuscus", Rank.SPECIES); EditorialDecision decision = new EditorialDecision(); decision.setSubject(sn); decision.setMode(Mode.UPDATE); decision.setDatasetKey(11); decision.setCreatedBy(0); decision.setModifiedBy(0); System.out.println(EsModule.writeDebug(decision)); } private static void massageTaxa(List<Taxon> taxa) { // Cannot compare created and modified fields (probably current time when null) taxa.forEach(t -> { t.setCreated(null); t.setModified(null); t.getName().setCreated(null); t.getName().setModified(null); }); // The order in which taxa flow from Postgres to Elasticsearch is impossible to reproduce with an es query, so just // re-order by id taxa.sort(Comparator.comparing(Taxon::getId)); } }
package com.xthena.common.domain; // default package import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import static javax.persistence.GenerationType.IDENTITY; import javax.persistence.Id; import javax.persistence.Table; /** * CommCfilds entity. @author MyEclipse Persistence Tools */ @Entity @Table(name="t_comm_cfileds" ,catalog="xhf" ) public class CommCfilds implements java.io.Serializable { // Fields private Long fid; private String fcode; private String fname; private Long ftype; private String fvalues; private String fdvalue; private Integer fisshow; private String forderstr; private Integer flength; private String fcname; private Integer freqired; private Integer fissys; private Integer fcolwidth; private Integer fheight; private Integer fisgridshow; private Integer fgridwidth; private Integer fminlength; // Constructors /** default constructor */ public CommCfilds() { } /** full constructor */ public CommCfilds(String fcode, String fname, Long ftype, String fvalues, String fdvalue, Integer fisshow, String forderstr, Integer flength, String fcname, Integer freqired, Integer fissys, Integer fcolwidth, Integer fheight, Integer fisgridshow, Integer fgridwidth, Integer fminlength) { this.fcode = fcode; this.fname = fname; this.ftype = ftype; this.fvalues = fvalues; this.fdvalue = fdvalue; this.fisshow = fisshow; this.forderstr = forderstr; this.flength = flength; this.fcname = fcname; this.freqired = freqired; this.fissys = fissys; this.fcolwidth = fcolwidth; this.fheight = fheight; this.fisgridshow = fisgridshow; this.fgridwidth = fgridwidth; this.fminlength = fminlength; } // Property accessors @Id @GeneratedValue(strategy=IDENTITY) @Column(name="fid", unique=true, nullable=false) public Long getFid() { return this.fid; } public void setFid(Long fid) { this.fid = fid; } @Column(name="fcode", length=64) public String getFcode() { return this.fcode; } public void setFcode(String fcode) { this.fcode = fcode; } @Column(name="fname", length=64) public String getFname() { return this.fname; } public void setFname(String fname) { this.fname = fname; } @Column(name="ftype") public Long getFtype() { return this.ftype; } public void setFtype(Long ftype) { this.ftype = ftype; } @Column(name="fvalues", length=1024) public String getFvalues() { return this.fvalues; } public void setFvalues(String fvalues) { this.fvalues = fvalues; } @Column(name="fdvalue") public String getFdvalue() { return this.fdvalue; } public void setFdvalue(String fdvalue) { this.fdvalue = fdvalue; } @Column(name="fisshow") public Integer getFisshow() { return this.fisshow; } public void setFisshow(Integer fisshow) { this.fisshow = fisshow; } @Column(name="forderstr", length=64) public String getForderstr() { return this.forderstr; } public void setForderstr(String forderstr) { this.forderstr = forderstr; } @Column(name="flength") public Integer getFlength() { return this.flength; } public void setFlength(Integer flength) { this.flength = flength; } @Column(name="fcname", length=64) public String getFcname() { return this.fcname; } public void setFcname(String fcname) { this.fcname = fcname; } @Column(name="freqired") public Integer getFreqired() { return this.freqired; } public void setFreqired(Integer freqired) { this.freqired = freqired; } @Column(name="fissys") public Integer getFissys() { return this.fissys; } public void setFissys(Integer fissys) { this.fissys = fissys; } @Column(name="fcolwidth") public Integer getFcolwidth() { return this.fcolwidth; } public void setFcolwidth(Integer fcolwidth) { this.fcolwidth = fcolwidth; } @Column(name="fheight") public Integer getFheight() { return this.fheight; } public void setFheight(Integer fheight) { this.fheight = fheight; } @Column(name="fisgridshow") public Integer getFisgridshow() { return this.fisgridshow; } public void setFisgridshow(Integer fisgridshow) { this.fisgridshow = fisgridshow; } @Column(name="fgridwidth") public Integer getFgridwidth() { return this.fgridwidth; } public void setFgridwidth(Integer fgridwidth) { this.fgridwidth = fgridwidth; } @Column(name="fminlength") public Integer getFminlength() { return this.fminlength; } public void setFminlength(Integer fminlength) { this.fminlength = fminlength; } }
package com.admarvel.android.ads.internal.network.async; import android.content.Context; import android.content.SharedPreferences; import android.os.AsyncTask; import android.util.Log; import com.admarvel.android.ads.AdMarvelAd; import com.admarvel.android.ads.AdMarvelAd.AdType; import com.admarvel.android.ads.AdMarvelUtils; import com.admarvel.android.ads.AdMarvelUtils.ErrorReason; import com.admarvel.android.ads.AdMarvelUtils.SDKAdNetwork; import com.admarvel.android.ads.AdMarvelView; import com.admarvel.android.ads.AdMarvelView.C0372a; import com.admarvel.android.ads.internal.AdMarvelViewPrivate; import com.admarvel.android.ads.internal.AdMarvelXMLElement; import com.admarvel.android.ads.internal.AdMarvelXMLReader; import com.admarvel.android.ads.internal.Utils; import com.admarvel.android.ads.internal.mediation.AdMarvelAdapterInstances; import com.admarvel.android.ads.internal.mediation.AdMarvelAnalyticsAdapterInstances; import com.admarvel.android.ads.internal.network.AdFetcher; import com.admarvel.android.ads.internal.util.Logging; import com.admarvel.android.ads.nativeads.AdMarvelNativeAd; import com.admarvel.android.ads.nativeads.AdMarvelNativeAd.RequestParameters.Builder; import com.facebook.ads.BuildConfig; import java.lang.ref.WeakReference; import java.util.HashMap; import java.util.Map; import javax.xml.parsers.ParserConfigurationException; /* renamed from: com.admarvel.android.ads.internal.network.async.c */ public class AdMarvelViewAsyncTask extends AsyncTask<Object, Object, Object> { private Map<String, Object> f971a; private WeakReference<AdMarvelView> f972b; private WeakReference<AdMarvelViewPrivate> f973c; private final WeakReference<Context> f974d; public AdMarvelViewAsyncTask(Context context) { this.f971a = new HashMap(); this.f974d = new WeakReference(context); } private boolean m441a(String str) { if (str == null) { return false; } try { AdMarvelXMLReader adMarvelXMLReader = new AdMarvelXMLReader(); adMarvelXMLReader.parseXMLString(str); AdMarvelXMLElement parsedXMLData = adMarvelXMLReader.getParsedXMLData(); if (parsedXMLData == null) { return false; } if (parsedXMLData.getName().equals("ad")) { String str2 = (String) parsedXMLData.getAttributes().get("type"); if ("native".equals(str2)) { return true; } if ("sdkcall".equals(str2) && parsedXMLData.getAttributes().containsKey("native")) { if ("1".equals((String) parsedXMLData.getAttributes().get("native"))) { return true; } } } return false; } catch (ParserConfigurationException e) { e.printStackTrace(); return false; } catch (Exception e2) { e2.printStackTrace(); return false; } } protected Object doInBackground(Object... params) { Map map = (Map) params[0]; String str = (String) params[1]; String str2 = (String) params[2]; String str3 = (String) params[3]; int intValue = ((Integer) params[4]).intValue(); String str4 = (String) params[5]; this.f972b = new WeakReference((AdMarvelView) params[6]); int intValue2 = ((Integer) params[7]).intValue(); String str5 = (String) params[8]; Boolean bool = (Boolean) params[9]; Boolean bool2 = (Boolean) params[10]; Boolean bool3 = (Boolean) params[11]; Boolean bool4 = (Boolean) params[12]; this.f973c = new WeakReference((AdMarvelViewPrivate) params[13]); AdFetcher adFetcher = new AdFetcher(); Context context = (Context) this.f974d.get(); if (context == null) { return null; } AdMarvelViewPrivate adMarvelViewPrivate; if (map != null) { try { synchronized (map) { this.f971a.putAll(map); } } catch (Exception e) { this.f971a = null; } } try { map = AdMarvelAnalyticsAdapterInstances.m341a("com.admarvel.android.admarvelmologiqadapter.AdMarvelMologiqAdapter", context).getEnhancedTargetParams(str2, this.f971a); } catch (Exception e2) { map = null; } if (map != null) { try { if (this.f971a != null) { map.putAll(this.f971a); this.f971a.putAll(map); } else { this.f971a = map; } } catch (Throwable e3) { Logging.log(Log.getStackTraceString(e3)); } } boolean z = false; int i = 0; int i2 = -2; AdMarvelView adMarvelView = this.f972b != null ? (AdMarvelView) this.f972b.get() : null; if (adMarvelView != null) { adMarvelViewPrivate = this.f973c != null ? (AdMarvelViewPrivate) this.f973c.get() : null; if (adMarvelViewPrivate != null) { z = adMarvelViewPrivate.f669z; i2 = adMarvelViewPrivate.f657n; } try { int adAvailablityStatus = AdMarvelAdapterInstances.m336a("ADMARVELGUID", "com.admarvel.android.admarveladcolonyadapter.AdMarvelAdColonyAdapter").getAdAvailablityStatus(str2, context); if (adAvailablityStatus == 0) { try { Utils.m489a(SDKAdNetwork.ADCOLONY, context, adAvailablityStatus); } catch (Exception e4) { i = adAvailablityStatus; } } else if (adAvailablityStatus == 2) { if (Utils.m496a(SDKAdNetwork.ADCOLONY, context)) { adAvailablityStatus = 1; } } i = adAvailablityStatus; } catch (Exception e5) { } } str5 = AdMarvelView.enableOfflineSDK ? adFetcher.m431a(AdFetcher.AdFetcher.BANNER, context, str3, intValue, str4, this.f971a, str, str2, intValue2, str5, bool.booleanValue(), bool2.booleanValue()) : adFetcher.m432a(AdFetcher.AdFetcher.BANNER, context, str3, intValue, str4, this.f971a, str, str2, intValue2, str5, bool.booleanValue(), bool2.booleanValue(), z, null, null, null, false, i, 0, 0, 0, 0, null, bool3.booleanValue(), bool4.booleanValue(), i2); if (m441a(str5)) { try { AdMarvelNativeAd adMarvelNativeAd = new AdMarvelNativeAd(); Builder builder = new Builder(); builder.context(context); builder.partnerId(str); builder.siteId(str2); builder.targetParams(this.f971a); if (adMarvelView != null) { adMarvelNativeAd.setListener(adMarvelView.nativeAdListener); adMarvelNativeAd.setAdMarvelNativeVideoAdListener(adMarvelView.nativeVideoAdListener); } adMarvelNativeAd.adMarvelNativeAdPrivate.m729a(builder.build(), str5); return adMarvelNativeAd; } catch (Exception e6) { e6.printStackTrace(); return null; } } Object adMarvelAd = new AdMarvelAd(str5, this.f971a, str, str2, str3, intValue, str4, context.getPackageName()); if (AdMarvelUtils.isLogDumpEnabled()) { adMarvelAd.setRequestJson(adFetcher.m433a()); } if (AdMarvelView.enableOfflineSDK) { SharedPreferences sharedPreferences = context.getSharedPreferences("admarvel_preferences", 0); String string = sharedPreferences.getString("banner_folder", "NULL"); adMarvelAd.setOfflineBaseUrl("file://" + sharedPreferences.getString("childDirectory", "NULL") + "/" + string); adMarvelAd.setOfflinekeyUrl(sharedPreferences.getString("childDirectory", "NULL") + "/" + string); } if (str5 != null) { try { AdMarvelXMLReader loadAd = adMarvelAd.loadAd(context); if (!(adMarvelAd == null || adMarvelAd.getAllowAutoExpand() == null || !adMarvelAd.getAllowAutoExpand().equals(C0372a.AdmarvelAd_AutoExpand_Or_Redirection_Behavior_NotSpecified))) { adMarvelAd.setAllowAutoExpand(adMarvelView.getClientSettingOfAutoExpandOrRedirection()); } if (loadAd == null) { adMarvelAd.setAdType(AdType.ERROR); adMarvelAd.setErrorCode(303); return adMarvelAd; } else if (adMarvelAd.getSdkNetwork() == null || adMarvelAd.getSdkNetwork().length() <= 0) { return adMarvelAd; } else { try { adMarvelViewPrivate = this.f973c != null ? (AdMarvelViewPrivate) this.f973c.get() : null; if (adMarvelViewPrivate.f645a == null) { adMarvelViewPrivate.f645a = new AdMarvelAdapterInstances(); } adMarvelViewPrivate.f646b = adMarvelViewPrivate.f645a.m339b(adMarvelAd.getSdkAdNetwork().name()); adMarvelViewPrivate.f647c = true; return (((AdMarvelView) this.f972b.get()) == null || adMarvelViewPrivate.f646b == null) ? adMarvelAd : adMarvelViewPrivate.f646b.loadAd(adMarvelAd, loadAd); } catch (Throwable e7) { Logging.log(Log.getStackTraceString(e7)); adMarvelAd.setAdType(AdType.ERROR); adMarvelAd.setErrorCode(303); return adMarvelAd; } } } catch (Throwable e72) { try { Logging.log(Log.getStackTraceString(e72)); adMarvelAd.setAdType(AdType.ERROR); adMarvelAd.setErrorCode(303); return adMarvelAd; } catch (Exception e8) { adMarvelAd.setAdType(AdType.ERROR); adMarvelAd.setErrorCode(303); return adMarvelAd; } } } adMarvelAd.setAdType(AdType.ERROR); adMarvelAd.setErrorCode(303); return adMarvelAd; } protected void onPostExecute(Object object) { ErrorReason a; super.onPostExecute(object); if (!(object instanceof AdMarvelNativeAd)) { AdMarvelViewPrivate adMarvelViewPrivate; AdMarvelView adMarvelView; if (object instanceof AdMarvelAd) { try { adMarvelViewPrivate = (AdMarvelViewPrivate) this.f973c.get(); AdMarvelAd adMarvelAd = (AdMarvelAd) object; if (adMarvelAd.getAdType() == AdType.ERROR) { adMarvelView = (AdMarvelView) this.f972b.get(); if (adMarvelView != null) { adMarvelViewPrivate.f647c = false; a = Utils.m480a(adMarvelAd.getErrorCode()); adMarvelView.getListenerImpl().m313a(adMarvelView.getContext(), adMarvelView, Utils.m478a(a), a, adMarvelAd.getSiteId(), adMarvelAd.getId(), adMarvelAd.getTargetParams(), adMarvelAd.getIpAddress()); return; } return; } if (this.f972b.get() != null) { AdMarvelView adMarvelView2 = (AdMarvelView) this.f972b.get(); if (adMarvelView2 != null && adMarvelView2.isAdFetchedModel()) { if (adMarvelView2.getListenerImpl() != null) { adMarvelView2.getListenerImpl().m319a(adMarvelView2, adMarvelAd); return; } return; } } if (adMarvelAd.getAdType() == AdType.SDKCALL) { Context context; if (adMarvelAd.getSdkAdNetwork() == SDKAdNetwork.GENERIC) { AdMarvelView adMarvelView3 = (AdMarvelView) this.f972b.get(); context = (Context) this.f974d.get(); if (adMarvelView3 != null && context != null && adMarvelView3.mAdMarvelViewPrivate != null) { adMarvelView3.mAdMarvelViewPrivate.m289a(adMarvelAd, context); return; } return; } else if (adMarvelAd.getSdkNetwork() != null) { context = (Context) this.f974d.get(); if (adMarvelViewPrivate != null && context != null) { adMarvelViewPrivate.m293a(this.f971a, adMarvelAd, adMarvelAd.getSdkNetwork(), context); return; } return; } else if (adMarvelAd.isDisableAdrequest()) { String disableAdDuration = adMarvelAd.getDisableAdDuration(); if (disableAdDuration != null) { context = (Context) this.f974d.get(); if (adMarvelViewPrivate != null && context != null) { adMarvelViewPrivate.m291a(disableAdDuration, adMarvelAd, context); return; } return; } } } if (adMarvelViewPrivate != null) { adMarvelViewPrivate.m297b(adMarvelAd); } } catch (Throwable e) { Logging.log(Log.getStackTraceString(e)); adMarvelViewPrivate = this.f973c != null ? (AdMarvelViewPrivate) this.f973c.get() : null; if (adMarvelViewPrivate != null) { adMarvelViewPrivate.f647c = false; } a = Utils.m480a(303); int a2 = Utils.m478a(a); adMarvelView = (AdMarvelView) this.f972b.get(); AdMarvelAd adMarvelAd2 = (AdMarvelAd) object; if (adMarvelView != null && adMarvelAd2 != null) { adMarvelView.getListenerImpl().m313a(adMarvelView.getContext(), adMarvelView, a2, a, adMarvelAd2.getSiteId(), adMarvelAd2.getId(), adMarvelAd2.getTargetParams(), adMarvelAd2.getIpAddress()); } } } else if (object == null) { try { adMarvelView = (AdMarvelView) this.f972b.get(); if (adMarvelView != null) { a = Utils.m480a(303); adMarvelView.getListenerImpl().m313a(adMarvelView.getContext(), adMarvelView, Utils.m478a(a), a, null, 0, null, BuildConfig.FLAVOR); } adMarvelViewPrivate = this.f973c != null ? (AdMarvelViewPrivate) this.f973c.get() : null; if (adMarvelViewPrivate != null) { adMarvelViewPrivate.f647c = false; } } catch (Exception e2) { e2.printStackTrace(); } } } } }
/** * Copyright 2015-2018 The OpenZipkin Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package zipkin.internal; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import zipkin.Annotation; import zipkin.BinaryAnnotation; import zipkin.Constants; import zipkin2.DependencyLink; import zipkin2.Endpoint; import zipkin2.Span; import zipkin2.Span.Kind; import static zipkin.BinaryAnnotation.Type.BOOL; import static zipkin.Constants.CLIENT_ADDR; import static zipkin.Constants.LOCAL_COMPONENT; import static zipkin.Constants.SERVER_ADDR; import static zipkin.internal.Util.lowerHexToUnsignedLong; import static zipkin.internal.Util.writeBase64Url; /** * This converts {@link zipkin.Span} instances to {@link Span} and visa versa. */ public final class V2SpanConverter { /** * Converts the input, parsing RPC annotations into {@link Span#kind()}. * * @return a span for each unique {@link Annotation#endpoint annotation endpoint} service name. */ public static List<Span> fromSpan(zipkin.Span source) { Builders builders = new Builders(source); // add annotations unless they are "core" builders.processAnnotations(source); // convert binary annotations to tags and addresses builders.processBinaryAnnotations(source); return builders.build(); } static final class Builders { final List<Span.Builder> spans = new ArrayList<>(); Annotation cs = null, sr = null, ss = null, cr = null, ms = null, mr = null, ws = null, wr = null; Builders(zipkin.Span source) { this.spans.add(newBuilder(source)); } void processAnnotations(zipkin.Span source) { for (int i = 0, length = source.annotations.size(); i < length; i++) { Annotation a = source.annotations.get(i); Span.Builder currentSpan = forEndpoint(source, a.endpoint); // core annotations require an endpoint. Don't give special treatment when that's missing if (a.value.length() == 2 && a.endpoint != null) { if (a.value.equals(Constants.CLIENT_SEND)) { currentSpan.kind(Kind.CLIENT); cs = a; } else if (a.value.equals(Constants.SERVER_RECV)) { currentSpan.kind(Kind.SERVER); sr = a; } else if (a.value.equals(Constants.SERVER_SEND)) { currentSpan.kind(Kind.SERVER); ss = a; } else if (a.value.equals(Constants.CLIENT_RECV)) { currentSpan.kind(Kind.CLIENT); cr = a; } else if (a.value.equals(Constants.MESSAGE_SEND)) { currentSpan.kind(Kind.PRODUCER); ms = a; } else if (a.value.equals(Constants.MESSAGE_RECV)) { currentSpan.kind(Kind.CONSUMER); mr = a; } else if (a.value.equals(Constants.WIRE_SEND)) { ws = a; } else if (a.value.equals(Constants.WIRE_RECV)) { wr = a; } else { currentSpan.addAnnotation(a.timestamp, a.value); } } else { currentSpan.addAnnotation(a.timestamp, a.value); } } // When bridging between event and span model, you can end up missing a start annotation if (cs == null && endTimestampReflectsSpanDuration(cr, source)) { cs = Annotation.create(source.timestamp, "cs", cr.endpoint); } if (sr == null && endTimestampReflectsSpanDuration(ss, source)) { sr = Annotation.create(source.timestamp, "sr", ss.endpoint); } if (cs != null && sr != null) { // in a shared span, the client side owns span duration by annotations or explicit timestamp maybeTimestampDuration(source, cs, cr); // special-case loopback: We need to make sure on loopback there are two span2s Span.Builder client = forEndpoint(source, cs.endpoint); Span.Builder server; if (closeEnough(cs.endpoint, sr.endpoint)) { client.kind(Kind.CLIENT); // fork a new span for the server side server = newSpanBuilder(source, sr.endpoint.toV2()).kind(Kind.SERVER); } else { server = forEndpoint(source, sr.endpoint); } // the server side is smaller than that, we have to read annotations to find out server.shared(true).timestamp(sr.timestamp); if (ss != null) server.duration(ss.timestamp - sr.timestamp); if (cr == null && source.duration == null) client.duration(null); // one-way has no duration } else if (cs != null && cr != null) { maybeTimestampDuration(source, cs, cr); } else if (sr != null && ss != null) { maybeTimestampDuration(source, sr, ss); } else { // otherwise, the span is incomplete. revert special-casing for (Span.Builder next : spans) { if (Kind.CLIENT.equals(next.kind())) { if (cs != null) next.timestamp(cs.timestamp); if (cr != null) next.addAnnotation(cr.timestamp, cr.value); } else if (Kind.SERVER.equals(next.kind())) { if (sr != null) next.timestamp(sr.timestamp); if (ss != null) next.addAnnotation(ss.timestamp, ss.value); } } if (source.timestamp != null) { spans.get(0).timestamp(source.timestamp).duration(source.duration); } } // Span v1 format did not have a shared flag. By convention, span.timestamp being absent // implied shared. When we only see the server-side, carry this signal over. if (cs == null && (sr != null && source.timestamp == null)) { forEndpoint(source, sr.endpoint).shared(true); } // ms and mr are not supposed to be in the same span, but in case they are.. if (ms != null && mr != null) { // special-case loopback: We need to make sure on loopback there are two span2s Span.Builder producer = forEndpoint(source, ms.endpoint); Span.Builder consumer; if (closeEnough(ms.endpoint, mr.endpoint)) { producer.kind(Kind.PRODUCER); // fork a new span for the consumer side consumer = newSpanBuilder(source, mr.endpoint.toV2()).kind(Kind.CONSUMER); } else { consumer = forEndpoint(source, mr.endpoint); } consumer.shared(true); if (wr != null) { consumer.timestamp(wr.timestamp).duration(mr.timestamp - wr.timestamp); } else { consumer.timestamp(mr.timestamp); } producer.timestamp(ms.timestamp).duration(ws != null ? ws.timestamp - ms.timestamp : null); } else if (ms != null) { maybeTimestampDuration(source, ms, ws); } else if (mr != null) { if (wr != null) { maybeTimestampDuration(source, wr, mr); } else { maybeTimestampDuration(source, mr, null); } } else { if (ws != null) forEndpoint(source, ws.endpoint).addAnnotation(ws.timestamp, ws.value); if (wr != null) forEndpoint(source, wr.endpoint).addAnnotation(wr.timestamp, wr.value); } } static boolean endTimestampReflectsSpanDuration(Annotation end, zipkin.Span source) { return end != null && source.timestamp != null && source.duration != null && source.timestamp + source.duration == end.timestamp; } void maybeTimestampDuration(zipkin.Span source, Annotation begin, @Nullable Annotation end) { Span.Builder span2 = forEndpoint(source, begin.endpoint); if (source.timestamp != null && source.duration != null) { span2.timestamp(source.timestamp).duration(source.duration); } else { span2.timestamp(begin.timestamp); if (end != null) span2.duration(end.timestamp - begin.timestamp); } } void processBinaryAnnotations(zipkin.Span source) { zipkin.Endpoint ca = null, sa = null, ma = null; for (int i = 0, length = source.binaryAnnotations.size(); i < length; i++) { BinaryAnnotation b = source.binaryAnnotations.get(i); if (b.type == BOOL) { if (Constants.CLIENT_ADDR.equals(b.key)) { ca = b.endpoint; } else if (Constants.SERVER_ADDR.equals(b.key)) { sa = b.endpoint; } else if (Constants.MESSAGE_ADDR.equals(b.key)) { ma = b.endpoint; } else { forEndpoint(source, b.endpoint).putTag(b.key, b.value[0] == 1 ? "true" : "false"); } continue; } Span.Builder currentSpan = forEndpoint(source, b.endpoint); switch (b.type) { case BOOL: break; // already handled case STRING: // don't add marker "lc" tags if (Constants.LOCAL_COMPONENT.equals(b.key) && b.value.length == 0) continue; currentSpan.putTag(b.key, new String(b.value, Util.UTF_8)); break; case BYTES: currentSpan.putTag(b.key, writeBase64Url(b.value)); break; case I16: currentSpan.putTag(b.key, Short.toString(ByteBuffer.wrap(b.value).getShort())); break; case I32: currentSpan.putTag(b.key, Integer.toString(ByteBuffer.wrap(b.value).getInt())); break; case I64: currentSpan.putTag(b.key, Long.toString(ByteBuffer.wrap(b.value).getLong())); break; case DOUBLE: double wrapped = Double.longBitsToDouble(ByteBuffer.wrap(b.value).getLong()); currentSpan.putTag(b.key, Double.toString(wrapped)); break; } } // special-case when we are missing core annotations, but we have both address annotations if ((cs == null && sr == null) && (ca != null && sa != null)) { forEndpoint(source, ca).remoteEndpoint(sa.toV2()); return; } if (sa != null) { if (cs != null && !closeEnough(sa, cs.endpoint)) { forEndpoint(source, cs.endpoint).remoteEndpoint(sa.toV2()); } else if (cr != null && !closeEnough(sa, cr.endpoint)) { forEndpoint(source, cr.endpoint).remoteEndpoint(sa.toV2()); } else if (cs == null && cr == null && sr == null && ss == null) { // no core annotations forEndpoint(source, null) .kind(Kind.CLIENT) .remoteEndpoint(sa.toV2()); } } if (ca != null) { if (sr != null && !closeEnough(ca, sr.endpoint)) { forEndpoint(source, sr.endpoint).remoteEndpoint(ca.toV2()); } if (ss != null && !closeEnough(ca, ss.endpoint)) { forEndpoint(source, ss.endpoint).remoteEndpoint(ca.toV2()); } else if (cs == null && cr == null && sr == null && ss == null) { // no core annotations forEndpoint(source, null) .kind(Kind.SERVER) .remoteEndpoint(ca.toV2()); } } if (ma != null){ if (ms != null && !closeEnough(ma, ms.endpoint)) { forEndpoint(source, ms.endpoint).remoteEndpoint(ma.toV2()); } if (mr != null && !closeEnough(ma, mr.endpoint)) { forEndpoint(source, mr.endpoint).remoteEndpoint(ma.toV2()); } } } Span.Builder forEndpoint(zipkin.Span source, @Nullable zipkin.Endpoint e) { if (e == null) return spans.get(0); // allocate missing endpoint data to first span Endpoint converted = e.toV2(); for (int i = 0, length = spans.size(); i < length; i++) { Span.Builder next = spans.get(i); Endpoint nextLocalEndpoint = next.localEndpoint(); if (nextLocalEndpoint == null) { next.localEndpoint(converted); return next; } else if (closeEnough(toEndpoint(nextLocalEndpoint), e)) { return next; } } return newSpanBuilder(source, converted); } Span.Builder newSpanBuilder(zipkin.Span source, Endpoint e) { Span.Builder result = newBuilder(source).localEndpoint(e); spans.add(result); return result; } List<Span> build() { int length = spans.size(); if (length == 1) return Collections.singletonList(spans.get(0).build()); List<Span> result = new ArrayList<>(length); for (int i = 0; i < length; i++) { result.add(spans.get(i).build()); } return result; } } static boolean closeEnough(zipkin.Endpoint left, zipkin.Endpoint right) { return left.serviceName.equals(right.serviceName); } static Span.Builder newBuilder(zipkin.Span source) { return Span.newBuilder() .traceId(source.traceIdString()) .parentId(source.parentId != null ? Util.toLowerHex(source.parentId) : null) .id(Util.toLowerHex(source.id)) .name(source.name) .debug(source.debug); } /** Converts the input, parsing {@link Span#kind()} into RPC annotations. */ public static zipkin.Span toSpan(Span in) { String traceId = in.traceId(); zipkin.Span.Builder result = zipkin.Span.builder() .traceId(lowerHexToUnsignedLong(traceId)) .parentId(in.parentId() != null ? lowerHexToUnsignedLong(in.parentId()) : null) .id(lowerHexToUnsignedLong(in.id())) .debug(in.debug()) .name(in.name() != null ? in.name() : ""); // avoid a NPE if (traceId.length() == 32) { result.traceIdHigh(lowerHexToUnsignedLong(traceId, 0)); } long startTs = in.timestampAsLong(), duration = in.durationAsLong(); long endTs = startTs != 0L && duration != 0L ? startTs + duration : 0L; if (startTs != 0L) { result.timestamp(startTs); result.duration(duration); } zipkin.Endpoint local = in.localEndpoint() != null ? toEndpoint(in.localEndpoint()) : null; zipkin.Endpoint remote = in.remoteEndpoint() != null ? toEndpoint(in.remoteEndpoint()) : null; Kind kind = in.kind(); Annotation cs = null, sr = null, ss = null, cr = null, ms = null, mr = null, ws = null, wr = null; String remoteEndpointType = null; boolean wroteEndpoint = false; for (int i = 0, length = in.annotations().size(); i < length; i++) { zipkin2.Annotation input = in.annotations().get(i); Annotation a = Annotation.create(input.timestamp(), input.value(), local); if (a.value.length() == 2) { if (a.value.equals(Constants.CLIENT_SEND)) { kind = Kind.CLIENT; cs = a; remoteEndpointType = SERVER_ADDR; } else if (a.value.equals(Constants.SERVER_RECV)) { kind = Kind.SERVER; sr = a; remoteEndpointType = CLIENT_ADDR; } else if (a.value.equals(Constants.SERVER_SEND)) { kind = Kind.SERVER; ss = a; } else if (a.value.equals(Constants.CLIENT_RECV)) { kind = Kind.CLIENT; cr = a; } else if (a.value.equals(Constants.MESSAGE_SEND)) { kind = Kind.PRODUCER; ms = a; } else if (a.value.equals(Constants.MESSAGE_RECV)) { kind = Kind.CONSUMER; mr = a; } else if (a.value.equals(Constants.WIRE_SEND)) { ws = a; } else if (a.value.equals(Constants.WIRE_RECV)) { wr = a; } else { wroteEndpoint = true; result.addAnnotation(a); } } else { wroteEndpoint = true; result.addAnnotation(a); } } if (kind != null) { switch (kind) { case CLIENT: remoteEndpointType = Constants.SERVER_ADDR; if (startTs != 0L) cs = Annotation.create(startTs, Constants.CLIENT_SEND, local); if (endTs != 0L) cr = Annotation.create(endTs, Constants.CLIENT_RECV, local); break; case SERVER: remoteEndpointType = Constants.CLIENT_ADDR; if (startTs != 0L) sr = Annotation.create(startTs, Constants.SERVER_RECV, local); if (endTs != 0L) ss = Annotation.create(endTs, Constants.SERVER_SEND, local); break; case PRODUCER: remoteEndpointType = Constants.MESSAGE_ADDR; if (startTs != 0L) ms = Annotation.create(startTs, Constants.MESSAGE_SEND, local); if (endTs != 0L) ws = Annotation.create(endTs, Constants.WIRE_SEND, local); break; case CONSUMER: remoteEndpointType = Constants.MESSAGE_ADDR; if (startTs != 0L && endTs != 0L) { wr = Annotation.create(startTs, Constants.WIRE_RECV, local); mr = Annotation.create(endTs, Constants.MESSAGE_RECV, local); } else if (startTs != 0L) { mr = Annotation.create(startTs, Constants.MESSAGE_RECV, local); } break; default: throw new AssertionError("update kind mapping"); } } for (Map.Entry<String, String> tag : in.tags().entrySet()) { wroteEndpoint = true; result.addBinaryAnnotation(BinaryAnnotation.create(tag.getKey(), tag.getValue(), local)); } if (cs != null || sr != null || ss != null || cr != null || ws != null || wr != null || ms != null || mr != null) { if (cs != null) result.addAnnotation(cs); if (sr != null) result.addAnnotation(sr); if (ss != null) result.addAnnotation(ss); if (cr != null) result.addAnnotation(cr); if (ws != null) result.addAnnotation(ws); if (wr != null) result.addAnnotation(wr); if (ms != null) result.addAnnotation(ms); if (mr != null) result.addAnnotation(mr); wroteEndpoint = true; } else if (local != null && remote != null) { // special-case when we are missing core annotations, but we have both address annotations result.addBinaryAnnotation(BinaryAnnotation.address(CLIENT_ADDR, local)); wroteEndpoint = true; remoteEndpointType = SERVER_ADDR; } if (remoteEndpointType != null && remote != null) { result.addBinaryAnnotation(BinaryAnnotation.address(remoteEndpointType, remote)); } // don't report server-side timestamp on shared or incomplete spans if (Boolean.TRUE.equals(in.shared()) && sr != null) { result.timestamp(null).duration(null); } if (local != null && !wroteEndpoint) { // create a dummy annotation result.addBinaryAnnotation(BinaryAnnotation.create(LOCAL_COMPONENT, "", local)); } return result.build(); } public static zipkin.Endpoint toEndpoint(Endpoint input) { zipkin.Endpoint.Builder result = zipkin.Endpoint.builder() .serviceName(input.serviceName() != null ? input.serviceName() : "") .port(input.portAsInt()); if (input.ipv6() != null) { result.parseIp(input.ipv6()); // parse first in case there's a mapped IP } if (input.ipv4() != null) { result.parseIp(input.ipv4()); } return result.build(); } static boolean endTimestampReflectsSpanDuration(Annotation end, zipkin.Span source) { return end != null && source.timestamp != null && source.duration != null && source.timestamp + source.duration == end.timestamp; } static List<zipkin.Span> toSpans(List<Span> spans) { if (spans.isEmpty()) return Collections.emptyList(); int length = spans.size(); List<zipkin.Span> span1s = new ArrayList<>(length); for (int i = 0; i < length; i++) { span1s.add(V2SpanConverter.toSpan(spans.get(i))); } return span1s; } public static DependencyLink fromLink(zipkin.DependencyLink link) { return DependencyLink.newBuilder() .parent(link.parent) .child(link.child) .callCount(link.callCount) .errorCount(link.errorCount).build(); } public static zipkin.DependencyLink toLink(DependencyLink link) { return zipkin.DependencyLink.builder() .parent(link.parent()) .child(link.child()) .callCount(link.callCount()) .errorCount(link.errorCount()).build(); } public static List<zipkin.DependencyLink> toLinks(List<DependencyLink> links) { if (links.isEmpty()) return Collections.emptyList(); int length = links.size(); List<zipkin.DependencyLink> result = new ArrayList<>(length); for (int i = 0; i < length; i++) { DependencyLink link2 = links.get(i); result.add(zipkin.DependencyLink.builder() .parent(link2.parent()) .child(link2.child()) .callCount(link2.callCount()) .errorCount(link2.errorCount()).build()); } return result; } public static List<DependencyLink> fromLinks(Iterable<zipkin.DependencyLink> links) { List<DependencyLink> result = new ArrayList<>(); for (zipkin.DependencyLink link1 : links) { result.add(DependencyLink.newBuilder() .parent(link1.parent) .child(link1.child) .callCount(link1.callCount) .errorCount(link1.errorCount).build()); } return result; } public static List<Span> fromSpans(Iterable<zipkin.Span> spans) { List<Span> result = new ArrayList<>(); for (zipkin.Span span1 : spans) { result.addAll(fromSpan(span1)); } return result; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.graphrbac.implementation; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.AzureServiceCall; import com.microsoft.azure.ListOperationCallback; import com.microsoft.azure.management.graphrbac.GraphErrorException; import com.microsoft.azure.management.graphrbac.UserGetMemberGroupsParameters; import com.microsoft.azure.Page; import com.microsoft.azure.PagedList; import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator; import java.io.IOException; import java.util.List; import okhttp3.ResponseBody; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.HTTP; import retrofit2.http.PATCH; import retrofit2.http.Path; import retrofit2.http.POST; import retrofit2.http.Query; import retrofit2.http.Url; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in Users. */ public final class UsersInner { /** The Retrofit service to perform REST calls. */ private UsersService service; /** The service client containing this operation class. */ private GraphRbacManagementClientImpl client; /** * Initializes an instance of UsersInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public UsersInner(Retrofit retrofit, GraphRbacManagementClientImpl client) { this.service = retrofit.create(UsersService.class); this.client = client; } /** * The interface defining all the services for Users to be * used by Retrofit to perform actually REST calls. */ interface UsersService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.graphrbac.Users create" }) @POST("{tenantID}/users") Observable<Response<ResponseBody>> create(@Path("tenantID") String tenantID, @Body UserCreateParametersInner parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.graphrbac.Users list" }) @GET("{tenantID}/users") Observable<Response<ResponseBody>> list(@Path("tenantID") String tenantID, @Query("$filter") String filter, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.graphrbac.Users get" }) @GET("{tenantID}/users/{upnOrObjectId}") Observable<Response<ResponseBody>> get(@Path(value = "upnOrObjectId", encoded = true) String upnOrObjectId, @Path("tenantID") String tenantID, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.graphrbac.Users update" }) @PATCH("{tenantID}/users/{upnOrObjectId}") Observable<Response<ResponseBody>> update(@Path(value = "upnOrObjectId", encoded = true) String upnOrObjectId, @Path("tenantID") String tenantID, @Body UserUpdateParametersInner parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.graphrbac.Users delete" }) @HTTP(path = "{tenantID}/users/{upnOrObjectId}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> delete(@Path(value = "upnOrObjectId", encoded = true) String upnOrObjectId, @Path("tenantID") String tenantID, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.graphrbac.Users getMemberGroups" }) @POST("{tenantID}/users/{objectId}/getMemberGroups") Observable<Response<ResponseBody>> getMemberGroups(@Path(value = "objectId", encoded = true) String objectId, @Path("tenantID") String tenantID, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Body UserGetMemberGroupsParameters parameters, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.graphrbac.Users listNext" }) @GET Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * Create a new user. * * @param parameters Parameters to create a user. * @return the UserInner object if successful. */ public UserInner create(UserCreateParametersInner parameters) { return createWithServiceResponseAsync(parameters).toBlocking().single().body(); } /** * Create a new user. * * @param parameters Parameters to create a user. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ public ServiceCall<UserInner> createAsync(UserCreateParametersInner parameters, final ServiceCallback<UserInner> serviceCallback) { return ServiceCall.fromResponse(createWithServiceResponseAsync(parameters), serviceCallback); } /** * Create a new user. * * @param parameters Parameters to create a user. * @return the observable to the UserInner object */ public Observable<UserInner> createAsync(UserCreateParametersInner parameters) { return createWithServiceResponseAsync(parameters).map(new Func1<ServiceResponse<UserInner>, UserInner>() { @Override public UserInner call(ServiceResponse<UserInner> response) { return response.body(); } }); } /** * Create a new user. * * @param parameters Parameters to create a user. * @return the observable to the UserInner object */ public Observable<ServiceResponse<UserInner>> createWithServiceResponseAsync(UserCreateParametersInner parameters) { if (this.client.tenantID() == null) { throw new IllegalArgumentException("Parameter this.client.tenantID() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Validator.validate(parameters); return service.create(this.client.tenantID(), parameters, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<UserInner>>>() { @Override public Observable<ServiceResponse<UserInner>> call(Response<ResponseBody> response) { try { ServiceResponse<UserInner> clientResponse = createDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<UserInner> createDelegate(Response<ResponseBody> response) throws GraphErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<UserInner, GraphErrorException>newInstance(this.client.serializerAdapter()) .register(201, new TypeToken<UserInner>() { }.getType()) .registerError(GraphErrorException.class) .build(response); } /** * Gets list of users for the current tenant. * * @return the PagedList&lt;UserInner&gt; object if successful. */ public PagedList<UserInner> list() { ServiceResponse<Page<UserInner>> response = listSinglePageAsync().toBlocking().single(); return new PagedList<UserInner>(response.body()) { @Override public Page<UserInner> nextPage(String nextLink) { return listNextSinglePageAsync(nextLink).toBlocking().single().body(); } }; } /** * Gets list of users for the current tenant. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ public ServiceCall<List<UserInner>> listAsync(final ListOperationCallback<UserInner> serviceCallback) { return AzureServiceCall.fromPageResponse( listSinglePageAsync(), new Func1<String, Observable<ServiceResponse<Page<UserInner>>>>() { @Override public Observable<ServiceResponse<Page<UserInner>>> call(String nextLink) { return listNextSinglePageAsync(nextLink); } }, serviceCallback); } /** * Gets list of users for the current tenant. * * @return the observable to the PagedList&lt;UserInner&gt; object */ public Observable<Page<UserInner>> listAsync() { return listWithServiceResponseAsync() .map(new Func1<ServiceResponse<Page<UserInner>>, Page<UserInner>>() { @Override public Page<UserInner> call(ServiceResponse<Page<UserInner>> response) { return response.body(); } }); } /** * Gets list of users for the current tenant. * * @return the observable to the PagedList&lt;UserInner&gt; object */ public Observable<ServiceResponse<Page<UserInner>>> listWithServiceResponseAsync() { return listSinglePageAsync() .concatMap(new Func1<ServiceResponse<Page<UserInner>>, Observable<ServiceResponse<Page<UserInner>>>>() { @Override public Observable<ServiceResponse<Page<UserInner>>> call(ServiceResponse<Page<UserInner>> page) { String nextLink = page.body().nextPageLink(); if (nextLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextLink)); } }); } /** * Gets list of users for the current tenant. * * @return the PagedList&lt;UserInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<UserInner>>> listSinglePageAsync() { if (this.client.tenantID() == null) { throw new IllegalArgumentException("Parameter this.client.tenantID() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } final String filter = null; return service.list(this.client.tenantID(), filter, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<UserInner>>>>() { @Override public Observable<ServiceResponse<Page<UserInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<UserInner>> result = listDelegate(response); return Observable.just(new ServiceResponse<Page<UserInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } /** * Gets list of users for the current tenant. * * @param filter The filter to apply to the operation. * @return the PagedList&lt;UserInner&gt; object if successful. */ public PagedList<UserInner> list(final String filter) { ServiceResponse<Page<UserInner>> response = listSinglePageAsync(filter).toBlocking().single(); return new PagedList<UserInner>(response.body()) { @Override public Page<UserInner> nextPage(String nextLink) { return listNextSinglePageAsync(nextLink).toBlocking().single().body(); } }; } /** * Gets list of users for the current tenant. * * @param filter The filter to apply to the operation. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ public ServiceCall<List<UserInner>> listAsync(final String filter, final ListOperationCallback<UserInner> serviceCallback) { return AzureServiceCall.fromPageResponse( listSinglePageAsync(filter), new Func1<String, Observable<ServiceResponse<Page<UserInner>>>>() { @Override public Observable<ServiceResponse<Page<UserInner>>> call(String nextLink) { return listNextSinglePageAsync(nextLink); } }, serviceCallback); } /** * Gets list of users for the current tenant. * * @param filter The filter to apply to the operation. * @return the observable to the PagedList&lt;UserInner&gt; object */ public Observable<Page<UserInner>> listAsync(final String filter) { return listWithServiceResponseAsync(filter) .map(new Func1<ServiceResponse<Page<UserInner>>, Page<UserInner>>() { @Override public Page<UserInner> call(ServiceResponse<Page<UserInner>> response) { return response.body(); } }); } /** * Gets list of users for the current tenant. * * @param filter The filter to apply to the operation. * @return the observable to the PagedList&lt;UserInner&gt; object */ public Observable<ServiceResponse<Page<UserInner>>> listWithServiceResponseAsync(final String filter) { return listSinglePageAsync(filter) .concatMap(new Func1<ServiceResponse<Page<UserInner>>, Observable<ServiceResponse<Page<UserInner>>>>() { @Override public Observable<ServiceResponse<Page<UserInner>>> call(ServiceResponse<Page<UserInner>> page) { String nextLink = page.body().nextPageLink(); if (nextLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextLink)); } }); } /** * Gets list of users for the current tenant. * ServiceResponse<PageImpl<UserInner>> * @param filter The filter to apply to the operation. * @return the PagedList&lt;UserInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<UserInner>>> listSinglePageAsync(final String filter) { if (this.client.tenantID() == null) { throw new IllegalArgumentException("Parameter this.client.tenantID() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.list(this.client.tenantID(), filter, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<UserInner>>>>() { @Override public Observable<ServiceResponse<Page<UserInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<UserInner>> result = listDelegate(response); return Observable.just(new ServiceResponse<Page<UserInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<UserInner>> listDelegate(Response<ResponseBody> response) throws GraphErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<UserInner>, GraphErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<UserInner>>() { }.getType()) .registerError(GraphErrorException.class) .build(response); } /** * Gets user information from the directory. * * @param upnOrObjectId The object ID or principal name of the user for which to get information. * @return the UserInner object if successful. */ public UserInner get(String upnOrObjectId) { return getWithServiceResponseAsync(upnOrObjectId).toBlocking().single().body(); } /** * Gets user information from the directory. * * @param upnOrObjectId The object ID or principal name of the user for which to get information. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ public ServiceCall<UserInner> getAsync(String upnOrObjectId, final ServiceCallback<UserInner> serviceCallback) { return ServiceCall.fromResponse(getWithServiceResponseAsync(upnOrObjectId), serviceCallback); } /** * Gets user information from the directory. * * @param upnOrObjectId The object ID or principal name of the user for which to get information. * @return the observable to the UserInner object */ public Observable<UserInner> getAsync(String upnOrObjectId) { return getWithServiceResponseAsync(upnOrObjectId).map(new Func1<ServiceResponse<UserInner>, UserInner>() { @Override public UserInner call(ServiceResponse<UserInner> response) { return response.body(); } }); } /** * Gets user information from the directory. * * @param upnOrObjectId The object ID or principal name of the user for which to get information. * @return the observable to the UserInner object */ public Observable<ServiceResponse<UserInner>> getWithServiceResponseAsync(String upnOrObjectId) { if (upnOrObjectId == null) { throw new IllegalArgumentException("Parameter upnOrObjectId is required and cannot be null."); } if (this.client.tenantID() == null) { throw new IllegalArgumentException("Parameter this.client.tenantID() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.get(upnOrObjectId, this.client.tenantID(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<UserInner>>>() { @Override public Observable<ServiceResponse<UserInner>> call(Response<ResponseBody> response) { try { ServiceResponse<UserInner> clientResponse = getDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<UserInner> getDelegate(Response<ResponseBody> response) throws GraphErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<UserInner, GraphErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<UserInner>() { }.getType()) .registerError(GraphErrorException.class) .build(response); } /** * Updates a user. * * @param upnOrObjectId The object ID or principal name of the user to update. * @param parameters Parameters to update an existing user. */ public void update(String upnOrObjectId, UserUpdateParametersInner parameters) { updateWithServiceResponseAsync(upnOrObjectId, parameters).toBlocking().single().body(); } /** * Updates a user. * * @param upnOrObjectId The object ID or principal name of the user to update. * @param parameters Parameters to update an existing user. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ public ServiceCall<Void> updateAsync(String upnOrObjectId, UserUpdateParametersInner parameters, final ServiceCallback<Void> serviceCallback) { return ServiceCall.fromResponse(updateWithServiceResponseAsync(upnOrObjectId, parameters), serviceCallback); } /** * Updates a user. * * @param upnOrObjectId The object ID or principal name of the user to update. * @param parameters Parameters to update an existing user. * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> updateAsync(String upnOrObjectId, UserUpdateParametersInner parameters) { return updateWithServiceResponseAsync(upnOrObjectId, parameters).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * Updates a user. * * @param upnOrObjectId The object ID or principal name of the user to update. * @param parameters Parameters to update an existing user. * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> updateWithServiceResponseAsync(String upnOrObjectId, UserUpdateParametersInner parameters) { if (upnOrObjectId == null) { throw new IllegalArgumentException("Parameter upnOrObjectId is required and cannot be null."); } if (this.client.tenantID() == null) { throw new IllegalArgumentException("Parameter this.client.tenantID() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Validator.validate(parameters); return service.update(upnOrObjectId, this.client.tenantID(), parameters, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = updateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> updateDelegate(Response<ResponseBody> response) throws GraphErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, GraphErrorException>newInstance(this.client.serializerAdapter()) .register(204, new TypeToken<Void>() { }.getType()) .registerError(GraphErrorException.class) .build(response); } /** * Delete a user. * * @param upnOrObjectId The object ID or principal name of the user to delete. */ public void delete(String upnOrObjectId) { deleteWithServiceResponseAsync(upnOrObjectId).toBlocking().single().body(); } /** * Delete a user. * * @param upnOrObjectId The object ID or principal name of the user to delete. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ public ServiceCall<Void> deleteAsync(String upnOrObjectId, final ServiceCallback<Void> serviceCallback) { return ServiceCall.fromResponse(deleteWithServiceResponseAsync(upnOrObjectId), serviceCallback); } /** * Delete a user. * * @param upnOrObjectId The object ID or principal name of the user to delete. * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> deleteAsync(String upnOrObjectId) { return deleteWithServiceResponseAsync(upnOrObjectId).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * Delete a user. * * @param upnOrObjectId The object ID or principal name of the user to delete. * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String upnOrObjectId) { if (upnOrObjectId == null) { throw new IllegalArgumentException("Parameter upnOrObjectId is required and cannot be null."); } if (this.client.tenantID() == null) { throw new IllegalArgumentException("Parameter this.client.tenantID() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.delete(upnOrObjectId, this.client.tenantID(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = deleteDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> deleteDelegate(Response<ResponseBody> response) throws GraphErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, GraphErrorException>newInstance(this.client.serializerAdapter()) .register(204, new TypeToken<Void>() { }.getType()) .registerError(GraphErrorException.class) .build(response); } /** * Gets a collection that contains the object IDs of the groups of which the user is a member. * * @param objectId The object ID of the user for which to get group membership. * @param securityEnabledOnly If true, only membership in security-enabled groups should be checked. Otherwise, membership in all groups should be checked. * @return the List&lt;String&gt; object if successful. */ public List<String> getMemberGroups(String objectId, boolean securityEnabledOnly) { return getMemberGroupsWithServiceResponseAsync(objectId, securityEnabledOnly).toBlocking().single().body(); } /** * Gets a collection that contains the object IDs of the groups of which the user is a member. * * @param objectId The object ID of the user for which to get group membership. * @param securityEnabledOnly If true, only membership in security-enabled groups should be checked. Otherwise, membership in all groups should be checked. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ public ServiceCall<List<String>> getMemberGroupsAsync(String objectId, boolean securityEnabledOnly, final ServiceCallback<List<String>> serviceCallback) { return ServiceCall.fromResponse(getMemberGroupsWithServiceResponseAsync(objectId, securityEnabledOnly), serviceCallback); } /** * Gets a collection that contains the object IDs of the groups of which the user is a member. * * @param objectId The object ID of the user for which to get group membership. * @param securityEnabledOnly If true, only membership in security-enabled groups should be checked. Otherwise, membership in all groups should be checked. * @return the observable to the List&lt;String&gt; object */ public Observable<List<String>> getMemberGroupsAsync(String objectId, boolean securityEnabledOnly) { return getMemberGroupsWithServiceResponseAsync(objectId, securityEnabledOnly).map(new Func1<ServiceResponse<List<String>>, List<String>>() { @Override public List<String> call(ServiceResponse<List<String>> response) { return response.body(); } }); } /** * Gets a collection that contains the object IDs of the groups of which the user is a member. * * @param objectId The object ID of the user for which to get group membership. * @param securityEnabledOnly If true, only membership in security-enabled groups should be checked. Otherwise, membership in all groups should be checked. * @return the observable to the List&lt;String&gt; object */ public Observable<ServiceResponse<List<String>>> getMemberGroupsWithServiceResponseAsync(String objectId, boolean securityEnabledOnly) { if (objectId == null) { throw new IllegalArgumentException("Parameter objectId is required and cannot be null."); } if (this.client.tenantID() == null) { throw new IllegalArgumentException("Parameter this.client.tenantID() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } UserGetMemberGroupsParameters parameters = new UserGetMemberGroupsParameters(); parameters.withSecurityEnabledOnly(securityEnabledOnly); return service.getMemberGroups(objectId, this.client.tenantID(), this.client.apiVersion(), this.client.acceptLanguage(), parameters, this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<List<String>>>>() { @Override public Observable<ServiceResponse<List<String>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl1<String>> result = getMemberGroupsDelegate(response); ServiceResponse<List<String>> clientResponse = new ServiceResponse<List<String>>(result.body().items(), result.response()); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl1<String>> getMemberGroupsDelegate(Response<ResponseBody> response) throws GraphErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl1<String>, GraphErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl1<String>>() { }.getType()) .registerError(GraphErrorException.class) .build(response); } /** * Gets a list of users for the current tenant. * * @param nextLink Next link for the list operation. * @return the PagedList&lt;UserInner&gt; object if successful. */ public PagedList<UserInner> listNext(final String nextLink) { ServiceResponse<Page<UserInner>> response = listNextSinglePageAsync(nextLink).toBlocking().single(); return new PagedList<UserInner>(response.body()) { @Override public Page<UserInner> nextPage(String nextLink) { return listNextSinglePageAsync(nextLink).toBlocking().single().body(); } }; } /** * Gets a list of users for the current tenant. * * @param nextLink Next link for the list operation. * @param serviceCall the ServiceCall object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */ public ServiceCall<List<UserInner>> listNextAsync(final String nextLink, final ServiceCall<List<UserInner>> serviceCall, final ListOperationCallback<UserInner> serviceCallback) { return AzureServiceCall.fromPageResponse( listNextSinglePageAsync(nextLink), new Func1<String, Observable<ServiceResponse<Page<UserInner>>>>() { @Override public Observable<ServiceResponse<Page<UserInner>>> call(String nextLink) { return listNextSinglePageAsync(nextLink); } }, serviceCallback); } /** * Gets a list of users for the current tenant. * * @param nextLink Next link for the list operation. * @return the observable to the PagedList&lt;UserInner&gt; object */ public Observable<Page<UserInner>> listNextAsync(final String nextLink) { return listNextWithServiceResponseAsync(nextLink) .map(new Func1<ServiceResponse<Page<UserInner>>, Page<UserInner>>() { @Override public Page<UserInner> call(ServiceResponse<Page<UserInner>> response) { return response.body(); } }); } /** * Gets a list of users for the current tenant. * * @param nextLink Next link for the list operation. * @return the observable to the PagedList&lt;UserInner&gt; object */ public Observable<ServiceResponse<Page<UserInner>>> listNextWithServiceResponseAsync(final String nextLink) { return listNextSinglePageAsync(nextLink) .concatMap(new Func1<ServiceResponse<Page<UserInner>>, Observable<ServiceResponse<Page<UserInner>>>>() { @Override public Observable<ServiceResponse<Page<UserInner>>> call(ServiceResponse<Page<UserInner>> page) { String nextLink = page.body().nextPageLink(); if (nextLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextLink)); } }); } /** * Gets a list of users for the current tenant. * ServiceResponse<PageImpl<UserInner>> * @param nextLink Next link for the list operation. * @return the PagedList&lt;UserInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<UserInner>>> listNextSinglePageAsync(final String nextLink) { if (nextLink == null) { throw new IllegalArgumentException("Parameter nextLink is required and cannot be null."); } if (this.client.tenantID() == null) { throw new IllegalArgumentException("Parameter this.client.tenantID() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } String nextUrl = String.format("%s/%s", this.client.tenantID(), nextLink); return service.listNext(nextUrl, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<UserInner>>>>() { @Override public Observable<ServiceResponse<Page<UserInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<UserInner>> result = listNextDelegate(response); return Observable.just(new ServiceResponse<Page<UserInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<UserInner>> listNextDelegate(Response<ResponseBody> response) throws GraphErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<UserInner>, GraphErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<UserInner>>() { }.getType()) .registerError(GraphErrorException.class) .build(response); } }
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.packages; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.collect.nestedset.Depset; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import javax.annotation.Nullable; import net.starlark.java.eval.ClassObject; import net.starlark.java.eval.EvalException; import net.starlark.java.eval.HasBinary; import net.starlark.java.eval.Starlark; import net.starlark.java.syntax.Location; import net.starlark.java.syntax.TokenKind; /** An Info (provider instance) for providers defined in Starlark. */ public final class StarlarkInfo extends StructImpl implements HasBinary, ClassObject { public static final Depset.ElementType TYPE = Depset.ElementType.of(StarlarkInfo.class); // For a n-element info, the table contains n key strings, sorted, // followed by the n corresponding legal Starlark values. private final Object[] table; // A format string with one %s placeholder for the missing field name. // If null, uses the default format specified by the provider. // TODO(adonovan): make the provider determine the error message // (but: this has implications for struct+struct, the equivalence // relation, and other observable behaviors). @Nullable private final String unknownFieldError; private StarlarkInfo( Provider provider, Object[] table, @Nullable Location loc, @Nullable String unknownFieldError) { super(provider, loc); this.table = table; this.unknownFieldError = unknownFieldError; } // Converts a map to a table of sorted keys followed by corresponding values. private static Object[] toTable(Map<String, Object> values) { int n = values.size(); Object[] table = new Object[n + n]; int i = 0; for (Map.Entry<String, Object> e : values.entrySet()) { table[i] = e.getKey(); table[n + i] = Starlark.checkValid(e.getValue()); i++; } // Sort keys, permuting values in parallel. if (n > 1) { sortPairs(table, 0, n - 1); } return table; } /** * Constructs a StarlarkInfo from an array of alternating key/value pairs as provided by * Starlark.fastcall. Checks that each key is provided at most once, and is defined by the * optional schema, which must be sorted. This optimized zero-allocation function exists solely * for the StarlarkProvider constructor. */ static StarlarkInfo createFromNamedArgs( Provider provider, Object[] table, @Nullable ImmutableList<String> schema, Location loc) throws EvalException { // Permute fastcall form (k, v, ..., k, v) into table form (k, k, ..., v, v). permute(table); int n = table.length >> 1; // number of K/V pairs // Sort keys, permuting values in parallel. if (n > 1) { sortPairs(table, 0, n - 1); } // Check for duplicate keys, which are now adjacent. for (int i = 0; i < n - 1; i++) { if (table[i].equals(table[i + 1])) { throw Starlark.errorf( "got multiple values for parameter %s in call to instantiate provider %s", table[i], provider.getPrintableName()); } } // Check that schema is a superset of the table's keys. if (schema != null) { List<String> unexpected = unexpectedKeys(schema, table, n); if (unexpected != null) { throw Starlark.errorf( "unexpected keyword%s %s in call to instantiate provider %s", unexpected.size() > 1 ? "s" : "", Joiner.on(", ").join(unexpected), provider.getPrintableName()); } } return new StarlarkInfo(provider, table, loc, /*unknownFieldError=*/ null); } // Permutes array elements from alternating keys/values form, // (as used by fastcall's named array) into keys-then-corresponding-values form, // as used by StarlarkInfo.table. // The permutation preserves the key/value association but not the order of keys. static void permute(Object[] named) { int n = named.length >> 1; // number of K/V pairs // Thanks to Murali Ganapathy for the algorithm. // See https://play.golang.org/p/QOKnrj_bIwk. // // i and j are the indices bracketing successive pairs of cells, // working from the outside to the middle. // // i j // [KV]KVKVKVKVKVKV[KV] // i j // KK[KV]KVKVKVKV[KV]VV // i j // KKKK[KV]KVKV[KV]VVVV // etc... for (int i = 0; i < n - 1; i += 2) { int j = named.length - i; // rotate two pairs [KV]...[kv] -> [Kk]...[vV] Object tmp = named[i + 1]; named[i + 1] = named[j - 2]; named[j - 2] = named[j - 1]; named[j - 1] = tmp; } // reverse lower half containing keys: [KkvV] -> [kKvV] for (int i = 0; i < n >> 1; i++) { Object tmp = named[n - 1 - i]; named[n - 1 - i] = named[i]; named[i] = tmp; } } // Sorts non-empty slice a[lo:hi] (inclusive) in place. // Elements a[n:2n) are permuted the same way as a[0:n), // where n = a.length / 2. The lower half must be strings. // Precondition: 0 <= lo <= hi < n. static void sortPairs(Object[] a, int lo, int hi) { String pivot = (String) a[lo + (hi - lo) / 2]; int i = lo; int j = hi; while (i <= j) { while (((String) a[i]).compareTo(pivot) < 0) { i++; } while (((String) a[j]).compareTo(pivot) > 0) { j--; } if (i <= j) { int n = a.length >> 1; swap(a, i, j); swap(a, i + n, j + n); i++; j--; } } if (lo < j) { sortPairs(a, lo, j); } if (i < hi) { sortPairs(a, i, hi); } } private static void swap(Object[] a, int i, int j) { Object tmp = a[i]; a[i] = a[j]; a[j] = tmp; } // Returns the list of keys in table[0:n) not defined by the schema, // or null on success. // Allocates no memory on success. // Both table[0:n) and schema are sorted lists of strings. @Nullable private static List<String> unexpectedKeys(ImmutableList<String> schema, Object[] table, int n) { int si = 0; List<String> unexpected = null; table: for (int ti = 0; ti < n; ti++) { String t = (String) table[ti]; while (si < schema.size()) { String s = schema.get(si++); int cmp = s.compareTo(t); if (cmp == 0) { // table key matches schema continue table; } else if (cmp > 0) { // table contains unexpected key if (unexpected == null) { unexpected = new ArrayList<>(); } unexpected.add(t); } else { // skip over schema key not provided by table } } if (unexpected == null) { unexpected = new ArrayList<>(); } unexpected.add(t); } return unexpected; } @Override public ImmutableCollection<String> getFieldNames() { // TODO(adonovan): opt: can we avoid allocating three objects? @SuppressWarnings("unchecked") List<String> keys = (List<String>) (List<?>) Arrays.asList(table).subList(0, table.length / 2); return ImmutableList.copyOf(keys); } /** * Returns the custom (i.e. per-instance, as opposed to per-provider-type) error message string * format used by this provider instance, or null if not set. */ @Nullable @Override protected String getErrorMessageFormatForUnknownField() { return unknownFieldError != null ? unknownFieldError : super.getErrorMessageFormatForUnknownField(); } @Override public boolean isImmutable() { // If the provider is not yet exported, the hash code of the object is subject to change. // TODO(adonovan): implement isHashable? if (!getProvider().isExported()) { return false; } // TODO(bazel-team): If we export at the end of a full module's evaluation, instead of at the // end of every top-level statement, then we can assume that exported implies frozen, and just // return true here without a traversal. for (int i = table.length / 2; i < table.length; i++) { if (!Starlark.isImmutable(table[i])) { return false; } } return true; } @Override public Object getValue(String name) { int n = table.length / 2; int i = Arrays.binarySearch(table, 0, n, name); if (i < 0) { return null; } return table[n + i]; } /** * Creates a schemaless provider instance with the given provider type and field values. * * <p>{@code loc} is the creation location for this instance. Built-in provider instances may use * {@link Location#BUILTIN}, which is the default if null. */ public static StarlarkInfo create( Provider provider, Map<String, Object> values, @Nullable Location loc) { return new StarlarkInfo(provider, toTable(values), loc, /*unknownFieldError=*/ null); } /** * Creates a schemaless provider instance with the given provider type, field values, and * unknown-field error message. * * <p>This is used to create structs for special purposes, such as {@code ctx.attr} and the {@code * native} module. The creation location will be {@link Location#BUILTIN}. * * <p>{@code unknownFieldError} is a string format, as for {@link * Provider#getErrorMessageFormatForUnknownField}. * * @deprecated Do not use this method. Instead, create a new subclass of {@link NativeProvider} * with the desired error message format, and create a corresponding {@link NativeInfo} * subclass. */ // TODO(bazel-team): Make the special structs that need a custom error message use a different // provider (subclassing NativeProvider) and a different StructImpl implementation. Then remove // this functionality, thereby saving a string pointer field for the majority of providers that // don't need it. @Deprecated public static StarlarkInfo createWithCustomMessage( Provider provider, Map<String, Object> values, String unknownFieldError) { Preconditions.checkNotNull(unknownFieldError); return new StarlarkInfo(provider, toTable(values), Location.BUILTIN, unknownFieldError); } @Override public StarlarkInfo binaryOp(TokenKind op, Object that, boolean thisLeft) throws EvalException { if (op == TokenKind.PLUS && that instanceof StarlarkInfo) { return thisLeft ? plus(this, (StarlarkInfo) that) // : plus((StarlarkInfo) that, this); } return null; } private static StarlarkInfo plus(StarlarkInfo x, StarlarkInfo y) throws EvalException { Provider xprov = x.getProvider(); Provider yprov = y.getProvider(); if (!xprov.equals(yprov)) { throw Starlark.errorf( "Cannot use '+' operator on instances of different providers (%s and %s)", xprov.getPrintableName(), yprov.getPrintableName()); } // ztable = merge(x.table, y.table) int xsize = x.table.length / 2; int ysize = y.table.length / 2; int zsize = xsize + ysize; Object[] ztable = new Object[zsize + zsize]; int xi = 0; int yi = 0; int zi = 0; while (xi < xsize && yi < ysize) { String xk = (String) x.table[xi]; String yk = (String) y.table[yi]; int cmp = xk.compareTo(yk); if (cmp < 0) { ztable[zi] = xk; ztable[zi + zsize] = x.table[xi + xsize]; xi++; } else if (cmp > 0) { ztable[zi] = yk; ztable[zi + zsize] = y.table[yi + ysize]; yi++; } else { throw Starlark.errorf("cannot add struct instances with common field '%s'", xk); } zi++; } while (xi < xsize) { ztable[zi] = x.table[xi]; ztable[zi + zsize] = x.table[xi + xsize]; xi++; zi++; } while (yi < ysize) { ztable[zi] = y.table[yi]; ztable[zi + zsize] = y.table[yi + ysize]; yi++; zi++; } return new StarlarkInfo(xprov, ztable, Location.BUILTIN, x.unknownFieldError); } }
package org.json; import static java.lang.String.format; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.*; /* Copyright (c) 2002 JSON.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The Software shall be used for Good, not Evil. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /** * A JSON Pointer is a simple query language defined for JSON documents by * <a href="https://tools.ietf.org/html/rfc6901">RFC 6901</a>. * * In a nutshell, JSONPointer allows the user to navigate into a JSON document * using strings, and retrieve targeted objects, like a simple form of XPATH. * Path segments are separated by the '/' char, which signifies the root of * the document when it appears as the first char of the string. Array * elements are navigated using ordinals, counting from 0. JSONPointer strings * may be extended to any arbitrary number of segments. If the navigation * is successful, the matched item is returned. A matched item may be a * JSONObject, a JSONArray, or a JSON value. If the JSONPointer string building * fails, an appropriate exception is thrown. If the navigation fails to find * a match, a JSONPointerException is thrown. * * @author JSON.org * @version 2016-05-14 */ public class JSONPointer { // used for URL encoding and decoding private static final String ENCODING = "utf-8"; /** * This class allows the user to build a JSONPointer in steps, using * exactly one segment in each step. */ public static class Builder { // Segments for the eventual JSONPointer string private final List<String> refTokens = new ArrayList<String>(); /** * Creates a {@code JSONPointer} instance using the tokens previously set using the * {@link #append(String)} method calls. */ public JSONPointer build() { return new JSONPointer(refTokens); } /** * Adds an arbitary token to the list of reference tokens. It can be any non-null value. * * Unlike in the case of JSON string or URI fragment representation of JSON pointers, the * argument of this method MUST NOT be escaped. If you want to query the property called * {@code "a~b"} then you should simply pass the {@code "a~b"} string as-is, there is no * need to escape it as {@code "a~0b"}. * * @param token the new token to be appended to the list * @return {@code this} * @throws NullPointerException if {@code token} is null */ public Builder append(String token) { if (token == null) { throw new NullPointerException("token cannot be null"); } refTokens.add(token); return this; } /** * Adds an integer to the reference token list. Although not necessarily, mostly this token will * denote an array index. * * @param arrayIndex the array index to be added to the token list * @return {@code this} */ public Builder append(int arrayIndex) { refTokens.add(String.valueOf(arrayIndex)); return this; } } /** * Static factory method for {@link Builder}. Example usage: * * <pre><code> * JSONPointer pointer = JSONPointer.builder() * .append("obj") * .append("other~key").append("another/key") * .append("\"") * .append(0) * .build(); * </code></pre> * * @return a builder instance which can be used to construct a {@code JSONPointer} instance by chained * {@link Builder#append(String)} calls. */ public static Builder builder() { return new Builder(); } // Segments for the JSONPointer string private final List<String> refTokens; /** * Pre-parses and initializes a new {@code JSONPointer} instance. If you want to * evaluate the same JSON Pointer on different JSON documents then it is recommended * to keep the {@code JSONPointer} instances due to performance considerations. * * @param pointer the JSON String or URI Fragment representation of the JSON pointer. * @throws IllegalArgumentException if {@code pointer} is not a valid JSON pointer */ public JSONPointer(String pointer) { if (pointer == null) { throw new NullPointerException("pointer cannot be null"); } if (pointer.isEmpty()) { refTokens = Collections.emptyList(); return; } if (pointer.startsWith("#/")) { pointer = pointer.substring(2); try { pointer = URLDecoder.decode(pointer, ENCODING); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } else if (pointer.startsWith("/")) { pointer = pointer.substring(1); } else { throw new IllegalArgumentException("a JSON pointer should start with '/' or '#/'"); } refTokens = new ArrayList<String>(); for (String token : pointer.split("/")) { refTokens.add(unescape(token)); } } public JSONPointer(List<String> refTokens) { this.refTokens = new ArrayList<String>(refTokens); } private String unescape(String token) { return token.replace("~1", "/").replace("~0", "~") .replace("\\\"", "\"") .replace("\\\\", "\\"); } /** * Evaluates this JSON Pointer on the given {@code document}. The {@code document} * is usually a {@link JSONObject} or a {@link JSONArray} instance, but the empty * JSON Pointer ({@code ""}) can be evaluated on any JSON values and in such case the * returned value will be {@code document} itself. * * @param document the JSON document which should be the subject of querying. * @return the result of the evaluation * @throws JSONPointerException if an error occurs during evaluation */ public Object queryFrom(Object document) { if (refTokens.isEmpty()) { return document; } Object current = document; for (String token : refTokens) { if (current instanceof JSONObject) { current = ((JSONObject) current).opt(unescape(token)); } else if (current instanceof JSONArray) { current = readByIndexToken(current, token); } else { throw new JSONPointerException(format( "value [%s] is not an array or object therefore its key %s cannot be resolved", current, token)); } } return current; } /** * Matches a JSONArray element by ordinal position * @param current the JSONArray to be evaluated * @param indexToken the array index in string form * @return the matched object. If no matching item is found a * JSONPointerException is thrown */ private Object readByIndexToken(Object current, String indexToken) { try { int index = Integer.parseInt(indexToken); JSONArray currentArr = (JSONArray) current; if (index >= currentArr.length()) { throw new JSONPointerException(format("index %d is out of bounds - the array has %d elements", index, currentArr.length())); } return currentArr.get(index); } catch (NumberFormatException e) { throw new JSONPointerException(format("%s is not an array index", indexToken), e); } } /** * Returns a string representing the JSONPointer path value using string * representation */ @Override public String toString() { StringBuilder rval = new StringBuilder(""); for (String token: refTokens) { rval.append('/').append(escape(token)); } return rval.toString(); } /** * Escapes path segment values to an unambiguous form. * The escape char to be inserted is '~'. The chars to be escaped * are ~, which maps to ~0, and /, which maps to ~1. Backslashes * and double quote chars are also escaped. * @param token the JSONPointer segment value to be escaped * @return the escaped value for the token */ private String escape(String token) { return token.replace("~", "~0") .replace("/", "~1") .replace("\\", "\\\\") .replace("\"", "\\\""); } /** * Returns a string representing the JSONPointer path value using URI * fragment identifier representation */ public String toURIFragment() { try { StringBuilder rval = new StringBuilder("#"); for (String token : refTokens) { rval.append('/').append(URLEncoder.encode(token, ENCODING)); } return rval.toString(); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } }
/** * Copyright (c) 2013, impossibl.com * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of impossibl.com nor the names of its contributors may * be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /*------------------------------------------------------------------------- * * Copyright (c) 2004-2011, PostgreSQL Global Development Group * * *------------------------------------------------------------------------- */ package com.impossibl.postgres.jdbc; import com.impossibl.postgres.utils.guava.Joiner; import static com.impossibl.postgres.utils.guava.Preconditions.checkArgument; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.HashMap; import java.util.Map; import java.util.Properties; import javax.sql.XAConnection; public class TestUtil { public static String getURL(Object... urlParams) { String query = ""; if (urlParams != null && urlParams.length > 0) { query = "?" + Joiner.on("&").withKeyValueSeparator("=").join(params(urlParams)); } if (!"5432".equals(getPort())) { return "jdbc:pgsql://" + getServer() + ":" + getPort() + "/" + getDatabase() + query; } else { return "jdbc:pgsql://" + getServer() + "/" + getDatabase() + query; } } public static String getServer() { return System.getProperty("pgjdbc.test.server", "localhost"); } public static String getPort() { return System.getProperty("pgjdbc.test.port", "5432"); } public static String getDatabase() { return System.getProperty("pgjdbc.test.db", "test"); } public static Properties getProperties() { Properties props = new Properties(); props.setProperty("user", getUser()); props.setProperty("password", getPassword()); return props; } public static String getUser() { return System.getProperty("pgjdbc.test.user", "pgjdbc"); } public static String getPassword() { return System.getProperty("pgjdbc.test.password", "test"); } private static Map<String, Object> params(Object... objs) { checkArgument(objs.length % 2 == 0); Map<String, Object> map = new HashMap<>(); for (int c = 0; c < objs.length; c += 2) map.put((String) objs[c], objs[c + 1]); return map; } /* * Helper - opens a connection. */ public static Connection openDB() throws Exception { return openDB(new Properties()); } /* * Helper - opens a connection with the allowance for passing additional * parameters, like "compatible". */ public static Connection openDB(Properties props) throws Exception { props.setProperty("user", getUser()); props.setProperty("password", getPassword()); return DriverManager.getConnection(getURL(), props); } /* * Helper - closes an open connection. */ public static void closeDB(Connection con) throws SQLException { if (con != null) con.close(); } /* * Helper - closes an open connection. */ public static void closeDB(XAConnection con) throws SQLException { if (con != null) con.close(); } /* * Helper - creates a test table for use by a test */ public static void createTable(Connection con, String table, String columns) throws SQLException { // by default we don't request oids. createTable(con, table, columns, false); } /* * Helper - creates a test table for use by a test */ public static void createTable(Connection con, String table, String columns, boolean withOids) throws SQLException { Statement st = con.createStatement(); try { // Drop the table dropTable(con, table); // Now create the table String sql = "CREATE TABLE " + table + " (" + columns + ") "; if (withOids) { sql += " WITH OIDS"; } st.executeUpdate(sql); } finally { st.close(); } } /* * Helper - creates a test type for use by a test */ public static void createType(Connection con, String type, String attrs) throws SQLException { Statement st = con.createStatement(); try { // Drop the table dropType(con, type); // Now create the table String sql = "CREATE TYPE " + type + " AS (" + attrs + ") "; st.executeUpdate(sql); } finally { st.close(); } } /** * Helper creates a temporary table * * @param con * Connection * @param table * String * @param columns * String * @throws SQLException */ public static void createTempTable(Connection con, String table, String columns) throws SQLException { Statement st = con.createStatement(); try { // Drop the table dropTable(con, table); // Now create the table st.executeUpdate("create temp table " + table + " (" + columns + ")"); } finally { st.close(); } } /* * drop a sequence because older versions don't have dependency information * for serials */ public static void dropSequence(Connection con, String sequence) throws SQLException { Statement stmt = con.createStatement(); try { String sql = "DROP SEQUENCE " + sequence; stmt.executeUpdate(sql); } catch (SQLException sqle) { if (!con.getAutoCommit()) throw sqle; } finally { stmt.close(); } } /* * Helper - drops a table */ public static void dropTable(Connection con, String table) throws SQLException { Statement stmt = con.createStatement(); try { String sql = "DROP TABLE " + table + " CASCADE "; stmt.executeUpdate(sql); } catch (SQLException ex) { // Since every create table issues a drop table // it's easy to get a table doesn't exist error. // we want to ignore these, but if we're in a // transaction then we've got trouble if (!con.getAutoCommit()) throw ex; } finally { stmt.close(); } } /* * Helper - drops a type */ public static void dropType(Connection con, String type) throws SQLException { Statement stmt = con.createStatement(); try { String sql = "DROP TYPE " + type + " CASCADE "; stmt.executeUpdate(sql); } catch (SQLException ex) { // Since every create table issues a drop table // it's easy to get a table doesn't exist error. // we want to ignore these, but if we're in a // transaction then we've got trouble if (!con.getAutoCommit()) throw ex; } finally { stmt.close(); } } /* * Helper - drops a text search configuration */ public static void dropTextSearchConfiguration(Connection con, String configuration) throws SQLException { Statement stmt = con.createStatement(); try { String sql = "DROP TEXT SEARCH CONFIGURATION " + configuration + " CASCADE "; stmt.executeUpdate(sql); } catch (SQLException ex) { // Since every create table issues a drop table // it's easy to get a table doesn't exist error. // we want to ignore these, but if we're in a // transaction then we've got trouble if (!con.getAutoCommit()) throw ex; } finally { stmt.close(); } } /* * Helper - generates INSERT SQL - very simple */ public static String insertSQL(String table, String values) { return insertSQL(table, null, values); } public static String insertSQL(String table, String columns, String values) { String s = "INSERT INTO " + table; if (columns != null) s = s + " (" + columns + ")"; return s + " VALUES (" + values + ")"; } /* * Helper - generates SELECT SQL - very simple */ public static String selectSQL(String table, String columns) { return selectSQL(table, columns, null, null); } public static String selectSQL(String table, String columns, String where) { return selectSQL(table, columns, where, null); } public static String selectSQL(String table, String columns, String where, String other) { String s = "SELECT " + columns + " FROM " + table; if (where != null) s = s + " WHERE " + where; if (other != null) s = s + " " + other; return s; } /** * Print a ResultSet to System.out. This is useful for debugging tests. */ public static void printResultSet(ResultSet rs) throws SQLException { ResultSetMetaData rsmd = rs.getMetaData(); for (int i = 1; i <= rsmd.getColumnCount(); i++) { if (i != 1) { System.out.print(", "); } System.out.print(rsmd.getColumnName(i)); } System.out.println(); while (rs.next()) { for (int i = 1; i <= rsmd.getColumnCount(); i++) { if (i != 1) { System.out.print(", "); } System.out.print(rs.getString(i)); } System.out.println(); } } public static boolean getStandardConformingStrings(Connection con) throws SQLException { Statement stmt = con.createStatement(); stmt.closeOnCompletion(); ResultSet rs = stmt.executeQuery("SHOW standard_conforming_strings"); if (rs.next()) { return rs.getBoolean(1); } return false; } public static String fix(int v, int l) { String s = "0000000000".substring(0, l) + Integer.toString(v); return s.substring(s.length() - l); } public static boolean isExtensionInstalled(Connection conn, String extensionName) throws SQLException { try (Statement stmt = conn.createStatement()) { try (ResultSet rs = stmt.executeQuery("SELECT * FROM pg_extension WHERE extname = '" + extensionName + "'")) { return rs.next(); } } } }
/* * TouchImageView.java * By: Michael Ortiz * Updated By: Patrick Lackemacher * Updated By: Babay88 * ------------------- * Extends Android ImageView to include pinch zooming and panning. */ package com.bereacollege.map; import android.content.Context; import android.graphics.Matrix; import android.graphics.PointF; import android.graphics.drawable.Drawable; import android.util.AttributeSet; import android.util.Log; import android.view.MotionEvent; import android.view.ScaleGestureDetector; import android.view.View; import android.widget.ImageView; public class TouchImageView extends ImageView { Matrix matrix; // We can be in one of these 3 states static final int NONE = 0; static final int DRAG = 1; static final int ZOOM = 2; int mode = NONE; // Remember some things for zooming PointF last = new PointF(); PointF start = new PointF(); float minScale = 1f; float maxScale = 3f; float[] m; int viewWidth, viewHeight; static final int CLICK = 3; float saveScale = 1f; protected float origWidth, origHeight; int oldMeasuredWidth, oldMeasuredHeight; ScaleGestureDetector mScaleDetector; Context context; public TouchImageView(Context context) { super(context); sharedConstructing(context); } public TouchImageView(Context context, AttributeSet attrs) { super(context, attrs); sharedConstructing(context); } private void sharedConstructing(Context context) { super.setClickable(true); this.context = context; mScaleDetector = new ScaleGestureDetector(context, new ScaleListener()); matrix = new Matrix(); m = new float[9]; setImageMatrix(matrix); setScaleType(ScaleType.MATRIX); setOnTouchListener(new OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { mScaleDetector.onTouchEvent(event); PointF curr = new PointF(event.getX(), event.getY()); switch (event.getAction()) { case MotionEvent.ACTION_DOWN: last.set(curr); start.set(last); mode = DRAG; break; case MotionEvent.ACTION_MOVE: if (mode == DRAG) { float deltaX = curr.x - last.x; float deltaY = curr.y - last.y; float fixTransX = getFixDragTrans(deltaX, viewWidth, origWidth * saveScale); float fixTransY = getFixDragTrans(deltaY, viewHeight, origHeight * saveScale); matrix.postTranslate(fixTransX, fixTransY); fixTrans(); last.set(curr.x, curr.y); } break; case MotionEvent.ACTION_UP: mode = NONE; int xDiff = (int) Math.abs(curr.x - start.x); int yDiff = (int) Math.abs(curr.y - start.y); if (xDiff < CLICK && yDiff < CLICK) performClick(); break; case MotionEvent.ACTION_POINTER_UP: mode = NONE; break; } setImageMatrix(matrix); invalidate(); return true; // indicate event was handled } }); } public void setMaxZoom(float x) { maxScale = x; } private class ScaleListener extends ScaleGestureDetector.SimpleOnScaleGestureListener { @Override public boolean onScaleBegin(ScaleGestureDetector detector) { mode = ZOOM; return true; } @Override public boolean onScale(ScaleGestureDetector detector) { float mScaleFactor = detector.getScaleFactor(); float origScale = saveScale; saveScale *= mScaleFactor; if (saveScale > maxScale) { saveScale = maxScale; mScaleFactor = maxScale / origScale; } else if (saveScale < minScale) { saveScale = minScale; mScaleFactor = minScale / origScale; } if (origWidth * saveScale <= viewWidth || origHeight * saveScale <= viewHeight) matrix.postScale(mScaleFactor, mScaleFactor, viewWidth / 2, viewHeight / 2); else matrix.postScale(mScaleFactor, mScaleFactor, detector.getFocusX(), detector.getFocusY()); fixTrans(); return true; } } void fixTrans() { matrix.getValues(m); float transX = m[Matrix.MTRANS_X]; float transY = m[Matrix.MTRANS_Y]; float fixTransX = getFixTrans(transX, viewWidth, origWidth * saveScale); float fixTransY = getFixTrans(transY, viewHeight, origHeight * saveScale); if (fixTransX != 0 || fixTransY != 0) matrix.postTranslate(fixTransX, fixTransY); } float getFixTrans(float trans, float viewSize, float contentSize) { float minTrans, maxTrans; if (contentSize <= viewSize) { minTrans = 0; maxTrans = viewSize - contentSize; } else { minTrans = viewSize - contentSize; maxTrans = 0; } if (trans < minTrans) return -trans + minTrans; if (trans > maxTrans) return -trans + maxTrans; return 0; } float getFixDragTrans(float delta, float viewSize, float contentSize) { if (contentSize <= viewSize) { return 0; } return delta; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); viewWidth = MeasureSpec.getSize(widthMeasureSpec); viewHeight = MeasureSpec.getSize(heightMeasureSpec); // // Rescales image on rotation // if (oldMeasuredHeight == viewWidth && oldMeasuredHeight == viewHeight || viewWidth == 0 || viewHeight == 0) return; oldMeasuredHeight = viewHeight; oldMeasuredWidth = viewWidth; if (saveScale == 1) { //Fit to screen. float scale; Drawable drawable = getDrawable(); if (drawable == null || drawable.getIntrinsicWidth() == 0 || drawable.getIntrinsicHeight() == 0) return; int bmWidth = drawable.getIntrinsicWidth(); int bmHeight = drawable.getIntrinsicHeight(); Log.d("bmSize", "bmWidth: " + bmWidth + " bmHeight : " + bmHeight); float scaleX = (float) viewWidth / (float) bmWidth; float scaleY = (float) viewHeight / (float) bmHeight; scale = Math.min(scaleX, scaleY); matrix.setScale(scale, scale); // Center the image float redundantYSpace = (float) viewHeight - (scale * (float) bmHeight); float redundantXSpace = (float) viewWidth - (scale * (float) bmWidth); redundantYSpace /= (float) 2; redundantXSpace /= (float) 2; matrix.postTranslate(redundantXSpace, redundantYSpace); origWidth = viewWidth - 2 * redundantXSpace; origHeight = viewHeight - 2 * redundantYSpace; setImageMatrix(matrix); } fixTrans(); } }
package com.github.takumalee.simplefacebook.entities; import android.location.Location; import android.os.Bundle; import com.github.takumalee.simplefacebook.Permission; import com.github.takumalee.simplefacebook.utils.Attributes; import com.github.takumalee.simplefacebook.utils.Utils; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; public class Profile implements User { private String mId; private String mName; private String mFirstName; private String mMiddleName; private String mLastName; private String mGender; private String mLocale; private List<Language> mLanguages; private String mLink; private AgeRange mAgeRange; private String mThirdPartyId; private Boolean mIsInstalled; private Integer mTimeZone; private String mUpdatedTime; private Boolean mVerified; private String mBio; private String mBirthday; private Photo mCover; private String mCurrency; private List<Education> mEducation; private String mEmail; private String mHometown; private Location mLocation; private String mPolitical; private List<String> mFavoriteAthletess; private List<String> mFavoriteTeams; private String mPicture; private String mQuotes; private String mRelationshipStatus; private String mReligion; private String mWebsite; private List<Work> mWorks; private Profile() { // // id // mId = Utils.getPropertyString(mGraphObject, Properties.ID); // // // name // mName = Utils.getPropertyString(mGraphObject, Properties.NAME); // // // first name // mFirstName = Utils.getPropertyString(mGraphObject, Properties.FIRST_NAME); // // // middle name // mMiddleName = Utils.getPropertyString(mGraphObject, Properties.MIDDLE_NAME); // // // last name // mLastName = Utils.getPropertyString(mGraphObject, Properties.LAST_NAME); // // // gender // mGender = Utils.getPropertyString(mGraphObject, Properties.GENDER); // // // locale // mLocale = Utils.getPropertyString(mGraphObject, Properties.LOCALE); // // // languages // mLanguages = Utils.createList(mGraphObject, Properties.LANGUAGE, new Converter<Language>() { // @Override // public Language convert(GraphObject graphObject) { // Language language = new Language(); // language.setId(Utils.getPropertyString(graphObject, "id")); // language.setName(Utils.getPropertyString(graphObject, "name")); // return language; // } // }); // // // link // mLink = Utils.getPropertyString(mGraphObject, Properties.LINK); // // // age range // GraphObject ageRangeGraphObject = Utils.getPropertyGraphObject(mGraphObject, Properties.AGE_RANGE); // if (ageRangeGraphObject != null) { // mAgeRange = new AgeRange(Utils.getPropertyString(ageRangeGraphObject, "min"), Utils.getPropertyString(ageRangeGraphObject, "max")); // } // // // third party id // mThirdPartyId = Utils.getPropertyString(mGraphObject, Properties.THIRD_PARTY_ID); // // // installed // mIsInstalled = Utils.getPropertyBoolean(mGraphObject, Properties.INSTALLED); // // // time zone // mTimeZone = Utils.getPropertyInteger(mGraphObject, Properties.TIMEZONE); // // // updated time // mUpdatedTime = Utils.getPropertyString(mGraphObject, Properties.UPDATED_TIME); // // // verified // mVerified = Utils.getPropertyBoolean(mGraphObject, Properties.VERIFIED); // // // bio // mBio = Utils.getPropertyString(mGraphObject, Properties.BIO); // // // birthday // mBirthday = Utils.getPropertyString(mGraphObject, Properties.BIRTHDAY); // // // cover // mCover = Photo.create(Utils.getPropertyGraphObject(mGraphObject, Properties.COVER)); // // // currency // mCurrency = Utils.getPropertyInsideProperty(mGraphObject, Properties.CURRENCY, "user_currency"); // // // education // mEducation = Utils.createList(mGraphObject, Properties.EDUCATION, new Converter<Education>() { // @Override // public Education convert(GraphObject graphObject) { // return Education.create(graphObject); // } // }); // // // email // mEmail = Utils.getPropertyString(mGraphObject, Properties.EMAIL); // // // hometown // mHometown = Utils.getPropertyString(mGraphObject, Properties.HOMETOWN); // // // location // mLocation = Location.create(Utils.getPropertyGraphObject(mGraphObject, Properties.LOCATION)); // // // political // mPolitical = Utils.getPropertyString(mGraphObject, Properties.POLITICAL); // // // favorite athletes // mFavoriteAthletess = Utils.createList(mGraphObject, Properties.FAVORITE_ATHLETES, new Converter<String>() { // @Override // public String convert(GraphObject graphObject) { // return Utils.getPropertyString(graphObject, Properties.NAME); // } // }); // // // favorite teams // mFavoriteTeams = Utils.createList(mGraphObject, Properties.FAVORITE_TEAMS, new Converter<String>() { // @Override // public String convert(GraphObject graphObject) { // return Utils.getPropertyString(graphObject, Properties.NAME); // } // }); // // // picture // GraphObject data = Utils.getPropertyGraphObject(mGraphObject, Properties.PICTURE); // mPicture = Utils.getPropertyInsideProperty(data, "data", "url"); // // // quotes // mQuotes = Utils.getPropertyString(mGraphObject, Properties.QUOTES); // // // relationship status // mRelationshipStatus = Utils.getPropertyString(mGraphObject, Properties.RELATIONSHIP_STATUS); // // // religion // mReligion = Utils.getPropertyString(mGraphObject, Properties.RELIGION); // // // website // mWebsite = Utils.getPropertyString(mGraphObject, Properties.WEBSITE); // // // work // mWorks = Utils.createList(mGraphObject, Properties.WORK, new Converter<Work>() { // @Override // public Work convert(GraphObject graphObject) { // return Work.create(graphObject); // } // }); } public static Profile create() { return new Profile(); } /** * Returns the ID of the user. <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return the ID of the user */ public String getId() { return mId; } /** * Returns the name of the user. <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return the name of the user */ public String getName() { return mName; } /** * Returns the first name of the user. <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return the first name of the user */ public String getFirstName() { return mFirstName; } /** * Returns the middle name of the user. <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return the middle name of the user */ public String getMiddleName() { return mMiddleName; } /** * Returns the last name of the user. <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return the last name of the user */ public String getLastName() { return mLastName; } /** * Returns the gender of the user. <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return the gender of the user */ public String getGender() { return mGender; } /** * Return the ISO language code and ISO country code of the user. <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return the ISO language code and ISO country code of the user */ public String getLocale() { return mLocale; } /** * Return the languages of the user.<br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_LIKES} * * @return the languages of the user */ public List<Language> getLanguages() { return mLanguages; } /** * Returns the Facebook URL of the user. <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return the Facebook URL of the user */ public String getLink() { return mLink; } /** * The user's age range. <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return the user's age range */ public AgeRange getAgeRange() { return mAgeRange; } /** * An anonymous, but unique identifier for the user. <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return the an anonymous, but unique identifier for the user */ public String getThirdPartyId() { return mThirdPartyId; } /** * Specifies whether the user has installed the application associated with * the app access token that is used to make the request. <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return <code>True</code> if installed, otherwise <code>False</code> */ public Boolean getInstalled() { return mIsInstalled == null ? false : mIsInstalled; } /** * Return the timezone of the user.<br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * <br> * <br> * <b>Note:</b> <br> * Avaliable only for my profile * * @return the timezone of the user */ public Integer getTimeZone() { return mTimeZone; } /** * The last time the user's profile was updated; changes to the languages, * link, timezone, verified, interested_in, favorite_athletes, * favorite_teams, and video_upload_limits are not not reflected in this * value.<br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * <br> * <br> * * @return string containing an ISO-8601 datetime */ public String getUpdatedTime() { return mUpdatedTime; } /** * The user's account verification status.<br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * <br> * <br> * <b>Note:</b> <br> * A user is considered verified if she takes any of the following actions: * <li>Registers for mobile</li> <li>Confirms her account via SMS</li> <li> * Enters a valid credit card</li> <br> * <br> * * @return The user's account verification status */ public Boolean getVerified() { return mVerified; } /** * Return the biography of the user.<br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_ABOUT_ME}<br> * * @return the biography of the user */ public String getBio() { return mBio; } /** * Returns the birthday of the user. <b>MM/DD/YYYY</b> format <br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_BIRTHDAY} <br> * * @return the birthday of the user */ public String getBirthday() { return mBirthday; } /** * The user's cover photo. The url of cover will be under * {@link Photo#getSource()} <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return The user's cover photo */ public Photo getCover() { return mCover; } /** * The user's currency settings <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return The user's currency settings */ public String getCurrency() { return mCurrency; } /** * The user's education history <br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_EDUCATION_HISTORY}<br> * * @return The user's education history */ public List<Education> getEducation() { return mEducation; } /** * Return the email of the user.<br> * <br> * <b> Permissions:</b> <br> * {@link Permission#EMAIL} * * @return the email of the user */ public String getEmail() { return mEmail; } /** * The user's hometown <br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_HOMETOWN}<br> * * @return The user's hometown */ public String getHometown() { return mHometown; } /** * Returns the current city of the user. <br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_LOCATION}<br> * * @return the current city of the user */ public Location getLocation() { return mLocation; } /** * The user's political view <br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_RELIGION_POLITICS}<br> // * * * @return The user's political view */ public String getPolitical() { return mPolitical; } /** * The user's favorite athletes <br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_LIKES}<br> * * @return The user's favorite athletes */ public List<String> getFavoriteAthletes() { return mFavoriteAthletess; } /** * The user's favorite teams <br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_LIKES}<br> * * @return The user's favorite teams */ public List<String> getFavoriteTeams() { return mFavoriteTeams; } /** * The user's profile pic <br> * <br> * <b> Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * * @return The user's profile pic */ public String getPicture() { return mPicture; } /** * The user's favorite quotes <br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_ABOUT_ME}<br> * * @return The user's favorite quotes */ public String getQuotes() { return mQuotes; } /** * The user's relationship status: <br> * <li>Single</li> <li>In a relationship</li> <li>Engaged</li> <li>Married</li> * <li>It's complicated</li> <li>In an open relationship</li> <li>Widowed</li> * <li>Separated</li> <li>Divorced</li> <li>In a civil union</li> <li>In a * domestic partnership</li> <br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_RELATIONSHIPS}<br> * * @return The user's relationship status */ public String getRelationshipStatus() { return mRelationshipStatus; } /** * The user's religion <br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_RELIGION_POLITICS}<br> * * @return The user's religion */ public String getReligion() { return mReligion; } /** * The URL of the user's personal website <br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_WEBSITE}<br> * * @return The URL of the user's personal website */ public String getWebsite() { return mWebsite; } /** * The user's work history <br> * <br> * <b> Permissions:</b><br> * {@link Permission#USER_WORK_HISTORY}<br> * * @return The user's work history */ public List<Work> getWork() { return mWorks; } public static class Properties { private final Bundle mBundle; private Properties(Builder builder) { mBundle = new Bundle(); Iterator<String> iterator = builder.properties.iterator(); String fields = Utils.join(iterator, ","); mBundle.putString("fields", fields); } public Bundle getBundle() { return mBundle; } /** * <b>Description:</b><br> * The user's Facebook ID<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String ID = "id"; /** * <b>Description:</b><br> * The user's full name<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String NAME = "name"; /** * <b>Description:</b><br> * The user's first name<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String FIRST_NAME = "first_name"; /** * <b>Description:</b><br> * The user's middle name<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String MIDDLE_NAME = "middle_name"; /** * <b>Description:</b><br> * The user's last name<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String LAST_NAME = "last_name"; /** * <b>Description:</b><br> * The user's gender: female or male<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String GENDER = "gender"; /** * <b>Description:</b><br> * The user's locale<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String LOCALE = "locale"; /** * <b>Description:</b><br> * The user's languages<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_LIKES} * */ public static final String LANGUAGE = "languages"; /** * <b>Description:</b><br> * The URL of the profile for the user on Facebook<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String LINK = "link"; /** * <b>Description:</b><br> * The user's age range<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String AGE_RANGE = "age_range"; /** * <b>Description:</b><br> * An anonymous, but unique identifier for the user<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String THIRD_PARTY_ID = "third_party_id"; /** * <b>Description:</b><br> * Specifies whether the user has installed the application associated * with the app access token that is used to make the request<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String INSTALLED = "installed"; /** * <b>Description:</b><br> * The user's timezone offset from UTC<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String TIMEZONE = "timezone"; /** * <b>Description:</b><br> * The last time the user's profile was updated; changes to the * languages, link, timezone, verified, interested_in, * favorite_athletes, favorite_teams, and video_upload_limits are not * not reflected in this value<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String UPDATED_TIME = "updated_time"; /** * <b>Description:</b><br> * The user's account verification status, either true or false<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String VERIFIED = "verified"; /** * <b>Description:</b><br> * The user's biography<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_ABOUT_ME}<br> * */ public static final String BIO = "bio"; /** * <b>Description:</b><br> * The user's birthday<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_BIRTHDAY}<br> * */ public static final String BIRTHDAY = "birthday"; /** * <b>Description:</b><br> * The user's cover photo<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String COVER = "cover"; /** * <b>Description:</b><br> * The user's currency settings <br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String CURRENCY = "currency"; /** * <b>Description:</b><br> * A list of the user's devices beyond desktop<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String DEVICES = "devices"; /** * <b>Description:</b><br> * A list of the user's education history<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_EDUCATION_HISTORY}<br> * */ public static final String EDUCATION = "education"; /** * <b>Description:</b><br> * The email address granted by the user<br> * <br> * * <b>Note:</b> There is no way for apps to obtain email addresses for a * user's friends.<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#EMAIL} */ public static final String EMAIL = "email"; /** * <b>Description:</b><br> * The user's hometown<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_HOMETOWN}<br> * */ public static final String HOMETOWN = "hometown"; /** * <b>Description:</b><br> * The genders the user is interested in<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_RELATIONSHIP_DETAILS}<br> * */ public static final String INTERESTED_IN = "interested_in"; /** * <b>Description:</b><br> * The user's current city<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_LOCATION}<br> * */ public static final String LOCATION = "location"; /** * <b>Description:</b><br> * The user's political view<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_RELIGION_POLITICS}<br> * */ public static final String POLITICAL = "political"; /** * <b>Description:</b><br> * The mobile payment price-points available for that user, for use when * processing payments using Facebook Credits<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String PAYMENT_PRICEPOINTS = "payment_pricepoints"; /** * <b>Description:</b><br> * The mobile payment price-points available for that user, for use when * processing payments using Local Currency<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String PAYMENT_MOBILE_PRICEPOINTS = "payment_mobile_pricepoints"; /** * <b>Description:</b><br> * The user's favorite athletes<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_LIKES}<br> * */ public static final String FAVORITE_ATHLETES = "favorite_athletes"; /** * <b>Description:</b><br> * The user's favorite teams<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_LIKES}<br> * */ public static final String FAVORITE_TEAMS = "favorite_teams"; /** * <b>Description:</b><br> * The user's profile pic<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String PICTURE = "picture"; /** * <b>Description:</b><br> * The user's favorite quotes<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_ABOUT_ME}<br> * */ public static final String QUOTES = "quotes"; /** * <b>Description:</b><br> * The user's relationship status: Single, In a relationship, Engaged, * Married, It's complicated, In an open relationship, Widowed, * Separated, Divorced, In a civil union, In a domestic partnership<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_RELATIONSHIPS}<br> * */ public static final String RELATIONSHIP_STATUS = "relationship_status"; /** * <b>Description:</b><br> * The user's religion<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_RELIGION_POLITICS}<br> * */ public static final String RELIGION = "religion"; /** * <b>Description:</b><br> * Information about security settings enabled on the user's account<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String SECURITY_SETTINGS = "security_settings"; /** * <b>Description:</b><br> * The user's significant other<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_RELATIONSHIPS}<br> * */ public static final String SINGNIFICANT_OTHER = "significant_other"; /** * <b>Description:</b><br> * The size of the video file and the length of the video that a user * can upload<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#PUBLIC_PROFILE} * */ public static final String VIDEO_UPLOAD_LIMITS = "video_upload_limits"; /** * <b>Description:</b><br> * The URL of the user's personal website<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_WEBSITE}<br> * */ public static final String WEBSITE = "website"; /** * <b>Description:</b><br> * A list of the user's work history<br> * <br> * * <b>Permissions:</b><br> * {@link Permission#USER_WORK_HISTORY}<br> * */ public static final String WORK = "work"; public static class Builder { Set<String> properties; public Builder() { properties = new HashSet<String>(); } /** * Add property you need * * @param property * The property of the user profile<br> * For example: {@link Properties#FIRST_NAME} * @return {@link Builder} */ public Builder add(String property) { properties.add(property); return this; } /** * Add property and attribute you need * * @param property * The property of the user profile<br> * For example: {@link Properties#PICTURE} * @param attributes * For example: picture can have type,width and height<br> * * @return {@link Builder} */ public Builder add(String property, Attributes attributes) { Map<String, String> map = attributes.getAttributes(); StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append(property); stringBuilder.append('.'); stringBuilder.append(Utils.join(map, '.', '(', ')')); properties.add(stringBuilder.toString()); return this; } public Properties build() { return new Properties(this); } } } }
/*L * Copyright Northwestern University. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.io/psc/LICENSE.txt for details. */ package edu.northwestern.bioinformatics.studycalendar.web.template; import edu.northwestern.bioinformatics.studycalendar.StudyCalendarSystemException; import edu.northwestern.bioinformatics.studycalendar.dao.DaoFinder; import edu.northwestern.bioinformatics.studycalendar.domain.*; import edu.northwestern.bioinformatics.studycalendar.domain.delta.Change; import edu.northwestern.bioinformatics.studycalendar.security.authorization.PscRole; import edu.northwestern.bioinformatics.studycalendar.service.DeltaService; import edu.northwestern.bioinformatics.studycalendar.service.StudyService; import edu.northwestern.bioinformatics.studycalendar.service.TemplateService; import edu.northwestern.bioinformatics.studycalendar.web.accesscontrol.ResourceAuthorization; import edu.northwestern.bioinformatics.studycalendar.web.delta.RevisionChanges; import gov.nih.nci.cabig.ctms.domain.DomainObject; import org.springframework.beans.factory.annotation.Required; import org.springframework.validation.Errors; import java.util.*; /** * Base class for commands invoked from the main display template page. * * @author Rhett Sutphin */ public abstract class EditTemplateCommand implements EditCommand { private Mode mode; private DeltaService deltaService; private StudyService studyService; private DaoFinder daoFinder; private TemplateService templateService; // directly bound private Study study; private Epoch epoch; private StudySegment studySegment; // revised private Study revisedStudy; private Epoch revisedEpoch; private StudySegment revisedStudySegment; public Collection<ResourceAuthorization> authorizations(Errors bindErrors) { return ResourceAuthorization.createTemplateManagementAuthorizations( getStudy(), PscRole.STUDY_CALENDAR_TEMPLATE_BUILDER, PscRole.STUDY_CREATOR); } public boolean apply() { Study target = getStudy(); verifyEditable(target); if (validAction()) { performEdit(); studyService.save(target); cleanUpdateRevised(); return true; } return false; } public void performEdit() { getMode().performEdit(); } public boolean validAction() { return getMode().validAction(); } private void verifyEditable(Study target) { if (!target.isInDevelopment()) { throw new StudyCalendarSystemException( "The study %s is not in development and so may not be edited.", target.getName()); } } public Map<String, Object> getModel() { Map<String, Object> model = new HashMap<String, Object>(); Map<String, Object> modeModel = getMode().getModel(); model.put("study", getStudy()); model.put("developmentRevision", getStudy().getDevelopmentAmendment()); model.put("revisionChanges", new RevisionChanges(daoFinder, getStudy().getDevelopmentAmendment(), getStudy())); if (modeModel != null) { model.putAll(modeModel); } Study theRevisedStudy = deltaService.revise(getStudy(), getStudy().getDevelopmentAmendment()); List<Epoch> epochs = theRevisedStudy.getPlannedCalendar().getEpochs(); model.put("epochs", epochs); if (getRelativeViewName() != null && getRelativeViewName().equals("rename")) { if (getMode().toString().toLowerCase().contains("studysegment")){ model.put(getRelativeViewName(), "Study Segment"); } if (getMode().toString().toLowerCase().contains("epoch")) { model.put(getRelativeViewName(), "Epoch"); } } if (getStudy().getDevelopmentAmendment() != null) { model.put("canEdit", "true"); } return model; } protected void updateRevision(PlanTreeNode<?> node, Change change) { deltaService.updateRevision(getStudy().getDevelopmentAmendment(),node, change); cleanUpdateRevised(); } ////// MODES // Subclasses should provide a mode for handling each type of bound domain object // that makes sense public String getRelativeViewName() { return getMode().getRelativeViewName(); } private Mode getMode() { if (mode == null) mode = selectMode(); return mode; } protected Mode studyMode() { throw new UnsupportedOperationException("No study mode for " + getClass().getSimpleName()); } protected Mode epochMode() { throw new UnsupportedOperationException("No epoch mode for " + getClass().getSimpleName()); } protected Mode studySegmentMode() { throw new UnsupportedOperationException("No studySegment mode for " + getClass().getSimpleName()); } protected Mode selectMode() { Mode newMode; if (getStudySegment() != null) { newMode = studySegmentMode(); } else if (getEpoch() != null) { newMode = epochMode(); } else { newMode = studyMode(); } return newMode; } protected abstract static class Mode { abstract String getRelativeViewName(); abstract Map<String, Object> getModel(); abstract void performEdit(); public boolean validAction() { return true; } } ////// REVISED-TO-CURRENT versions of bound props private void cleanUpdateRevised() { revisedStudy = null; // reset updateRevised(); } private void updateRevised() { if (getStudy() != null && revisedStudy == null) { revisedStudy = deltaService.revise(getStudy(), getStudy().getDevelopmentAmendment()); } if (revisedStudy != null && (getEpoch() != null || getStudySegment() != null)) { for (Epoch e : revisedStudy.getPlannedCalendar().getEpochs()) { if (getEpoch() != null && e.getId().equals(getEpoch().getId())) { revisedEpoch = e; } for (StudySegment a : e.getStudySegments()) { if (getStudySegment() != null && a.getId().equals(getStudySegment().getId())) { revisedStudySegment = a; } } } } } public Study getRevisedStudy() { return revisedStudy; } public Epoch getRevisedEpoch() { return revisedEpoch; } public StudySegment getRevisedStudySegment() { return revisedStudySegment; } public PlannedCalendar getSafeEpochParent() { return getSafeParent(getEpoch(), getRevisedEpoch()); } public Epoch getSafeStudySegmentParent() { return getSafeParent(getStudySegment(), getRevisedStudySegment()); } private <P extends DomainObject> P getSafeParent(PlanTreeNode<P> bound, PlanTreeNode<P> revised) { // these casts are safe because this method is only used with Study Segments or Epochs if (bound.getParent() == null) { // If the thing targeted is newly added, its parent will be null // In order to update the parent's delta, we need to find the parent in the revised tree return revised.getParent(); } else { // However, if it isn't newly added, it might not have any other changes // in order to create the delta properly, we need to use the persistent one return bound.getParent(); } } ////// BOUND PROPERTIES public Study getStudy() { return study; } public void setStudy(Study study) { verifyEditable(study); this.study = study; updateRevised(); } public Epoch getEpoch() { return epoch; } public void setEpoch(Epoch epoch) { this.epoch = epoch; updateRevised(); } public StudySegment getStudySegment() { return studySegment; } public void setStudySegment(StudySegment studySegment) { this.studySegment = studySegment; updateRevised(); } ////// CONFIGURATION @Required public void setStudyService(StudyService studyService) { this.studyService = studyService; } public StudyService getStudyService() { return studyService; } public TemplateService getTemplateService() { return templateService; } public void setTemplateService(TemplateService templateService) { this.templateService = templateService; } @Required public void setDeltaService(DeltaService deltaService) { this.deltaService = deltaService; } public void setDaoFinder(DaoFinder daoFinder) { this.daoFinder = daoFinder; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.cql3.functions; import java.nio.ByteBuffer; import java.util.Collection; import java.util.List; import com.google.common.collect.ImmutableList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.db.marshal.*; import org.apache.cassandra.transport.ProtocolVersion; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.UUIDGen; public abstract class TimeFcts { public static Logger logger = LoggerFactory.getLogger(TimeFcts.class); public static Collection<Function> all() { return ImmutableList.of(now("now", TimeUUIDType.instance), now("currenttimeuuid", TimeUUIDType.instance), now("currenttimestamp", TimestampType.instance), now("currentdate", SimpleDateType.instance), now("currenttime", TimeType.instance), minTimeuuidFct, maxTimeuuidFct, dateOfFct, unixTimestampOfFct, toDate(TimeUUIDType.instance), toTimestamp(TimeUUIDType.instance), toUnixTimestamp(TimeUUIDType.instance), toUnixTimestamp(TimestampType.instance), toDate(TimestampType.instance), toUnixTimestamp(SimpleDateType.instance), toTimestamp(SimpleDateType.instance)); } public static final Function now(final String name, final TemporalType<?> type) { return new NativeScalarFunction(name, type) { @Override public ByteBuffer execute(ProtocolVersion protocolVersion, List<ByteBuffer> parameters) { return type.now(); } }; }; public static final Function minTimeuuidFct = new NativeScalarFunction("mintimeuuid", TimeUUIDType.instance, TimestampType.instance) { public ByteBuffer execute(ProtocolVersion protocolVersion, List<ByteBuffer> parameters) { ByteBuffer bb = parameters.get(0); if (bb == null) return null; return UUIDGen.toByteBuffer(UUIDGen.minTimeUUID(TimestampType.instance.compose(bb).getTime())); } }; public static final Function maxTimeuuidFct = new NativeScalarFunction("maxtimeuuid", TimeUUIDType.instance, TimestampType.instance) { public ByteBuffer execute(ProtocolVersion protocolVersion, List<ByteBuffer> parameters) { ByteBuffer bb = parameters.get(0); if (bb == null) return null; return UUIDGen.toByteBuffer(UUIDGen.maxTimeUUID(TimestampType.instance.compose(bb).getTime())); } }; /** * Function that convert a value of <code>TIMEUUID</code> into a value of type <code>TIMESTAMP</code>. * @deprecated Replaced by the {@link #timeUuidToTimestamp} function */ public static final NativeScalarFunction dateOfFct = new NativeScalarFunction("dateof", TimestampType.instance, TimeUUIDType.instance) { private volatile boolean hasLoggedDeprecationWarning; public ByteBuffer execute(ProtocolVersion protocolVersion, List<ByteBuffer> parameters) { if (!hasLoggedDeprecationWarning) { hasLoggedDeprecationWarning = true; logger.warn("The function 'dateof' is deprecated." + " Use the function 'toTimestamp' instead."); } ByteBuffer bb = parameters.get(0); if (bb == null) return null; long timeInMillis = UUIDGen.unixTimestamp(UUIDGen.getUUID(bb)); return ByteBufferUtil.bytes(timeInMillis); } }; /** * Function that convert a value of type <code>TIMEUUID</code> into an UNIX timestamp. * @deprecated Replaced by the {@link #timeUuidToUnixTimestamp} function */ public static final NativeScalarFunction unixTimestampOfFct = new NativeScalarFunction("unixtimestampof", LongType.instance, TimeUUIDType.instance) { private volatile boolean hasLoggedDeprecationWarning; public ByteBuffer execute(ProtocolVersion protocolVersion, List<ByteBuffer> parameters) { if (!hasLoggedDeprecationWarning) { hasLoggedDeprecationWarning = true; logger.warn("The function 'unixtimestampof' is deprecated." + " Use the function 'toUnixTimestamp' instead."); } ByteBuffer bb = parameters.get(0); if (bb == null) return null; return ByteBufferUtil.bytes(UUIDGen.unixTimestamp(UUIDGen.getUUID(bb))); } }; /** * Creates a function that convert a value of the specified type into a <code>DATE</code>. * @param type the temporal type * @return a function that convert a value of the specified type into a <code>DATE</code>. */ public static final NativeScalarFunction toDate(final TemporalType<?> type) { return new NativeScalarFunction("todate", SimpleDateType.instance, type) { public ByteBuffer execute(ProtocolVersion protocolVersion, List<ByteBuffer> parameters) { ByteBuffer bb = parameters.get(0); if (bb == null || !bb.hasRemaining()) return null; long millis = type.toTimeInMillis(bb); return SimpleDateType.instance.fromTimeInMillis(millis); } }; } /** * Creates a function that convert a value of the specified type into a <code>TIMESTAMP</code>. * @param type the temporal type * @return a function that convert a value of the specified type into a <code>TIMESTAMP</code>. */ public static final NativeScalarFunction toTimestamp(final TemporalType<?> type) { return new NativeScalarFunction("totimestamp", TimestampType.instance, type) { public ByteBuffer execute(ProtocolVersion protocolVersion, List<ByteBuffer> parameters) { ByteBuffer bb = parameters.get(0); if (bb == null || !bb.hasRemaining()) return null; long millis = type.toTimeInMillis(bb); return TimestampType.instance.fromTimeInMillis(millis); } }; } /** * Creates a function that convert a value of the specified type into an UNIX timestamp. * @param type the temporal type * @return a function that convert a value of the specified type into an UNIX timestamp. */ public static final NativeScalarFunction toUnixTimestamp(final TemporalType<?> type) { return new NativeScalarFunction("tounixtimestamp", LongType.instance, type) { public ByteBuffer execute(ProtocolVersion protocolVersion, List<ByteBuffer> parameters) { ByteBuffer bb = parameters.get(0); if (bb == null || !bb.hasRemaining()) return null; return ByteBufferUtil.bytes(type.toTimeInMillis(bb)); } }; } }
/** * Copyright 2008-2017 Qualogy Solutions B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package test.com.qualogy.qafe.business.integration.adapter; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.qualogy.qafe.bind.commons.type.Out; import com.qualogy.qafe.business.integration.adapter.Adapter; import com.qualogy.qafe.core.datastore.DataIdentifier; import com.qualogy.qafe.core.datastore.DataStore; import junit.framework.TestCase; public class AdapterTest extends TestCase { public void testEmptyList(){ DataIdentifier dataId = DataStore.register(); List<Object> myList = new ArrayList<Object>(); List outputMapping = new ArrayList(); Out out = new Out(); out.setName("result"); outputMapping.add(out); Adapter.adaptOut(dataId,myList, outputMapping); Object result = DataStore.findValue(dataId, out.getName()); System.err.println("testEmptyList"); System.err.println(DataStore.toLogString(dataId)); DataStore.unregister(dataId); } public void testListOfString(){ DataIdentifier dataId = DataStore.register(); List<String> myList = new ArrayList<String>(); myList.add("String 1"); myList.add("String 2"); myList.add("String 3"); List outputMapping = new ArrayList(); Out out = new Out(); out.setName("result"); outputMapping.add(out); Adapter.adaptOut(dataId,myList, outputMapping); Object result = DataStore.findValue(dataId, out.getName()); System.err.println("testListOfString"); System.err.println(DataStore.toLogString(dataId)); DataStore.unregister(dataId); } public void testListSingleElementSimpleType(){ DataIdentifier dataId = DataStore.register(); List<Object> myList = new ArrayList<Object>(); myList.add(new Integer(1)); List outputMapping = new ArrayList(); Out out = new Out(); out.setName("result"); outputMapping.add(out); Adapter.adaptOut(dataId,myList, outputMapping); Object result = DataStore.findValue(dataId, out.getName()); System.err.println("testListSingleElementSimpleType"); System.err.println(DataStore.toLogString(dataId)); DataStore.unregister(dataId); } public void testListSingleElementOnePerson(){ DataIdentifier dataId = DataStore.register(); List<Object> myList = new ArrayList<Object>(); myList.add(new DummyPerson("Hallo","Khaznadar")); List outputMapping = new ArrayList(); Out out = new Out(); out.setName("result"); outputMapping.add(out); Adapter.adaptOut(dataId,myList, outputMapping); Object result = DataStore.findValue(dataId, out.getName()); System.err.println("testListSingleElementOnePerson"); System.err.println(DataStore.toLogString(dataId)); DataStore.unregister(dataId); } public void testListMultipleElementPerson(){ DataIdentifier dataId = DataStore.register(); List<Object> myList = new ArrayList<Object>(); myList.add(new DummyPerson("Hallo","Khaznadar")); myList.add(new DummyPerson("Ravi","Nair")); List outputMapping = new ArrayList(); Out out = new Out(); out.setName("result"); outputMapping.add(out); Adapter.adaptOut(dataId,myList, outputMapping); Object result = DataStore.findValue(dataId, out.getName()); System.err.println("testListMultipleElementPerson"); System.err.println(DataStore.toLogString(dataId)); DataStore.unregister(dataId); } public void testHashMapWithPerson(){ DataIdentifier dataId = DataStore.register(); Map data = new HashMap<String, DummyPerson>(); data.put("person1" , new DummyPerson("Hallo","Khaznadar")); List outputMapping = new ArrayList(); Out out = new Out(); out.setName("result"); outputMapping.add(out); Adapter.adaptOut(dataId,data, outputMapping); Object result = DataStore.findValue(dataId, out.getName()); System.err.println("testHashMapWithPerson"); System.err.println(DataStore.toLogString(dataId)); DataStore.unregister(dataId); } // not working- TO DO public void testSetWithPerson(){ DataIdentifier dataId = DataStore.register(); Set data = new HashSet<String>(); data.add("Hallo"); List outputMapping = new ArrayList(); Out out = new Out(); out.setName("result"); outputMapping.add(out); Adapter.adaptOut(dataId,data, outputMapping); Object result = DataStore.findValue(dataId, out.getName()); System.err.println("testSetWithPerson"); System.err.println(DataStore.toLogString(dataId)); DataStore.unregister(dataId); } public void testListMultipleElementComplexPerson(){ DataIdentifier dataId = DataStore.register(); List<Object> myList = new ArrayList<Object>(); DummyPersonMoreComplex tp1 = new DummyPersonMoreComplex("Hallo","Khaznadar"); DummyPersonMoreComplex tp2 = new DummyPersonMoreComplex("Ravi","Nair"); tp1.add("house", "nearby"); myList.add(tp1); myList.add(tp2); List outputMapping = new ArrayList(); Out out = new Out(); out.setName("result"); outputMapping.add(out); Adapter.adaptOut(dataId,myList, outputMapping); Object result = DataStore.findValue(dataId, out.getName()); System.err.println("testListMultipleElementComplexPerson"); System.err.println(DataStore.toLogString(dataId)); DataStore.unregister(dataId); } public void testListMultipleElementComplexPersonObject(){ DataIdentifier dataId = DataStore.register(); List<Object> myList = new ArrayList<Object>(); DummyPersonMoreComplexObject tp1 = new DummyPersonMoreComplexObject("Hallo","Khaznadar"); DummyPersonMoreComplexObject tp2 = new DummyPersonMoreComplexObject("Ravi","Nair"); tp1.add("colleague", tp2); myList.add(tp1); List outputMapping = new ArrayList(); Out out = new Out(); out.setName("result"); outputMapping.add(out); Adapter.adaptOut(dataId,myList, outputMapping); Object result = DataStore.findValue(dataId, out.getName()); System.err.println("testListMultipleElementComplexPersonObject"); System.err.println(DataStore.toLogString(dataId)); DataStore.unregister(dataId); } public void testListMultipleElementComplexPersonList(){ DataIdentifier dataId = DataStore.register(); List<Object> myList = new ArrayList<Object>(); DummyPersonMoreComplexObjectList tp1 = new DummyPersonMoreComplexObjectList("Hallo","Khaznadar"); DummyPersonMoreComplexObjectList tp2 = new DummyPersonMoreComplexObjectList("Ravi","Nair"); tp1.add(tp2); myList.add(tp1); List outputMapping = new ArrayList(); Out out = new Out(); out.setName("result"); outputMapping.add(out); Adapter.adaptOut(dataId,myList, outputMapping); Object result = DataStore.findValue(dataId, out.getName()); System.err.println("testListMultipleElementComplexPersonList"); System.err.println(DataStore.toLogString(dataId)); DataStore.unregister(dataId); } public void testListMultipleElementComplexPersonListRecursive(){ DataIdentifier dataId = DataStore.register(); List<Object> myList = new ArrayList<Object>(); DummyPersonMoreComplexObjectList tp1 = new DummyPersonMoreComplexObjectList("Hallo","Khaznadar"); DummyPersonMoreComplexObjectList tp2 = new DummyPersonMoreComplexObjectList("Ravi","Nair"); tp1.add(tp2); myList.add(tp1); tp2.add(tp1); List outputMapping = new ArrayList(); Out out = new Out(); out.setName("result"); outputMapping.add(out); Adapter.adaptOut(dataId,myList, outputMapping); Object result = DataStore.findValue(dataId, out.getName()); System.err.println("testListMultipleElementComplexPersonListRecursive"); System.err.println(DataStore.toLogString(dataId)); DataStore.unregister(dataId); } public void testListMultipleElementComplexListOfList(){ DataIdentifier dataId = DataStore.register(); List<DummyPersonMoreComplexObjectList> myList = new ArrayList<DummyPersonMoreComplexObjectList>(); DummyPersonMoreComplexObjectList tp1 = new DummyPersonMoreComplexObjectList("Hallo","Khaznadar"); DummyPersonMoreComplexObjectList tp2 = new DummyPersonMoreComplexObjectList("Ravi","Nair"); myList.add(tp1); myList.add(tp2); List<List<DummyPersonMoreComplexObjectList>> listOfList = new ArrayList<List<DummyPersonMoreComplexObjectList>>(); listOfList.add(myList); List outputMapping = new ArrayList(); Out out = new Out(); out.setName("result"); outputMapping.add(out); Adapter.adaptOut(dataId,listOfList, outputMapping); Object result = DataStore.findValue(dataId, out.getName()); System.err.println("testListMultipleElementComplexListOfList"); System.err.println(DataStore.toLogString(dataId)); DataStore.unregister(dataId); } }
/* * JBoss, Home of Professional Open Source. * Copyright 2012 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.web; import org.jboss.logging.Cause; import org.jboss.logging.Message; import org.jboss.logging.MessageBundle; import org.jboss.logging.Messages; /** * Logging IDs 8500-8850 * @author Remy Maucherat */ @MessageBundle(projectCode = "JBWEB") public interface WebsocketsMessages { /** * The messages */ WebsocketsMessages MESSAGES = Messages.getBundle(WebsocketsMessages.class); @Message(id = 8500, value = "Concurrent read operations are not permitted") IllegalStateException invalidConcurrentRead(); @Message(id = 8501, value = "Concurrent write operations are not permitted") IllegalStateException invalidConcurrentWrite(); @Message(id = 8502, value = "Unexpected Status of SSLEngineResult after a wrap() operation") IllegalStateException unexpectedStatusAfterWrap(); @Message(id = 8503, value = "Flag that indicates a write is in progress was found to be false (it should have been true) when trying to complete a write operation") IllegalStateException invalidWriteState(); @Message(id = 8504, value = "Unexpected end of stream") String unexpectedEndOfStream(); @Message(id = 8505, value = "Flag that indicates a read is in progress was found to be false (it should have been true) when trying to complete a read operation") IllegalStateException invalidReadState(); @Message(id = 8506, value = "Unexpected Status of SSLEngineResult after an unwrap() operation") IllegalStateException unexpectedStatusAfterUnwrap(); @Message(id = 8507, value = "The result [%s] is too big to be expressed as an Integer") String notAnInteger(long value); @Message(id = 8508, value = "Unable to coerce value [%s] to type [%s]. That type is not supported.") IllegalArgumentException invalidType(String value, String type); @Message(id = 8509, value = "The specified decoder of type [%s] could not be instantiated") String cannotInstatiateDecoder(String className); @Message(id = 8510, value = "Unable to add the message handler [%s] as it was for the unrecognised type [%s]") IllegalArgumentException unknownHandler(Object listener, Object target); @Message(id = 8511, value = "The message handler provided does not have an onMessage(Object) method") IllegalArgumentException invalidMessageHandler(@Cause Throwable t); @Message(id = 8512, value = "The Decoder type [%s] is not recognized") IllegalArgumentException unknownDecoderType(String className); @Message(id = 8513, value = "New frame received after a close control frame") String receivedFrameAfterClose(); @Message(id = 8514, value = "The client frame set the reserved bits to [%s] which was not supported by this endpoint") String unsupportedReservedBitsSet(int bit); @Message(id = 8515, value = "A fragmented control frame was received but control frames may not be fragmented") String invalidFragmentedControlFrame(); @Message(id = 8516, value = "A WebSocket frame was sent with an unrecognised opCode of [%s]") String invalidFrameOpcode(int code); @Message(id = 8517, value = "A new message was started when a continuation frame was expected") String noContinuationFrame(); @Message(id = 8518, value = "The client data can not be processed because the session has already been closed") String sessionClosed(); @Message(id = 8519, value = "The client frame was not masked but all client frames must be masked") String frameWithoutMask(); @Message(id = 8520, value = "A control frame was sent with a payload of size [%s] which is larger than the maximum permitted of 125 bytes") String controlFramePayloadTooLarge(long size); @Message(id = 8521, value = "A control frame was sent that did not have the fin bit set. Control frames are not permitted to use continuation frames.") String controlFrameWithoutFin(); @Message(id = 8522, value = "The client sent a close frame with a single byte payload which is not valid") String invalidOneByteClose(); @Message(id = 8523, value = "A WebSocket close frame was received with a close reason that contained invalid UTF-8 byte sequences") String invalidUtf8Close(); @Message(id = 8524, value = "The message was [%s] bytes long but the MessageHandler has a limit of [%s] bytes") String messageTooLarge(long size, long limit); @Message(id = 8525, value = "A WebSocket text frame was received that could not be decoded to UTF-8 because it contained invalid byte sequences") String invalidUtf8(); @Message(id = 8526, value = "The decoded text message was too big for the output buffer and the endpoint does not support partial messages") String textMessageTooLarge(); @Message(id = 8527, value = "No async message support and buffer too small. Buffer size: [%s], Message size: [%s]") String bufferTooSmall(int capacity, long payload); @Message(id = 8528, value = "Too many bytes ([%s]) were provided to be converted into a long") String invalidLong(long length); @Message(id = 8529, value = "The remote endpoint was in state [%s] which is an invalid state for called method") IllegalStateException wrongState(String state); @Message(id = 8530, value = "Message will not be sent because the WebSocket session has been closed") IllegalStateException messageSessionClosed(); @Message(id = 8531, value = "When sending a fragmented message, all fragments bust be of the same type") IllegalStateException messageFragmentTypeChange(); @Message(id = 8532, value = "No encoder specified for object of class [%s]") String noEncoderForClass(String className); @Message(id = 8533, value = "The specified encoder of type [%s] could not be instantiated") String cannotInstatiateEncoder(String className); @Message(id = 8534, value = "This method may not be called as the OutputStream has been closed") IllegalStateException closedOutputStream(); @Message(id = 8535, value = "This method may not be called as the Writer has been closed") IllegalStateException closedWriter(); @Message(id = 8536, value = "A text message handler has already been configured") IllegalStateException duplicateHandlerText(); @Message(id = 8537, value = "A binary message handler has already been configured") IllegalStateException duplicateHandlerBinary(); @Message(id = 8538, value = "A pong message handler has already been configured") IllegalStateException duplicateHandlerPong(); @Message(id = 8539, value = "A pong message handler must implement MessageHandler.Basic") IllegalStateException invalidHandlerPong(); @Message(id = 8540, value = "Unable to add the message handler [%s] as it was wrapped as the unrecognised type [%s]") IllegalArgumentException invalidMessageHandler(Object listener, Object type); @Message(id = 8541, value = "Unable to remove the handler [%s] as it was not registered with this session") IllegalStateException cannotRemoveHandler(Object listener); @Message(id = 8542, value = "Unable to write the complete message as the WebSocket connection has been closed") String messageFailed(); @Message(id = 8543, value = "The WebSocket session timeout expired") String sessionTimeout(); @Message(id = 8544, value = "The WebSocket session has been closed and no method (apart from close()) may be called on a closed session") IllegalStateException sessionAlreadyClosed(); @Message(id = 8545, value = "Unable to create dedicated AsynchronousChannelGroup for WebSocket clients which is required to prevent memory leaks in complex class loader environments like J2EE containers") IllegalStateException asyncGroupFail(); @Message(id = 8546, value = "Cannot use POJO class [%s] as it is not annotated with @ClientEndpoint") String missingClientEndpointAnnotation(String className); @Message(id = 8547, value = "Failed to create the default configurator") String defaultConfiguratorFailed(); @Message(id = 8548, value = "Failed to create a local endpoint of type [%s]") String endpointCreateFailed(String className); @Message(id = 8549, value = "The scheme [%s] is not supported") String pathWrongScheme(String scheme); @Message(id = 8550, value = "No host was specified in URI") String pathNoHost(); @Message(id = 8551, value = "The requested scheme, [%s], is not supported. The supported schemes are ws and wss") String invalidScheme(String scheme); @Message(id = 8552, value = "Unable to open a connection to the server") String connectionFailed(); @Message(id = 8553, value = "The HTTP request to initiate the WebSocket connection failed") String httpRequestFailed(); @Message(id = 8554, value = "Invalid websockets protocol header") String invalidProtocolHeader(); @Message(id = 8555, value = "The HTTP response from the server [%s] did not permit the HTTP upgrade to WebSocket") String invalidHttpStatus(String line); @Message(id = 8556, value = "Unable to create SSLEngine to support SSL/TLS connections") String sslEngineFail(); @Message(id = 8557, value = "The web application is stopping") String webappStopping(); @Message(id = 8558, value = "Failed to create instance of POJO of type [%s]") IllegalArgumentException pojoInstanceFailed(String className, @Cause Throwable t); @Message(id = 8559, value = "IO error while decoding message") String errorDecodingMessage(); @Message(id = 8560, value = "Duplicate annotations [%s] present on class [%s]") String duplicateAnnotations(Class<?> annotation, Class<?> clazz); @Message(id = 8561, value = "The annotated method [%s] is not public") String methodNotPublic(String method); @Message(id = 8562, value = "Parameters annotated with @PathParam may only be Strings, Java primitives or a boxed version thereof") String invalidPathParamType(); @Message(id = 8563, value = "A parameter of type [%s] was found on method[%s] of class [%s] that did not have a @PathParam annotation") String pathParamWithoutAnnotation(Class<?> clazz, String method, String className); @Message(id = 8564, value = "No Throwable parameter was present on the method [%s] of class [%s] that was annotated with OnError") String onErrorWithoutThrowable(String method, String className); @Message(id = 8565, value = "Failed to decode path parameter value [%s] to expected type [%s]") String errorDecodingPathParam(String value, Class<?> clazz); @Message(id = 8566, value = "Multiple message parameters present on the method [%s] of class [%s] that was annotated with OnMessage") IllegalArgumentException duplicateMessageParameter(String method, String className); @Message(id = 8567, value = "Multiple boolean (last) parameters present on the method [%s] of class [%s] that was annotated with OnMessage") IllegalArgumentException duplicateLastMessageParameter(String method, String className); @Message(id = 8568, value = "Multiple session parameters present on the method [%s] of class [%s] that was annotated with OnMessage") IllegalArgumentException duplicateSessionParameter(String method, String className); @Message(id = 8569, value = "Multiple PongMessage parameters present on the method [%s] of class [%s] that was annotated with OnMessage") IllegalArgumentException duplicatePongMessageParameter(String method, String className); @Message(id = 8570, value = "Invalid PongMessgae and Message parameters present on the method [%s] of class [%s] that was annotated with OnMessage") IllegalArgumentException invalidPongWithPayload(String method, String className); @Message(id = 8571, value = "No payload parameter present on the method [%s] of class [%s] that was annotated with OnMessage") IllegalArgumentException missingPayload(String method, String className); @Message(id = 8572, value = "Invalid PongMesssge and boolean parameters present on the method [%s] of class [%s] that was annotated with OnMessage") IllegalArgumentException partialPong(String method, String className); @Message(id = 8573, value = "Invalid Reader and boolean parameters present on the method [%s] of class [%s] that was annotated with OnMessage") IllegalArgumentException partialReader(String method, String className); @Message(id = 8574, value = "Invalid InputStream and boolean parameters present on the method [%s] of class [%s] that was annotated with OnMessage") IllegalArgumentException partialInputStream(String method, String className); @Message(id = 8575, value = "Invalid Object and boolean parameters present on the method [%s] of class [%s] that was annotated with OnMessage") IllegalArgumentException partialObject(String method, String className); @Message(id = 8576, value = "The path [%s] is not valid.") String invalidPath(String path); @Message(id = 8577, value = "The path [%s] contains one or more empty segments which are is not permitted") IllegalArgumentException invalidEmptySegment(String path); @Message(id = 8578, value = "The parameter [%s] appears more than once in the path which is not permitted") IllegalArgumentException duplicateParameter(String path); @Message(id = 8579, value = "The segment [%s] is not valid in the provided path [%s]") IllegalArgumentException invalidPathSegment(String segment, String path); @Message(id = 8580, value = "The preInit() method must be called to configure the WebSocket HttpUpgradeHandler before the container calls init(). Usually, this means the Servlet that created the WsHttpUpgradeHandler instance should also call preInit()") IllegalStateException noPreInit(); @Message(id = 8581, value = "No further Endpoints may be registered once an attempt has been made to use one of the previously registered endpoints") String addNotAllowed(); @Message(id = 8582, value = "No ServletContext was specified") String missingServletContext(); @Message(id = 8583, value = "Multiple Endpoints may not be deployed to using the same path [%s]") String duplicatePaths(String path); @Message(id = 8584, value = "Cannot deploy POJO class [%s] as it is not annotated with @ServerEndpoint") String cannotDeployPojo(String className); @Message(id = 8585, value = "Failed to create configurator of type [%s] for POJO of type [%s]") String configuratorFailed(String configurator, String className); @Message(id = 8586, value = "Upgrade failed") String upgradeFailed(); @Message(id = 8587, value = "This connection was established under an authenticated HTTP session that has ended") String expiredHttpSession(); @Message(id = 8588, value = "The remainder of the message will not be sent because the WebSocket session has been closed") String messageRemainderSessionClosed(); @Message(id = 8589, value = "The maximum supported message size for this implementation is Integer.MAX_VALUE") IllegalArgumentException messageTooLarge(); @Message(id = 8590, value = "Closing session following IO error") String closeAfterError(); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.runtime.partitioner; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.api.java.tuple.Tuple; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.JobException; import org.apache.flink.runtime.akka.AkkaUtils; import org.apache.flink.runtime.blob.BlobKey; import org.apache.flink.runtime.executiongraph.ExecutionEdge; import org.apache.flink.runtime.executiongraph.ExecutionGraph; import org.apache.flink.runtime.executiongraph.ExecutionJobVertex; import org.apache.flink.runtime.executiongraph.ExecutionVertex; import org.apache.flink.runtime.executiongraph.restart.NoRestartStrategy; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobVertex; import org.apache.flink.runtime.jobmanager.scheduler.Scheduler; import org.apache.flink.runtime.plugable.SerializationDelegate; import org.apache.flink.runtime.testingUtils.TestingUtils; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.source.ParallelSourceFunction; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.util.Collector; import org.apache.flink.util.SerializedValue; import org.apache.flink.util.TestLogger; import org.junit.Before; import org.junit.Test; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static org.junit.Assert.*; @SuppressWarnings("serial") public class RescalePartitionerTest extends TestLogger { private RescalePartitioner<Tuple> distributePartitioner; private StreamRecord<Tuple> streamRecord = new StreamRecord<Tuple>(null); private SerializationDelegate<StreamRecord<Tuple>> sd = new SerializationDelegate<StreamRecord<Tuple>>( null); @Before public void setPartitioner() { distributePartitioner = new RescalePartitioner<Tuple>(); } @Test public void testSelectChannelsLength() { sd.setInstance(streamRecord); assertEquals(1, distributePartitioner.selectChannels(sd, 1).length); assertEquals(1, distributePartitioner.selectChannels(sd, 2).length); assertEquals(1, distributePartitioner.selectChannels(sd, 1024).length); } @Test public void testSelectChannelsInterval() { sd.setInstance(streamRecord); assertEquals(0, distributePartitioner.selectChannels(sd, 3)[0]); assertEquals(1, distributePartitioner.selectChannels(sd, 3)[0]); assertEquals(2, distributePartitioner.selectChannels(sd, 3)[0]); assertEquals(0, distributePartitioner.selectChannels(sd, 3)[0]); } @Test public void testExecutionGraphGeneration() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(4); // get input data DataStream<String> text = env.addSource(new ParallelSourceFunction<String>() { private static final long serialVersionUID = 7772338606389180774L; @Override public void run(SourceContext<String> ctx) throws Exception { } @Override public void cancel() { } }).setParallelism(2); DataStream<Tuple2<String, Integer>> counts = text .rescale() .flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() { private static final long serialVersionUID = -5255930322161596829L; @Override public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception { } }); counts.rescale().print().setParallelism(2); JobGraph jobGraph = env.getStreamGraph().getJobGraph(); final JobID jobId = new JobID(); final String jobName = "Semi-Rebalance Test Job"; final Configuration cfg = new Configuration(); List<JobVertex> jobVertices = jobGraph.getVerticesSortedTopologicallyFromSources(); JobVertex sourceVertex = jobVertices.get(0); JobVertex mapVertex = jobVertices.get(1); JobVertex sinkVertex = jobVertices.get(2); assertEquals(2, sourceVertex.getParallelism()); assertEquals(4, mapVertex.getParallelism()); assertEquals(2, sinkVertex.getParallelism()); ExecutionGraph eg = new ExecutionGraph( TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), jobId, jobName, cfg, new SerializedValue<>(new ExecutionConfig()), AkkaUtils.getDefaultTimeout(), new NoRestartStrategy(), new ArrayList<BlobKey>(), new ArrayList<URL>(), new Scheduler(TestingUtils.defaultExecutionContext()), ExecutionGraph.class.getClassLoader()); try { eg.attachJobGraph(jobVertices); } catch (JobException e) { e.printStackTrace(); fail("Building ExecutionGraph failed: " + e.getMessage()); } ExecutionJobVertex execSourceVertex = eg.getJobVertex(sourceVertex.getID()); ExecutionJobVertex execMapVertex= eg.getJobVertex(mapVertex.getID()); ExecutionJobVertex execSinkVertex= eg.getJobVertex(sinkVertex.getID()); assertEquals(0, execSourceVertex.getInputs().size()); assertEquals(1, execMapVertex.getInputs().size()); assertEquals(4, execMapVertex.getParallelism()); ExecutionVertex[] mapTaskVertices = execMapVertex.getTaskVertices(); // verify that we have each parallel input partition exactly twice, i.e. that one source // sends to two unique mappers Map<Integer, Integer> mapInputPartitionCounts = new HashMap<>(); for (ExecutionVertex mapTaskVertex: mapTaskVertices) { assertEquals(1, mapTaskVertex.getNumberOfInputs()); assertEquals(1, mapTaskVertex.getInputEdges(0).length); ExecutionEdge inputEdge = mapTaskVertex.getInputEdges(0)[0]; assertEquals(sourceVertex.getID(), inputEdge.getSource().getProducer().getJobvertexId()); int inputPartition = inputEdge.getSource().getPartitionNumber(); if (!mapInputPartitionCounts.containsKey(inputPartition)) { mapInputPartitionCounts.put(inputPartition, 1); } else { mapInputPartitionCounts.put(inputPartition, mapInputPartitionCounts.get(inputPartition) + 1); } } assertEquals(2, mapInputPartitionCounts.size()); for (int count: mapInputPartitionCounts.values()) { assertEquals(2, count); } assertEquals(1, execSinkVertex.getInputs().size()); assertEquals(2, execSinkVertex.getParallelism()); ExecutionVertex[] sinkTaskVertices = execSinkVertex.getTaskVertices(); // verify each sink instance has two inputs from the map and that each map subpartition // only occurs in one unique input edge Set<Integer> mapSubpartitions = new HashSet<>(); for (ExecutionVertex sinkTaskVertex: sinkTaskVertices) { assertEquals(1, sinkTaskVertex.getNumberOfInputs()); assertEquals(2, sinkTaskVertex.getInputEdges(0).length); ExecutionEdge inputEdge1 = sinkTaskVertex.getInputEdges(0)[0]; ExecutionEdge inputEdge2 = sinkTaskVertex.getInputEdges(0)[1]; assertEquals(mapVertex.getID(), inputEdge1.getSource().getProducer().getJobvertexId()); assertEquals(mapVertex.getID(), inputEdge2.getSource().getProducer().getJobvertexId()); int inputPartition1 = inputEdge1.getSource().getPartitionNumber(); assertFalse(mapSubpartitions.contains(inputPartition1)); mapSubpartitions.add(inputPartition1); int inputPartition2 = inputEdge2.getSource().getPartitionNumber(); assertFalse(mapSubpartitions.contains(inputPartition2)); mapSubpartitions.add(inputPartition2); } assertEquals(4, mapSubpartitions.size()); } }
/* * Copyright 2011 yingxinwu.g@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package xink.vpn; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.List; import xink.crypto.StreamCrypto; import xink.vpn.stats.VpnConnectivityStats; import xink.vpn.wrapper.InvalidProfileException; import xink.vpn.wrapper.VpnProfile; import xink.vpn.wrapper.VpnState; import xink.vpn.wrapper.VpnType; import android.content.Context; import android.content.SharedPreferences; import android.preference.PreferenceManager; import android.text.TextUtils; import android.util.Log; /** * Repository of VPN profiles * * @author ywu */ public final class VpnProfileRepository { private static final String TAG = "xink"; private static final String FILE_PROFILES = "profiles"; private static final String FILE_ACT_ID = "active_profile_id"; private static VpnProfileRepository instance; private Context context; private String activeProfileId; private List<VpnProfile> profiles; private VpnState activeVpnState; private VpnConnectivityStats connStats; private VpnProfileRepository(final Context ctx) { this.context = ctx; profiles = new ArrayList<VpnProfile>(); connStats = new VpnConnectivityStats(ctx); } /** * Retrieves the single instance of repository. * * @param ctx * Context * @return singleton */ public static VpnProfileRepository getInstance(final Context ctx) { if (instance == null) { instance = new VpnProfileRepository(ctx); instance.load(); StreamCrypto.init(ctx); } return instance; } /** * Get state of the active vpn. */ public VpnState getActiveVpnState() { if (activeVpnState == null) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); String v = prefs.getString(context.getString(R.string.active_vpn_state_key), context.getString(R.string.active_vpn_state_default)); activeVpnState = VpnState.valueOf(v); } return activeVpnState; } /** * Update state of the active vpn. */ public void setActiveVpnState(final VpnState state) { if (!state.isStable()) return; this.activeVpnState = state; SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); prefs.edit().putString(context.getString(R.string.active_vpn_state_key), state.toString()).commit(); } /** * Retrieves the connectivity stats instance */ public VpnConnectivityStats getConnectivityStats() { return this.connStats; } public void save() { Log.d(TAG, "save, activeId=" + activeProfileId + ", profiles=" + profiles); try { saveActiveProfileId(); saveProfiles(); } catch (IOException e) { Log.e(TAG, "save profiles failed", e); } } private void saveActiveProfileId() throws IOException { ObjectOutputStream os = null; try { os = new ObjectOutputStream(openPrivateFileOutput(FILE_ACT_ID)); os.writeObject(activeProfileId); } finally { if (os != null) { os.close(); } } } private void saveProfiles() throws IOException { ObjectOutputStream os = null; try { os = new ObjectOutputStream(openPrivateFileOutput(FILE_PROFILES)); for (VpnProfile p : profiles) { p.write(os); } } finally { if (os != null) { os.close(); } } } private FileOutputStream openPrivateFileOutput(final String fileName) throws FileNotFoundException { return context.openFileOutput(fileName, Context.MODE_PRIVATE); } private void load() { try { loadActiveProfileId(); loadProfiles(); Log.d(TAG, "loaded, activeId=" + activeProfileId + ", profiles=" + profiles); } catch (Exception e) { Log.e(TAG, "load profiles failed", e); } } private void loadActiveProfileId() throws IOException, ClassNotFoundException { ObjectInputStream is = null; try { is = new ObjectInputStream(context.openFileInput(FILE_ACT_ID)); activeProfileId = (String) is.readObject(); } finally { if (is != null) { is.close(); } } } private void loadProfiles() throws Exception { ObjectInputStream is = null; try { is = new ObjectInputStream(context.openFileInput(FILE_PROFILES)); loadProfilesFrom(is); } finally { if (is != null) { is.close(); } } } private void loadProfilesFrom(final ObjectInputStream is) throws Exception { Object obj = null; try { while (true) { VpnType type = (VpnType) is.readObject(); obj = is.readObject(); loadProfileObject(type, obj, is); } } catch (EOFException eof) { Log.d(TAG, "reach the end of profiles file"); } } private void loadProfileObject(final VpnType type, final Object obj, final ObjectInputStream is) throws Exception { if (obj == null) return; VpnProfile p = VpnProfile.newInstance(type, context); if (p.isCompatible(obj)) { p.read(obj, is); profiles.add(p); } else { Log.e(TAG, "saved profile '" + obj + "' is NOT compatible with " + type); } } public void setActiveProfile(final VpnProfile profile) { Log.i(TAG, "active vpn set to: " + profile); activeProfileId = profile.getId(); } public String getActiveProfileId() { return activeProfileId; } public VpnProfile getActiveProfile() { if (activeProfileId == null) return null; return getProfileById(activeProfileId); } private VpnProfile getProfileById(final String id) { for (VpnProfile p : profiles) { if (p.getId().equals(id)) return p; } return null; } public VpnProfile getProfileByName(final String name) { for (VpnProfile p : profiles) { if (p.getName().equals(name)) return p; } return null; } /** * @return a read-only view of the VpnProfile list. */ public List<VpnProfile> getAllVpnProfiles() { return Collections.unmodifiableList(profiles); } public synchronized void addVpnProfile(final VpnProfile p) { p.postConstruct(); profiles.add(p); } public void checkProfile(final VpnProfile newProfile) { String newName = newProfile.getName(); if (TextUtils.isEmpty(newName)) throw new InvalidProfileException("profile name is empty.", R.string.err_empty_name); for (VpnProfile p : profiles) { if (newProfile != p && newName.equals(p.getName())) throw new InvalidProfileException("duplicated profile name '" + newName + "'.", R.string.err_duplicated_profile_name, newName); } newProfile.validate(); } public synchronized void deleteVpnProfile(final VpnProfile profile) { String id = profile.getId(); boolean removed = profiles.remove(profile); Log.d(TAG, "delete vpn: " + profile + ", removed=" + removed); if (id.equals(activeProfileId)) { activeProfileId = null; Log.d(TAG, "deactivate vpn: " + profile); } } public void backup(final String path) { if (profiles.isEmpty()) { Log.i(TAG, "profile list is empty, will not export"); return; } save(); File dir = ensureDir(path); try { doBackup(dir, FILE_ACT_ID); doBackup(dir, FILE_PROFILES); } catch (Exception e) { throw new AppException("backup failed", e, R.string.err_exp_failed); } } private File ensureDir(final String path) { File dir = new File(path); Utils.ensureDir(dir); return dir; } private void doBackup(final File dir, final String name) throws Exception { InputStream is = context.openFileInput(name); OutputStream os = new FileOutputStream(new File(dir, name)); StreamCrypto.encrypt(is, os); } public void restore(final String dir) { checkExternalData(dir); try { doRestore(dir, FILE_ACT_ID); doRestore(dir, FILE_PROFILES); clean(); load(); } catch (Exception e) { throw new AppException("restore failed", e, R.string.err_imp_failed); } } private void clean() { activeProfileId = null; profiles.clear(); } private void doRestore(final String dir, final String name) throws Exception { InputStream is = new FileInputStream(new File(dir, name)); OutputStream os = openPrivateFileOutput(name); StreamCrypto.decrypt(is, os); } /* * verify data files in external storage. */ private void checkExternalData(final String path) { File id = new File(path, FILE_ACT_ID); File profiles = new File(path, FILE_PROFILES); if (!(verifyDataFile(id) && verifyDataFile(profiles))) throw new AppException("no valid data found in: " + path, R.string.err_imp_nodata); } private boolean verifyDataFile(final File file) { return file.exists() && file.isFile() && file.length() > 0; } /** * Check last backup time. * * @return timestamp of last backup, null for no backup. */ public Date checkLastBackup(final String path) { File id = new File(path, FILE_ACT_ID); if (!verifyDataFile(id)) return null; return new Date(id.lastModified()); } }
package softmodelling; import java.util.ArrayList; import java.util.List; import toxi.geom.Vec3D; import wblut.hemesh.HE_Edge; import wblut.hemesh.HE_Vertex; public class Surface { SoftModelling p5; float strength = 0.1f; ArrayList<Particle> particles = new ArrayList<Particle>(); ArrayList<Particle> particlesSelected = new ArrayList<Particle>(); ArrayList<Spring> springs = new ArrayList<Spring>(); ArrayList<Spring> springsSelected = new ArrayList<Spring>(); // ////////////////CONSTRUCTOR Surface(SoftModelling _p5) { p5 = _p5; initSurface(); lockCorners(); } // ///////////////////////// void run() { runParticles(); runSprings(); } void runParticles() { for (int i = 0; i < particles.size(); i++) { Particle p = (Particle) particles.get(i); p.run(); } } void runSprings() { for (int i = 0; i < springs.size(); i++) { Spring s = (Spring) springs.get(i); s.run(); } } void initSurface() { ArrayList<HE_Vertex> emptyList = new ArrayList<HE_Vertex>(); createNewParticlesFromMesh(p5.mesh.getVerticesAsList(),emptyList); List<HE_Edge> listCheck = (List<HE_Edge>) p5.mesh.getEdgesAsList(); createSpringsFromMesh(listCheck); } void createNewParticlesFromMesh(List<HE_Vertex> verticesToCheck, List<HE_Vertex> verticesToAvoid) { for (int i = 0; i < verticesToCheck.size(); i++) { HE_Vertex vv = (HE_Vertex) verticesToCheck.get(i); if (!verticesToAvoid.contains(vv)) { Vec3D v1 = new Vec3D((float) vv.xf(), (float) vv.yf(), (float) vv.zf()); Particle p = (Particle) new Particle(p5, v1, vv.key()); if (!particles.contains(p)) { particles.add(p); p5.physics.addParticle(p); p.key = vv.key(); } float weight = 50.0f/(float)verticesToCheck.size(); for(int j =0;j<p5.physics.particles.size();j++) p5.physics.particles.get(j).setWeight(weight); } } } Particle getParticleswithKey(List<Particle> particleList, int key) { int index = 0; for (int i = 0; i < particleList.size(); i++) { Particle p = (Particle) particleList.get(i); if (p.key == key) { index = i; } } Particle p2 = (Particle) particleList.get(index); return p2; } Spring getSpringswithKey(List<Spring> springsList, int key) { int index = 0; for (int i = 0; i < springsList.size(); i++) { Spring s = (Spring) springsList.get(i); if (s.key == key) { index = i; } } Spring s2 = (Spring) springsList.get(index); return s2; } void createSpringsFromMesh(List listToCheck) { for (int i = 0; i < listToCheck.size() / 1; i++) { HE_Edge e = (HE_Edge) listToCheck.get(i); HE_Vertex va = e.getStartVertex(); HE_Vertex vb = e.getEndVertex(); Particle a, b; a = (Particle) getParticleswithKey(particles, va.key()); b = (Particle) getParticleswithKey(particles, vb.key()); Spring s = (Spring) new Spring(p5, a, b, (float) (e.getLength() * .8f), strength, e.key()); p5.physics.addSpring(s); springs.add(s); p5.println("mesh.getEdgesAsList().size() = " + p5.mesh.getEdgesAsList().size()); p5.println("surface.springs.size() = " + springs.size()); // } } } void lockCorners() { for (int i = 0; i < p5.mesh.getVerticesAsList().size(); i++) { HE_Vertex v = p5.mesh.getVerticesAsList().get(i); if (v.getEdgeStar().size() <= 2) { Particle p = this.getParticleswithKey(particles, v.key()); p.lock(); p.isCorner = true; p.lockSelected = true; p.keepLocked = true; } } } void resizeSprings() { for (int i = 0; i < springs.size(); i++) { Spring s = (Spring) springs.get(i); if ((s.isSelected) || (p5.mesh.selection.getEdgesAsList().size()==0)){ float initlength = s.initlen; // float initlength = s.getRestLength(); p5.println("initlength = " + initlength); float newlength = initlength * ((p5.springlengthScale / 100)); if ((newlength < 300) && (newlength > 1)) { s.setRestLength(newlength); s.initlen = newlength; } } } // for (int i = 0; i < springs.size(); i++) { // BoxClass b = (BoxClass) p5.mesh.boxArrayEdges.get(i); // Spring s = (Spring) springs.get(i); // if (b.isSelected) { // float initlength = s.initlen; // float newlength = initlength * ((p5.springlengthScale / 100)); // //if (newlength < 200) // s.setRestLength(newlength); // } // } } void deselectParticles() { for (int i = 0; i < this.particlesSelected.size(); i++) { Particle p = (Particle) particlesSelected.get(i); // if (p.isLocked()) // p.keepLocked = true; // if (!p.lockSelected) { // p.unlock(); // p.keepLocked = false; // } // p.hasBeenDragged = false; p.isSelected = false; // particlesSelected.remove(p); } particlesSelected.clear(); } void deselectSprings() { for (int i = 0; i < this.springsSelected.size(); i++) { Spring s = (Spring) springsSelected.get(i); s.isSelected = false; } particlesSelected.clear(); } void lockSelectParticles() { for (int i = 0; i < particles.size(); i++) { Particle p = (Particle) particles.get(i); if (p.isSelected) { p.lock(); p.keepLocked = true; p.lockSelected = true; p.checkNeighborstoRemovePhysics(); } } } void unlockSelectParticles() { for (int i = 0; i < this.particlesSelected.size(); i++) { Particle p = (Particle) particlesSelected.get(i); if (p.isSelected) { if (p.keepLocked) p.unlock(); p.keepLocked = false; p.lockSelected = false; p.checkNeighborstoAddPhysics(); // addSpringsPhysicsUnlocking(); } } } void addSpringsPhysicsUnlocking(){ for (int i = 0; i < p5.surface.springs.size(); i++) { Spring s = p5.surface.springs.get(i); if (!p5.physics.springs.contains(s)){ if ((s.a.isLocked())||(s.b.isLocked())) p5.physics.addSpring(s); } } } void selectAllParticles() { p5.mesh.selection.addVertices(p5.mesh.getVerticesAsList()); for (int i = 0; i < particles.size(); i++) { Particle p = (Particle) particles.get(i); p.isSelected = true; if (!particlesSelected.contains(p)) particlesSelected.add(p); } } void removeDuplicatesSprings() { for (int i = 0; i < springs.size(); i++) { Spring s1 = (Spring) springs.get(i); for (int j = 0; j < springs.size(); j++) { Spring s2 = (Spring) springs.get(j); if (s1 != s2) { if ((s1.key == s2.key)) { if ((s1.a.key == s2.a.key) && (s1.b.key == s2.b.key)) { springs.remove(s2); p5.physics.removeSpring(s2); } if ((s1.a.key == s2.b.key) && (s1.b.key == s2.a.key)) { springs.remove(s2); p5.physics.removeSpring(s2); } } } } } } void removeSpringsifNotInPhysics() { for (int j = 0; j < springs.size(); j++) { Spring s = (Spring) springs.get(j); if (!p5.physics.springs.contains(s)) { springs.remove(s); } } } void removeSpringsWithoutBoxes() { // // check if exist//// for (int i = 0; i < p5.mesh.getEdgesAsList().size(); i++) { HE_Edge e = (HE_Edge) p5.mesh.getEdgesAsList().get(i); for (int j = 0; j < springs.size(); j++) { Spring s = (Spring) springs.get(j); if ((s.key == e.key())) { if ((s.a.key == e.getStartVertex().key()) && (s.b.key != e.getEndVertex().key())) { springs.remove(s); p5.physics.removeSpring(s); } if ((s.b.key == e.getStartVertex().key()) && (s.a.key != e.getEndVertex().key())) { springs.remove(s); p5.physics.removeSpring(s); } if ((s.a.key == e.getEndVertex().key()) && (s.b.key != e.getStartVertex().key())) { springs.remove(s); p5.physics.removeSpring(s); } if ((s.b.key == e.getEndVertex().key()) && (s.a.key != e.getStartVertex().key())) { springs.remove(s); p5.physics.removeSpring(s); } } } } } void recomputeAllSpringsToPhysics() { p5.physics.springs.clear(); for (int i = 0; i < springs.size(); i++) { Spring s = (Spring) springs.get(i); p5.physics.addSpring(s); } } void recomputeSpringsKeys() { for (int h = 0; h < p5.mesh.getEdgesAsList().size(); h++) { HE_Edge e = (HE_Edge) p5.mesh.getEdgesAsList().get(h); HE_Vertex va = e.getStartVertex(); HE_Vertex vb = e.getEndVertex(); for (int i = 0; i < p5.surface.springs.size(); i++) { Spring s = (Spring) p5.surface.springs.get(i); if (((s.a.key == va.key()) && (s.b.key == vb.key())) || ((s.a.key == vb.key()) && (s.b.key == va.key()))) { s.key = e.key(); } } } } // -----------------------------------------------------------------------tut014 void killSelectParticles() { int indexSelected = 0; boolean possitive = false; boolean somethingSelected = false; Particle p; for (int i = 0; i < particles.size(); i++) { p = (Particle) particles.get(i); if (p.isSelected) { indexSelected = i; somethingSelected = true; } } // /---clear Springs---/// p5.println("indexSelected = " + indexSelected); p = (Particle) particles.get(indexSelected); Spring s = null; for (int k = 0; k < springs.size(); k++) { s = (Spring) springs.get(k); if ((s.a == p) || (s.b == p)) { p5.physics.removeSpring(s); springs.remove(s); possitive = true; } } HE_Vertex v = p5.mesh.getVertexByKey(p.key); // // /---clear Faces---/// // List facesP = (List) v.getFaceStar(); // for (int i = 0; i < facesP.size(); i++) { // HE_Face f = (HE_Face) facesP.get(i); // mesh.deleteFace(f); // } // /---clear Edges---/// List edgesP = (List) v.getEdgeStar(); for (int i = 0; i < edgesP.size(); i++) { HE_Edge e = (HE_Edge) edgesP.get(i); p5.mesh.deleteEdge(e); } p5.mesh.remove(v); // /---clear Particles---/// this.particles.remove(indexSelected); p5.physics.removeParticle(p); particles.remove(p); } // -----------------------------------------------------------------------tut014 }// endClass
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.remote; import org.apache.nifi.groups.ProcessGroup; import org.apache.nifi.remote.protocol.FlowFileTransaction; import org.apache.nifi.remote.protocol.HandshakeProperties; import org.apache.nifi.remote.protocol.http.HttpFlowFileServerProtocol; import org.apache.nifi.util.FormatUtils; import org.apache.nifi.util.NiFiProperties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.apache.nifi.util.NiFiProperties.DEFAULT_SITE_TO_SITE_HTTP_TRANSACTION_TTL; import static org.apache.nifi.util.NiFiProperties.SITE_TO_SITE_HTTP_TRANSACTION_TTL; public class HttpRemoteSiteListener implements RemoteSiteListener { private static final Logger logger = LoggerFactory.getLogger(HttpRemoteSiteListener.class); private final int transactionTtlSec; private static HttpRemoteSiteListener instance; private final Map<String, TransactionWrapper> transactions = new ConcurrentHashMap<>(); private final ScheduledExecutorService taskExecutor; private ProcessGroup rootGroup; private ScheduledFuture<?> transactionMaintenanceTask; private HttpRemoteSiteListener(final NiFiProperties nifiProperties) { super(); taskExecutor = Executors.newScheduledThreadPool(1, new ThreadFactory() { private final ThreadFactory defaultFactory = Executors.defaultThreadFactory(); @Override public Thread newThread(final Runnable r) { final Thread thread = defaultFactory.newThread(r); thread.setName("Http Site-to-Site Transaction Maintenance"); thread.setDaemon(true); return thread; } }); int txTtlSec; try { final String snapshotFrequency = nifiProperties.getProperty(SITE_TO_SITE_HTTP_TRANSACTION_TTL, DEFAULT_SITE_TO_SITE_HTTP_TRANSACTION_TTL); txTtlSec = (int) FormatUtils.getTimeDuration(snapshotFrequency, TimeUnit.SECONDS); } catch (final Exception e) { txTtlSec = (int) FormatUtils.getTimeDuration(DEFAULT_SITE_TO_SITE_HTTP_TRANSACTION_TTL, TimeUnit.SECONDS); logger.warn("Failed to parse {} due to {}, use default as {} secs.", SITE_TO_SITE_HTTP_TRANSACTION_TTL, e.getMessage(), txTtlSec); } transactionTtlSec = txTtlSec; } public static HttpRemoteSiteListener getInstance(final NiFiProperties nifiProperties) { if (instance == null) { synchronized (HttpRemoteSiteListener.class) { if (instance == null) { instance = new HttpRemoteSiteListener(nifiProperties); } } } return instance; } private class TransactionWrapper { private final FlowFileTransaction transaction; private final HandshakeProperties handshakeProperties; private long lastCommunicationAt; private TransactionWrapper(final FlowFileTransaction transaction, final HandshakeProperties handshakeProperties) { this.transaction = transaction; this.handshakeProperties = handshakeProperties; this.lastCommunicationAt = System.currentTimeMillis(); } private boolean isExpired() { long elapsedMillis = System.currentTimeMillis() - lastCommunicationAt; long elapsedSec = TimeUnit.SECONDS.convert(elapsedMillis, TimeUnit.MILLISECONDS); return elapsedSec > transactionTtlSec; } private void extend() { lastCommunicationAt = System.currentTimeMillis(); } } @Override public void setRootGroup(ProcessGroup rootGroup) { this.rootGroup = rootGroup; } public void setupServerProtocol(HttpFlowFileServerProtocol serverProtocol) { serverProtocol.setRootProcessGroup(rootGroup); } @Override public void start() throws IOException { transactionMaintenanceTask = taskExecutor.scheduleWithFixedDelay(() -> { int originalSize = transactions.size(); logger.trace("Transaction maintenance task started."); try { Set<String> transactionIds = transactions.keySet().stream().collect(Collectors.toSet()); transactionIds.stream().filter(tid -> !isTransactionActive(tid)) .forEach(tid -> cancelTransaction(tid)); } catch (Exception e) { // Swallow exception so that this thread can keep working. logger.error("An exception occurred while maintaining transactions", e); } logger.debug("Transaction maintenance task finished. originalSize={}, currentSize={}", originalSize, transactions.size()); }, 0, transactionTtlSec / 2, TimeUnit.SECONDS); } public void cancelTransaction(String transactionId) { TransactionWrapper wrapper = transactions.remove(transactionId); if (wrapper == null) { logger.debug("The transaction was not found. transactionId={}", transactionId); } else { logger.debug("Cancel a transaction. transactionId={}", transactionId); FlowFileTransaction t = wrapper.transaction; if (t != null && t.getSession() != null) { logger.info("Cancel a transaction, rollback its session. transactionId={}", transactionId); try { t.getSession().rollback(); } catch (Exception e) { // Swallow exception so that it can keep expiring other transactions. logger.error("Failed to rollback. transactionId={}", transactionId, e); } } } } @Override public void stop() { if (transactionMaintenanceTask != null) { logger.debug("Stopping transactionMaintenanceTask..."); transactionMaintenanceTask.cancel(true); } } public String createTransaction() { final String transactionId = UUID.randomUUID().toString(); transactions.put(transactionId, new TransactionWrapper(null, null)); logger.debug("Created a new transaction: {}", transactionId); return transactionId; } public boolean isTransactionActive(final String transactionId) { TransactionWrapper transaction = transactions.get(transactionId); return isTransactionActive(transaction); } private boolean isTransactionActive(TransactionWrapper transaction) { if (transaction == null) { return false; } if (transaction.isExpired()) { return false; } return true; } /** * @param transactionId transactionId to check * @return Returns a HandshakeProperties instance which is created when this * transaction is started, only if the transaction is active, and it holds a * HandshakeProperties, otherwise return null */ public HandshakeProperties getHandshakenProperties(final String transactionId) { TransactionWrapper transaction = transactions.get(transactionId); if (isTransactionActive(transaction)) { return transaction.handshakeProperties; } return null; } public void holdTransaction(final String transactionId, final FlowFileTransaction transaction, final HandshakeProperties handshakenProperties) throws IllegalStateException { // We don't check expiration of the transaction here, to support large file transport or slow network. // The availability of current transaction is already checked when the HTTP request was received at SiteToSiteResource. TransactionWrapper currentTransaction = transactions.remove(transactionId); if (currentTransaction == null) { logger.debug("The transaction was not found, it looks it took longer than transaction TTL."); } else if (currentTransaction.transaction != null) { throw new IllegalStateException("Transaction has already been processed. It can only be finalized. transactionId=" + transactionId); } if (transaction.getSession() == null) { throw new IllegalStateException("Passed transaction is not associated any session yet, can not hold. transactionId=" + transactionId); } logger.debug("Holding a transaction: {}", transactionId); // Server has received or sent all data, and transaction TTL count down starts here. // However, if the client doesn't consume data fast enough, server might expire and rollback the transaction. transactions.put(transactionId, new TransactionWrapper(transaction, handshakenProperties)); } public FlowFileTransaction finalizeTransaction(final String transactionId) throws IllegalStateException { if (!isTransactionActive(transactionId)) { throw new IllegalStateException("Transaction was not found or not active anymore. transactionId=" + transactionId); } TransactionWrapper transaction = transactions.remove(transactionId); if (transaction == null) { throw new IllegalStateException("Transaction was not found anymore. It's already finalized or expired. transactionId=" + transactionId); } if (transaction.transaction == null) { throw new IllegalStateException("Transaction has not started yet."); } logger.debug("Finalized a transaction: {}", transactionId); return transaction.transaction; } public void extendTransaction(final String transactionId) throws IllegalStateException { if (!isTransactionActive(transactionId)) { throw new IllegalStateException("Transaction was not found or not active anymore. transactionId=" + transactionId); } TransactionWrapper transaction = transactions.get(transactionId); if (transaction != null) { logger.debug("Extending transaction TTL, transactionId={}", transactionId); transaction.extend(); } } public int getTransactionTtlSec() { return transactionTtlSec; } }
package com.alorma.github.ui.fragment.pullrequest; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import com.alorma.github.R; import com.alorma.gitskarios.core.client.BaseClient; import com.alorma.github.sdk.bean.dto.response.Commit; import com.alorma.github.sdk.bean.dto.response.ReviewComment; import com.alorma.github.sdk.bean.info.IssueInfo; import com.alorma.github.sdk.bean.issue.IssueStoryDetail; import com.alorma.github.sdk.bean.issue.IssueStoryReviewComment; import com.alorma.github.sdk.bean.issue.PullRequestStoryCommit; import com.alorma.github.sdk.services.pullrequest.GetPullRequestCommits; import com.alorma.github.sdk.services.pullrequest.PullRequestReviewCommentsClient; import com.alorma.github.ui.adapter.commit.PullRequestCommitsReviewCommentsAdapter; import com.alorma.github.ui.fragment.base.PaginatedListFragment; import com.alorma.github.ui.fragment.detail.repo.BackManager; import com.alorma.github.ui.fragment.detail.repo.PermissionsManager; import com.mikepenz.octicons_typeface_library.Octicons; import org.joda.time.DateTime; import org.joda.time.Days; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import retrofit.RetrofitError; import retrofit.client.Response; /** * Created by Bernat on 07/09/2014. */ public class PullRequestCommitsListFragment extends PaginatedListFragment<List<Commit>, PullRequestCommitsReviewCommentsAdapter> implements PermissionsManager , BackManager { private static final String ISSUE_INFO = "ISSUE_INFO"; private List<Commit> commits; private IssueInfo issueInfo; public static PullRequestCommitsListFragment newInstance(IssueInfo issueInfo) { Bundle bundle = new Bundle(); bundle.putParcelable(ISSUE_INFO, issueInfo); PullRequestCommitsListFragment fragment = new PullRequestCommitsListFragment(); fragment.setArguments(bundle); return fragment; } @Override protected void onResponse(final List<Commit> commits, boolean refreshing) { if (this.commits == null || refreshing) { this.commits = new ArrayList<>(); } if (commits != null && commits.size() > 0) { orderCommits(commits); getReviewComments(); List<IssueStoryDetail> issueStoryDetails = new ArrayList<>(); for (Commit commit : this.commits) { issueStoryDetails.add(new PullRequestStoryCommit(commit)); } if (getAdapter() == null) { PullRequestCommitsReviewCommentsAdapter commitsAdapter = new PullRequestCommitsReviewCommentsAdapter(LayoutInflater.from(getActivity()), false, issueInfo.repoInfo); commitsAdapter.addAll(issueStoryDetails); setAdapter(commitsAdapter); } else { getAdapter().addAll(issueStoryDetails); } } else if (getAdapter() == null || getAdapter().getItemCount() == 0) { setEmpty(false); } } private void getReviewComments() { PullRequestReviewCommentsClient pullRequestReviewComments = new PullRequestReviewCommentsClient(getActivity(), issueInfo); pullRequestReviewComments.setOnResultCallback(new BaseClient.OnResultCallback<List<ReviewComment>>() { @Override public void onResponseOk(List<ReviewComment> reviewComments, Response r) { if (reviewComments != null) { List<IssueStoryDetail> items = getAdapter().getItems(); if (items != null) { List<PullRequestStoryCommit> commits = new ArrayList<PullRequestStoryCommit>(); for (IssueStoryDetail item : items) { if (item instanceof PullRequestStoryCommit) { commits.add((PullRequestStoryCommit) item); } } Map<String, List<ReviewComment>> mapComments = new HashMap<>(); for (ReviewComment reviewComment : reviewComments) { if (mapComments.get(reviewComment.original_commit_id) == null) { mapComments.put(reviewComment.original_commit_id, new ArrayList<ReviewComment>()); } mapComments.get(reviewComment.original_commit_id).add(reviewComment); } items = new ArrayList<>(); for (PullRequestStoryCommit commit : commits) { items.add(commit); if (mapComments.get(commit.commit.sha) != null) { for (ReviewComment reviewComment : mapComments.get(commit.commit.sha)) { IssueStoryReviewComment issueStoryReviewComment = new IssueStoryReviewComment(reviewComment); issueStoryReviewComment.created_at = getMilisFromDateClearHour(reviewComment.created_at); items.add(issueStoryReviewComment); } } } getAdapter().clear(); getAdapter().addAll(items); } } } private long getMilisFromDateClearHour(String createdAt) { DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); DateTime dt = formatter.parseDateTime(createdAt); return dt.hourOfDay().roundFloorCopy().getMillis(); } @Override public void onFail(RetrofitError error) { } }); pullRequestReviewComments.execute(); } @Override public void onFail(RetrofitError error) { super.onFail(error); if (getAdapter() == null || getAdapter().getItemCount() == 0) { if (error != null && error.getResponse() != null) { setEmpty(true, error.getResponse().getStatus()); } } } @Override public void setEmpty(boolean withError, int statusCode) { super.setEmpty(withError, statusCode); if (fab != null) { fab.setVisibility(View.INVISIBLE); } } @Override public void hideEmpty() { super.hideEmpty(); if (fab != null) { fab.setVisibility(View.VISIBLE); } } @Override public void setPermissions(boolean admin, boolean push, boolean pull) { } @Override public boolean onBackPressed() { return true; } private void orderCommits(List<Commit> commits) { for (Commit commit : commits) { if (commit.commit.author.date != null) { DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); DateTime dt = formatter.parseDateTime(commit.commit.committer.date); Days days = Days.daysBetween(dt.withTimeAtStartOfDay(), new DateTime(System.currentTimeMillis()).withTimeAtStartOfDay()); commit.days = days.getDays(); this.commits.add(commit); } } } @Override protected void executeRequest() { super.executeRequest(); GetPullRequestCommits getPullRequestCommits = new GetPullRequestCommits(getActivity(), issueInfo); getPullRequestCommits.setOnResultCallback(this); getPullRequestCommits.execute(); } @Override protected void executePaginatedRequest(int page) { super.executePaginatedRequest(page); GetPullRequestCommits getPullRequestCommits = new GetPullRequestCommits(getActivity(), issueInfo); getPullRequestCommits.setOnResultCallback(this); getPullRequestCommits.execute(); } @Override protected void loadArguments() { if (getArguments() != null) { issueInfo = getArguments().getParcelable(ISSUE_INFO); } } @Override protected Octicons.Icon getNoDataIcon() { return Octicons.Icon.oct_diff; } @Override protected int getNoDataText() { return R.string.no_commits; } @Override protected boolean useFAB() { return false; } // TODO /*@Override public void onListItemClick(final ListView l, final View v, final int position, final long id) { Commit item = commitsAdapter.getItem(position); CommitInfo info = new CommitInfo(); info.repoInfo = issueInfo.repoInfo; info.sha = item.sha; Intent intent = CommitDetailActivity.newInstance(getActivity(), info); startActivity(intent); }*/ }
package org.feldspaten.hyperion.persistence; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; public class MySQL { /* Connection parameters */ /** Hostname of the database */ private String db_hostname; /** Port of the database */ private int db_port; /** Database name for the connection */ private String db_database; /** Username for the database */ private String db_username; /** Password for the connection */ private String db_password; /** Default port for MySQL connections */ public static final int DEFAULT_PORT = 3306; /** Default names encoding used for a statement */ private String defaultEncoding = "utf8"; /** Default timezone for a statement. Adapt to your needs */ private String timezone = "+01:00"; /** Offset in seconds for the timezone in seconds */ private long timezoneOffset = 60L * 60L; /** JDBC connection */ private java.sql.Connection conn = null; /** {@link SimpleDateFormat} for formatting dates to MySQL date instances */ static final SimpleDateFormat sqlDateTimeFormatter = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss"); /** {@link SimpleDateFormat} for formatting dates to MySQL date instances */ static final SimpleDateFormat sqlDateFormatter = new SimpleDateFormat( "yyyy-MM-dd"); /** Closed flag. After closing no further connections are established */ private static boolean closed = false; /** Implementation of Map to provide easier access to the values map */ public static class ValuesMap extends HashMap<String, String> implements Map<String, String> { /** Serialisation ID */ private static final long serialVersionUID = -3241025828184612599L; @Override /** * Escape string and insert it */ public String put(String key, String value) { if (value == null) value = "''"; else value = "'" + MySQL.sqlSafeString(value) + "'"; return super.put(key, value); } /** * Insert into the values map * * @param key * Key of the value * @param value * Value * @param noEscape * If true, the given string will not be escaped * @return key of the inserted String */ public String put(String key, String value, boolean noEscape) { if (value == null) value = (noEscape ? "" : "''"); else { if (!noEscape) value = "'" + MySQL.sqlSafeString(value) + "'"; } return super.put(key, value); } public String put(String key, long value) { return super.put(key, Long.toString(value)); } public String put(String key, float value) { return super.put(key, Float.toString(value)); } public String put(String key, double value) { return super.put(key, Double.toString(value)); } } /** * Helper class for creating queries * * @author phoenix * */ public static class Query { /** Underlying statement */ private final Statement stmt; /** {@link ResultSet} when quering data */ protected ResultSet rs = null; /** Set offset for this query */ private long offset = 0; /** Limit for the resulting rows */ private long limit = 100; /** Name of the table the query operates on */ private String tablename; /** Ordering */ private String order; /** True if ascending order, false if descending order */ private boolean orderAscending; /** Created query string */ private String query = null; /** Enclose query in transaction */ private boolean transaction = false; /** Where clauses */ private final List<String> whereClauses = new ArrayList<>(10); /** Connector of the where clauses */ private String whereClauseConnector = "AND"; /** Optional ON DUPLICATE KEY statement for insert queries */ private String insertOnDuplicateKeyStatement = null; public Query(final MySQL mysql) throws SQLException { this.stmt = mysql.createStatement(); } public String getLastQuery() { return query; } public String getOrder() { return order; } public void setOrder(String order) { this.order = order; } public boolean isOrderAscending() { return orderAscending; } public void setOrderAscending(boolean orderAscending) { this.orderAscending = orderAscending; } public String getTablename() { return tablename; } public void setTablename(String tablename) { this.tablename = tablename; } public long getOffset() { return offset; } public void setOffset(long offset) { this.offset = offset; } public long getLimit() { return limit; } public void setLimit(long limit) { this.limit = limit; } public String getWhereClause() { synchronized (whereClauses) { if (whereClauses.isEmpty()) return ""; final StringBuffer buffer = new StringBuffer(); boolean first = true; final String connector = " " + this.whereClauseConnector + " "; for (final String clause : whereClauses) { if (first) first = false; else buffer.append(connector); buffer.append('('); buffer.append(clause); buffer.append(')'); } return buffer.toString(); } } /** * Set the connecto between where clauses. Acceptable connectors are * <b>AND</b> or <b>OR</b> * * @param connector * Connector between where clauses */ public void setWhereClauseConnector(String connector) { connector = connector.trim().toUpperCase(); if (connector.equals("AND")) this.whereClauseConnector = "AND"; else if (connector.equals("OR")) this.whereClauseConnector = "OR"; else throw new IllegalArgumentException( "Illegal where clause connector: " + connector); } /** * Replaces ALL where clauses by this one clause * * @param clause * to be set */ public void setWhereClause(String clause) { synchronized (whereClauses) { this.whereClauses.clear(); if (clause == null || clause.isEmpty()) return; else this.whereClauses.add(clause); } } public void addWhereClause(final String clause) { if (clause == null || clause.isEmpty()) return; synchronized (whereClauses) { this.whereClauses.add(clause); } } public void clearWhereClauses() { synchronized (whereClauses) { this.whereClauses.clear(); } } public synchronized void cleanup() throws SQLException { if (rs != null) rs.close(); rs = null; } protected synchronized ResultSet executeQuery(final String sql) throws SQLException { cleanup(); this.query = sql; this.rs = stmt.executeQuery(sql); return this.rs; } protected synchronized ResultSet selectQuery() throws SQLException { return this.selectQuery("*"); } protected synchronized ResultSet selectQuery(final String rows) throws SQLException { // Generate SELECT query final StringBuffer query = new StringBuffer(); query.append("SELECT " + rows + " FROM `"); query.append(tablename); final String whereClause = getWhereClause(); if (whereClause != null && whereClause.length() > 0) { query.append("` WHERE "); query.append(whereClause); } else query.append('`'); if (order != null && order.length() > 0) { query.append(" ORDER BY "); query.append(MySQL.sqlSafeString(order)); if (isOrderAscending()) query.append(" ASC "); else query.append(" DESC "); } if (limit > 0) { query.append(" LIMIT "); query.append(limit); if (offset > 0) { query.append(" OFFSET "); query.append(offset); } } query.append(';'); return executeQuery(query.toString()); } public synchronized ResultSet getResultSet() { return this.rs; } public synchronized void close() { if (transaction) try { endTransaction(); } catch (SQLException e1) { } try { cleanup(); } catch (SQLException e) { e.printStackTrace(); } try { stmt.close(); } catch (SQLException e) { e.printStackTrace(); } } public synchronized boolean next() throws SQLException { if (rs == null) return false; return rs.next(); } public synchronized boolean execute(String sql) throws SQLException { cleanup(); this.query = sql; return stmt.execute(sql); } public synchronized int executeUpdate(String sql) throws SQLException { cleanup(); this.query = sql; return stmt.executeUpdate(sql); } public synchronized ResultSetMetaData getResultSetMetaData() throws SQLException { if (rs == null) throw new IllegalStateException( "Cannot get ResultSetMetData when no ResultSet is available"); return rs.getMetaData(); } public synchronized int insert(Map<String, String> values) throws SQLException { return this.insertStatement(this.tablename, values, false); } public synchronized int insert(String tablename, Map<String, String> values) throws SQLException { return this.insertStatement(tablename, values, false); } public synchronized int insertIgnore(Map<String, String> values) throws SQLException { return this.insertStatement(this.tablename, values, true); } public synchronized int insertIgnore(String tablename, Map<String, String> values) throws SQLException { return this.insertStatement(this.tablename, values, true); } protected synchronized int insertStatement(String tablename, Map<String, String> values, final boolean ignore) throws SQLException { if (tablename.isEmpty() || values.isEmpty()) return 0; final StringBuffer buffer = new StringBuffer(); if (ignore) buffer.append("INSERT IGNORE INTO `"); else buffer.append("INSERT INTO `"); buffer.append(tablename); buffer.append("` ("); { final StringBuffer valuesString = new StringBuffer(); boolean first = true; for (final String key : values.keySet()) { if (first) first = false; else { buffer.append(", "); valuesString.append(", "); } buffer.append('`'); buffer.append(key); buffer.append('`'); valuesString.append(values.get(key)); } buffer.append(") VALUES ("); buffer.append(valuesString.toString()); } if (this.insertOnDuplicateKeyStatement != null) { buffer.append("ON DUPLICATE KEY "); buffer.append(this.insertOnDuplicateKeyStatement); } buffer.append(");"); return this.executeUpdate(buffer.toString()); } public void setOnDuplicateKeyStatement(final String statement) { this.insertOnDuplicateKeyStatement = statement; } public synchronized int update(Map<String, String> values) throws SQLException { return this.update(this.tablename, values); } public synchronized int update(String tablename, Map<String, String> values) throws SQLException { if (tablename.isEmpty() || values.isEmpty()) return 0; final StringBuffer buffer = new StringBuffer(); buffer.append("UPDATE `"); buffer.append(tablename); buffer.append("` SET "); boolean first = true; for (final String key : values.keySet()) { if (first) first = false; else { buffer.append(", "); } buffer.append('`'); buffer.append(key); buffer.append("` = "); buffer.append(values.get(key)); } buffer.append(" WHERE " + getWhereClause()); buffer.append(";"); return this.executeUpdate(buffer.toString()); } public synchronized void insertOrUpdate(ValuesMap values) throws SQLException { this.insertOrUpdate(this.tablename, values); } public synchronized void insertOrUpdate(String tableName, ValuesMap values) throws SQLException { if (tablename.isEmpty() || values.isEmpty()) return; final StringBuffer buffer = new StringBuffer(); buffer.append("INSERT INTO `"); buffer.append(tablename); buffer.append("` ("); { final StringBuffer valuesString = new StringBuffer(); boolean first = true; for (final String key : values.keySet()) { if (first) first = false; else { buffer.append(", "); valuesString.append(", "); } buffer.append('`'); buffer.append(key); buffer.append('`'); valuesString.append("'"); valuesString.append(values.get(key)); valuesString.append("'"); } buffer.append(") VALUES ("); buffer.append(valuesString.toString()); } buffer.append(") ON DUPLICATE KEY UPDATE "); { boolean first = true; for (final String key : values.keySet()) { if (first) first = false; else buffer.append(", "); buffer.append('`'); buffer.append(key); buffer.append("` = '"); buffer.append(values.get(key)); buffer.append("'"); } } this.executeUpdate(buffer.toString()); } public synchronized void startTransaction() throws SQLException { if (transaction) return; else { transaction = true; this.execute("START TRANSACTION;"); } } public synchronized void endTransaction() throws SQLException { if (!transaction) return; else { this.execute("COMMIT;"); transaction = false; } } /** * Delete from the table with the given where clause */ public synchronized void delete() throws SQLException { final StringBuffer buffer = new StringBuffer(); buffer.append("DELETE FROM `"); buffer.append(tablename); buffer.append("`"); final String whereClause = getWhereClause(); if (whereClause.length() > 0) { buffer.append(" WHERE "); buffer.append(whereClause); } buffer.append(';'); execute(buffer.toString()); } /** * Clear the whole table */ public synchronized void clear() throws SQLException { final StringBuffer buffer = new StringBuffer(); buffer.append("DELETE FROM `"); buffer.append(tablename); buffer.append("`"); buffer.append(';'); execute(buffer.toString()); } } /** * Static class constructor * * This constructor must initialise the JDBC driver */ static { try { // newInstance() fixes some buggy implementations of the JDBC // driver Class.forName("com.mysql.jdbc.Driver").newInstance(); } catch (ClassNotFoundException e) { System.err .println("Could not find jdbc driver. Try to download a valid jdbc driver from http://www.oracle.com!"); e.printStackTrace(System.err); } catch (InstantiationException e) { System.err.println("Could not find jdbc driver"); e.printStackTrace(System.err); } catch (IllegalAccessException e) { System.err .println("Access to JDBC driver denied. Try to download a valid jdbc driver from http://www.oracle.com!"); } } public MySQL(String hostname, int port, String database, String username, String password) { super(); this.db_hostname = hostname; this.db_port = port; this.db_database = database; this.db_username = username; this.db_password = password; } public MySQL(String hostname, String database, String username, String password) { this(hostname, DEFAULT_PORT, database, username, password); } public String getDefaultEncoding() { return defaultEncoding; } public void setDefaultEncoding(String defaultEncoding) { this.defaultEncoding = defaultEncoding; } public String getTimezone() { return timezone; } public void setTimezone(String timezone) { this.timezone = timezone; } public long getTimezoneOffset() { return timezoneOffset; } public void setTimezoneOffset(long timezoneOffset) { this.timezoneOffset = timezoneOffset; } /** * Initialized the connection with the given parameters * * @throws SQLException * Thrown if the initial connection fails */ public void initialize() throws SQLException { this.reconnectThrowsException(); } public void execSql(final String sql) throws SQLException { final Statement stmt = createStatement(); try { stmt.execute(sql); } finally { stmt.close(); } } /** * Execute the given SQL statements. The statements are executed * sequentially without a transaction * * @param sqls * to be executed * @throws SQLException * Thrown if occurring on database */ public void execSql(final String[] sqls) throws SQLException { execSql(sqls, false); } /** * Execute the given SQL statements. If transaction is set to true, the * whole sql statements are enclosed within a TRANSACTION * * @param sqls * to be executed * @param transaction * True if the statements should be enclosed within a transaction * @throws SQLException * Thrown if occurring on database */ public void execSql(final String[] sqls, final boolean transaction) throws SQLException { final Statement stmt = createStatement(); try { if (transaction) stmt.execute("START TRANSACTION;"); for (String sql : sqls) stmt.execute(sql); } finally { if (transaction) stmt.execute("COMMIT;"); stmt.close(); } } public void executeSql(final String sql) throws SQLException { execSql(sql); } public void executeUpdate(final String sql) throws SQLException { final Statement stmt = createStatement(); try { stmt.executeUpdate(sql); } finally { stmt.close(); } } /** * Connects or reconnects the database. Useful, if you changed the database * connection parameters, i.e. hostname, port, database, username and * password */ public synchronized void connect() { try { if (conn != null) conn.close(); checkConnection(); } catch (SQLException e) { // Ignore } } /** * Checks current connection and re-connects if the connection is broken * * @return true if the connection is alive after the call */ public synchronized boolean checkConnection() { if (closed) return false; final int timeout = 5000; try { if (conn != null && conn.isValid(timeout)) return true; } catch (SQLException e) { // Consider as a broken connection. } final String url = createJDBCAddress(); java.sql.Connection con = conn; try { con = DriverManager.getConnection(url); try { if (conn != null) conn.close(); } catch (SQLException e) { System.err.println("Error closing existing JDBC: " + e.getLocalizedMessage()); con.close(); return false; } // Set new connection conn = con; return true; } catch (SQLException e) { System.err.println("New JDBC Connection failed: " + e.getLocalizedMessage()); return false; } } /** * @return created JDBC address string */ protected String createJDBCAddress() { String url_ = "jdbc:mysql://" + db_hostname + ":" + db_port + "/" + db_database + "?user=" + db_username + "&zeroDateTimeBehavior=convertToNull&characterEncoding=utf8"; if (db_password != null && db_password.length() > 0) url_ += "&password=" + db_password; return url_; } /** * (Re)connects the database and throws an {@link SQLException} if failed */ protected synchronized void reconnectThrowsException() throws SQLException { try { if (conn != null) conn.close(); } catch (SQLException e) { // Ignore } final String url; { String url_ = "jdbc:mysql://" + db_hostname + ":" + db_port + "/" + db_database + "?user=" + db_username; if (db_password != null && db_password.length() > 0) url_ += "&password=" + db_password; url = url_; } java.sql.Connection con = conn; try { con = DriverManager.getConnection(url); // Set new connection conn = con; } catch (SQLException e) { throw e; } } /** * Make a string SQL-Safe, i.e. replace all <i>'</i> with <i>\'</i> * * @param string * to be handled * @return SQL-Safe string */ public static String sqlSafeString(final String string) { final StringBuffer buffer = new StringBuffer(); boolean escaped = false; for (final char ch : string.toCharArray()) { if (ch == '\\') escaped = true; else { if (ch == '\'') { if (!escaped) buffer.append('\\'); } escaped = false; } buffer.append(ch); } return buffer.toString(); } /** * Create SQL date out of ad {@link Date} instance * * @param date * to be converted * @return SQL date string */ public static String sqlDateTime(final Date date) { if (date == null) return "NULL"; return sqlDateTimeFormatter.format(date); } /** * Create SQL date out of ad {@link Date} instance * * @param date * to be converted * @return SQL date string */ public static String sqlDate(final Date date) { if (date == null) return "NULL"; return sqlDateFormatter.format(date); } /** * Checks the connection and creates a new statement. * * @return {@link Statement} for the connection * @throws SQLException * Packet {@link SQLException} if occurring */ public synchronized Statement createStatement() throws SQLException { checkConnection(); if (conn == null) throw new SQLException("Error setting up SQL connection"); final Statement statement = conn.createStatement(); statement.execute("SET NAMES " + defaultEncoding + ";"); statement.execute("SET time_zone = '" + timezone + "';"); return statement; } /** * Checks the connection and creates a new preapared statement. * * @return {@link Statement} for the connection * @throws SQLException * Packet {@link SQLException} if occurring */ public PreparedStatement createPreparedStatement(final String sql) throws SQLException { checkConnection(); if (conn == null) throw new SQLException("Error setting up SQL connection"); final PreparedStatement statement = conn.prepareStatement(sql); statement.execute("SET NAMES " + defaultEncoding + ";"); statement.execute("SET time_zone = '" + timezone + "';"); return statement; } /** * Close SQL connection */ public void close() { closed = true; try { if (conn != null) conn.close(); } catch (SQLException e) { System.err.println("Error closing SQL connection: " + e.getMessage()); e.printStackTrace(); } } /** * Get the Version of the database management system, i.e. the version of * the MySQL server instance * * @return MySQL server instance version * @throws SQLException * Thrown if occurring while querying */ public String getDBMSVersion() throws SQLException { final java.sql.Statement stmt = createStatement(); try { stmt.executeQuery("SELECT version();"); ResultSet rs = stmt.getResultSet(); try { if (!rs.next()) return ""; return rs.getString(1); } finally { rs.close(); } } finally { stmt.close(); } } public Date getSqlDate(final ResultSet rs, final String columnName) throws SQLException { final Timestamp timestamp = rs.getTimestamp(columnName); if (timestamp == null) return new Date(); else { final long time = timestamp.getTime() + timezoneOffset; return new Date(time); } } /** * Espaced (including "'" characters) safe SQL string * * @param string * to be processed * @return */ public static String escapeString(String string) { return "'" + sqlSafeString(string) + "'"; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package backtype.storm.spout; import backtype.storm.task.ICollectorCallback; import backtype.storm.task.OutputCollector; import backtype.storm.utils.Utils; import java.util.List; /** * This output collector exposes the API for emitting tuples from an {@link backtype.storm.topology.IRichSpout}. The main difference between this output * collector and {@link OutputCollector} for {@link backtype.storm.topology.IRichBolt} is that spouts can tag messages with ids so that they can be acked or * failed later on. This is the Spout portion of Storm's API to guarantee that each message is fully processed at least once. */ public class SpoutOutputCollector extends SpoutOutputCollectorCb { SpoutOutputCollectorCb _delegate; public SpoutOutputCollector(ISpoutOutputCollector delegate) { _delegate = new SpoutOutputCollectorCb(delegate) { @Override public List<Integer> emit(String streamId, List<Object> tuple, Object messageId) { return delegate.emit(streamId, tuple, messageId); } @Override public void emitDirect(int taskId, String streamId, List<Object> tuple, Object messageId) { delegate.emitDirect(taskId, streamId, tuple, messageId); } @Override public void reportError(Throwable error) { delegate.reportError(error); } @Override public List<Integer> emit(String streamId, List<Object> tuple, Object messageId, ICollectorCallback callback) { throw new RuntimeException("This method should not be called!"); } @Override public void emitDirect(int taskId, String streamId, List<Object> tuple, Object messageId, ICollectorCallback callback) { throw new RuntimeException("This method should not be called!"); } @Override public List<Integer> emitCtrl(String streamId, List<Object> tuple, Object messageId) { throw new RuntimeException("This method should not be called!"); } @Override public void emitDirectCtrl(int taskId, String streamId, List<Object> tuple, Object messageId) { throw new RuntimeException("This method should not be called!"); } }; } public SpoutOutputCollector(SpoutOutputCollectorCb delegate) { _delegate = delegate; } /** * Emits a new tuple to the specified output stream with the given message * ID. When Storm detects that this tuple has been fully processed, or has * failed to be fully processed, the spout will receive an ack or fail * callback respectively with the messageId as long as the messageId was not * null. If the messageId was null, Storm will not track the tuple and no * callback will be received. The emitted values must be immutable. * * @return the list of task ids that this tuple was sent to */ public List<Integer> emit(String streamId, List<Object> tuple, Object messageId) { return _delegate.emit(streamId, tuple, messageId); } public List<Integer> emit(String streamId, List<Object> tuple, Object messageId, ICollectorCallback callback) { return _delegate.emit(streamId, tuple, messageId, callback); } /** * Emits a new tuple to the default output stream with the given message ID. * When Storm detects that this tuple has been fully processed, or has * failed to be fully processed, the spout will receive an ack or fail * callback respectively with the messageId as long as the messageId was not * null. If the messageId was null, Storm will not track the tuple and no * callback will be received. The emitted values must be immutable. * * @return the list of task ids that this tuple was sent to */ public List<Integer> emit(List<Object> tuple, Object messageId) { return emit(Utils.DEFAULT_STREAM_ID, tuple, messageId); } public List<Integer> emit(List<Object> tuple, Object messageId, ICollectorCallback callback) { return emit(Utils.DEFAULT_STREAM_ID, tuple, messageId, callback); } /** * Emits a tuple to the default output stream with a null message id. Storm * will not track this message so ack and fail will never be called for this * tuple. The emitted values must be immutable. */ public List<Integer> emit(List<Object> tuple) { return emit(tuple, null); } public List<Integer> emit(List<Object> tuple, ICollectorCallback callback) { return emit(tuple, null, callback); } /** * Emits a tuple to the specified output stream with a null message id. * Storm will not track this message so ack and fail will never be called * for this tuple. The emitted values must be immutable. */ public List<Integer> emit(String streamId, List<Object> tuple) { return emit(streamId, tuple, null); } public List<Integer> emit(String streamId, List<Object> tuple, ICollectorCallback callback) { return emit(streamId, tuple, null, callback); } /** * Emits a tuple to the specified task on the specified output stream. This * output stream must have been declared as a direct stream, and the * specified task must use a direct grouping on this stream to receive the * message. The emitted values must be immutable. */ public void emitDirect(int taskId, String streamId, List<Object> tuple, Object messageId) { _delegate.emitDirect(taskId, streamId, tuple, messageId); } public void emitDirect(int taskId, String streamId, List<Object> tuple, Object messageId, ICollectorCallback callback) { _delegate.emitDirect(taskId, streamId, tuple, messageId, callback); } /** * Emits a tuple to the specified task on the default output stream. This * output stream must have been declared as a direct stream, and the * specified task must use a direct grouping on this stream to receive the * message. The emitted values must be immutable. */ public void emitDirect(int taskId, List<Object> tuple, Object messageId) { emitDirect(taskId, Utils.DEFAULT_STREAM_ID, tuple, messageId); } public void emitDirect(int taskId, List<Object> tuple, Object messageId, ICollectorCallback callback) { emitDirect(taskId, Utils.DEFAULT_STREAM_ID, tuple, messageId, callback); } /** * Emits a tuple to the specified task on the specified output stream. This * output stream must have been declared as a direct stream, and the * specified task must use a direct grouping on this stream to receive the * message. The emitted values must be immutable. * * <p> * Because no message id is specified, Storm will not track this message so * ack and fail will never be called for this tuple. * </p> */ public void emitDirect(int taskId, String streamId, List<Object> tuple) { emitDirect(taskId, streamId, tuple, null); } public void emitDirect(int taskId, String streamId, List<Object> tuple, ICollectorCallback callback) { emitDirect(taskId, streamId, tuple, null, callback); } /** * Emits a tuple to the specified task on the default output stream. This * output stream must have been declared as a direct stream, and the * specified task must use a direct grouping on this stream to receive the * message. The emitted values must be immutable. * * <p> * Because no message id is specified, Storm will not track this message so * ack and fail will never be called for this tuple. * </p> */ public void emitDirect(int taskId, List<Object> tuple) { emitDirect(taskId, tuple, null); } public void emitDirect(int taskId, List<Object> tuple, ICollectorCallback callback) { emitDirect(taskId, tuple, null, callback); } @Override public void reportError(Throwable error) { _delegate.reportError(error); } @Override public void flush(){ _delegate.flush(); } @Override public void setBatchId(long batchId) { _delegate.setBatchId(batchId); } public SpoutOutputCollectorCb getDelegate() { return _delegate; } public void emitDirectCtrl(int taskId, List<Object> tuple) { _delegate.emitDirectCtrl(taskId, Utils.DEFAULT_STREAM_ID, tuple, null); } public void emitDirectCtrl(int taskId, List<Object> tuple, Object messageId) { _delegate.emitDirectCtrl(taskId, Utils.DEFAULT_STREAM_ID, tuple, messageId); } public void emitDirectCtrl(int taskId, String streamId, List<Object> tuple) { _delegate.emitDirectCtrl(taskId, streamId, tuple, null); } @Override public void emitDirectCtrl(int taskId, String streamId, List<Object> tuple, Object messageId) { _delegate.emitDirectCtrl(taskId, streamId, tuple, messageId); } public List<Integer> emitCtrl(List<Object> tuple) { return _delegate.emitCtrl(Utils.DEFAULT_STREAM_ID, tuple, null); } public List<Integer> emitCtrl(List<Object> tuple, Object messageId) { return _delegate.emitCtrl(Utils.DEFAULT_STREAM_ID, tuple, messageId); } public List<Integer> emitCtrl(String streamId, List<Object> tuple) { return _delegate.emitCtrl(streamId, tuple, null); } @Override public List<Integer> emitCtrl(String streamId, List<Object> tuple, Object messageId) { return _delegate.emitCtrl(streamId, tuple, messageId); } @Override public void emitBarrier() { _delegate.emitBarrier(); } }
package com.subitolabs.cordova.wifiapapi; import org.apache.cordova.*; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.lang.reflect.Method; import java.util.ArrayList; import android.content.Context; import android.net.wifi.WifiConfiguration; import android.net.wifi.WifiManager; import android.util.Log; import android.os.Build; import android.annotation.TargetApi; public class WifiApAPI extends CordovaPlugin { private static int constant = 0; private static final int WIFI_AP_STATE_UNKNOWN = -1; private static int WIFI_AP_STATE_DISABLING = 0; private static int WIFI_AP_STATE_DISABLED = 1; public int WIFI_AP_STATE_ENABLING = 2; public int WIFI_AP_STATE_ENABLED = 3; private static int WIFI_AP_STATE_FAILED = 4; private final String[] WIFI_STATE_TEXTSTATE = new String[] { "DISABLING","DISABLED","ENABLING","ENABLED","FAILED" }; private WifiManager wifi; private String TAG = "WifiAP"; private String SSID = ""; private int stateWifiWasIn = -1; private boolean alwaysEnableWifi = true; public static final String ACTION_ENABLE_AP = "setApEnabled"; public static final String ACTION_DISABLE_AP = "setApDisabled"; @Override public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { try { if (ACTION_ENABLE_AP.equals(action)) { SSID = args.getString(0); setWifiApEnabled(true); callbackContext.success(new JSONArray()); return true; } else if (ACTION_DISABLE_AP.equals(action)) { setWifiApEnabled(false); callbackContext.success(new JSONArray()); return true; } callbackContext.error("Invalid action"); return false; } catch(Exception e) { e.printStackTrace(); callbackContext.error(e.getMessage()); return false; } } /** * Enable/disable wifi * @param true or false */ private void setWifiApEnabled(boolean enabled) { wifi = (WifiManager) getContext().getSystemService(Context.WIFI_SERVICE); WifiConfiguration config = new WifiConfiguration(); config.SSID = SSID; config.allowedAuthAlgorithms.set(WifiConfiguration.AuthAlgorithm.OPEN); //remember wirelesses current state if (enabled && stateWifiWasIn==-1){ stateWifiWasIn=wifi.getWifiState(); } //disable wireless if (enabled && wifi.getConnectionInfo() !=null) { wifi.setWifiEnabled(false); int loopMax = 10; while(loopMax>0 && wifi.getWifiState()!=WifiManager.WIFI_STATE_DISABLED){ try { Thread.sleep(500); loopMax--; } catch (Exception e) { } } } //enable/disable wifi ap int state = WIFI_AP_STATE_UNKNOWN; try { wifi.setWifiEnabled(false); Method method1 = wifi.getClass().getMethod("setWifiApEnabled", WifiConfiguration.class, boolean.class); method1.invoke(wifi, config, enabled); Method method2 = wifi.getClass().getMethod("getWifiApState"); state = (Integer) method2.invoke(wifi); } catch (Exception e) { Log.e(TAG, e.getMessage()); } //hold thread up while processing occurs if (!enabled) { int loopMax = 10; while (loopMax>0 && (getWifiAPState()==WIFI_AP_STATE_DISABLING || getWifiAPState()==WIFI_AP_STATE_ENABLED || getWifiAPState()==WIFI_AP_STATE_FAILED)) { try { Thread.sleep(500); loopMax--; } catch (Exception e) { } } //enable wifi if it was enabled beforehand //this is somewhat unreliable and app gets confused and doesn't turn it back on sometimes so added toggle to always enable if you desire if(stateWifiWasIn==WifiManager.WIFI_STATE_ENABLED || stateWifiWasIn==WifiManager.WIFI_STATE_ENABLING || stateWifiWasIn==WifiManager.WIFI_STATE_UNKNOWN || alwaysEnableWifi){ Log.d(TAG, "enable wifi: calling"); wifi.setWifiEnabled(true); //don't hold things up and wait for it to get enabled } stateWifiWasIn = -1; } else if (enabled) { int loopMax = 10; while (loopMax>0 && (getWifiAPState()==WIFI_AP_STATE_ENABLING || getWifiAPState()==WIFI_AP_STATE_DISABLED || getWifiAPState()==WIFI_AP_STATE_FAILED)) { Log.d(TAG, (enabled?"enabling":"disabling") +" wifi ap: waiting, pass: " + (10-loopMax)); try { Thread.sleep(500); loopMax--; } catch (Exception e) { } } Log.d(TAG, (enabled?"enabling":"disabling") +" wifi ap: done, pass: " + (10-loopMax)); } } /** * Get the wifi AP state * @return WifiAP state * @author http://stackoverflow.com/a/7049074/1233435 */ public int getWifiAPState() { int state = WIFI_AP_STATE_UNKNOWN; try { Method method2 = wifi.getClass().getMethod("getWifiApState"); state = (Integer) method2.invoke(wifi); } catch (Exception e) { } if(state>=10){ //using Android 4.0+ (or maybe 3+, haven't had a 3 device to test it on) so use states that are +10 constant=10; } //reset these in case was newer device WIFI_AP_STATE_DISABLING = 0+constant; WIFI_AP_STATE_DISABLED = 1+constant; WIFI_AP_STATE_ENABLING = 2+constant; WIFI_AP_STATE_ENABLED = 3+constant; WIFI_AP_STATE_FAILED = 4+constant; Log.d(TAG, "getWifiAPState.state " + (state==-1?"UNKNOWN":WIFI_STATE_TEXTSTATE[state-constant])); sendJavascript("WifiApAPI.__plugin_async('status','"+ (state==-1?"UNKNOWN":WIFI_STATE_TEXTSTATE[state-constant])+"')"); return state; } private Context getContext() { return this.cordova.getActivity().getApplicationContext(); } @TargetApi(Build.VERSION_CODES.KITKAT) private void sendJavascript(final String javascript) { webView.post(new Runnable() { @Override public void run() { // See: https://github.com/GoogleChrome/chromium-webview-samples/blob/master/jsinterface-example/src/com/google/chrome/android/example/jsinterface/MainActivity.java if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { webView.evaluateJavascript(javascript, null); } else { webView.loadUrl("javascript:" + javascript); } } }); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto package com.google.cloud.compute.v1; /** * * * <pre> * A request message for FirewallPolicies.ListAssociations. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest} */ public final class ListAssociationsFirewallPolicyRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest) ListAssociationsFirewallPolicyRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListAssociationsFirewallPolicyRequest.newBuilder() to construct. private ListAssociationsFirewallPolicyRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListAssociationsFirewallPolicyRequest() { targetResource_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListAssociationsFirewallPolicyRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListAssociationsFirewallPolicyRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case -556419102: { java.lang.String s = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; targetResource_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ListAssociationsFirewallPolicyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ListAssociationsFirewallPolicyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest.class, com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest.Builder.class); } private int bitField0_; public static final int TARGET_RESOURCE_FIELD_NUMBER = 467318524; private volatile java.lang.Object targetResource_; /** * * * <pre> * The target resource to list associations. It is an organization, or a folder. * </pre> * * <code>optional string target_resource = 467318524;</code> * * @return Whether the targetResource field is set. */ @java.lang.Override public boolean hasTargetResource() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The target resource to list associations. It is an organization, or a folder. * </pre> * * <code>optional string target_resource = 467318524;</code> * * @return The targetResource. */ @java.lang.Override public java.lang.String getTargetResource() { java.lang.Object ref = targetResource_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); targetResource_ = s; return s; } } /** * * * <pre> * The target resource to list associations. It is an organization, or a folder. * </pre> * * <code>optional string target_resource = 467318524;</code> * * @return The bytes for targetResource. */ @java.lang.Override public com.google.protobuf.ByteString getTargetResourceBytes() { java.lang.Object ref = targetResource_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); targetResource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 467318524, targetResource_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(467318524, targetResource_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest)) { return super.equals(obj); } com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest other = (com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest) obj; if (hasTargetResource() != other.hasTargetResource()) return false; if (hasTargetResource()) { if (!getTargetResource().equals(other.getTargetResource())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasTargetResource()) { hash = (37 * hash) + TARGET_RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getTargetResource().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request message for FirewallPolicies.ListAssociations. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest) com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ListAssociationsFirewallPolicyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ListAssociationsFirewallPolicyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest.class, com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest.Builder.class); } // Construct using // com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); targetResource_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ListAssociationsFirewallPolicyRequest_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest getDefaultInstanceForType() { return com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest build() { com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest buildPartial() { com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest result = new com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.targetResource_ = targetResource_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest) { return mergeFrom((com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest other) { if (other == com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest.getDefaultInstance()) return this; if (other.hasTargetResource()) { bitField0_ |= 0x00000001; targetResource_ = other.targetResource_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object targetResource_ = ""; /** * * * <pre> * The target resource to list associations. It is an organization, or a folder. * </pre> * * <code>optional string target_resource = 467318524;</code> * * @return Whether the targetResource field is set. */ public boolean hasTargetResource() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The target resource to list associations. It is an organization, or a folder. * </pre> * * <code>optional string target_resource = 467318524;</code> * * @return The targetResource. */ public java.lang.String getTargetResource() { java.lang.Object ref = targetResource_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); targetResource_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The target resource to list associations. It is an organization, or a folder. * </pre> * * <code>optional string target_resource = 467318524;</code> * * @return The bytes for targetResource. */ public com.google.protobuf.ByteString getTargetResourceBytes() { java.lang.Object ref = targetResource_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); targetResource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The target resource to list associations. It is an organization, or a folder. * </pre> * * <code>optional string target_resource = 467318524;</code> * * @param value The targetResource to set. * @return This builder for chaining. */ public Builder setTargetResource(java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; targetResource_ = value; onChanged(); return this; } /** * * * <pre> * The target resource to list associations. It is an organization, or a folder. * </pre> * * <code>optional string target_resource = 467318524;</code> * * @return This builder for chaining. */ public Builder clearTargetResource() { bitField0_ = (bitField0_ & ~0x00000001); targetResource_ = getDefaultInstance().getTargetResource(); onChanged(); return this; } /** * * * <pre> * The target resource to list associations. It is an organization, or a folder. * </pre> * * <code>optional string target_resource = 467318524;</code> * * @param value The bytes for targetResource to set. * @return This builder for chaining. */ public Builder setTargetResourceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bitField0_ |= 0x00000001; targetResource_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest) private static final com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest(); } public static com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListAssociationsFirewallPolicyRequest> PARSER = new com.google.protobuf.AbstractParser<ListAssociationsFirewallPolicyRequest>() { @java.lang.Override public ListAssociationsFirewallPolicyRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListAssociationsFirewallPolicyRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListAssociationsFirewallPolicyRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListAssociationsFirewallPolicyRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.ListAssociationsFirewallPolicyRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package org.cloudfoundry.community.servicebroker.datalifecycle.service; import static org.cloudfoundry.community.servicebroker.datalifecycle.config.LCCatalogConfig.COPY; import static org.cloudfoundry.community.servicebroker.datalifecycle.config.LCCatalogConfig.PRODUCTION; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.stream.IntStream; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.cloudfoundry.community.servicebroker.datalifecycle.config.LCCatalogConfig; import org.cloudfoundry.community.servicebroker.datalifecycle.dto.InstancePair; import org.cloudfoundry.community.servicebroker.datalifecycle.provider.CopyProvider; import org.cloudfoundry.community.servicebroker.datalifecycle.provider.DataProvider; import org.cloudfoundry.community.servicebroker.datalifecycle.repo.BrokerActionRepository; import org.cloudfoundry.community.servicebroker.datalifecycle.service.DataProviderService; import org.cloudfoundry.community.servicebroker.datalifecycle.service.LCServiceInstanceManager; import org.cloudfoundry.community.servicebroker.datalifecycle.service.LCServiceInstanceService; import org.cloudfoundry.community.servicebroker.exception.ServiceBrokerAsyncRequiredException; import org.cloudfoundry.community.servicebroker.exception.ServiceBrokerException; import org.cloudfoundry.community.servicebroker.exception.ServiceInstanceExistsException; import org.cloudfoundry.community.servicebroker.exception.ServiceInstanceUpdateNotSupportedException; import org.cloudfoundry.community.servicebroker.model.CreateServiceInstanceRequest; import org.cloudfoundry.community.servicebroker.model.DeleteServiceInstanceRequest; import org.cloudfoundry.community.servicebroker.model.ServiceDefinition; import org.cloudfoundry.community.servicebroker.model.ServiceInstance; import org.cloudfoundry.community.servicebroker.model.UpdateServiceInstanceRequest; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.springframework.core.task.SyncTaskExecutor; public class LCServiceInstanceServiceCopyTest { private LCServiceInstanceService service; private ServiceInstance instance; @Mock private CopyProvider copyProvider; @Mock private DataProvider dataProvider; @Mock private BrokerActionRepository brokerRepo; private ServiceDefinition serviceDef = new LCCatalogConfig().catalog() .getServiceDefinitions().get(0); @Mock LCServiceInstanceManager instanceManager; @Mock private DataProviderService dataProviderService; @Before public void setUp() throws ServiceInstanceExistsException, ServiceBrokerException { MockitoAnnotations.initMocks(this); service = new LCServiceInstanceService(copyProvider, dataProvider, "source_instance_id", brokerRepo, instanceManager, new SyncTaskExecutor(), dataProviderService); } private void createServiceInstance() throws ServiceInstanceExistsException, ServiceBrokerException, ServiceBrokerAsyncRequiredException { when(copyProvider.createCopy("source_instance_id")).thenReturn( "copy_instance"); CreateServiceInstanceRequest createServiceInstanceRequest = newCreateServiceInstanceRequest(); instance = service.createServiceInstance(createServiceInstanceRequest); } private CreateServiceInstanceRequest newCreateServiceInstanceRequest() { return new CreateServiceInstanceRequest(serviceDef.getId(), COPY, "org_guid", "space_guid", true) .withServiceInstanceId("service_instance_id").and() .withServiceDefinition(serviceDef).withAsyncClient(true); } @Test public void itShouldStoreWhatItCreates() throws ServiceInstanceExistsException, ServiceBrokerException, ServiceBrokerAsyncRequiredException { createServiceInstance(); assertThat(instance.getServiceInstanceLastOperation().getState(), is(equalTo("succeeded"))); verify(instanceManager).saveInstance(instance, "copy_instance"); } @Test public void itShouldCreateACopyWhenProvisionedWithACopyPlan() throws Exception { createServiceInstance(); verify(copyProvider).createCopy("source_instance_id"); } @Test public void itShouldSanitizeACopy() throws Exception { createServiceInstance(); verify(dataProvider).sanitize(anyString(), any()); } @Test public void itDeletesWhatItShould() throws Exception { createServiceInstance(); String id = instance.getServiceInstanceId(); when(instanceManager.getInstance(id)).thenReturn(instance); when(instanceManager.removeInstance(id)).thenReturn(instance); when(instanceManager.getCopyIdForInstance(id)).thenReturn( "copy_instance"); assertThat( service.deleteServiceInstance(new DeleteServiceInstanceRequest( id, instance.getServiceDefinitionId(), instance .getPlanId(), true)), is(equalTo(instance))); verify(copyProvider).deleteCopy("copy_instance"); verify(instanceManager).removeInstance(instance.getServiceInstanceId()); } @Test public void itReturnsTheCopyInstanceIdForServiceInstanceId() throws Exception { createServiceInstance(); Collection<Pair<String, ServiceInstance>> instances = Collections .singletonList(new ImmutablePair<String, ServiceInstance>( "copy_instance", instance)); when(instanceManager.getInstances()).thenReturn(instances); assertThat(service.getInstanceIdForServiceInstance(instance .getServiceInstanceId()), is(equalTo("copy_instance"))); } @Test public void itReturnsTheCorrectListOfServices() throws ServiceBrokerException, ServiceInstanceExistsException { Collection<Pair<String, ServiceInstance>> instances = createInstances(); when(instanceManager.getInstances()).thenReturn(instances); List<InstancePair> provisionedInstances = service .getProvisionedInstances(); assertThat(provisionedInstances, hasSize(5)); assertTrue(provisionedInstances.contains(new InstancePair( "source_instance_id", "copy_instance2"))); assertTrue(provisionedInstances.contains(new InstancePair( "source_instance_id", "source_instance_id"))); } private Collection<Pair<String, ServiceInstance>> createInstances() throws ServiceInstanceExistsException, ServiceBrokerException { Collection<Pair<String, ServiceInstance>> instances = new ArrayList<Pair<String, ServiceInstance>>(); IntStream.range(0, 4).forEach( i -> instances.add(new ImmutablePair<String, ServiceInstance>( "copy_instance" + i, null))); instances.add(new ImmutablePair<String, ServiceInstance>( "source_instance_id", null)); return instances; } @Test(expected = ServiceInstanceExistsException.class) public void itShouldThrowIfInstanceAlreadyExists() throws Exception { when(instanceManager.getInstance(any())).thenReturn( new ServiceInstance(new CreateServiceInstanceRequest(null, null, null, null, true))); createServiceInstance(); } @Test(expected = ServiceInstanceUpdateNotSupportedException.class) public void itShouldThrowForUpdateService() throws Exception { createServiceInstance(); service.updateServiceInstance(new UpdateServiceInstanceRequest( PRODUCTION, true).withInstanceId(instance .getServiceInstanceId())); } @Test(expected = ServiceBrokerAsyncRequiredException.class) public void itShouldThrowForSyncServiceDeletion() throws Exception { service.deleteServiceInstance(new DeleteServiceInstanceRequest(null, null, null, false)); } @Test(expected = ServiceBrokerAsyncRequiredException.class) public void itShouldThrowForSyncServiceCreation() throws Exception { service.createServiceInstance(new CreateServiceInstanceRequest() .withAsyncClient(false)); } @Test public void itShouldSaveTheInstnaceAsFailedIfDeprovisionFails() throws Exception { ServiceInstance theInstance = new ServiceInstance( newCreateServiceInstanceRequest()); doThrow(new ServiceBrokerException("Problem!")).when(copyProvider) .deleteCopy(anyString()); when(instanceManager.getInstance(anyString())).thenReturn(theInstance); when(instanceManager.getCopyIdForInstance(anyString())).thenReturn( "copy_id"); ServiceInstance failedInstance = service .deleteServiceInstance(new DeleteServiceInstanceRequest( theInstance.getServiceInstanceId(), theInstance .getServiceDefinitionId(), COPY, true)); assertThat(failedInstance.getServiceInstanceLastOperation().getState(), is(equalTo("failed"))); // Once for in progress, once for failed. verify(instanceManager, times(2)).saveInstance(any(), anyString()); assertTrue(failedInstance.isAsync()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ranger.services.yarn.client; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import javax.security.auth.Subject; import org.apache.log4j.Logger; import org.apache.ranger.plugin.client.BaseClient; import org.apache.ranger.plugin.client.HadoopException; import org.apache.ranger.services.yarn.client.json.model.YarnSchedulerResponse; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.WebResource; public class YarnClient extends BaseClient { private static final Logger LOG = Logger.getLogger(YarnClient.class); private static final String EXPECTED_MIME_TYPE = "application/json"; private static final String YARN_LIST_API_ENDPOINT = "/ws/v1/cluster/scheduler"; private static final String errMessage = " You can still save the repository and start creating " + "policies, but you would not be able to use autocomplete for " + "resource names. Check ranger_admin.log for more info."; String yarnQUrl; String userName; String password; public YarnClient(String serviceName, Map<String, String> configs) { super(serviceName,configs,"yarn-client"); this.yarnQUrl = configs.get("yarn.url"); this.userName = configs.get("username"); this.password = configs.get("password"); if (this.yarnQUrl == null || this.yarnQUrl.isEmpty()) { LOG.error("No value found for configuration 'yarn.url'. YARN resource lookup will fail"); } if (this.userName == null || this.userName.isEmpty()) { LOG.error("No value found for configuration 'username'. YARN resource lookup will fail"); } if (this.password == null || this.password.isEmpty()) { LOG.error("No value found for configuration 'password'. YARN resource lookup will fail"); } if (LOG.isDebugEnabled()) { LOG.debug("Yarn Client is build with url [" + this.yarnQUrl + "] user: [" + this.userName + "], password: [" + "*********" + "]"); } } public List<String> getQueueList(final String queueNameMatching, final List<String> existingQueueList) { if (LOG.isDebugEnabled()) { LOG.debug("Getting Yarn queue list for queueNameMatching : " + queueNameMatching); } final String errMsg = errMessage; List<String> ret = null; Callable<List<String>> callableYarnQListGetter = new Callable<List<String>>() { @Override public List<String> call() { List<String> yarnQueueListGetter = null; Subject subj = getLoginSubject(); if (subj != null) { yarnQueueListGetter = Subject.doAs(subj, new PrivilegedAction<List<String>>() { @Override public List<String> run() { if (yarnQUrl == null || yarnQUrl.trim().isEmpty()) { return null; } String[] yarnQUrls = yarnQUrl.trim().split("[,;]"); if(yarnQUrls == null || yarnQUrls.length == 0) { return null; } Client client = Client.create(); ClientResponse response = null; for(String currentUrl : yarnQUrls) { if(currentUrl == null || currentUrl.trim().isEmpty()) { continue; } String url = currentUrl.trim() + YARN_LIST_API_ENDPOINT; try { response = getQueueResponse(url, client); if (response != null) { if(response.getStatus() == 200) { break; } else{ response.close(); } } } catch (Throwable t) { String msgDesc = "Exception while getting Yarn Queue List." + " URL : " + url; LOG.error(msgDesc, t); } } List<String> lret = new ArrayList<String>(); try { if (response != null && response.getStatus() == 200) { String jsonString = response.getEntity(String.class); Gson gson = new GsonBuilder().setPrettyPrinting().create(); YarnSchedulerResponse yarnQResponse = gson.fromJson(jsonString, YarnSchedulerResponse.class); if (yarnQResponse != null) { List<String> yarnQueueList = yarnQResponse.getQueueNames(); if (yarnQueueList != null) { for ( String yarnQueueName : yarnQueueList) { if ( existingQueueList != null && existingQueueList.contains(yarnQueueName)) { continue; } if (queueNameMatching == null || queueNameMatching.isEmpty() || yarnQueueName.startsWith(queueNameMatching)) { if (LOG.isDebugEnabled()) { LOG.debug("getQueueList():Adding yarnQueue " + yarnQueueName); } lret.add(yarnQueueName); } } } } } else { String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "] URL : " + yarnQUrl + " - got null response."; LOG.error(msgDesc); HadoopException hdpException = new HadoopException(msgDesc); hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null); throw hdpException; } } catch (HadoopException he) { throw he; } catch (Throwable t) { String msgDesc = "Exception while getting Yarn Queue List." + " URL : " + yarnQUrl; HadoopException hdpException = new HadoopException(msgDesc, t); LOG.error(msgDesc, t); hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null); throw hdpException; } finally { if (response != null) { response.close(); } if (client != null) { client.destroy(); } } return lret; } private ClientResponse getQueueResponse(String url, Client client) { if (LOG.isDebugEnabled()) { LOG.debug("getQueueResponse():calling " + url); } WebResource webResource = client.resource(url); ClientResponse response = webResource.accept(EXPECTED_MIME_TYPE) .get(ClientResponse.class); if (response != null) { if (LOG.isDebugEnabled()) { LOG.debug("getQueueResponse():response.getStatus()= " + response.getStatus()); } if (response.getStatus() != 200) { LOG.info("getQueueResponse():response.getStatus()= " + response.getStatus() + " for URL " + url + ", failed to get queue list"); String jsonString = response.getEntity(String.class); LOG.info(jsonString); } } return response; } } ); } return yarnQueueListGetter; } }; try { ret = timedTask(callableYarnQListGetter, 5, TimeUnit.SECONDS); } catch ( Throwable t) { LOG.error("Unable to get Yarn Queue list from [" + yarnQUrl + "]", t); String msgDesc = "Unable to get a valid response for " + "expected mime type : [" + EXPECTED_MIME_TYPE + "] URL : " + yarnQUrl; HadoopException hdpException = new HadoopException(msgDesc, t); LOG.error(msgDesc, t); hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null); throw hdpException; } return ret; } public static Map<String, Object> connectionTest(String serviceName, Map<String, String> configs) { String errMsg = errMessage; boolean connectivityStatus = false; Map<String, Object> responseData = new HashMap<String, Object>(); YarnClient yarnClient = getYarnClient(serviceName, configs); List<String> strList = getYarnResource(yarnClient, "",null); if (strList != null && strList.size() > 0 ) { if (LOG.isDebugEnabled()) { LOG.debug("TESTING list size" + strList.size() + " Yarn Queues"); } connectivityStatus = true; } if (connectivityStatus) { String successMsg = "ConnectionTest Successful"; BaseClient.generateResponseDataMap(connectivityStatus, successMsg, successMsg, null, null, responseData); } else { String failureMsg = "Unable to retrieve any Yarn Queues using given parameters."; BaseClient.generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + errMsg, null, null, responseData); } return responseData; } public static YarnClient getYarnClient(String serviceName, Map<String, String> configs) { YarnClient yarnClient = null; if (LOG.isDebugEnabled()) { LOG.debug("Getting YarnClient for datasource: " + serviceName); } String errMsg = errMessage; if (configs == null || configs.isEmpty()) { String msgDesc = "Could not connect as Connection ConfigMap is empty."; LOG.error(msgDesc); HadoopException hdpException = new HadoopException(msgDesc); hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null); throw hdpException; } else { yarnClient = new YarnClient (serviceName, configs); } return yarnClient; } public static List<String> getYarnResource (final YarnClient yarnClient, String yarnQname, List<String> existingQueueName) { List<String> resultList = new ArrayList<String>(); String errMsg = errMessage; try { if (yarnClient == null) { String msgDesc = "Unable to get Yarn Queue : YarnClient is null."; LOG.error(msgDesc); HadoopException hdpException = new HadoopException(msgDesc); hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null); throw hdpException; } if (yarnQname != null) { String finalyarnQueueName = yarnQname.trim(); resultList = yarnClient .getQueueList(finalyarnQueueName,existingQueueName); if (resultList != null) { if (LOG.isDebugEnabled()) { LOG.debug("Returning list of " + resultList.size() + " Yarn Queues"); } } } } catch (HadoopException he) { throw he; } catch (Throwable t) { String msgDesc = "getYarnResource: Unable to get Yarn resources."; LOG.error(msgDesc, t); HadoopException hdpException = new HadoopException(msgDesc); hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null, null); throw hdpException; } return resultList; } public static <T> T timedTask(Callable<T> callableObj, long timeout, TimeUnit timeUnit) throws Exception { return callableObj.call(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.qpid.jms.integration; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import javax.jms.Connection; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageListener; import javax.jms.Queue; import javax.jms.Session; import org.apache.qpid.jms.message.JmsMessageSupport; import org.apache.qpid.jms.test.QpidJmsTestCase; import org.apache.qpid.jms.test.testpeer.TestAmqpPeer; import org.apache.qpid.jms.test.testpeer.describedtypes.sections.AmqpValueDescribedType; import org.apache.qpid.jms.test.testpeer.matchers.AcceptedMatcher; import org.apache.qpid.jms.test.testpeer.matchers.ModifiedMatcher; import org.apache.qpid.jms.test.testpeer.matchers.RejectedMatcher; import org.apache.qpid.jms.test.testpeer.matchers.ReleasedMatcher; import org.apache.qpid.proton.amqp.UnsignedInteger; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.Test; public class AmqpAcknowledgementsIntegrationTest extends QpidJmsTestCase { private static final int INDIVIDUAL_ACK = 101; private static final int SKIP = -1; private static final int INVALID = 99; private final IntegrationTestFixture testFixture = new IntegrationTestFixture(); @Test(timeout = 20000) public void testAcknowledgeFailsAfterSessionIsClosed() throws Exception { try (TestAmqpPeer testPeer = new TestAmqpPeer();) { Connection connection = testFixture.establishConnecton(testPeer); connection.start(); testPeer.expectBegin(); Session session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); Queue queue = session.createQueue("myQueue"); testPeer.expectReceiverAttach(); testPeer.expectLinkFlowRespondWithTransfer(null, null, null, null, new AmqpValueDescribedType(null), 1); MessageConsumer messageConsumer = session.createConsumer(queue); Message receivedMessage = messageConsumer.receive(6000); assertNotNull("Message was not recieved", receivedMessage); testPeer.expectDisposition(true, new ModifiedMatcher().withDeliveryFailed(equalTo(true)), 1, 1); testPeer.expectEnd(); session.close(); try { receivedMessage.acknowledge(); fail("Should not be able to acknowledge the message after session closed"); } catch (JMSException jmsex) {} testPeer.expectClose(); connection.close(); testPeer.waitForAllHandlersToComplete(3000); } } @Test(timeout = 20000) public void testDefaultAcceptMessages() throws Exception { doTestAmqpAcknowledgementTestImpl(SKIP, new AcceptedMatcher(), false); } @Test(timeout = 20000) public void testRequestAcceptMessages() throws Exception { doTestAmqpAcknowledgementTestImpl(JmsMessageSupport.ACCEPTED, new AcceptedMatcher(), false); } @Test(timeout = 20000) public void testRequestRejectMessages() throws Exception { doTestAmqpAcknowledgementTestImpl(JmsMessageSupport.REJECTED, new RejectedMatcher(), false); } @Test(timeout = 20000) public void testRequestReleaseMessages() throws Exception { doTestAmqpAcknowledgementTestImpl(JmsMessageSupport.RELEASED, new ReleasedMatcher(), false); } @Test(timeout = 20000) public void testRequestReleaseMessagesClearPropsFirst() throws Exception { doTestAmqpAcknowledgementTestImpl(JmsMessageSupport.RELEASED, new ReleasedMatcher(), true); } @Test(timeout = 20000) public void testRequestModifiedFailedMessages() throws Exception { doTestAmqpAcknowledgementTestImpl(JmsMessageSupport.MODIFIED_FAILED, new ModifiedMatcher().withDeliveryFailed(equalTo(true)), false); } @Test(timeout = 20000) public void testRequestModifiedFailedUndeliverableHereMessages() throws Exception { doTestAmqpAcknowledgementTestImpl(JmsMessageSupport.MODIFIED_FAILED_UNDELIVERABLE, new ModifiedMatcher().withDeliveryFailed(equalTo(true)).withUndeliverableHere(equalTo(true)), false); } @Test(timeout = 20000) public void testRequestAcknowledgeMessagesWithInvalidDisposition() throws Exception { doTestAmqpAcknowledgementTestImpl(INVALID, new AcceptedMatcher(), false); } private void doTestAmqpAcknowledgementTestImpl(int disposition, Matcher<?> descriptorMatcher, boolean clearPropsFirst) throws Exception { try (TestAmqpPeer testPeer = new TestAmqpPeer();) { Connection connection = testFixture.establishConnecton(testPeer); connection.start(); testPeer.expectBegin(); Session session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); Queue queue = session.createQueue("myQueue"); int msgCount = 3; testPeer.expectReceiverAttach(); testPeer.expectLinkFlowRespondWithTransfer(null, null, null, null, new AmqpValueDescribedType(null), msgCount); for (int i = 1; i <= msgCount; i++) { testPeer.expectDisposition(true, descriptorMatcher); } MessageConsumer messageConsumer = session.createConsumer(queue); Message lastReceivedMessage = null; for (int i = 1; i <= msgCount; i++) { lastReceivedMessage = messageConsumer.receive(6000); assertNotNull("Message " + i + " was not recieved", lastReceivedMessage); } if (disposition != SKIP) { if (clearPropsFirst) { lastReceivedMessage.clearProperties(); } lastReceivedMessage.setIntProperty(JmsMessageSupport.JMS_AMQP_ACK_TYPE, disposition); } if (disposition == INVALID) { try { lastReceivedMessage.acknowledge(); fail("Should throw exception due to invalid ack type"); } catch (JMSException jmsex) {} lastReceivedMessage.setIntProperty(JmsMessageSupport.JMS_AMQP_ACK_TYPE, JmsMessageSupport.ACCEPTED); lastReceivedMessage.acknowledge(); } else { lastReceivedMessage.acknowledge(); } testPeer.expectClose(); connection.close(); testPeer.waitForAllHandlersToComplete(3000); } } @Test(timeout = 20000) public void testDefaultAcceptMessagesWithMessageListener() throws Exception { doTestAmqpAcknowledgementAsyncTestImpl(SKIP, new AcceptedMatcher(), false); } @Test(timeout = 20000) public void testRequestAcceptMessagesWithMessageListener() throws Exception { doTestAmqpAcknowledgementAsyncTestImpl(JmsMessageSupport.ACCEPTED, new AcceptedMatcher(), false); } @Test(timeout = 20000) public void testRequestRejectMessagesWithMessageListener() throws Exception { doTestAmqpAcknowledgementAsyncTestImpl(JmsMessageSupport.REJECTED, new RejectedMatcher(), false); } @Test(timeout = 20000) public void testRequestReleaseMessagesWithMessageListener() throws Exception { doTestAmqpAcknowledgementAsyncTestImpl(JmsMessageSupport.RELEASED, new ReleasedMatcher(), false); } @Test(timeout = 20000) public void testRequestReleaseMessagesClearPropsFirstWithMessageListener() throws Exception { doTestAmqpAcknowledgementAsyncTestImpl(JmsMessageSupport.RELEASED, new ReleasedMatcher(), true); } @Test(timeout = 20000) public void testRequestModifiedFailedMessagesWithMessageListener() throws Exception { doTestAmqpAcknowledgementAsyncTestImpl(JmsMessageSupport.MODIFIED_FAILED, new ModifiedMatcher().withDeliveryFailed(equalTo(true)), false); } @Test(timeout = 20000) public void testRequestModifiedFailedUndeliverableHereMessagesWithMessageListener() throws Exception { doTestAmqpAcknowledgementAsyncTestImpl(JmsMessageSupport.MODIFIED_FAILED_UNDELIVERABLE, new ModifiedMatcher().withDeliveryFailed(equalTo(true)).withUndeliverableHere(equalTo(true)), false); } @Test(timeout = 20000) public void testRequestAcknowledgeMessagesWithInvalidDispositionWithMessageListener() throws Exception { doTestAmqpAcknowledgementAsyncTestImpl(INVALID, new AcceptedMatcher(), false); } private void doTestAmqpAcknowledgementAsyncTestImpl(int disposition, Matcher<?> descriptorMatcher, boolean clearPropsFirst) throws Exception { try (TestAmqpPeer testPeer = new TestAmqpPeer();) { Connection connection = testFixture.establishConnecton(testPeer); connection.start(); testPeer.expectBegin(); Session session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE); Queue queue = session.createQueue("myQueue"); final int msgCount = 3; testPeer.expectReceiverAttach(); testPeer.expectLinkFlowRespondWithTransfer(null, null, null, null, new AmqpValueDescribedType(null), msgCount); for (int i = 1; i <= msgCount; i++) { testPeer.expectDisposition(true, descriptorMatcher); } final CountDownLatch receiveCountDown = new CountDownLatch(msgCount); final AtomicReference<Message> lastReceivedMessage = new AtomicReference<Message>(); MessageConsumer messageConsumer = session.createConsumer(queue); messageConsumer.setMessageListener(new MessageListener() { @Override public void onMessage(Message message) { lastReceivedMessage.set(message); receiveCountDown.countDown(); } }); assertTrue("Did not get all messages", receiveCountDown.await(10, TimeUnit.SECONDS)); assertNotNull("Message was not received", lastReceivedMessage.get()); if (disposition != SKIP) { if (clearPropsFirst) { lastReceivedMessage.get().clearProperties(); } lastReceivedMessage.get().setIntProperty(JmsMessageSupport.JMS_AMQP_ACK_TYPE, disposition); } if (disposition == INVALID) { try { lastReceivedMessage.get().acknowledge(); fail("Should throw exception due to invalid ack type"); } catch (JMSException jmsex) {} lastReceivedMessage.get().setIntProperty(JmsMessageSupport.JMS_AMQP_ACK_TYPE, JmsMessageSupport.ACCEPTED); lastReceivedMessage.get().acknowledge(); } else { lastReceivedMessage.get().acknowledge(); } testPeer.expectClose(); connection.close(); testPeer.waitForAllHandlersToComplete(3000); } } @Test(timeout = 20000) public void testAcknowledgeIndividualMessages() throws Exception { try (TestAmqpPeer testPeer = new TestAmqpPeer();) { Connection connection = testFixture.establishConnecton(testPeer); connection.start(); testPeer.expectBegin(); Session session = connection.createSession(INDIVIDUAL_ACK); Queue queue = session.createQueue("myQueue"); int msgCount = 6; testPeer.expectReceiverAttach(); testPeer.expectLinkFlowRespondWithTransfer(null, null, null, null, new AmqpValueDescribedType(null), msgCount, false, false, Matchers.greaterThanOrEqualTo(UnsignedInteger.valueOf(msgCount)), 1, false, true); MessageConsumer messageConsumer = session.createConsumer(queue); List<Message> messages = new ArrayList<>(); Message lastReceivedMessage = null; for (int i = 0; i < msgCount; i++) { lastReceivedMessage = messageConsumer.receive(3000); assertNotNull("Message " + i + " was not received", lastReceivedMessage); messages.add(lastReceivedMessage); assertEquals("unexpected message number property", i, lastReceivedMessage.getIntProperty(TestAmqpPeer.MESSAGE_NUMBER)); } List<Integer> ackTypes = new ArrayList<>(); ackTypes.add(SKIP); ackTypes.add(JmsMessageSupport.ACCEPTED); ackTypes.add(JmsMessageSupport.REJECTED); ackTypes.add(JmsMessageSupport.RELEASED); ackTypes.add(JmsMessageSupport.MODIFIED_FAILED); ackTypes.add(JmsMessageSupport.MODIFIED_FAILED_UNDELIVERABLE); Matcher<?>[] dispositionMatchers = new Matcher<?>[msgCount]; dispositionMatchers[0] = null; dispositionMatchers[JmsMessageSupport.ACCEPTED] = new AcceptedMatcher(); dispositionMatchers[JmsMessageSupport.REJECTED] = new RejectedMatcher(); dispositionMatchers[JmsMessageSupport.RELEASED] = new ReleasedMatcher(); dispositionMatchers[JmsMessageSupport.MODIFIED_FAILED] = new ModifiedMatcher().withDeliveryFailed(equalTo(true)); dispositionMatchers[JmsMessageSupport.MODIFIED_FAILED_UNDELIVERABLE] = new ModifiedMatcher().withDeliveryFailed(equalTo(true)).withUndeliverableHere(equalTo(true)); // Acknowledge the messages in a random order with random amqp ack type set (leaving one message without // any specific set, to check it accepts), verify the individual dispositions have expected delivery state. Random rand = new Random(); for (int i = 0; i < msgCount; i++) { Message msg = messages.remove(rand.nextInt(msgCount - i)); int deliveryNumber = msg.getIntProperty(TestAmqpPeer.MESSAGE_NUMBER) + 1; int ackType = ackTypes.remove(rand.nextInt(msgCount - i)); if(ackType != SKIP) { msg.setIntProperty(JmsMessageSupport.JMS_AMQP_ACK_TYPE, ackType); testPeer.expectDisposition(true, dispositionMatchers[ackType], deliveryNumber, deliveryNumber); } else { testPeer.expectDisposition(true, new AcceptedMatcher(), deliveryNumber, deliveryNumber); } msg.acknowledge(); testPeer.waitForAllHandlersToComplete(3000); } testPeer.expectClose(); connection.close(); testPeer.waitForAllHandlersToComplete(3000); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.common.SortProcessor.SortOrder; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class SortProcessorTests extends ESTestCase { public void testSortStrings() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numItems = randomIntBetween(1, 10); List<String> fieldValue = new ArrayList<>(numItems); List<String> expectedResult = new ArrayList<>(numItems); for (int j = 0; j < numItems; j++) { String value = randomAlphaOfLengthBetween(1, 10); fieldValue.add(value); expectedResult.add(value); } Collections.sort(expectedResult); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; if (order.equals(SortOrder.DESCENDING)) { Collections.reverse(expectedResult); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order, fieldName); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } public void testSortIntegersNonRandom() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Integer[] expectedResult = new Integer[]{1,2,3,4,5,10,20,21,22,50,100}; List<Integer> fieldValue = new ArrayList<>(expectedResult.length); fieldValue.addAll(Arrays.asList(expectedResult).subList(0, expectedResult.length)); Collections.shuffle(fieldValue, random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, SortOrder.ASCENDING, fieldName); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, List.class).toArray(), equalTo(expectedResult)); } public void testSortIntegers() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numItems = randomIntBetween(1, 10); List<Integer> fieldValue = new ArrayList<>(numItems); List<Integer> expectedResult = new ArrayList<>(numItems); for (int j = 0; j < numItems; j++) { Integer value = randomIntBetween(1, 100); fieldValue.add(value); expectedResult.add(value); } Collections.sort(expectedResult); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; if (order.equals(SortOrder.DESCENDING)) { Collections.reverse(expectedResult); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order, fieldName); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } public void testSortShorts() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numItems = randomIntBetween(1, 10); List<Short> fieldValue = new ArrayList<>(numItems); List<Short> expectedResult = new ArrayList<>(numItems); for (int j = 0; j < numItems; j++) { Short value = randomShort(); fieldValue.add(value); expectedResult.add(value); } Collections.sort(expectedResult); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; if (order.equals(SortOrder.DESCENDING)) { Collections.reverse(expectedResult); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order, fieldName); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } public void testSortDoubles() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numItems = randomIntBetween(1, 10); List<Double> fieldValue = new ArrayList<>(numItems); List<Double> expectedResult = new ArrayList<>(numItems); for (int j = 0; j < numItems; j++) { Double value = randomDoubleBetween(0.0, 100.0, true); fieldValue.add(value); expectedResult.add(value); } Collections.sort(expectedResult); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; if (order.equals(SortOrder.DESCENDING)) { Collections.reverse(expectedResult); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order, fieldName); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } public void testSortFloats() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numItems = randomIntBetween(1, 10); List<Float> fieldValue = new ArrayList<>(numItems); List<Float> expectedResult = new ArrayList<>(numItems); for (int j = 0; j < numItems; j++) { Float value = randomFloat(); fieldValue.add(value); expectedResult.add(value); } Collections.sort(expectedResult); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; if (order.equals(SortOrder.DESCENDING)) { Collections.reverse(expectedResult); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order, fieldName); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } public void testSortBytes() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numItems = randomIntBetween(1, 10); List<Byte> fieldValue = new ArrayList<>(numItems); List<Byte> expectedResult = new ArrayList<>(numItems); for (int j = 0; j < numItems; j++) { Byte value = randomByte(); fieldValue.add(value); expectedResult.add(value); } Collections.sort(expectedResult); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; if (order.equals(SortOrder.DESCENDING)) { Collections.reverse(expectedResult); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order, fieldName); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } public void testSortBooleans() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numItems = randomIntBetween(1, 10); List<Boolean> fieldValue = new ArrayList<>(numItems); List<Boolean> expectedResult = new ArrayList<>(numItems); for (int j = 0; j < numItems; j++) { Boolean value = randomBoolean(); fieldValue.add(value); expectedResult.add(value); } Collections.sort(expectedResult); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; if (order.equals(SortOrder.DESCENDING)) { Collections.reverse(expectedResult); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order, fieldName); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } public void testSortMixedStrings() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numItems = randomIntBetween(1, 10); List<String> fieldValue = new ArrayList<>(numItems); List<String> expectedResult = new ArrayList<>(numItems); String value; for (int j = 0; j < numItems; j++) { if (randomBoolean()) { value = String.valueOf(randomIntBetween(0, 100)); } else { value = randomAlphaOfLengthBetween(1, 10); } fieldValue.add(value); expectedResult.add(value); } Collections.sort(expectedResult); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; if (order.equals(SortOrder.DESCENDING)) { Collections.reverse(expectedResult); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order, fieldName); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } public void testSortNonListField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, randomAlphaOfLengthBetween(1, 10)); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order, fieldName); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.String] cannot be cast to [java.util.List]")); } } public void testSortNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order, fieldName); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]")); } } public void testSortNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; Processor processor = new SortProcessor(randomAlphaOfLength(10), "field", order, "field"); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("field [field] is null, cannot sort.")); } } public void testSortWithTargetField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numItems = randomIntBetween(1, 10); List<String> fieldValue = new ArrayList<>(numItems); List<String> expectedResult = new ArrayList<>(numItems); for (int j = 0; j < numItems; j++) { String value = randomAlphaOfLengthBetween(1, 10); fieldValue.add(value); expectedResult.add(value); } Collections.sort(expectedResult); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; if (order.equals(SortOrder.DESCENDING)) { Collections.reverse(expectedResult); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); String targetFieldName = RandomDocumentPicks.randomFieldName(random()); Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order, targetFieldName); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(targetFieldName, List.class), expectedResult); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.server; import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.inject.Inject; import com.sun.jersey.spi.container.ResourceFilters; import org.apache.druid.client.DruidDataSource; import org.apache.druid.client.FilteredServerInventoryView; import org.apache.druid.client.ServerViewUtil; import org.apache.druid.client.TimelineServerView; import org.apache.druid.client.selector.ServerSelector; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.JodaUtils; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.query.LocatedSegmentDescriptor; import org.apache.druid.query.TableDataSource; import org.apache.druid.query.metadata.SegmentMetadataQueryConfig; import org.apache.druid.server.http.security.DatasourceResourceFilter; import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthorizationUtils; import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.ResourceAction; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.TimelineLookup; import org.apache.druid.timeline.TimelineObjectHolder; import org.apache.druid.timeline.partition.PartitionHolder; import org.joda.time.DateTime; import org.joda.time.Interval; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.stream.Collectors; import java.util.stream.Stream; /** */ @Path("/druid/v2/datasources") public class ClientInfoResource { private static final Logger log = new Logger(ClientInfoResource.class); private static final String KEY_DIMENSIONS = "dimensions"; private static final String KEY_METRICS = "metrics"; private FilteredServerInventoryView serverInventoryView; private TimelineServerView timelineServerView; private SegmentMetadataQueryConfig segmentMetadataQueryConfig; private final AuthConfig authConfig; private final AuthorizerMapper authorizerMapper; @Inject public ClientInfoResource( FilteredServerInventoryView serverInventoryView, TimelineServerView timelineServerView, SegmentMetadataQueryConfig segmentMetadataQueryConfig, AuthConfig authConfig, AuthorizerMapper authorizerMapper ) { this.serverInventoryView = serverInventoryView; this.timelineServerView = timelineServerView; this.segmentMetadataQueryConfig = (segmentMetadataQueryConfig == null) ? new SegmentMetadataQueryConfig() : segmentMetadataQueryConfig; this.authConfig = authConfig; this.authorizerMapper = authorizerMapper; } @GET @Produces(MediaType.APPLICATION_JSON) public Iterable<String> getDataSources(@Context final HttpServletRequest request) { Function<String, Iterable<ResourceAction>> raGenerator = datasourceName -> { return Collections.singletonList(AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(datasourceName)); }; return AuthorizationUtils.filterAuthorizedResources( request, getAllDataSources(), raGenerator, authorizerMapper ); } private Set<String> getAllDataSources() { return serverInventoryView .getInventory() .stream() .flatMap(server -> server.getDataSources().stream().map(DruidDataSource::getName)) .collect(Collectors.toSet()); } @GET @Path("/{dataSourceName}") @Produces(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Map<String, Object> getDatasource( @PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval, @QueryParam("full") String full ) { if (full == null) { return ImmutableMap.of( KEY_DIMENSIONS, getDataSourceDimensions(dataSourceName, interval), KEY_METRICS, getDataSourceMetrics(dataSourceName, interval) ); } Interval theInterval; if (interval == null || interval.isEmpty()) { DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { theInterval = Intervals.of(interval); } TimelineLookup<String, ServerSelector> timeline = timelineServerView.getTimeline(new TableDataSource(dataSourceName)); Iterable<TimelineObjectHolder<String, ServerSelector>> serversLookup = timeline != null ? timeline.lookup( theInterval ) : null; if (serversLookup == null || Iterables.isEmpty(serversLookup)) { return Collections.EMPTY_MAP; } Map<Interval, Object> servedIntervals = new TreeMap<>( new Comparator<Interval>() { @Override public int compare(Interval o1, Interval o2) { if (o1.equals(o2) || o1.overlaps(o2)) { return 0; } else { return o1.isBefore(o2) ? -1 : 1; } } } ); for (TimelineObjectHolder<String, ServerSelector> holder : serversLookup) { final Set<Object> dimensions = new HashSet<>(); final Set<Object> metrics = new HashSet<>(); final PartitionHolder<ServerSelector> partitionHolder = holder.getObject(); if (partitionHolder.isComplete()) { for (ServerSelector server : partitionHolder.payloads()) { final DataSegment segment = server.getSegment(); dimensions.addAll(segment.getDimensions()); metrics.addAll(segment.getMetrics()); } } servedIntervals.put( holder.getInterval(), ImmutableMap.of(KEY_DIMENSIONS, dimensions, KEY_METRICS, metrics) ); } //collapse intervals if they abut and have same set of columns Map<String, Object> result = Maps.newLinkedHashMap(); Interval curr = null; Map<String, Set<String>> cols = null; for (Map.Entry<Interval, Object> e : servedIntervals.entrySet()) { Interval ival = e.getKey(); if (curr != null && curr.abuts(ival) && cols.equals(e.getValue())) { curr = curr.withEnd(ival.getEnd()); } else { if (curr != null) { result.put(curr.toString(), cols); } curr = ival; cols = (Map<String, Set<String>>) e.getValue(); } } //add the last one in if (curr != null) { result.put(curr.toString(), cols); } return result; } @Deprecated @GET @Path("/{dataSourceName}/dimensions") @Produces(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Iterable<String> getDataSourceDimensions( @PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval ) { final Set<DataSegment> segments = getAllSegmentsForDataSource(dataSourceName); final Interval theInterval; if (interval == null || interval.isEmpty()) { DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { theInterval = Intervals.of(interval); } final Set<String> dims = new HashSet<>(); for (DataSegment segment : segments) { if (theInterval.overlaps(segment.getInterval())) { dims.addAll(segment.getDimensions()); } } return dims; } @Deprecated @GET @Path("/{dataSourceName}/metrics") @Produces(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Iterable<String> getDataSourceMetrics( @PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval ) { final Set<DataSegment> segments = getAllSegmentsForDataSource(dataSourceName); final Interval theInterval; if (interval == null || interval.isEmpty()) { DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { theInterval = Intervals.of(interval); } final Set<String> metrics = new HashSet<>(); for (DataSegment segment : segments) { if (theInterval.overlaps(segment.getInterval())) { metrics.addAll(segment.getMetrics()); } } return metrics; } private Set<DataSegment> getAllSegmentsForDataSource(String dataSourceName) { return serverInventoryView .getInventory() .stream() .flatMap(server -> { DruidDataSource dataSource = server.getDataSource(dataSourceName); if (dataSource == null) { return Stream.empty(); } return dataSource.getSegments().stream(); }) .collect(Collectors.toSet()); } @GET @Path("/{dataSourceName}/candidates") @Produces(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Iterable<LocatedSegmentDescriptor> getQueryTargets( @PathParam("dataSourceName") String datasource, @QueryParam("intervals") String intervals, @QueryParam("numCandidates") @DefaultValue("-1") int numCandidates, @Context final HttpServletRequest req ) { List<Interval> intervalList = new ArrayList<>(); for (String interval : intervals.split(",")) { intervalList.add(Intervals.of(interval.trim())); } List<Interval> condensed = JodaUtils.condenseIntervals(intervalList); return ServerViewUtil.getTargetLocations(timelineServerView, datasource, condensed, numCandidates); } protected DateTime getCurrentTime() { return DateTimes.nowUtc(); } }
/** * Copyright 2007-2016, Kaazing Corporation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaazing.gateway.management.gateway; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import org.apache.mina.core.buffer.IoBuffer; import org.apache.mina.core.write.WriteRequest; import org.apache.mina.util.CopyOnWriteMap; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.kaazing.gateway.management.AbstractManagementBean; import org.kaazing.gateway.management.ClusterManagementListener; import org.kaazing.gateway.management.ManagementService; import org.kaazing.gateway.management.Utils.ManagementSessionType; import org.kaazing.gateway.management.context.ManagementContext; import org.kaazing.gateway.management.update.check.ManagementUpdateCheck; import org.kaazing.gateway.management.update.check.ManagementUpdateCheckFactory; import org.kaazing.gateway.server.context.GatewayContext; import org.kaazing.gateway.server.impl.VersionUtils; import org.kaazing.gateway.service.cluster.ClusterContext; import org.kaazing.gateway.service.cluster.MemberId; import org.kaazing.gateway.service.cluster.MembershipEventListener; import org.kaazing.gateway.service.collections.CollectionsFactory; import org.kaazing.gateway.service.http.balancer.HttpBalancerService; import org.kaazing.mina.netty.util.threadlocal.VicariousThreadLocal; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.hazelcast.core.EntryEvent; import com.hazelcast.core.EntryListener; import com.hazelcast.core.MapEvent; /** * Implementation of the management 'data' bean for a session. This just contains the data. Wrappers for different management * protocols define the use of those data. */ public class GatewayManagementBeanImpl extends AbstractManagementBean implements GatewayManagementBean, MembershipEventListener, EntryListener<MemberId, Collection<String>> { private static final Logger logger = LoggerFactory.getLogger(GatewayManagementBeanImpl.class); // Each IO worker thread gets a ThreadServiceStats object via get(). private final ThreadLocal<ThreadGatewayStats> gatewayStats = new VicariousThreadLocal<ThreadGatewayStats>() { @Override protected ThreadGatewayStats initialValue() { ThreadGatewayStats stats = new ThreadGatewayStats(); gatewayStatsMap.put(Thread.currentThread(), stats); return stats; } }; // Map of the per-thread thread-local stats objects. Keyed on thread ID. private final CopyOnWriteMap<Thread, ThreadGatewayStats> gatewayStatsMap = new CopyOnWriteMap<>(); // A dynamic Gateway "id". For customer usefulness, we'll make this // the string <hostname>:<pid>, where <hostname> is the hostname of the // processor this gateway process is running on, and <pid> is the process ID // of the gateway process. These can both be determined here. private final String hostAndPid; private GatewayContext gatewayContext; // fields from VersionInfo private String productTitle; private String productBuild; private String productEdition; private final long startTime; private ClusterContext clusterContext; private final List<ClusterManagementListener> clusterManagementListeners; // Keep a unique index number for each gateway instance, as we can use // it in SNMP for an OID, and it might be useful elsewhere if we decide // we want to use it in place of some map key or something. The SNMP // support for sessions also depends on knowing this value. private static final AtomicInteger maxGatewayIndex = new AtomicInteger(1); private final int id; private final ManagementUpdateCheck updateChecker; public GatewayManagementBeanImpl(ManagementContext managementContext, GatewayContext gatewayContext, String hostAndPid) { super(managementContext, managementContext.getGatewaySummaryDataNotificationInterval(), SUMMARY_DATA_FIELD_LIST); // FIXME: every gateway ends up with id = 1 based on the next line of code, instead id should be from some cluster // info... this.id = maxGatewayIndex.getAndIncrement(); // may use in various wrappers this.hostAndPid = hostAndPid; this.startTime = System.currentTimeMillis(); this.clusterManagementListeners = new ArrayList<>(); this.gatewayContext = gatewayContext; this.productTitle = VersionUtils.getGatewayProductTitle(); this.productBuild = VersionUtils.getGatewayProductVersionBuild(); this.productEdition = VersionUtils.getGatewayProductEdition(); ManagementUpdateCheck updateCheckerLookup; try { // TODO, force update check should really only be available from the service bean and then with fallback with console // going directly to the source itself. Before we do that we should refactor the SNMP tables to allow for services // to offer componentized services updateCheckerLookup = ManagementUpdateCheckFactory.newManagementUpdateCheckFactory().newUpdateCheck("http"); } catch (IllegalArgumentException e) { updateCheckerLookup = null; } this.updateChecker = updateCheckerLookup; } @Override public int getId() { return id; } @Override public String getHostAndPid() { return hostAndPid; } @Override public String getProductTitle() { return productTitle; } @Override public String getProductBuild() { return productBuild; } @Override public String getProductEdition() { return productEdition; } @Override public long getTotalCurrentSessions() { long total = 0; for (ThreadGatewayStats stats : gatewayStatsMap.values()) { total += stats.getTotalCurrentSessions(); } return total; } @Override public long getTotalBytesReceived() { long total = 0; for (ThreadGatewayStats stats : gatewayStatsMap.values()) { total += stats.getTotalBytesReceived(); } return total; } @Override public long getTotalBytesSent() { long total = 0; for (ThreadGatewayStats stats : gatewayStatsMap.values()) { total += stats.getTotalBytesSent(); } return total; } @Override public long getTotalExceptions() { long total = 0; for (ThreadGatewayStats stats : gatewayStatsMap.values()) { total += stats.getTotalExceptions(); } return total; } @Override public long getUptime() { return System.currentTimeMillis() - startTime; } @Override public long getStartTime() { return startTime; } @Override public String getInstanceKey() { // NOTE: the ClusterContext pointed to here is always present, even if we're a singleton // instead of defining a <cluster> element in the config. The clusterContext that's a // member variable of this bean is set from outside, and is not set unless we have a // real cluster config. ClusterContext context = gatewayContext.getCluster(); String instanceKey = context.getInstanceKey(context.getLocalMember()); return instanceKey; } @Override public void setClusterContext(ClusterContext clusterContext) { this.clusterContext = clusterContext; if (clusterContext != null) { clusterContext.addMembershipEventListener(this); } } @Override public String getClusterMembers() { if (clusterContext == null) { return ""; } CollectionsFactory factory = clusterContext.getCollectionsFactory(); Collection<MemberId> memberIds = clusterContext.getMemberIds(); Map<MemberId, Map<String, List<String>>> memberIdBalancerMap = factory .getMap(HttpBalancerService.MEMBERID_BALANCER_MAP_NAME); JSONObject jsonObj = new JSONObject(); try { for (MemberId memberId : memberIds) { String instanceKey = clusterContext.getInstanceKey(memberId); Map<String, List<String>> balancerURIMap = memberIdBalancerMap.get(memberId); if (balancerURIMap != null) { JSONObject uriMap = new JSONObject(); for (String balancerURI : balancerURIMap.keySet()) { List<String> balanceeURIs = balancerURIMap.get(balancerURI); JSONArray jsonArray = new JSONArray(); for (String balanceeURI : balanceeURIs) { jsonArray.put(balanceeURI); } uriMap.put(balancerURI, jsonArray); } jsonObj.put(instanceKey, uriMap); } else { jsonObj.put(instanceKey, JSONObject.NULL); } } } catch (JSONException ex) { // We know the values are valid, we should not be able to get to here. throw new RuntimeException("Error inserting balancer URIs for cluster members into JSON object"); } return jsonObj.toString(); } @Override public String getManagementServiceMap() { if (clusterContext == null) { return ""; } CollectionsFactory factory = clusterContext.getCollectionsFactory(); Map<MemberId, Collection<String>> managementServices = factory.getMap(ManagementService.MANAGEMENT_SERVICE_MAP_NAME); if ((managementServices == null) || managementServices.isEmpty()) { return ""; } JSONObject jsonObj = new JSONObject(); try { for (MemberId member : managementServices.keySet()) { String instanceKey = clusterContext.getInstanceKey(member); JSONArray jsonArray = new JSONArray(); Collection<String> acceptURIs = managementServices.get(member); if (acceptURIs != null) { for (String acceptURI : acceptURIs) { jsonArray.put(acceptURI); } } jsonObj.put(instanceKey, jsonArray); } } catch (JSONException ex) { // We know the values are valid, we should not be able to get to here. throw new RuntimeException("Error inserting acceptURIs for management services into JSON array"); } return jsonObj.toString(); } @Override public String getClusterBalancerMap() { if (clusterContext == null) { return ""; } CollectionsFactory factory = clusterContext.getCollectionsFactory(); Map<String, Collection<String>> balancers = factory.getMap(HttpBalancerService.BALANCER_MAP_NAME); if ((balancers == null) || balancers.isEmpty()) { return ""; } JSONObject jsonObj = new JSONObject(); try { for (String uri : balancers.keySet()) { Collection<String> balancees = balancers.get(uri); if (balancees != null && balancees.size() > 0) { JSONArray jsonArray = new JSONArray(); for (String balanceeURI : balancees) { jsonArray.put(balanceeURI); } jsonObj.put(uri, jsonArray); } else { jsonObj.put(uri, JSONObject.NULL); } } } catch (JSONException ex) { // We know the values are valid, we should not be able to get to here. throw new RuntimeException("Error inserting balanceeURIs for balancerURIs into JSON array"); } return jsonObj.toString(); } @Override public void addClusterManagementListener(ClusterManagementListener listener) { listener.setGatewayBean(this); clusterManagementListeners.add(listener); } @Override public String getSummaryData() { JSONArray jsonArray = null; try { Object[] vals = new Object[SUMMARY_DATA_FIELD_LIST.length]; vals[SUMMARY_DATA_TOTAL_CURRENT_SESSIONS_INDEX] = 0L; vals[SUMMARY_DATA_TOTAL_BYTES_RECEIVED_INDEX] = 0L; vals[SUMMARY_DATA_TOTAL_BYTES_SENT_INDEX] = 0L; vals[SUMMARY_DATA_TOTAL_EXCEPTIONS_INDEX] = 0L; for (ThreadGatewayStats stats : gatewayStatsMap.values()) { stats.collectSummaryValues(vals); } jsonArray = new JSONArray(vals); } catch (JSONException ex) { // We should never be able to get here, as the summary data values are all legal } return jsonArray.toString(); } private List<GatewayManagementListener> getManagementListeners() { return managementContext.getGatewayManagementListeners(); } @Override public void memberAdded(MemberId newMember) { // Removed the listener 'membershipChanged' notification to instanceKeyAdded } @Override public void memberRemoved(MemberId removedMember) { CollectionsFactory factory = clusterContext.getCollectionsFactory(); Map<MemberId, Collection<String>> managementServiceUriMap = factory. getMap(ManagementService.MANAGEMENT_SERVICE_MAP_NAME); managementServiceUriMap.remove(removedMember); } @Override public void entryAdded(EntryEvent<MemberId, Collection<String>> event) { MemberId memberId = event.getKey(); String instanceKey = clusterContext.getInstanceKey(memberId); for (ClusterManagementListener listener : clusterManagementListeners) { listener.managementServicesChanged("add", instanceKey, event.getValue()); } } @Override public void entryEvicted(EntryEvent<MemberId, Collection<String>> event) { // this listener is here to track when new management services are added, so we can ignore this } @Override public void entryRemoved(EntryEvent<MemberId, Collection<String>> event) { // this listener is here to track when new management services are added, so we can ignore this } @Override public void entryUpdated(EntryEvent<MemberId, Collection<String>> event) { // this listener is here to track when new management services are added, so we can ignore this } @Override public void mapCleared(MapEvent event) { // this listener is here to track when new management services are added, so we can ignore this } @Override public void mapEvicted(MapEvent event) { // this listener is here to track when new management services are added, so we can ignore this } // Implement various methods needed by the strategy objects. // This must run ON the IO thread @Override public void doSessionCreated(final long sessionId, final ManagementSessionType managementSessionType) throws Exception { ThreadGatewayStats stats = gatewayStats.get(); stats.doSessionCreated(); } /** * Notify the management listeners on a sessionCreated. * <p/> * NOTE: this starts on the IO thread, but runs a task OFF the thread. */ @Override public void doSessionCreatedListeners(final long sessionId, final ManagementSessionType managementSessionType) { runManagementTask(new Runnable() { @Override public void run() { try { // The particular management listeners change on strategy, so get them here. for (final GatewayManagementListener listener : getManagementListeners()) { listener.doSessionCreated(GatewayManagementBeanImpl.this, sessionId); } markChanged(); // mark ourselves as changed, possibly tell listeners } catch (Exception ex) { logger.warn("Error during sessionCreated gateway listener notifications:", ex); } } }); } // This must run ON the IO thread @Override public void doSessionClosed(final long sessionId, final ManagementSessionType managementSessionType) throws Exception { ThreadGatewayStats stats = gatewayStats.get(); stats.doSessionClosed(); } /** * Notify the management listeners on a sessionClosed. * <p/> * NOTE: this starts on the IO thread, but runs a task OFF the thread. */ @Override public void doSessionClosedListeners(final long sessionId, final ManagementSessionType managementSessionType) { runManagementTask(new Runnable() { @Override public void run() { try { // The particular management listeners change on strategy, so get them here. for (final GatewayManagementListener listener : getManagementListeners()) { listener.doSessionClosed(GatewayManagementBeanImpl.this, sessionId); } markChanged(); // mark ourselves as changed, possibly tell listeners } catch (Exception ex) { logger.warn("Error during sessionClosed gateway listener notifications:", ex); } } }); } // This must run ON the IO thread @Override public void doMessageReceived(final long sessionId, final long sessionReadBytes, final Object message) throws Exception { if (message instanceof ByteBuffer) { ThreadGatewayStats stats = gatewayStats.get(); stats.doMessageReceived((IoBuffer) message); } } /** * Notify the management listeners on a messageReceived. * <p/> * NOTE: this starts on the IO thread, but runs a task OFF the thread. */ @Override public void doMessageReceivedListeners(final long sessionId, final long sessionReadBytes, final Object message) { runManagementTask(new Runnable() { @Override public void run() { try { // The particular management listeners change on strategy, so get them here. for (final GatewayManagementListener listener : getManagementListeners()) { listener.doMessageReceived(GatewayManagementBeanImpl.this, sessionId); } markChanged(); // mark ourselves as changed, possibly tell listeners } catch (Exception ex) { logger.warn("Error during messageReceived gateway listener notifications:", ex); } } }); } // This must run ON the IO thread @Override public void doFilterWrite(final long sessionId, final long sessionWrittenBytes, final WriteRequest writeRequest) throws Exception { Object message = writeRequest.getMessage(); if (message instanceof IoBuffer) { ThreadGatewayStats stats = gatewayStats.get(); stats.doFilterWrite((IoBuffer) message); } } /** * Notify the management listeners on a filterWrite. * <p/> * NOTE: this starts on the IO thread, but runs a task OFF the thread. */ @Override public void doFilterWriteListeners(final long sessionId, final long sessionWrittenBytes, final WriteRequest writeRequest) { runManagementTask(new Runnable() { @Override public void run() { try { // The particular management listeners change on strategy, so get them here. for (final GatewayManagementListener listener : getManagementListeners()) { listener.doFilterWrite(GatewayManagementBeanImpl.this, sessionId); } markChanged(); // mark ourselves as changed, possibly tell listeners } catch (Exception ex) { logger.warn("Error during filterWrite gateway listener notifications:", ex); } } }); } // This must run ON the IO thread @Override public void doExceptionCaught(final long sessionId, final Throwable cause) throws Exception { ThreadGatewayStats stats = gatewayStats.get(); stats.doExceptionCaught(); } /** * Notify the management listeners on a filterWrite. * <p/> * NOTE: this starts on the IO thread, but runs a task OFF the thread. */ @Override public void doExceptionCaughtListeners(final long sessionId, final Throwable cause) { runManagementTask(new Runnable() { @Override public void run() { try { // The particular management listeners change on strategy, so get them here. for (final GatewayManagementListener listener : getManagementListeners()) { listener.doExceptionCaught(GatewayManagementBeanImpl.this, sessionId); } markChanged(); // mark ourselves as changed, possibly tell listeners } catch (Exception ex) { logger.warn("Error during exceptionCaught gateway listener notifications:", ex); } } }); } /** * Ongoing service statistics. There is an instance of this class per worker thread, stored as a ThreadLocal on the thread, * with a reference to it in a CopyOnWriteMap stored here in ServiceManagementBeanImpl so we can do insertions of stats * objects into the map without locks. */ private class ThreadGatewayStats { private long totalCurrentSessions; private long totalBytesReceived; private long totalBytesSent; private long totalExceptions; /** * Given a session, extract relevant counts and update them locally. * * @param session * @return */ // This must only be called ON an IO thread (the one equal to this structure) public long doSessionCreated() { totalCurrentSessions++; return totalCurrentSessions; } public long doSessionClosed() { totalCurrentSessions--; return totalCurrentSessions; } public long doMessageReceived(IoBuffer buf) { totalBytesReceived += buf.remaining(); return totalBytesReceived; } public long doFilterWrite(IoBuffer buf) { totalBytesSent += buf.remaining(); return totalBytesSent; } public long doExceptionCaught() { totalExceptions++; return totalExceptions; } // For use by the 'summation' methods All of these try to create a future to run on // the IO worker thread associated with the map. // This runs OFF any IO worker thread public long getTotalCurrentSessions() { return totalCurrentSessions; } // This runs OFF any IO worker thread public long getTotalBytesReceived() { return totalBytesReceived; } // This runs OFF any IO worker thread public long getTotalBytesSent() { return totalBytesSent; } // This runs OFF any IO worker thread public long getTotalExceptions() { return totalExceptions; } // This runs OFF any IO worker thread. The final list of summary values from a service // actually includes data we do not keep in the thread-specific area. See ServiceManagementBeanImpl for those. public void collectSummaryValues(Object[] vals) { vals[GatewayManagementBean.SUMMARY_DATA_TOTAL_CURRENT_SESSIONS_INDEX] = ((Long) vals[GatewayManagementBean.SUMMARY_DATA_TOTAL_CURRENT_SESSIONS_INDEX]) + totalCurrentSessions; vals[GatewayManagementBean.SUMMARY_DATA_TOTAL_BYTES_RECEIVED_INDEX] = ((Long) vals[GatewayManagementBean.SUMMARY_DATA_TOTAL_BYTES_RECEIVED_INDEX]) + totalBytesReceived; vals[GatewayManagementBean.SUMMARY_DATA_TOTAL_BYTES_SENT_INDEX] = ((Long) vals[GatewayManagementBean.SUMMARY_DATA_TOTAL_BYTES_SENT_INDEX]) + totalBytesSent; vals[GatewayManagementBean.SUMMARY_DATA_TOTAL_EXCEPTIONS_INDEX] = ((Long) vals[GatewayManagementBean.SUMMARY_DATA_TOTAL_EXCEPTIONS_INDEX]) + totalExceptions; vals[GatewayManagementBean.SUMMARY_DATA_LATEST_UPDATEABLE_GATEWAY_VERSION_INDEX] = getAvailableUpdateVersion(); } } @Override public String getAvailableUpdateVersion() { String version = ""; if (updateChecker != null) { version = updateChecker.getAvailableUpdateVersion(); } return version; } @Override public ManagementUpdateCheck getUpdateCheck() { return updateChecker; } @Override public void forceUpdateVersionCheck() { if (updateChecker != null) { updateChecker.checkForUpdate(); } } }
package com.ir.hw6; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import de.bwaldvogel.liblinear.InvalidInputDataException; import de.bwaldvogel.liblinear.Predict; import de.bwaldvogel.liblinear.Train; /* Removing Coding redundancies etc. * Changed Read/write and output folders. Complete Codes*/ public class ML8 { static Map<Integer, Map<String, Integer>> qRelMap = new LinkedHashMap<Integer, Map<String, Integer>>(); static Map<Integer, List<Integer>> setsMap = new LinkedHashMap<Integer, List<Integer>>(); static Map<Integer, Map<String, Double>> okapiMap = new LinkedHashMap<Integer, Map<String, Double>>(); static Map<Integer, Map<String, Double>> okapiIdfMap = new LinkedHashMap<Integer, Map<String, Double>>(); static Map<Integer, Map<String, Double>> bm25Map = new LinkedHashMap<Integer, Map<String, Double>>(); static Map<Integer, Map<String, Double>> laplaceMap = new LinkedHashMap<Integer, Map<String, Double>>(); static Map<Integer, Map<String, Double>> jmMap = new LinkedHashMap<Integer, Map<String, Double>>(); static Map<Integer, Map<String, Double>> proxMap = new LinkedHashMap<Integer, Map<String, Double>>(); static Map<String, Double[]> docFeatureMatrix = new LinkedHashMap<String, Double[]>(); static Map<String, Double[]> testFeatureMatrix = new LinkedHashMap<String, Double[]>(); public static void main(String[] args) throws InvalidInputDataException { try { // read the qrelFile and Create the data Map. readQrelFile("qrels.adhoc.51-100.AP89"); System.out.println("Qrel Map Size::: " + qRelMap.size()); // create training and test query sets readQueryFiles("query_desc.51-100.short"); System.out.println("Sets Size:: " + setsMap.size()); System.out.println("Training Set:: " + setsMap.get(0).size()); System.out.println("Test Set:: " + setsMap.get(1).size()); // read the result files and create a matrix okapiMap = readResultsMap(okapiMap, "okapi-FINAL-1"); okapiIdfMap = readResultsMap(okapiIdfMap, "okapiIDF-Final-1"); bm25Map = readResultsMap(bm25Map, "BM-FINAL-1"); laplaceMap = readResultsMap(laplaceMap, "Laplace-Final-1"); jmMap = readResultsMap(jmMap, "JM-Final-1"); proxMap = readResultsMap(proxMap, "proximity-1"); System.out.println("Okpai Map Size:: " + okapiMap.size()); System.out.println("Okpai IDF Map Size:: " + okapiIdfMap.size()); System.out.println("BM25 Map Size:: " + bm25Map.size()); System.out.println("Laplace Map Size:: " + laplaceMap.size()); System.out.println("JM Map Size:: " + jmMap.size()); System.out.println("Prox Map Size:: " + proxMap.size()); // Add features to the matrix createMatrix(okapiMap); updateMatrix(okapiIdfMap, "tf-idf"); updateMatrix(bm25Map, "bm25"); updateMatrix(laplaceMap, "laplace"); updateMatrix(jmMap, "jm"); updateMatrix(proxMap, "prox"); System.out.println("Training Matrix Size:: " + docFeatureMatrix.size()); System.out .println("Test Matrix Size:: " + testFeatureMatrix.size()); writeMatrixToFile(docFeatureMatrix, "trainingMatrix", "trainCatalog"); writeMatrixToFile(testFeatureMatrix, "testMatrix", "testCatalog"); // Train the model String[] args1 = { "-s", "0", "C:/Users/Nitin/Assign6/output/trainingMatrix.txt", "C:/Users/Nitin/Assign6/output/modelTrain.txt" }; Train.main(args1); System.out.println("Done with training. Started Predicting.."); // predict for Testing predictLabels("testMatrix.txt", "modelTrain.txt", "outputTest.txt"); // generate Ranked List for Testing generateRankedLists("outputTest", "testCatalog", "testingPerformance"); // Predict for Training predictLabels("trainingMatrix.txt", "modelTrain.txt", "outputTrain.txt"); // generate Ranked List for Training generateRankedLists("outputTrain", "trainCatalog", "trainingPerformance"); } catch (IOException e) { e.printStackTrace(); } } private static void generateRankedLists(String outputFile, String catalogFile, String rankedFile) throws NumberFormatException, IOException { // Generate a Ranked List file. // Step 1: load the ModelOutput. List<Double> scores = readModelFile(outputFile); // Step2: Load the Catalog file List<String> docIds = readCatalog(catalogFile); System.out.println("Model Size::" + scores.size()); System.out.println("Catalog Size::" + docIds.size()); // create a Map Map<String, Double> rankedMap = new LinkedHashMap<String, Double>(); for (int i = 0; i < scores.size(); i++) { rankedMap.put(docIds.get(i), scores.get(i)); } rankedMap = getSortedMap(rankedMap); // Print a ranked List. writeRankedFile(rankedFile, sortQids(rankedMap)); } private static void predictLabels(String testFile, String model, String output) throws IOException { String path = "C:/Users/Nitin/Assign6/output/"; try { Thread.sleep(2000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } // Predict String[] args2 = { "-b", "1", path + testFile, path + model, path + output }; Predict.main(args2); } public static <K, V extends Comparable<? super V>> Map<K, V> sortQids( Map<K, V> rankTerm) { System.out.println("Started Sorting..." + "@ " + new Date()); List<Map.Entry<K, V>> list = new LinkedList<Map.Entry<K, V>>( rankTerm.entrySet()); Collections.sort(list, new Comparator<Map.Entry<K, V>>() { public int compare(Map.Entry<K, V> o1, Map.Entry<K, V> o2) { // return (o1.getValue()).compareTo(o2.getValue()); return Integer.parseInt(o1.getKey().toString().split(":")[0]) < Integer .parseInt(o2.getKey().toString().split(":")[0]) ? -1 : Integer .parseInt(o1.getKey().toString().split(":")[0]) == Integer .parseInt(o2.getKey().toString().split(":")[0]) ? 0 : 1; } }); Map<K, V> result = new LinkedHashMap<K, V>(); for (Map.Entry<K, V> entry : list) { result.put(entry.getKey(), entry.getValue()); } System.out.println("Stopped Sorting..." + "@ " + new Date()); return result; } private static void writeRankedFile(String fileName, Map<String, Double> rankedMap) { File file = new File("C:/Users/Nitin/Assign6/" + fileName + ".txt"); try { BufferedWriter out = new BufferedWriter(new FileWriter(file)); int in = 1; for (Map.Entry<String, Double> m : rankedMap.entrySet()) { String ids[] = m.getKey().split(":"); String qId = ids[0]; String docId = ids[1]; String finalString = qId + " " + "Q0" + " " + docId + " " + in + " " + m.getValue() + " " + "EXP"; in++; out.write(finalString); out.newLine(); } out.close(); } catch (IOException e) { e.printStackTrace(); } } private static List<String> readCatalog(String fileName) throws NumberFormatException, IOException { String path = "C:/Users/Nitin/Assign6/output/"; File file = new File(path + fileName + ".txt"); List<String> docIds = new ArrayList<String>(); try { BufferedReader br = new BufferedReader(new InputStreamReader( new FileInputStream(file))); String str = ""; while ((str = br.readLine()) != null) { String[] line = str.split("\t"); String docId = line[1]; docIds.add(docId); } } catch (FileNotFoundException e) { e.printStackTrace(); } return docIds; } private static List<Double> readModelFile(String fileName) throws NumberFormatException, IOException { String path = "C:/Users/Nitin/Assign6/output/"; File file = new File(path + fileName + ".txt"); List<Double> scores = new ArrayList<Double>(); try { BufferedReader br = new BufferedReader(new InputStreamReader( new FileInputStream(file))); String str = ""; str = br.readLine(); // disregarding the first line while ((str = br.readLine()) != null) { String[] line = str.split(" "); double score = Double.parseDouble(line[1]); scores.add(score); } } catch (FileNotFoundException e) { e.printStackTrace(); } return scores; } private static void updateMatrix(Map<Integer, Map<String, Double>> map, String type) { // create the doc-feature Matrix for training set int i = 0; if (type.equals("tf-idf")) { i = 1; } if (type.equals("bm25")) { i = 2; } if (type.equals("laplace")) { i = 3; } if (type.equals("jm")) { i = 4; } if (type.equals("prox")) { i = 5; } for (Map.Entry<Integer, Map<String, Double>> q : map.entrySet()) { int qid = q.getKey(); Map<String, Double> docsmap = q.getValue(); if (setsMap.get(0).contains(qid)) { for (Map.Entry<String, Double> d : docsmap.entrySet()) { String docId = d.getKey(); if (docFeatureMatrix.containsKey(qid + ":" + docId)) { Double[] features = docFeatureMatrix.get(qid + ":" + docId); features[i] = d.getValue(); docFeatureMatrix.put(qid + ":" + docId, features); } else { Double[] features = new Double[7]; for (int j = 0; j < 6; j++) { if (j != i) { features[j] = -1.0; } else features[i] = d.getValue(); } if (qRelMap.get(qid).get(docId) != null) { features[6] = (double) qRelMap.get(qid).get(docId); docFeatureMatrix.put(qid + ":" + docId, features); } } } } else if (setsMap.get(1).contains(qid)) { for (Map.Entry<String, Double> d : docsmap.entrySet()) { String docId = d.getKey(); if (testFeatureMatrix.containsKey(qid + ":" + docId)) { Double[] features = testFeatureMatrix.get(qid + ":" + docId); features[i] = d.getValue(); testFeatureMatrix.put(qid + ":" + docId, features); } else { Double[] features = new Double[7]; for (int j = 0; j < 6; j++) { if (j != i) { features[j] = -1.0; } else features[i] = d.getValue(); } if (qRelMap.get(qid).get(docId) != null) { features[6] = (double) qRelMap.get(qid).get(docId); testFeatureMatrix.put(qid + ":" + docId, features); } } } } } } private static void writeMatrixToFile(Map<String, Double[]> matrix, String fileName, String fileName1) { String path = "C:/Users/Nitin/Assign6/output/"; File file = new File(path + fileName + ".txt"); File file1 = new File(path + fileName1 + ".txt"); try { BufferedWriter out = new BufferedWriter(new FileWriter(file)); BufferedWriter out1 = new BufferedWriter(new FileWriter(file1)); int in = 1; for (Map.Entry<String, Double[]> m : matrix.entrySet()) { String qid = m.getKey(); String line = in + "\t" + qid; out1.write(line); out1.newLine(); Double[] features = m.getValue(); String fea = ""; for (int i = 0; i < features.length - 1; i++) { if (features[i] != -1.0) fea += (i + 1) + ":" + features[i] + "\t"; } String finalString = features[6] + "\t" + fea; in++; out.write(finalString); out.newLine(); } out.close(); out1.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } private static void createMatrix(Map<Integer, Map<String, Double>> map) { // create the doc-feature Matrix for training set for (Map.Entry<Integer, Map<String, Double>> q : map.entrySet()) { int qid = q.getKey(); Map<String, Double> docsmap = q.getValue(); if (setsMap.get(0).contains(qid)) { for (Map.Entry<String, Double> d : docsmap.entrySet()) { String docId = d.getKey(); Double[] features = new Double[7]; features[0] = d.getValue(); features[1] = -1.0; features[2] = -1.0; features[3] = -1.0; features[4] = -1.0; features[5] = -1.0; if (qRelMap.get(qid).get(docId) != null) { features[6] = (double) qRelMap.get(qid).get(docId); docFeatureMatrix.put(qid + ":" + docId, features); } } } else if (setsMap.get(1).contains(qid)) { for (Map.Entry<String, Double> d : docsmap.entrySet()) { String docId = d.getKey(); Double[] features = new Double[7]; features[0] = d.getValue(); features[1] = -1.0; features[2] = -1.0; features[3] = -1.0; features[4] = -1.0; features[5] = -1.0; if (qRelMap.get(qid).get(docId) != null) { features[6] = (double) qRelMap.get(qid).get(docId); testFeatureMatrix.put(qid + ":" + docId, features); } } } } } private static void readQueryFiles(String fileName) throws NumberFormatException, IOException { String path = "C:/Users/Nitin/Assign6/data/"; File file = new File(path + fileName + ".txt"); try { BufferedReader br = new BufferedReader(new InputStreamReader( new FileInputStream(file))); String str = ""; List<Integer> trainMap = new ArrayList<Integer>(); List<Integer> testMap = new ArrayList<Integer>(); while ((str = br.readLine()) != null) { String[] line = str.split(" "); String queryNo = line[0]; int docno = Integer.parseInt(queryNo.substring(0, queryNo.length() - 1)); if (docno == 58 || docno == 93 || docno == 85 || docno == 61 || docno == 100) { if (setsMap.get(1) != null) { testMap = setsMap.get(1); testMap.add(docno); setsMap.put(1, testMap); } else { testMap.add(docno); setsMap.put(1, testMap); } } /* * textMap.add(58); textMap.add(93); textMap.add(85); * textMap.add(99); textMap.add(100); setsMap.put(1,textMap); */ /* * if (docno == 56 || docno == 71 || docno == 91 || docno == 97 * || docno == 59 || docno == 64 || docno == 62 || docno == 77 * || docno == 54 || docno == 87 || docno == 94 || docno == 89 * || docno == 61 || docno == 95 || docno == 68 || docno == 57 * || docno == 98 || docno == 60 || docno == 80 || docno == 63) */ if (!testMap.contains(docno)) { if (setsMap.get(0) != null) { trainMap = setsMap.get(0); trainMap.add(docno); setsMap.put(0, trainMap); } else { trainMap.add(docno); setsMap.put(0, trainMap); } } // trainMap.add(docno); /* * trainMap.add(56); trainMap.add(71); trainMap.add(97); * trainMap.add(59); trainMap.add(64); trainMap.add(62); * trainMap.add(77); trainMap.add(54); trainMap.add(87); * trainMap.add(94); trainMap.add(89); trainMap.add(61); * trainMap.add(95); trainMap.add(68); trainMap.add(57); * trainMap.add(98); trainMap.add(60); trainMap.add(80); * trainMap.add(63); trainMap.add(91); setsMap.put(0,trainMap); */ } } catch (FileNotFoundException e) { e.printStackTrace(); } } private static Map<Integer, Map<String, Double>> readResultsMap( Map<Integer, Map<String, Double>> map, String fileName) throws NumberFormatException, IOException { String path = "C:/Users/Nitin/Assign6/data/"; File file = new File(path + fileName + ".txt"); try { BufferedReader br = new BufferedReader(new InputStreamReader( new FileInputStream(file))); String str = ""; while ((str = br.readLine()) != null) { String[] line = str.split(" "); int queryNo = Integer.parseInt(line[0]); String docno = line[2]; double score = Double.parseDouble(line[4]); if (map.containsKey(queryNo)) { Map<String, Double> tempMap = new LinkedHashMap<String, Double>(); tempMap = map.get(queryNo); tempMap.put(docno, score); map.put(queryNo, tempMap); } else { Map<String, Double> tempMap = new LinkedHashMap<String, Double>(); tempMap.put(docno, score); map.put(queryNo, tempMap); } } } catch (FileNotFoundException e) { e.printStackTrace(); } return map; } private static void readQrelFile(String fileName) throws IOException { String path = "C:/Users/Nitin/Assign6/data/"; File file = new File(path + fileName + ".txt"); try { BufferedReader br = new BufferedReader(new InputStreamReader( new FileInputStream(file))); String str = ""; while ((str = br.readLine()) != null) { String[] line = str.split(" "); int queryNo = Integer.parseInt(line[0]); String docno = line[2]; int rel = Integer.parseInt(line[3]); if (qRelMap.containsKey(queryNo)) { Map<String, Integer> tempMap = new LinkedHashMap<String, Integer>(); tempMap = qRelMap.get(queryNo); tempMap.put(docno, rel); qRelMap.put(queryNo, tempMap); } else { Map<String, Integer> tempMap = new LinkedHashMap<String, Integer>(); tempMap.put(docno, rel); qRelMap.put(queryNo, tempMap); } } } catch (FileNotFoundException e) { e.printStackTrace(); } } public static <K, V extends Comparable<? super V>> Map<K, V> getSortedMap( Map<K, V> rankTerm) { System.out.println("Started Sorting..." + "@ " + new Date()); List<Map.Entry<K, V>> list = new LinkedList<Map.Entry<K, V>>( rankTerm.entrySet()); Collections.sort(list, new Comparator<Map.Entry<K, V>>() { public int compare(Map.Entry<K, V> o1, Map.Entry<K, V> o2) { // return (o1.getValue()).compareTo(o2.getValue()); return Double.parseDouble(o1.getValue().toString()) > Double .parseDouble(o2.getValue().toString()) ? -1 : Double .parseDouble(o1.getValue().toString()) == Double .parseDouble(o2.getValue().toString()) ? 0 : 1; } }); Map<K, V> result = new LinkedHashMap<K, V>(); for (Map.Entry<K, V> entry : list) { result.put(entry.getKey(), entry.getValue()); } System.out.println("Stopped Sorting..." + "@ " + new Date()); return result; } }
package com.gerken.frontend.se.xform.compiled; import org.eclipse.jet.JET2Context; import org.eclipse.jet.JET2Template; import org.eclipse.jet.JET2Writer; import org.eclipse.jet.taglib.RuntimeTagElement; import org.eclipse.jet.taglib.TagInfo; public class _jet_m2msnippets implements JET2Template { private static final String _jetns_c = "org.eclipse.jet.controlTags"; //$NON-NLS-1$ public _jet_m2msnippets() { super(); } private static final String NL = System.getProperty("line.separator"); //$NON-NLS-1$ private static final TagInfo _td_c_setVariable_1_1 = new TagInfo("c:setVariable", //$NON-NLS-1$ 1, 1, new String[] { "select", //$NON-NLS-1$ "var", //$NON-NLS-1$ }, new String[] { " ':' ", //$NON-NLS-1$ "colon", //$NON-NLS-1$ } ); private static final TagInfo _td_c_iterate_3_1 = new TagInfo("c:iterate", //$NON-NLS-1$ 3, 1, new String[] { "select", //$NON-NLS-1$ "var", //$NON-NLS-1$ }, new String[] { "$editor/type[not(@parent)]", //$NON-NLS-1$ "type", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_6_13 = new TagInfo("c:get", //$NON-NLS-1$ 6, 13, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$type/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_iterate_9_1 = new TagInfo("c:iterate", //$NON-NLS-1$ 9, 1, new String[] { "select", //$NON-NLS-1$ "var", //$NON-NLS-1$ }, new String[] { "$type/field", //$NON-NLS-1$ "field", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_10_3 = new TagInfo("c:get", //$NON-NLS-1$ 10, 3, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$colon", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_10_38 = new TagInfo("c:get", //$NON-NLS-1$ 10, 38, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$type/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_10_69 = new TagInfo("c:get", //$NON-NLS-1$ 10, 69, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$field/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_10_101 = new TagInfo("c:get", //$NON-NLS-1$ 10, 101, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$field/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_10_135 = new TagInfo("c:get", //$NON-NLS-1$ 10, 135, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$colon", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_10_173 = new TagInfo("c:get", //$NON-NLS-1$ 10, 173, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$type/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_10_204 = new TagInfo("c:get", //$NON-NLS-1$ 10, 204, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$field/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_10_241 = new TagInfo("c:get", //$NON-NLS-1$ 10, 241, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$colon", //$NON-NLS-1$ } ); private static final TagInfo _td_c_iterate_16_1 = new TagInfo("c:iterate", //$NON-NLS-1$ 16, 1, new String[] { "select", //$NON-NLS-1$ "var", //$NON-NLS-1$ }, new String[] { "$editor/type[@parent]", //$NON-NLS-1$ "type", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_19_13 = new TagInfo("c:get", //$NON-NLS-1$ 19, 13, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$type/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_22_3 = new TagInfo("c:get", //$NON-NLS-1$ 22, 3, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$colon", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_22_45 = new TagInfo("c:get", //$NON-NLS-1$ 22, 45, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$type/@parent", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_22_77 = new TagInfo("c:get", //$NON-NLS-1$ 22, 77, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$type/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_22_113 = new TagInfo("c:get", //$NON-NLS-1$ 22, 113, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$type/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_iterate_24_1 = new TagInfo("c:iterate", //$NON-NLS-1$ 24, 1, new String[] { "select", //$NON-NLS-1$ "var", //$NON-NLS-1$ }, new String[] { "$type/field", //$NON-NLS-1$ "field", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_25_3 = new TagInfo("c:get", //$NON-NLS-1$ 25, 3, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$colon", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_25_38 = new TagInfo("c:get", //$NON-NLS-1$ 25, 38, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$type/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_25_69 = new TagInfo("c:get", //$NON-NLS-1$ 25, 69, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$field/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_25_101 = new TagInfo("c:get", //$NON-NLS-1$ 25, 101, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$field/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_25_135 = new TagInfo("c:get", //$NON-NLS-1$ 25, 135, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$colon", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_25_173 = new TagInfo("c:get", //$NON-NLS-1$ 25, 173, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$type/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_25_204 = new TagInfo("c:get", //$NON-NLS-1$ 25, 204, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$field/@name", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_25_241 = new TagInfo("c:get", //$NON-NLS-1$ 25, 241, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$colon", //$NON-NLS-1$ } ); private static final TagInfo _td_c_get_28_4 = new TagInfo("c:get", //$NON-NLS-1$ 28, 4, new String[] { "select", //$NON-NLS-1$ }, new String[] { "$colon", //$NON-NLS-1$ } ); public void generate(final JET2Context context, final JET2Writer __out) { JET2Writer out = __out; RuntimeTagElement _jettag_c_setVariable_1_1 = context.getTagFactory().createRuntimeTag(_jetns_c, "setVariable", "c:setVariable", _td_c_setVariable_1_1); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_setVariable_1_1.setRuntimeParent(null); _jettag_c_setVariable_1_1.setTagInfo(_td_c_setVariable_1_1); _jettag_c_setVariable_1_1.doStart(context, out); _jettag_c_setVariable_1_1.doEnd(); out.write(NL); RuntimeTagElement _jettag_c_iterate_3_1 = context.getTagFactory().createRuntimeTag(_jetns_c, "iterate", "c:iterate", _td_c_iterate_3_1); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_iterate_3_1.setRuntimeParent(null); _jettag_c_iterate_3_1.setTagInfo(_td_c_iterate_3_1); _jettag_c_iterate_3_1.doStart(context, out); while (_jettag_c_iterate_3_1.okToProcessBody()) { out.write(NL); out.write("|"); //$NON-NLS-1$ out.write(NL); out.write("| Element: "); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_6_13 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_6_13); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_6_13.setRuntimeParent(_jettag_c_iterate_3_1); _jettag_c_get_6_13.setTagInfo(_td_c_get_6_13); _jettag_c_get_6_13.doStart(context, out); _jettag_c_get_6_13.doEnd(); out.write(NL); out.write("|"); //$NON-NLS-1$ out.write(NL); out.write(NL); RuntimeTagElement _jettag_c_iterate_9_1 = context.getTagFactory().createRuntimeTag(_jetns_c, "iterate", "c:iterate", _td_c_iterate_9_1); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_iterate_9_1.setRuntimeParent(_jettag_c_iterate_3_1); _jettag_c_iterate_9_1.setTagInfo(_td_c_iterate_9_1); _jettag_c_iterate_9_1.doStart(context, out); while (_jettag_c_iterate_9_1.okToProcessBody()) { out.write("<c"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_10_3 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_10_3); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_10_3.setRuntimeParent(_jettag_c_iterate_9_1); _jettag_c_get_10_3.setTagInfo(_td_c_get_10_3); _jettag_c_get_10_3.doStart(context, out); _jettag_c_get_10_3.doEnd(); out.write("if test=\"$"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_10_38 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_10_38); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_10_38.setRuntimeParent(_jettag_c_iterate_9_1); _jettag_c_get_10_38.setTagInfo(_td_c_get_10_38); _jettag_c_get_10_38.doStart(context, out); _jettag_c_get_10_38.doEnd(); out.write("/@"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_10_69 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_10_69); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_10_69.setRuntimeParent(_jettag_c_iterate_9_1); _jettag_c_get_10_69.setTagInfo(_td_c_get_10_69); _jettag_c_get_10_69.doStart(context, out); _jettag_c_get_10_69.doEnd(); out.write("\">"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_10_101 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_10_101); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_10_101.setRuntimeParent(_jettag_c_iterate_9_1); _jettag_c_get_10_101.setTagInfo(_td_c_get_10_101); _jettag_c_get_10_101.doStart(context, out); _jettag_c_get_10_101.doEnd(); out.write("=\"<c"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_10_135 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_10_135); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_10_135.setRuntimeParent(_jettag_c_iterate_9_1); _jettag_c_get_10_135.setTagInfo(_td_c_get_10_135); _jettag_c_get_10_135.doStart(context, out); _jettag_c_get_10_135.doEnd(); out.write("get select=\"$"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_10_173 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_10_173); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_10_173.setRuntimeParent(_jettag_c_iterate_9_1); _jettag_c_get_10_173.setTagInfo(_td_c_get_10_173); _jettag_c_get_10_173.doStart(context, out); _jettag_c_get_10_173.doEnd(); out.write("/@"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_10_204 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_10_204); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_10_204.setRuntimeParent(_jettag_c_iterate_9_1); _jettag_c_get_10_204.setTagInfo(_td_c_get_10_204); _jettag_c_get_10_204.doStart(context, out); _jettag_c_get_10_204.doEnd(); out.write("\"/>\"</c"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_10_241 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_10_241); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_10_241.setRuntimeParent(_jettag_c_iterate_9_1); _jettag_c_get_10_241.setTagInfo(_td_c_get_10_241); _jettag_c_get_10_241.doStart(context, out); _jettag_c_get_10_241.doEnd(); out.write("if>"); //$NON-NLS-1$ out.write(NL); out.write(" "); //$NON-NLS-1$ out.write(NL); _jettag_c_iterate_9_1.handleBodyContent(out); } _jettag_c_iterate_9_1.doEnd(); out.write(" "); //$NON-NLS-1$ out.write(NL); _jettag_c_iterate_3_1.handleBodyContent(out); } _jettag_c_iterate_3_1.doEnd(); out.write(NL); RuntimeTagElement _jettag_c_iterate_16_1 = context.getTagFactory().createRuntimeTag(_jetns_c, "iterate", "c:iterate", _td_c_iterate_16_1); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_iterate_16_1.setRuntimeParent(null); _jettag_c_iterate_16_1.setTagInfo(_td_c_iterate_16_1); _jettag_c_iterate_16_1.doStart(context, out); while (_jettag_c_iterate_16_1.okToProcessBody()) { out.write(NL); out.write("|"); //$NON-NLS-1$ out.write(NL); out.write("| Element: "); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_19_13 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_19_13); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_19_13.setRuntimeParent(_jettag_c_iterate_16_1); _jettag_c_get_19_13.setTagInfo(_td_c_get_19_13); _jettag_c_get_19_13.doStart(context, out); _jettag_c_get_19_13.doEnd(); out.write(NL); out.write("|"); //$NON-NLS-1$ out.write(NL); out.write(NL); out.write("<c"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_22_3 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_22_3); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_22_3.setRuntimeParent(_jettag_c_iterate_16_1); _jettag_c_get_22_3.setTagInfo(_td_c_get_22_3); _jettag_c_get_22_3.doStart(context, out); _jettag_c_get_22_3.doEnd(); out.write("iterate select=\"$"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_22_45 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_22_45); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_22_45.setRuntimeParent(_jettag_c_iterate_16_1); _jettag_c_get_22_45.setTagInfo(_td_c_get_22_45); _jettag_c_get_22_45.doStart(context, out); _jettag_c_get_22_45.doEnd(); out.write("/"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_22_77 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_22_77); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_22_77.setRuntimeParent(_jettag_c_iterate_16_1); _jettag_c_get_22_77.setTagInfo(_td_c_get_22_77); _jettag_c_get_22_77.doStart(context, out); _jettag_c_get_22_77.doEnd(); out.write("\" var=\""); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_22_113 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_22_113); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_22_113.setRuntimeParent(_jettag_c_iterate_16_1); _jettag_c_get_22_113.setTagInfo(_td_c_get_22_113); _jettag_c_get_22_113.doStart(context, out); _jettag_c_get_22_113.doEnd(); out.write("\" >"); //$NON-NLS-1$ out.write(NL); out.write(NL); RuntimeTagElement _jettag_c_iterate_24_1 = context.getTagFactory().createRuntimeTag(_jetns_c, "iterate", "c:iterate", _td_c_iterate_24_1); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_iterate_24_1.setRuntimeParent(_jettag_c_iterate_16_1); _jettag_c_iterate_24_1.setTagInfo(_td_c_iterate_24_1); _jettag_c_iterate_24_1.doStart(context, out); while (_jettag_c_iterate_24_1.okToProcessBody()) { out.write("<c"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_25_3 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_25_3); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_25_3.setRuntimeParent(_jettag_c_iterate_24_1); _jettag_c_get_25_3.setTagInfo(_td_c_get_25_3); _jettag_c_get_25_3.doStart(context, out); _jettag_c_get_25_3.doEnd(); out.write("if test=\"$"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_25_38 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_25_38); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_25_38.setRuntimeParent(_jettag_c_iterate_24_1); _jettag_c_get_25_38.setTagInfo(_td_c_get_25_38); _jettag_c_get_25_38.doStart(context, out); _jettag_c_get_25_38.doEnd(); out.write("/@"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_25_69 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_25_69); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_25_69.setRuntimeParent(_jettag_c_iterate_24_1); _jettag_c_get_25_69.setTagInfo(_td_c_get_25_69); _jettag_c_get_25_69.doStart(context, out); _jettag_c_get_25_69.doEnd(); out.write("\">"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_25_101 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_25_101); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_25_101.setRuntimeParent(_jettag_c_iterate_24_1); _jettag_c_get_25_101.setTagInfo(_td_c_get_25_101); _jettag_c_get_25_101.doStart(context, out); _jettag_c_get_25_101.doEnd(); out.write("=\"<c"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_25_135 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_25_135); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_25_135.setRuntimeParent(_jettag_c_iterate_24_1); _jettag_c_get_25_135.setTagInfo(_td_c_get_25_135); _jettag_c_get_25_135.doStart(context, out); _jettag_c_get_25_135.doEnd(); out.write("get select=\"$"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_25_173 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_25_173); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_25_173.setRuntimeParent(_jettag_c_iterate_24_1); _jettag_c_get_25_173.setTagInfo(_td_c_get_25_173); _jettag_c_get_25_173.doStart(context, out); _jettag_c_get_25_173.doEnd(); out.write("/@"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_25_204 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_25_204); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_25_204.setRuntimeParent(_jettag_c_iterate_24_1); _jettag_c_get_25_204.setTagInfo(_td_c_get_25_204); _jettag_c_get_25_204.doStart(context, out); _jettag_c_get_25_204.doEnd(); out.write("\"/>\"</c"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_25_241 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_25_241); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_25_241.setRuntimeParent(_jettag_c_iterate_24_1); _jettag_c_get_25_241.setTagInfo(_td_c_get_25_241); _jettag_c_get_25_241.doStart(context, out); _jettag_c_get_25_241.doEnd(); out.write("if>"); //$NON-NLS-1$ out.write(NL); out.write(" "); //$NON-NLS-1$ out.write(NL); _jettag_c_iterate_24_1.handleBodyContent(out); } _jettag_c_iterate_24_1.doEnd(); out.write("</c"); //$NON-NLS-1$ RuntimeTagElement _jettag_c_get_28_4 = context.getTagFactory().createRuntimeTag(_jetns_c, "get", "c:get", _td_c_get_28_4); //$NON-NLS-1$ //$NON-NLS-2$ _jettag_c_get_28_4.setRuntimeParent(_jettag_c_iterate_16_1); _jettag_c_get_28_4.setTagInfo(_td_c_get_28_4); _jettag_c_get_28_4.doStart(context, out); _jettag_c_get_28_4.doEnd(); out.write("iterate>"); //$NON-NLS-1$ out.write(NL); out.write(" "); //$NON-NLS-1$ out.write(NL); _jettag_c_iterate_16_1.handleBodyContent(out); } _jettag_c_iterate_16_1.doEnd(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sentry.tests.e2e.hive; import static org.apache.sentry.provider.common.ProviderConstants.AUTHORIZABLE_SPLITTER; import static org.apache.sentry.provider.common.ProviderConstants.PRIVILEGE_PREFIX; import static org.apache.sentry.provider.common.ProviderConstants.ROLE_SPLITTER; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import junit.framework.Assert; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.sentry.binding.hive.v2.impl.SentryAuthorizationTaskFactoryImplV2; import org.apache.sentry.binding.metastore.SentryMetastorePostEventListener; import org.apache.sentry.core.model.db.DBModelAction; import org.apache.sentry.core.model.db.DBModelAuthorizable; import org.apache.sentry.policy.db.DBModelAuthorizables; import org.apache.sentry.provider.db.SimpleDBProviderBackend; import org.apache.sentry.provider.db.service.thrift.SentryPolicyServiceClient; import org.apache.sentry.provider.file.PolicyFile; import org.apache.sentry.service.thrift.SentryServiceClientFactory; import org.apache.sentry.service.thrift.ServiceConstants.ClientConfig; import org.apache.sentry.service.thrift.ServiceConstants.ServerConfig; import org.apache.sentry.tests.e2e.hive.fs.DFS; import org.apache.sentry.tests.e2e.hive.fs.DFSFactory; import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServer; import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory; import org.apache.sentry.tests.e2e.minisentry.SentrySrv; import org.apache.sentry.tests.e2e.minisentry.SentrySrvFactory; import org.apache.sentry.tests.e2e.minisentry.SentrySrvFactory.SentrySrvType; import org.apache.tools.ant.util.StringUtils; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Maps; import com.google.common.io.Files; public abstract class AbstractTestWithStaticConfiguration { private static final Logger LOGGER = LoggerFactory .getLogger(AbstractTestWithStaticConfiguration.class); protected static final String SINGLE_TYPE_DATA_FILE_NAME = "kv1.dat"; protected static final String ALL_DB1 = "server=server1->db=db_1", ALL_DB2 = "server=server1->db=db_2", SELECT_DB1_TBL1 = "server=server1->db=db_1->table=tb_1->action=select", SELECT_DB1_TBL2 = "server=server1->db=db_1->table=tb_2->action=select", SELECT_DB1_NONTABLE = "server=server1->db=db_1->table=blahblah->action=select", INSERT_DB1_TBL1 = "server=server1->db=db_1->table=tb_1->action=insert", SELECT_DB2_TBL2 = "server=server1->db=db_2->table=tb_2->action=select", INSERT_DB2_TBL1 = "server=server1->db=db_2->table=tb_1->action=insert", SELECT_DB1_VIEW1 = "server=server1->db=db_1->table=view_1->action=select", ADMIN1 = StaticUserGroup.ADMIN1, ADMINGROUP = StaticUserGroup.ADMINGROUP, USER1_1 = StaticUserGroup.USER1_1, USER1_2 = StaticUserGroup.USER1_2, USER2_1 = StaticUserGroup.USER2_1, USER3_1 = StaticUserGroup.USER3_1, USER4_1 = StaticUserGroup.USER4_1, USERGROUP1 = StaticUserGroup.USERGROUP1, USERGROUP2 = StaticUserGroup.USERGROUP2, USERGROUP3 = StaticUserGroup.USERGROUP3, USERGROUP4 = StaticUserGroup.USERGROUP4, GROUP1_ROLE = "group1_role", DB1 = "db_1", DB2 = "db_2", DB3 = "db_3", TBL1 = "tb_1", TBL2 = "tb_2", TBL3 = "tb_3", VIEW1 = "view_1", VIEW2 = "view_2", VIEW3 = "view_3", INDEX1 = "index_1"; protected static final String SERVER_HOST = "localhost"; private static final String EXTERNAL_SENTRY_SERVICE = "sentry.e2etest.external.sentry"; protected static final String EXTERNAL_HIVE_LIB = "sentry.e2etest.hive.lib"; private static final String ENABLE_SENTRY_HA = "sentry.e2etest.enable.service.ha"; protected static boolean policyOnHdfs = false; protected static boolean useSentryService = false; protected static boolean setMetastoreListener = true; protected static String testServerType = null; protected static boolean enableHiveConcurrency = false; // indicate if the database need to be clear for every test case in one test class protected static boolean clearDbPerTest = true; protected static File baseDir; protected static File logDir; protected static File confDir; protected static File dataDir; protected static File policyFileLocation; protected static HiveServer hiveServer; protected static FileSystem fileSystem; protected static HiveServerFactory.HiveServer2Type hiveServer2Type; protected static DFS dfs; protected static Map<String, String> properties; protected static SentrySrv sentryServer; protected static Configuration sentryConf; protected static boolean enableSentryHA = false; protected static Context context; protected final String semanticException = "SemanticException No valid privileges"; protected final String SENTRY_ACCESS_CONTROLLER_EXCEPTION = "SentryAccessControlException"; public static void createContext() throws Exception { context = new Context(hiveServer, fileSystem, baseDir, confDir, dataDir, policyFileLocation); } protected void dropDb(String user, String...dbs) throws Exception { Connection connection = context.createConnection(user); Statement statement = connection.createStatement(); for(String db : dbs) { statement.execute("DROP DATABASE IF EXISTS " + db + " CASCADE"); } statement.close(); connection.close(); } protected void createDb(String user, String...dbs) throws Exception { Connection connection = context.createConnection(user); Statement statement = connection.createStatement(); ArrayList<String> allowedDBs = new ArrayList<String>(Arrays.asList(DB1, DB2, DB3)); for(String db : dbs) { assertTrue(db + " is not part of known test dbs which will be cleaned up after the test", allowedDBs.contains(db)); statement.execute("CREATE DATABASE if not exists " + db); } statement.close(); connection.close(); } protected void createTable(String user, String db, File dataFile, String...tables) throws Exception { Connection connection = context.createConnection(user); Statement statement = connection.createStatement(); statement.execute("USE " + db); for(String table : tables) { statement.execute("DROP TABLE IF EXISTS " + table); statement.execute("create table " + table + " (under_col int comment 'the under column', value string)"); if(dataFile != null) { statement.execute("load data local inpath '" + dataFile.getPath() + "' into table " + table); ResultSet res = statement.executeQuery("select * from " + table); Assert.assertTrue("Table should have data after load", res.next()); res.close(); } } statement.close(); connection.close(); } protected static File assertCreateDir(File dir) { if(!dir.isDirectory()) { Assert.assertTrue("Failed creating " + dir, dir.mkdirs()); } return dir; } @BeforeClass public static void setupTestStaticConfiguration() throws Exception { LOGGER.info("AbstractTestWithStaticConfiguration setupTestStaticConfiguration"); properties = Maps.newHashMap(); if(!policyOnHdfs) { policyOnHdfs = new Boolean(System.getProperty("sentry.e2etest.policyonhdfs", "false")); } if (testServerType != null) { properties.put("sentry.e2etest.hiveServer2Type", testServerType); } baseDir = Files.createTempDir(); LOGGER.info("BaseDir = " + baseDir); logDir = assertCreateDir(new File(baseDir, "log")); confDir = assertCreateDir(new File(baseDir, "etc")); dataDir = assertCreateDir(new File(baseDir, "data")); policyFileLocation = new File(confDir, HiveServerFactory.AUTHZ_PROVIDER_FILENAME); String dfsType = System.getProperty(DFSFactory.FS_TYPE); dfs = DFSFactory.create(dfsType, baseDir, testServerType); fileSystem = dfs.getFileSystem(); PolicyFile policyFile = PolicyFile.setAdminOnServer1(ADMIN1) .setUserGroupMapping(StaticUserGroup.getStaticMapping()); policyFile.write(policyFileLocation); String policyURI; if (policyOnHdfs) { String dfsUri = FileSystem.getDefaultUri(fileSystem.getConf()).toString(); LOGGER.error("dfsUri " + dfsUri); policyURI = dfsUri + System.getProperty("sentry.e2etest.hive.policy.location", "/user/hive/sentry"); policyURI += "/" + HiveServerFactory.AUTHZ_PROVIDER_FILENAME; } else { policyURI = policyFileLocation.getPath(); } boolean startSentry = new Boolean(System.getProperty(EXTERNAL_SENTRY_SERVICE, "false")); if ("true".equalsIgnoreCase(System.getProperty(ENABLE_SENTRY_HA, "false"))) { enableSentryHA = true; } if (useSentryService && (!startSentry)) { setupSentryService(); } if (enableHiveConcurrency) { properties.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "true"); properties.put(HiveConf.ConfVars.HIVE_TXN_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager"); properties.put(HiveConf.ConfVars.HIVE_LOCK_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager"); } hiveServer = create(properties, baseDir, confDir, logDir, policyURI, fileSystem); hiveServer.start(); createContext(); // Create tmp as scratch dir if it doesn't exist Path tmpPath = new Path("/tmp"); if (!fileSystem.exists(tmpPath)) { fileSystem.mkdirs(tmpPath, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); } } public static HiveServer create(Map<String, String> properties, File baseDir, File confDir, File logDir, String policyFile, FileSystem fileSystem) throws Exception { String type = properties.get(HiveServerFactory.HIVESERVER2_TYPE); if(type == null) { type = System.getProperty(HiveServerFactory.HIVESERVER2_TYPE); } if(type == null) { type = HiveServerFactory.HiveServer2Type.InternalHiveServer2.name(); } hiveServer2Type = HiveServerFactory.HiveServer2Type.valueOf(type.trim()); return HiveServerFactory.create(hiveServer2Type, properties, baseDir, confDir, logDir, policyFile, fileSystem); } protected static void writePolicyFile(PolicyFile policyFile) throws Exception { policyFile.write(context.getPolicyFile()); if(policyOnHdfs) { LOGGER.info("use policy file on HDFS"); dfs.writePolicyFile(context.getPolicyFile()); } else if(useSentryService) { LOGGER.info("use sentry service, granting permissions"); grantPermissions(policyFile); } } private static void grantPermissions(PolicyFile policyFile) throws Exception { Connection connection = context.createConnection(ADMIN1); Statement statement = context.createStatement(connection); // remove existing metadata ResultSet resultSet = statement.executeQuery("SHOW ROLES"); while( resultSet.next()) { Statement statement1 = context.createStatement(connection); String roleName = resultSet.getString(1).trim(); if(!roleName.equalsIgnoreCase("admin_role")) { LOGGER.info("Dropping role :" + roleName); statement1.execute("DROP ROLE " + roleName); } } // create roles and add privileges for (Map.Entry<String, Collection<String>> roleEntry : policyFile.getRolesToPermissions() .asMap().entrySet()) { String roleName = roleEntry.getKey(); if(!roleEntry.getKey().equalsIgnoreCase("admin_role")){ LOGGER.info("Creating role : " + roleName); statement.execute("CREATE ROLE " + roleName); for (String privilege : roleEntry.getValue()) { addPrivilege(roleEntry.getKey(), privilege, statement); } } } // grant roles to groups for (Map.Entry<String, Collection<String>> groupEntry : policyFile.getGroupsToRoles().asMap() .entrySet()) { for (String roleNames : groupEntry.getValue()) { for (String roleName : roleNames.split(",")) { String sql = "GRANT ROLE " + roleName + " TO GROUP " + groupEntry.getKey(); LOGGER.info("Granting role to group: " + sql); statement.execute(sql); } } } } private static void addPrivilege(String roleName, String privileges, Statement statement) throws IOException, SQLException{ String serverName = null, dbName = null, tableName = null, uriPath = null, columnName = null; String action = "ALL";//AccessConstants.ALL; for (String privilege : ROLE_SPLITTER.split(privileges)) { for(String section : AUTHORIZABLE_SPLITTER.split(privilege)) { // action is not an authorizeable if(!section.toLowerCase().startsWith(PRIVILEGE_PREFIX)) { DBModelAuthorizable dbAuthorizable = DBModelAuthorizables.from(section); if(dbAuthorizable == null) { throw new IOException("Unknown Auth type " + section); } if (DBModelAuthorizable.AuthorizableType.Server.equals(dbAuthorizable.getAuthzType())) { serverName = dbAuthorizable.getName(); } else if (DBModelAuthorizable.AuthorizableType.Db.equals(dbAuthorizable.getAuthzType())) { dbName = dbAuthorizable.getName(); } else if (DBModelAuthorizable.AuthorizableType.Table.equals(dbAuthorizable.getAuthzType())) { tableName = dbAuthorizable.getName(); } else if (DBModelAuthorizable.AuthorizableType.Column.equals(dbAuthorizable.getAuthzType())) { columnName = dbAuthorizable.getName(); } else if (DBModelAuthorizable.AuthorizableType.URI.equals(dbAuthorizable.getAuthzType())) { uriPath = dbAuthorizable.getName(); } else { throw new IOException("Unsupported auth type " + dbAuthorizable.getName() + " : " + dbAuthorizable.getTypeName()); } } else { action = DBModelAction.valueOf( StringUtils.removePrefix(section, PRIVILEGE_PREFIX).toUpperCase()) .toString(); } } LOGGER.info("addPrivilege"); if (columnName != null) { statement.execute("CREATE DATABASE IF NOT EXISTS " + dbName); statement.execute("USE " + dbName); statement.execute("CREATE TABLE IF NOT EXISTS " + tableName + " ( " + columnName + " string) "); String sql = "GRANT " + action + " ( " + columnName + " ) ON TABLE " + tableName + " TO ROLE " + roleName; LOGGER.info("Granting column level privilege: database = " + dbName + ", sql = " + sql); statement.execute(sql); } else if (tableName != null) { statement.execute("CREATE DATABASE IF NOT EXISTS " + dbName); statement.execute("USE " + dbName); statement.execute("CREATE TABLE IF NOT EXISTS " + tableName + " (c1 string) "); String sql = "GRANT " + action + " ON TABLE " + tableName + " TO ROLE " + roleName; LOGGER.info("Granting table level privilege: database = " + dbName + ", sql = " + sql); statement.execute(sql); } else if (dbName != null) { statement.execute("CREATE DATABASE IF NOT EXISTS " + dbName); String sql = "GRANT " + action + " ON DATABASE " + dbName + " TO ROLE " + roleName; LOGGER.info("Granting db level privilege: " + sql); statement.execute(sql); } else if (uriPath != null) { String sql = "GRANT " + action + " ON URI '" + uriPath + "' TO ROLE " + roleName; LOGGER.info("Granting uri level privilege: " + sql); statement.execute(sql);//ALL? } else if (serverName != null) { String sql = "GRANT ALL ON SERVER " + serverName + " TO ROLE " + roleName; LOGGER.info("Granting server level privilege: " + sql); statement.execute(sql); } } } private static void setupSentryService() throws Exception { sentryConf = new Configuration(false); properties.put(HiveServerFactory.AUTHZ_PROVIDER_BACKEND, SimpleDBProviderBackend.class.getName()); properties.put(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY.varname, SentryAuthorizationTaskFactoryImplV2.class.getName()); properties .put(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS.varname, "2"); properties.put(ServerConfig.SECURITY_MODE, ServerConfig.SECURITY_MODE_NONE); properties.put(ServerConfig.ADMIN_GROUPS, ADMINGROUP); properties.put(ServerConfig.RPC_ADDRESS, SERVER_HOST); properties.put(ServerConfig.RPC_PORT, String.valueOf(0)); properties.put(ServerConfig.SENTRY_VERIFY_SCHEM_VERSION, "false"); properties.put(ServerConfig.SENTRY_STORE_JDBC_URL, "jdbc:derby:;databaseName=" + baseDir.getPath() + "/sentrystore_db;create=true"); properties.put(ServerConfig.SENTRY_STORE_JDBC_PASS, "dummy"); properties.put(ServerConfig.SENTRY_STORE_GROUP_MAPPING, ServerConfig.SENTRY_STORE_LOCAL_GROUP_MAPPING); properties.put(ServerConfig.SENTRY_STORE_GROUP_MAPPING_RESOURCE, policyFileLocation.getPath()); properties.put(ServerConfig.RPC_MIN_THREADS, "3"); for (Map.Entry<String, String> entry : properties.entrySet()) { sentryConf.set(entry.getKey(), entry.getValue()); } sentryServer = SentrySrvFactory.create( SentrySrvType.INTERNAL_SERVER, sentryConf, enableSentryHA ? 2 : 1); properties.put(ClientConfig.SERVER_RPC_ADDRESS, sentryServer.get(0) .getAddress() .getHostName()); sentryConf.set(ClientConfig.SERVER_RPC_ADDRESS, sentryServer.get(0) .getAddress() .getHostName()); properties.put(ClientConfig.SERVER_RPC_PORT, String.valueOf(sentryServer.get(0).getAddress().getPort())); sentryConf.set(ClientConfig.SERVER_RPC_PORT, String.valueOf(sentryServer.get(0).getAddress().getPort())); if (enableSentryHA) { properties.put(ClientConfig.SERVER_HA_ENABLED, "true"); properties.put(ClientConfig.SENTRY_HA_ZOOKEEPER_QUORUM, sentryServer.getZKQuorum()); } startSentryService(); if (setMetastoreListener) { LOGGER.info("setMetastoreListener is enabled"); properties.put(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS.varname, SentryMetastorePostEventListener.class.getName()); } } private static void startSentryService() throws Exception { sentryServer.startAll(); } public static SentryPolicyServiceClient getSentryClient() throws Exception { if (sentryServer == null) { throw new IllegalAccessException("Sentry service not initialized"); } return SentryServiceClientFactory.create(sentryServer.get(0).getConf()); } @Before public void setup() throws Exception{ LOGGER.info("AbstractTestStaticConfiguration setup"); dfs.createBaseDir(); if (clearDbPerTest) { LOGGER.info("Before per test run clean up"); clearAll(true); } } @After public void clearAfterPerTest() throws Exception { LOGGER.info("AbstractTestStaticConfiguration clearAfterPerTest"); if (clearDbPerTest) { LOGGER.info("After per test run clean up"); clearAll(true); } } protected static void clearAll(boolean clearDb) throws Exception { LOGGER.info("About to run clearAll"); ResultSet resultSet; Connection connection = context.createConnection(ADMIN1); Statement statement = context.createStatement(connection); if (clearDb) { LOGGER.info("About to clear all databases and default database tables"); resultSet = statement.executeQuery("SHOW DATABASES"); ArrayList<String> dbs = new ArrayList<String>(); while(resultSet.next()) { dbs.add(resultSet.getString(1)); } for (String db : dbs) { if(!db.equalsIgnoreCase("default")) { statement.execute("DROP DATABASE if exists " + db + " CASCADE"); } } statement.execute("USE default"); resultSet = statement.executeQuery("SHOW tables"); while (resultSet.next()) { Statement statement2 = context.createStatement(connection); statement2.execute("DROP table " + resultSet.getString(1)); statement2.close(); } } if(useSentryService) { LOGGER.info("About to clear all roles"); resultSet = statement.executeQuery("SHOW roles"); List<String> roles = new ArrayList<String>(); while (resultSet.next()) { String roleName = resultSet.getString(1); if (!roleName.toLowerCase().contains("admin")) { roles.add(roleName); } } for (String role : roles) { statement.execute("DROP Role " + role); } } statement.close(); connection.close(); } protected static void setupAdmin() throws Exception { if(useSentryService) { LOGGER.info("setupAdmin to create admin_role"); Connection connection = context.createConnection(ADMIN1); Statement statement = connection.createStatement(); try { statement.execute("CREATE ROLE admin_role"); } catch ( Exception e) { //It is ok if admin_role already exists } statement.execute("GRANT ALL ON SERVER " + HiveServerFactory.DEFAULT_AUTHZ_SERVER_NAME + " TO ROLE admin_role"); statement.execute("GRANT ROLE admin_role TO GROUP " + ADMINGROUP); statement.close(); connection.close(); } } protected PolicyFile setupPolicy() throws Exception { LOGGER.info("Pre create policy file with admin group mapping"); PolicyFile policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); policyFile.setUserGroupMapping(StaticUserGroup.getStaticMapping()); writePolicyFile(policyFile); return policyFile; } @AfterClass public static void tearDownTestStaticConfiguration() throws Exception { if(hiveServer != null) { hiveServer.shutdown(); hiveServer = null; } if (sentryServer != null) { sentryServer.close(); sentryServer = null; } if(baseDir != null) { if(System.getProperty(HiveServerFactory.KEEP_BASEDIR) == null) { FileUtils.deleteQuietly(baseDir); } baseDir = null; } if(dfs != null) { try { dfs.tearDown(); } catch (Exception e) { LOGGER.info("Exception shutting down dfs", e); } } if (context != null) { context.close(); } } public static SentrySrv getSentrySrv() { return sentryServer; } /** * A convenience method to validate: * if expected is equivalent to returned; * Firstly check if each expected item is in the returned list; * Secondly check if each returned item in in the expected list. */ protected void validateReturnedResult(List<String> expected, List<String> returned) { for (String obj : expected) { assertTrue("expected " + obj + " not found in the returned list: " + returned.toString(), returned.contains(obj)); } for (String obj : returned) { assertTrue("returned " + obj + " not found in the expected list: " + expected.toString(), expected.contains(obj)); } } }
/* * Copyright (c) 2015-2016 Jan Ivenz * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'Jan Ivenz' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package de.perjin.shadow; import com.jme3.asset.AssetManager; import com.jme3.export.JmeExporter; import com.jme3.export.JmeImporter; import com.jme3.export.Savable; import com.jme3.light.DirectionalLight; import com.jme3.light.PointLight; import com.jme3.light.SpotLight; import com.jme3.material.Material; import com.jme3.math.FastMath; import com.jme3.math.Matrix4f; import com.jme3.math.Vector2f; import com.jme3.math.Vector3f; import com.jme3.math.Vector4f; import com.jme3.post.SceneProcessor; import com.jme3.renderer.Camera; import com.jme3.renderer.RenderManager; import com.jme3.renderer.Renderer; import com.jme3.renderer.ViewPort; import com.jme3.renderer.queue.GeometryList; import com.jme3.renderer.queue.OpaqueComparator; import com.jme3.renderer.queue.RenderQueue; import com.jme3.scene.Spatial; import com.jme3.shader.VarType; import com.jme3.shadow.CompareMode; import com.jme3.shadow.EdgeFilteringMode; import com.jme3.shadow.PssmShadowUtil; import com.jme3.shadow.ShadowUtil; import com.jme3.texture.FrameBuffer; import com.jme3.texture.Image; import com.jme3.texture.Texture; import com.jme3.texture.Texture2D; import com.jme3.texture.TextureArray; import com.jme3.texture.image.ColorSpace; import com.jme3.util.BufferUtils; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * * @author Jan */ public class ShadowRenderer implements SceneProcessor, Savable { protected RenderManager renderManager; protected ViewPort viewPort; protected FrameBuffer[] directionalFB; protected FrameBuffer[] pointFB; protected FrameBuffer[] spotFB; protected TextureArray directionalShadowTextures; protected TextureArray spotShadowTextures; protected TextureArray pointShadowTextures; protected Texture2D dummyTex; protected Material preshadowMat; protected Matrix4f[] lightViewProjectionsMatrices; protected AssetManager assetManager; protected boolean debug = false; protected float edgesThickness = 1.0f; protected EdgeFilteringMode edgeFilteringMode = EdgeFilteringMode.Bilinear; protected CompareMode shadowCompareMode = CompareMode.Hardware; // protected Picture[] dispPic; /** * list of materials for post shadow queue geometries */ protected List<Material> matCache = new ArrayList<Material>(); protected List<DirectionalLight> directionalLights = new ArrayList<>(1); protected List<PointLight> pointLights = new ArrayList<>(1); protected List<SpotLight> spotLights = new ArrayList<>(1); protected GeometryList lightReceivers = new GeometryList(new OpaqueComparator()); protected GeometryList shadowMapOccluders = new GeometryList(new OpaqueComparator()); // private String[] lightViewStringCache; /** * fade shadows at distance */ protected float zFarOverride = 0; protected Vector2f fadeInfo; protected float fadeLength; protected Camera frustumCam; private Camera directionalShadowCam; private Camera pointShadowCam[]; private Camera spotShadowCam; private Vector4f splits[]; private Vector3f pointLightPosition[]; protected float[] splitsArray; protected float lambda = 0.65f; protected Vector3f[] points = new Vector3f[8]; private final boolean stabilize = true; private int maxLightsPerPass; private int frameCounter = 0; private int directionalShadowLights; private float directionalShadowMapSize; private float pointShadowMapSize; private int pointShadowLights; private float spotShadowMapSize; private int spotShadowLights; private ShadowLightFilter shadowLightFilter; private final int shadowMapsPerDirectionalLight = 4; /** * used for serialization */ protected ShadowRenderer() { } public ShadowRenderer(ShadowLightFilter shadowLightFilter, AssetManager assetManager, int directionalShadowMapSize, int directionalShadowLights, int pointShadowMapSize, int pointShadowLights, int spotShadowMapSize, int spotShadowLights) { this.shadowLightFilter = shadowLightFilter; this.assetManager = assetManager; this.directionalShadowMapSize = directionalShadowMapSize; this.directionalShadowLights = directionalShadowLights; this.pointShadowMapSize = pointShadowMapSize; this.pointShadowLights = pointShadowLights; this.spotShadowMapSize = spotShadowMapSize; this.spotShadowLights = spotShadowLights; init(assetManager); } private void init(AssetManager assetManager) { for (int i = 0; i < points.length; i++) { points[i] = new Vector3f(); } directionalFB = new FrameBuffer[directionalShadowLights * shadowMapsPerDirectionalLight]; pointFB = new FrameBuffer[pointShadowLights * 6]; pointLightPosition = new Vector3f[pointShadowLights]; for (int i = 0; i < pointShadowLights; i++) { pointLightPosition[i] = new Vector3f(); } spotFB = new FrameBuffer[spotShadowLights]; directionalShadowTextures = createTextureArray(directionalShadowLights * shadowMapsPerDirectionalLight, (int) directionalShadowMapSize); pointShadowTextures = createTextureArray(pointShadowLights * 6, (int) pointShadowMapSize); spotShadowTextures = createTextureArray(spotShadowLights, (int) spotShadowMapSize); lightViewProjectionsMatrices = new Matrix4f[directionalShadowLights * shadowMapsPerDirectionalLight + pointShadowLights * 6 + spotShadowLights]; for (int i = 0; i < lightViewProjectionsMatrices.length; i++) { lightViewProjectionsMatrices[i] = new Matrix4f(); } splits = new Vector4f[directionalShadowLights]; splitsArray = new float[4 + 1]; for (int i = 0; i < splits.length; i++) { splits[i] = new Vector4f(); } directionalShadowCam = new Camera((int) directionalShadowMapSize, (int) directionalShadowMapSize); directionalShadowCam.setParallelProjection(true); pointShadowCam = new Camera[6]; for (int i = 0; i < 6; i++) { pointShadowCam[i] = new Camera((int) pointShadowMapSize, (int) pointShadowMapSize); } spotShadowCam = new Camera((int) spotShadowMapSize, (int) spotShadowMapSize); //DO NOT COMMENT THIS (it prevent the OSX incomplete read buffer crash) // dummyTex = new Texture2D(shadowMapSize, shadowMapSize, Image.Format.RGBA8); preshadowMat = new Material(assetManager, "Common/MatDefs/Shadow/PreShadow.j3md"); setupFrameBuffer(directionalFB, directionalShadowLights * shadowMapsPerDirectionalLight, (int) directionalShadowMapSize, directionalShadowTextures); setupFrameBuffer(pointFB, pointShadowLights * 6, (int) pointShadowMapSize, pointShadowTextures); setupFrameBuffer(spotFB, spotShadowLights, (int) spotShadowMapSize, spotShadowTextures); setShadowCompareMode(shadowCompareMode); setEdgeFilteringMode(edgeFilteringMode); } /** * Sets the filtering mode for shadow edges. See {@link EdgeFilteringMode} for more info. * * @param filterMode the desired filter mode (not null) */ final public void setEdgeFilteringMode(EdgeFilteringMode filterMode) { if (filterMode == null) { throw new NullPointerException(); } this.edgeFilteringMode = filterMode; if (shadowCompareMode == CompareMode.Hardware) { if (filterMode == EdgeFilteringMode.Bilinear || filterMode == EdgeFilteringMode.PCFPOISSON) { if (directionalShadowLights > 0) { directionalShadowTextures.setMagFilter(Texture.MagFilter.Bilinear); directionalShadowTextures.setMinFilter(Texture.MinFilter.BilinearNoMipMaps); } if (pointShadowLights > 0) { pointShadowTextures.setMagFilter(Texture.MagFilter.Bilinear); pointShadowTextures.setMinFilter(Texture.MinFilter.BilinearNoMipMaps); } if (spotShadowLights > 0) { spotShadowTextures.setMagFilter(Texture.MagFilter.Bilinear); spotShadowTextures.setMinFilter(Texture.MinFilter.BilinearNoMipMaps); } } else { if (directionalShadowLights > 0) { directionalShadowTextures.setMagFilter(Texture.MagFilter.Nearest); directionalShadowTextures.setMinFilter(Texture.MinFilter.NearestNoMipMaps); } if (pointShadowLights > 0) { pointShadowTextures.setMagFilter(Texture.MagFilter.Nearest); pointShadowTextures.setMinFilter(Texture.MinFilter.NearestNoMipMaps); } if (spotShadowLights > 0) { spotShadowTextures.setMagFilter(Texture.MagFilter.Nearest); spotShadowTextures.setMinFilter(Texture.MinFilter.NearestNoMipMaps); } } } } /** * returns the edge filtering mode * * @see EdgeFilteringMode * @return */ public EdgeFilteringMode getEdgeFilteringMode() { return edgeFilteringMode; } /** * Sets the shadow compare mode. See {@link CompareMode} for more info. * * @param compareMode the desired compare mode (not null) */ final public void setShadowCompareMode(CompareMode compareMode) { if (compareMode == null) { throw new IllegalArgumentException("Shadow compare mode cannot be null"); } this.shadowCompareMode = compareMode; if (compareMode == CompareMode.Hardware) { if (directionalShadowLights > 0) { directionalShadowTextures.setShadowCompareMode(Texture.ShadowCompareMode.LessOrEqual); } if (pointShadowLights > 0) { pointShadowTextures.setShadowCompareMode(Texture.ShadowCompareMode.LessOrEqual); } if (spotShadowLights > 0) { spotShadowTextures.setShadowCompareMode(Texture.ShadowCompareMode.LessOrEqual); } if (edgeFilteringMode == EdgeFilteringMode.Bilinear) { if (directionalShadowLights > 0) { directionalShadowTextures.setMagFilter(Texture.MagFilter.Bilinear); directionalShadowTextures.setMinFilter(Texture.MinFilter.BilinearNoMipMaps); } if (pointShadowLights > 0) { pointShadowTextures.setMagFilter(Texture.MagFilter.Bilinear); pointShadowTextures.setMinFilter(Texture.MinFilter.BilinearNoMipMaps); } if (spotShadowLights > 0) { spotShadowTextures.setMagFilter(Texture.MagFilter.Bilinear); spotShadowTextures.setMinFilter(Texture.MinFilter.BilinearNoMipMaps); } } else { if (directionalShadowLights > 0) { directionalShadowTextures.setMagFilter(Texture.MagFilter.Nearest); directionalShadowTextures.setMinFilter(Texture.MinFilter.NearestNoMipMaps); } if (pointShadowLights > 0) { pointShadowTextures.setMagFilter(Texture.MagFilter.Nearest); pointShadowTextures.setMinFilter(Texture.MinFilter.NearestNoMipMaps); } if (spotShadowLights > 0) { spotShadowTextures.setMagFilter(Texture.MagFilter.Nearest); spotShadowTextures.setMinFilter(Texture.MinFilter.NearestNoMipMaps); } } } else { if (directionalShadowLights > 0) { directionalShadowTextures.setShadowCompareMode(Texture.ShadowCompareMode.Off); directionalShadowTextures.setMagFilter(Texture.MagFilter.Nearest); directionalShadowTextures.setMinFilter(Texture.MinFilter.NearestNoMipMaps); } if (pointShadowLights > 0) { pointShadowTextures.setShadowCompareMode(Texture.ShadowCompareMode.Off); pointShadowTextures.setMagFilter(Texture.MagFilter.Nearest); pointShadowTextures.setMinFilter(Texture.MinFilter.NearestNoMipMaps); } if (spotShadowLights > 0) { spotShadowTextures.setShadowCompareMode(Texture.ShadowCompareMode.Off); spotShadowTextures.setMagFilter(Texture.MagFilter.Nearest); spotShadowTextures.setMinFilter(Texture.MinFilter.NearestNoMipMaps); } } } /** * returns the shadow compare mode * * @see CompareMode * @return the shadowCompareMode */ public CompareMode getShadowCompareMode() { return shadowCompareMode; } /** * Initialize this shadow renderer prior to its first update. * * @param rm the render manager * @param vp the viewport */ @Override public void initialize(RenderManager rm, ViewPort vp) { renderManager = rm; viewPort = vp; maxLightsPerPass = rm.getSinglePassLightBatchSize(); // if (zFarOverride > 0 && frustumCam == null) { // initFrustumCam(); // } } // /** // * delegates the initialization of the frustum cam to child renderers // */ // protected void initFrustumCam() { // } /** * Test whether this shadow renderer has been initialized. * * @return true if initialized, otherwise false */ public boolean isInitialized() { return viewPort != null; } /** * Invoked once per frame to update the shadow cams according to the light view. * * @param viewCam the scene cam * @param shadowIndex */ protected void updateDirectionalShadowCams(Camera viewCam, int lightIndex, float frustumNear, float zFar) { //shadowCam.setDirection(direction); directionalShadowCam.getRotation().lookAt(((DirectionalLight) directionalLights.get(lightIndex)).getDirection(), Vector3f.UNIT_Y); directionalShadowCam.update(); directionalShadowCam.updateViewProjection(); PssmShadowUtil.updateFrustumSplits(splitsArray, frustumNear, zFar, lambda); // in parallel projection shadow position goe from 0 to 1 if (viewCam.isParallelProjection()) { for (int i = 0; i < 5; i++) { splitsArray[i] = splitsArray[i] / (zFar - frustumNear); } } switch (splitsArray.length) { case 5: splits[lightIndex].w = splitsArray[4]; case 4: splits[lightIndex].z = splitsArray[3]; case 3: splits[lightIndex].y = splitsArray[2]; case 2: case 1: splits[lightIndex].x = splitsArray[1]; break; } } protected void updatePointShadowCams(Camera viewCam, int lightIndex) { // if (light == null) { // throw new IllegalStateException("The light can't be null for a " + this.getClass().getName()); // } //bottom pointShadowCam[0].setAxes(Vector3f.UNIT_X.mult(-1f), Vector3f.UNIT_Z.mult(-1f), Vector3f.UNIT_Y.mult(-1f)); //top pointShadowCam[1].setAxes(Vector3f.UNIT_X.mult(-1f), Vector3f.UNIT_Z, Vector3f.UNIT_Y); //forward pointShadowCam[2].setAxes(Vector3f.UNIT_X.mult(-1f), Vector3f.UNIT_Y, Vector3f.UNIT_Z.mult(-1f)); //backward pointShadowCam[3].setAxes(Vector3f.UNIT_X, Vector3f.UNIT_Y, Vector3f.UNIT_Z); //left pointShadowCam[4].setAxes(Vector3f.UNIT_Z, Vector3f.UNIT_Y, Vector3f.UNIT_X.mult(-1f)); //right pointShadowCam[5].setAxes(Vector3f.UNIT_Z.mult(-1f), Vector3f.UNIT_Y, Vector3f.UNIT_X); for (int i = 0; i < 6; i++) { pointShadowCam[i].setFrustumPerspective(90f, 1f, 0.1f, pointLights.get(lightIndex).getRadius()); pointShadowCam[i].setLocation(pointLights.get(lightIndex).getPosition()); pointShadowCam[i].update(); pointShadowCam[i].updateViewProjection(); } } protected void updateSpotShadowCams(Camera viewCam, int camIndex) { spotShadowCam.setFrustumPerspective(spotLights.get(camIndex).getSpotOuterAngle() * FastMath.RAD_TO_DEG * 2.0f, 1, 1f, spotLights.get(camIndex).getSpotRange()); spotShadowCam.getRotation().lookAt(spotLights.get(camIndex).getDirection(), spotShadowCam.getUp()); spotShadowCam.setLocation(spotLights.get(camIndex).getPosition()); spotShadowCam.update(); spotShadowCam.updateViewProjection(); } /** * Returns a subclass-specific geometryList containing the occluders to be rendered in the shadow map * * @param shadowMapIndex the index of the shadow map being rendered * @param shadowMapOccluders * @return */ // protected GeometryList getOccludersToRender(int shadowMapIndex, GeometryList shadowMapOccluders, int shadocCameraIndex) { // return null; // } protected GeometryList getDirectionalOccludersToRender(int shadowMapIndex, GeometryList shadowMapOccluders, int shadowCameraIndex) { shadowMapOccluders.clear(); // update frustum points based on current camera and split ShadowUtil.updateFrustumPoints(viewPort.getCamera(), splitsArray[shadowMapIndex % 4], splitsArray[shadowMapIndex % 4 + 1], 1.0f, points); //Updating shadow cam with curent split frustra if (lightReceivers.size() == 0) { for (Spatial scene : viewPort.getScenes()) { ShadowUtil.getGeometriesInCamFrustum(scene, viewPort.getCamera(), RenderQueue.ShadowMode.Receive, lightReceivers); } } ShadowUtil.updateShadowCamera(viewPort, lightReceivers, directionalShadowCam, points, shadowMapOccluders, stabilize ? directionalShadowMapSize : 0); // for (int i = 0; i < shadowMapOccluders.size(); i++) // System.out.println(shadowMapOccluders.get(i)); return shadowMapOccluders; } protected GeometryList getSpotOccludersToRender(int shadowCameraIndex, GeometryList shadowMapOccluders) { shadowMapOccluders.clear(); for (Spatial scene : viewPort.getScenes()) { // TODO: ShadowUtil.addGeometriesInCamFrustumFromNode() is probably not treating instanced meshes correctly. ShadowUtil.getGeometriesInCamFrustum(scene, spotShadowCam, RenderQueue.ShadowMode.Cast, shadowMapOccluders); } return shadowMapOccluders; } protected GeometryList getPointOccludersToRender(int shadowMapIndex, GeometryList shadowMapOccluders) { shadowMapOccluders.clear(); for (Spatial scene : viewPort.getScenes()) { // TODO: ShadowUtil.addGeometriesInCamFrustumFromNode() is probably not treating instanced meshes correctly. ShadowUtil.getGeometriesInCamFrustum(scene, pointShadowCam[shadowMapIndex], RenderQueue.ShadowMode.Cast, shadowMapOccluders); } return shadowMapOccluders; } @SuppressWarnings("fallthrough") @Override public void postQueue(RenderQueue rq) { } protected void renderDirectionalShadowMap(int shadowMapIndex, int camIndex) { shadowMapOccluders = getDirectionalOccludersToRender(shadowMapIndex, shadowMapOccluders, camIndex); //saving light view projection matrix for this split lightViewProjectionsMatrices[shadowMapIndex].set(directionalShadowCam.getViewProjectionMatrix()); renderManager.setCamera(directionalShadowCam, false); renderManager.getRenderer().setFrameBuffer(directionalFB[shadowMapIndex]); renderManager.getRenderer().clearBuffers(true, true, true); // render shadow casters to shadow map viewPort.getQueue().renderShadowQueue(shadowMapOccluders, renderManager, directionalShadowCam, false); } protected void renderPointShadowMap(int lightIndex) { for (int i = 0; i < 6; i++) { shadowMapOccluders = getPointOccludersToRender(i, shadowMapOccluders); lightViewProjectionsMatrices[directionalLights.size() * shadowMapsPerDirectionalLight + lightIndex * 6 + i].set(pointShadowCam[i].getViewProjectionMatrix()); renderManager.setCamera(pointShadowCam[i], false); renderManager.getRenderer().setFrameBuffer(pointFB[i + lightIndex * 6]); renderManager.getRenderer().clearBuffers(true, true, true); // render shadow casters to shadow map viewPort.getQueue().renderShadowQueue(shadowMapOccluders, renderManager, pointShadowCam[i], false); } } protected void renderSpotShadowMap(int lightIndex) { shadowMapOccluders = getSpotOccludersToRender(lightIndex, shadowMapOccluders); //saving light view projection matrix for this split lightViewProjectionsMatrices[directionalLights.size() * shadowMapsPerDirectionalLight + pointLights.size() * 6 + lightIndex].set(spotShadowCam.getViewProjectionMatrix()); renderManager.setCamera(spotShadowCam, false); renderManager.getRenderer().setFrameBuffer(spotFB[lightIndex]); renderManager.getRenderer().clearBuffers(true, true, true); // render shadow casters to shadow map viewPort.getQueue().renderShadowQueue(shadowMapOccluders, renderManager, spotShadowCam, false); } protected void getReceivers(GeometryList lightReceivers) { lightReceivers.clear(); for (Spatial scene : viewPort.getScenes()) { ShadowUtil.getGeometriesInCamFrustum(scene, viewPort.getCamera(), RenderQueue.ShadowMode.Receive, lightReceivers); } } // protected void getSpotReceivers(GeometryList lightReceivers) { // lightReceivers.clear(); // for (Spatial scene : viewPort.getScenes()) { // ShadowUtil.getLitGeometriesInViewPort(scene, viewPort.getCamera(), spotShadowCam, RenderQueue.ShadowMode.Receive, lightReceivers); // } // } public void postFrame(FrameBuffer out) { } private void clearMatParams() { for (Material mat : matCache) { mat.clearParam("TOTALSHADOWMAPS"); mat.clearParam("LIGHTS"); mat.clearParam("ShadowLightViewProjectionMatrix"); mat.clearParam("DirectionalShadowMaps"); mat.clearParam("DIRECTIONALSHADOWMAP_SIZE"); mat.clearParam("MAXDIRECTIONALSHADOWLIGHTS"); mat.clearParam("Splits"); mat.clearParam("ActiveDirectionalShadows"); mat.clearParam("PointShadowMaps"); mat.clearParam("POINTSHADOWMAP_SIZE"); mat.clearParam("MAXPOINTSHADOWLIGHTS"); mat.clearParam("ActivePointShadows"); mat.clearParam("PointLightPosition"); mat.clearParam("SpotShadowMaps"); mat.clearParam("SPOTSHADOWMAP_SIZE"); mat.clearParam("MAXSPOTSHADOWLIGHTS"); mat.clearParam("ActiveSpotShadows"); } } /** * This method is called once per frame and is responsible for setting any material parameters that subclasses may * need to set on the post material. * * @param material the material to use for the post shadow pass */ protected void setMaterialParameters(Material material) { material.setInt("TOTALSHADOWMAPS", directionalShadowLights * shadowMapsPerDirectionalLight + pointShadowLights * 6 + spotShadowLights); material.setInt("LIGHTS", maxLightsPerPass); material.setParam("ShadowLightViewProjectionMatrix", VarType.Matrix4Array, lightViewProjectionsMatrices); if (directionalShadowLights > 0) { material.setParam("DirectionalShadowMaps", VarType.TextureArray, directionalShadowTextures); material.setFloat("DIRECTIONALSHADOWMAP_SIZE", directionalShadowMapSize); material.setInt("MAXDIRECTIONALSHADOWLIGHTS", directionalShadowLights); material.setParam("Splits", VarType.Vector4Array, splits); material.setInt("ActiveDirectionalShadows", directionalLights.size()); } if (pointShadowLights > 0) { material.setParam("PointShadowMaps", VarType.TextureArray, pointShadowTextures); material.setFloat("POINTSHADOWMAP_SIZE", pointShadowMapSize); material.setInt("MAXPOINTSHADOWLIGHTS", pointShadowLights); material.setInt("ActivePointShadows", pointLights.size()); material.setParam("PointLightPosition", VarType.Vector3Array, pointLightPosition); } if (spotShadowLights > 0) { material.setParam("SpotShadowMaps", VarType.TextureArray, spotShadowTextures); material.setFloat("SPOTSHADOWMAP_SIZE", spotShadowMapSize); material.setInt("MAXSPOTSHADOWLIGHTS", spotShadowLights); material.setInt("ActiveSpotShadows", spotLights.size()); } } // protected void setMaterialParameters(Material material) { // material.setParam("ShadowLightViewProjectionMatrix", VarType.Matrix4Array, lightViewProjectionsMatrices); // if (directionalShadowLights > 0) { // material.setParam("Splits", VarType.Vector4Array, splits); // material.setInt("ActiveDirectionalShadows", directionalLights.size()); // } // if (pointShadowLights > 0) { // material.setInt("ActivePointShadows", pointLights.size()); // material.setParam("PointLightPosition", VarType.Vector3Array, pointLightPosition); // } // if (spotShadowLights > 0) { // material.setInt("ActiveSpotShadows", spotLights.size()); // } // } // // private void setMaterialParameterConstants(Material material) { // material.setInt("TOTALSHADOWMAPS", directionalShadowLights * shadowMapsPerDirectionalLight + pointShadowLights * 6 + spotShadowLights); // material.setInt("LIGHTS", maxLightsPerPass); // if (directionalShadowLights > 0) { // material.setParam("DirectionalShadowMaps", VarType.TextureArray, directionalShadowTextures); // material.setFloat("DIRECTIONALSHADOWMAP_SIZE", directionalShadowMapSize); // material.setInt("MAXDIRECTIONALSHADOWLIGHTS", directionalShadowLights); // } // if (pointShadowLights > 0) { // material.setParam("PointShadowMaps", VarType.TextureArray, pointShadowTextures); // material.setFloat("POINTSHADOWMAP_SIZE", pointShadowMapSize); // material.setInt("MAXPOINTSHADOWLIGHTS", pointShadowLights); // } // if (spotShadowLights > 0) { // material.setParam("SpotShadowMaps", VarType.TextureArray, spotShadowTextures); // material.setFloat("SPOTSHADOWMAP_SIZE", spotShadowMapSize); // material.setInt("MAXSPOTSHADOWLIGHTS", spotShadowLights); // } // } private void setMatParams(GeometryList l) { //iteration throught all the geometries of the list to gather the materials matCache.clear(); for (int i = 0; i < l.size(); i++) { Material mat = l.get(i).getMaterial(); //checking if the material has the post technique and adding it to the material cache if (!matCache.contains(mat)) { matCache.add(mat); // setMaterialParameterConstants(mat); setMaterialParameters(mat); } } //iterating through the mat cache and setting the parameters // for (Material mat : matCache) { // setMaterialParameters(mat); // } } /** * How far the shadows are rendered in the view * * @see #setShadowZExtend(float zFar) * @return shadowZExtend */ public float getShadowZExtend() { return zFarOverride; } /** * Set the distance from the eye where the shadows will be rendered default value is dynamicaly computed to the shadow * casters/receivers union bound zFar, capped to view frustum far value. * * @param zFar the zFar values that override the computed one */ public void setShadowZExtend(float zFar) { this.zFarOverride = zFar; if (zFarOverride == 0) { fadeInfo = null; frustumCam = null; } else if (fadeInfo != null) { fadeInfo.set(zFarOverride - fadeLength, 1f / fadeLength); } // if (frustumCam == null && viewPort != null) { // initFrustumCam(); // } } /** * Define the length over which the shadow will fade out when using a shadowZextend This is useful to make dynamic * shadows fade into baked shadows in the distance. * * @param length the fade length in world units */ public void setShadowZFadeLength(float length) { if (length == 0) { fadeInfo = null; fadeLength = 0; } else { if (zFarOverride == 0) { fadeInfo = new Vector2f(0, 0); } else { fadeInfo = new Vector2f(zFarOverride - length, 1.0f / length); } fadeLength = length; } } /** * get the length over which the shadow will fade out when using a shadowZextend * * @return the fade length in world units */ public float getShadowZFadeLength() { if (fadeInfo != null) { return zFarOverride - fadeInfo.x; } return 0f; } public void preFrame(float tpf) { Camera cam = viewPort.getCamera(); Renderer r = renderManager.getRenderer(); //renderManager.setForcedMaterial(preshadowMat); renderManager.setForcedTechnique("PreShadow"); float zFar = zFarOverride; if (zFar == 0) { zFar = cam.getFrustumFar(); } //We prevent computing the frustum points and splits with zeroed or negative near clip value float frustumNear = Math.max(cam.getFrustumNear(), 0.001f); ShadowUtil.updateFrustumPoints(cam, frustumNear, zFar, 1.0f, points); for (int i = 0; i < directionalLights.size(); i++) { updateDirectionalShadowCams(cam, i, frustumNear, zFar); for (int shadowMapIndex = i * 4; shadowMapIndex < i * 4 + 4; shadowMapIndex++) { // if (shadowMapIndex % 4 == 0 || (shadowMapIndex % 4 == 1 && frameCounter % 2 == 0) || (shadowMapIndex % 4 == 2 && frameCounter == 1) || (shadowMapIndex % 4 == 3 && frameCounter == 3)) { renderDirectionalShadowMap(shadowMapIndex, i); // } } } for (int i = 0; i < pointLights.size(); i++) { pointLightPosition[i].set(pointLights.get(i).getPosition()); updatePointShadowCams(cam, i); renderPointShadowMap(i); } for (int i = 0; i < spotLights.size(); i++) { updateSpotShadowCams(cam, i); renderSpotShadowMap(i); } getReceivers(lightReceivers); setMatParams(lightReceivers); r.setFrameBuffer(viewPort.getOutputFrameBuffer()); frameCounter = (frameCounter + 1) % 4; //resetting renderManager settings renderManager.setForcedTechnique(null); // renderManager.setForcedMaterial(null); renderManager.setCamera(cam, false); } @Override public void cleanup() { //clearing the params in case there are some other shadow renderers clearMatParams(); } @Override public void reshape(ViewPort vp, int w, int h) { } /** * returns the edges thickness * * @see #setEdgesThickness(int edgesThickness) * @return edgesThickness */ public int getEdgesThickness() { return (int) (edgesThickness * 10); } /** * Sets the shadow edges thickness. default is 1, setting it to lower values can help to reduce the jagged effect of * the shadow edges * * @param edgesThickness */ public void setEdgesThickness(int edgesThickness) { this.edgesThickness = Math.max(1, Math.min(edgesThickness, 10)); this.edgesThickness *= 0.1f; } /** * De-serialize this instance, for example when loading from a J3O file. * * @param im importer (not null) */ public void read(JmeImporter im) throws IOException { // InputCapsule ic = (InputCapsule) im.getCapsule(this); // assetManager = im.getAssetManager(); // nbShadowMaps = ic.readInt("nbShadowMaps", 1); // shadowMapSize = ic.readFloat("shadowMapSize", 0f); // shadowIntensity = ic.readFloat("shadowIntensity", 0.7f); // edgeFilteringMode = ic.readEnum("edgeFilteringMode", EdgeFilteringMode.class, EdgeFilteringMode.Bilinear); // shadowCompareMode = ic.readEnum("shadowCompareMode", CompareMode.class, CompareMode.Hardware); // init(assetManager, nbShadowMaps, (int) shadowMapSize); // edgesThickness = ic.readFloat("edgesThickness", 1.0f); } /** * Serialize this instance, for example when saving to a J3O file. * * @param ex exporter (not null) */ public void write(JmeExporter ex) throws IOException { // OutputCapsule oc = (OutputCapsule) ex.getCapsule(this); // oc.write(nbShadowMaps, "nbShadowMaps", 1); // oc.write(shadowMapSize, "shadowMapSize", 0); // oc.write(shadowIntensity, "shadowIntensity", 0.7f); // oc.write(edgeFilteringMode, "edgeFilteringMode", EdgeFilteringMode.Bilinear); // oc.write(shadowCompareMode, "shadowCompareMode", CompareMode.Hardware); // oc.write(edgesThickness, "edgesThickness", 1.0f); } private TextureArray createTextureArray(int shadowMaps, int shadowMapSize) { if (shadowMaps > 0) { List<Image> directionalShadowImages = new ArrayList<>(); for (int i = 0; i < shadowMaps; i++) { directionalShadowImages.add(new Image(Image.Format.Depth16, shadowMapSize, shadowMapSize, BufferUtils.createByteBuffer(shadowMapSize * shadowMapSize * 2), ColorSpace.Linear)); } TextureArray shadowTexture = new TextureArray(directionalShadowImages); shadowTexture.setMagFilter(Texture.MagFilter.Bilinear); shadowTexture.setMinFilter(Texture.MinFilter.BilinearNoMipMaps); // dispPic = new Picture[nbShadowMaps]; return shadowTexture; } return null; } private void setupFrameBuffer(FrameBuffer[] frameBuffer, int shadowMaps, int shadowMapSize, TextureArray shadowTextures) { for (int i = 0; i < shadowMaps; i++) { frameBuffer[i] = new FrameBuffer(shadowMapSize, shadowMapSize, 1); frameBuffer[i].setDepthTexture(shadowTextures, i); //DO NOT COMMENT THIS (it prevent the OSX incomplete read buffer crash) // frameBuffer[i].setColorTexture(dummyTex); } } public void addDirectionalLight(DirectionalLight dl) { if (directionalLights.size() < directionalShadowLights) { directionalLights.add(dl); shadowLightFilter.addDirectionalShadowLight(dl); } } public void addPointLight(PointLight pl) { if (pointLights.size() < pointShadowLights) { pointLights.add(pl); shadowLightFilter.addPointShadowLight(pl); } } public void addSpotLight(SpotLight sl) { if (spotLights.size() < spotShadowLights) { spotLights.add(sl); shadowLightFilter.addSpotShadowLight(sl); } } public void removeDirectionalLight(DirectionalLight dl) { directionalLights.remove(dl); shadowLightFilter.removeDirectionalLight(dl); } public void removePointLight(PointLight pl) { pointLights.remove(pl); shadowLightFilter.removePointShadowLight(pl); } public void removeSpotLight(SpotLight sl) { spotLights.remove(sl); shadowLightFilter.removeSpotShadowLight(sl); } }
package crazypants.enderio.machine.capbank.render; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import net.minecraft.client.renderer.Tessellator; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.IIcon; import net.minecraft.world.World; import net.minecraftforge.client.event.TextureStitchEvent; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.common.util.ForgeDirection; import cpw.mods.fml.common.eventhandler.SubscribeEvent; import crazypants.enderio.EnderIO; import crazypants.enderio.machine.capbank.CapBankType; import crazypants.enderio.machine.capbank.InfoDisplayType; import crazypants.enderio.machine.capbank.TileCapBank; import crazypants.enderio.machine.capbank.network.CapBankClientNetwork; import crazypants.render.BoundingBox; import crazypants.render.RenderUtil; import crazypants.util.BlockCoord; import crazypants.util.ForgeDirectionOffsets; import crazypants.vecmath.Vector3d; import crazypants.vecmath.Vector4f; import crazypants.vecmath.Vertex; public class FillGauge implements IInfoRenderer { private static final double HEIGHT = 0.75; private static final double VERT_BORDER = (1 - HEIGHT) / 2; private static final double WIDTH = 0.25; enum Type { SINGLE, TOP, BOTTOM, MIDDLE } private IIcon barIcon; private IIcon gaugeIcon; private float barHeightV; private Map<GaugeKey, List<Vertex>> gaugeVertexCache; private Map<GaugeKey, List<Vertex>> levelVertexCache; private float barMinV; FillGauge() { MinecraftForge.EVENT_BUS.register(this); } @Override public void render(TileCapBank cb, ForgeDirection dir, double x, double y, double z, float partialTick) { CapBankClientNetwork nw = null; if(cb.getNetwork() != null) { nw = (CapBankClientNetwork) cb.getNetwork(); nw.requestPowerUpdate(cb, 20); } int brightness = cb.getWorldObj().getLightBrightnessForSkyBlocks(cb.xCoord + dir.offsetX, cb.yCoord + dir.offsetY, cb.zCoord + dir.offsetZ, 0); GaugeInfo info = getGaugeInfo(cb, dir); GaugeKey key = new GaugeKey(dir, info.type); doRender(nw, brightness, info, key); } public void doRender(CapBankClientNetwork nw, int brightness, GaugeInfo info, GaugeKey key) { if(gaugeVertexCache == null) { createVertexCache(); } RenderUtil.bindBlockTexture(); Tessellator tes = Tessellator.instance; tes.startDrawingQuads(); tes.setBrightness(brightness); tes.setColorOpaque_F(1, 1, 1); List<Vertex> verts = gaugeVertexCache.get(key); RenderUtil.addVerticesToTessellator(verts, Tessellator.instance); renderFillBar(key, nw, info); tes.draw(); } private void renderFillBar(GaugeKey key, CapBankClientNetwork nw, GaugeInfo info) { double ratio = 0; if(nw != null) { ratio = nw.getEnergyStoredRatio(); } if(ratio <= 0) { return; } double maxY = ratio * info.height; if(maxY <= info.yPosition) { //empty return; } Vector3d offset = ForgeDirectionOffsets.offsetScaled(key.dir, 0.005); Tessellator.instance.addTranslation((float) offset.x, (float) offset.y, (float) offset.z); List<Vertex> verts = levelVertexCache.get(key); if(maxY >= info.yPosition + 1) { //full bar RenderUtil.addVerticesToTessellator(verts, Tessellator.instance); } else { //need to render partial bar double myMaxY = maxY - info.yPosition; if(info.type == Type.BOTTOM || info.type == Type.SINGLE) { //If we have some power and we are the bottom bit of the display, //always show at least a little bit in the bar myMaxY = Math.max(0.2, myMaxY); } List<Vertex> newVerts = new ArrayList<Vertex>(); for (Vertex v : verts) { v = new Vertex(v); newVerts.add(v); if(v.y() > myMaxY) { v.setXYZ(v.x(), myMaxY, v.z()); v.setUV(v.u(), barMinV + (float) (myMaxY * barHeightV)); } } RenderUtil.addVerticesToTessellator(newVerts, Tessellator.instance); } offset.scale(-1); Tessellator.instance.addTranslation((float) offset.x, (float) offset.y, (float) offset.z); } private GaugeInfo getGaugeInfo(TileCapBank cb, ForgeDirection dir) { if (!cb.getType().isMultiblock()) { return new GaugeInfo(1, 0); } int height = 1; int yPos = 0; BlockCoord loc = cb.getLocation(); boolean found = true; while (found) { loc = loc.getLocation(ForgeDirection.UP); if(isGaugeType(cb.getWorldObj(), loc, dir, cb.getType())) { height++; } else { found = false; } } loc = cb.getLocation(); found = true; while (found) { loc = loc.getLocation(ForgeDirection.DOWN); if(isGaugeType(cb.getWorldObj(), loc, dir, cb.getType())) { height++; yPos++; } else { found = false; } } return new GaugeInfo(height, yPos); } private boolean isGaugeType(World worldObj, BlockCoord bc, ForgeDirection face, CapBankType type) { TileEntity te = worldObj.getTileEntity(bc.x, bc.y, bc.z); if(te instanceof TileCapBank) { TileCapBank cb = (TileCapBank) te; return type == cb.getType() && cb.getDisplayType(face) == InfoDisplayType.LEVEL_BAR; } return false; } @SubscribeEvent public void onTextureRestitch(TextureStitchEvent.Post event) { createVertexCache(); } private void createVertexCache() { barIcon = EnderIO.blockCapBank.getFillBarIcon(); barMinV = barIcon.getMinV(); barHeightV = barIcon.getMaxV() - barIcon.getMinV(); gaugeIcon = EnderIO.blockCapBank.getGaugeIcon(); gaugeVertexCache = new HashMap<FillGauge.GaugeKey, List<Vertex>>(); levelVertexCache = new HashMap<FillGauge.GaugeKey, List<Vertex>>(); for (ForgeDirection dir : ForgeDirection.VALID_DIRECTIONS) { if(dir.offsetY == 0) { for (Type type : Type.values()) { GaugeKey key = new GaugeKey(dir, type); gaugeVertexCache.put(key, createGaugeBoundForFace(key, gaugeIcon)); levelVertexCache.put(key, createGaugeBoundForFace(key, barIcon)); } } } } protected List<Vertex> createGaugeBoundForFace(GaugeKey key, IIcon icon) { ForgeDirection dir = key.dir; Type type = key.type; double widthScale = 0.25; double heightScale = 0.8; double xScale = dir.offsetX == 0 ? widthScale : 1; double yScale = 1; double zScale = dir.offsetZ == 0 ? widthScale : 1; BoundingBox bb = BoundingBox.UNIT_CUBE; Vector3d off = ForgeDirectionOffsets.forDirCopy(dir); off.scale(-1); bb = bb.translate(off); bb = bb.scale(xScale, yScale, zScale); off.scale(-1); bb = bb.translate(off); Vector4f uv = getUvForType(key.type, icon); List<Vertex> result = bb.getCornersWithUvForFace(dir, uv.x, uv.y, uv.z, uv.w); return result; } private Vector4f getUvForType(Type type, IIcon icon) { double uWidth = (icon.getMaxU() - icon.getMinU()) / 4; Vector4f res = new Vector4f(); res.x = (float) (icon.getMinU() + type.ordinal() * uWidth); res.y = (float) (res.x + uWidth); res.z = icon.getMinV(); res.w = icon.getMaxV(); return res; } static class GaugeInfo { int height; int yPosition; Type type; GaugeInfo(int height, int position) { this.height = height; yPosition = position; type = calcType(); } Type calcType() { if(height == 1) { return Type.SINGLE; } if(yPosition == 0) { return Type.BOTTOM; } if(yPosition == height - 1) { return Type.TOP; } return Type.MIDDLE; } } static class GaugeKey { ForgeDirection dir; Type type; GaugeKey(ForgeDirection dir, Type type) { this.dir = dir; this.type = type; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((dir == null) ? 0 : dir.hashCode()); result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if(this == obj) { return true; } if(obj == null) { return false; } if(getClass() != obj.getClass()) { return false; } GaugeKey other = (GaugeKey) obj; if(dir != other.dir) { return false; } if(type != other.type) { return false; } return true; } } }
/* * Copyright (C) 2013 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.caliper.runner; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.reflect.ClassPath; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLClassLoader; import java.util.Map; import java.util.Set; import java.util.jar.Attributes; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.logging.Logger; import javax.annotation.Nullable; /** * Scans the source of a {@link ClassLoader} and finds all jar files. This is a modified version * of {@link ClassPath} that finds jars instead of resources. */ final class JarFinder { private static final Logger logger = Logger.getLogger(JarFinder.class.getName()); /** Separator for the Class-Path manifest attribute value in jar files. */ private static final Splitter CLASS_PATH_ATTRIBUTE_SEPARATOR = Splitter.on(' ').omitEmptyStrings(); /** * Returns a list of jar files reachable from the given class loaders. * * <p>Currently only {@link URLClassLoader} and only {@code file://} urls are supported. * * @throws IOException if the attempt to read class path resources (jar files or directories) * failed. */ public static ImmutableSet<File> findJarFiles(ClassLoader first, ClassLoader... rest) throws IOException { Scanner scanner = new Scanner(); Map<URI, ClassLoader> map = Maps.newLinkedHashMap(); for (ClassLoader classLoader : Lists.asList(first, rest)) { map.putAll(getClassPathEntries(classLoader)); } for (Map.Entry<URI, ClassLoader> entry : map.entrySet()) { scanner.scan(entry.getKey(), entry.getValue()); } return scanner.jarFiles(); } @VisibleForTesting static ImmutableMap<URI, ClassLoader> getClassPathEntries( ClassLoader classloader) { Map<URI, ClassLoader> entries = Maps.newLinkedHashMap(); // Search parent first, since it's the order ClassLoader#loadClass() uses. ClassLoader parent = classloader.getParent(); if (parent != null) { entries.putAll(getClassPathEntries(parent)); } if (classloader instanceof URLClassLoader) { URLClassLoader urlClassLoader = (URLClassLoader) classloader; for (URL entry : urlClassLoader.getURLs()) { URI uri; try { uri = entry.toURI(); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } if (!entries.containsKey(uri)) { entries.put(uri, classloader); } } } return ImmutableMap.copyOf(entries); } @VisibleForTesting static final class Scanner { private final ImmutableSet.Builder<File> jarFiles = new ImmutableSet.Builder<File>(); private final Set<URI> scannedUris = Sets.newHashSet(); ImmutableSet<File> jarFiles() { return jarFiles.build(); } void scan(URI uri, ClassLoader classloader) throws IOException { if (uri.getScheme().equals("file") && scannedUris.add(uri)) { scanFrom(new File(uri), classloader); } } @VisibleForTesting void scanFrom(File file, ClassLoader classloader) throws IOException { if (!file.exists()) { return; } if (file.isDirectory()) { scanDirectory(file, classloader); } else { scanJar(file, classloader); } } private void scanDirectory(File directory, ClassLoader classloader) { scanDirectory(directory, classloader, ""); } private void scanDirectory( File directory, ClassLoader classloader, String packagePrefix) { for (File file : directory.listFiles()) { String name = file.getName(); if (file.isDirectory()) { scanDirectory(file, classloader, packagePrefix + name + "/"); } // do we need to look for jars here? } } private void scanJar(File file, ClassLoader classloader) throws IOException { JarFile jarFile; try { jarFile = new JarFile(file); } catch (IOException e) { // Not a jar file return; } jarFiles.add(file); try { for (URI uri : getClassPathFromManifest(file, jarFile.getManifest())) { scan(uri, classloader); } } finally { try { jarFile.close(); } catch (IOException ignored) {} } } /** * Returns the class path URIs specified by the {@code Class-Path} manifest attribute, according * to <a * href="http://docs.oracle.com/javase/6/docs/technotes/guides/jar/jar.html#Main%20Attributes"> * JAR File Specification</a>. If {@code manifest} is null, it means the jar file has no * manifest, and an empty set will be returned. */ @VisibleForTesting static ImmutableSet<URI> getClassPathFromManifest( File jarFile, @Nullable Manifest manifest) { if (manifest == null) { return ImmutableSet.of(); } ImmutableSet.Builder<URI> builder = ImmutableSet.builder(); String classpathAttribute = manifest.getMainAttributes() .getValue(Attributes.Name.CLASS_PATH.toString()); if (classpathAttribute != null) { for (String path : CLASS_PATH_ATTRIBUTE_SEPARATOR.split(classpathAttribute)) { URI uri; try { uri = getClassPathEntry(jarFile, path); } catch (URISyntaxException e) { // Ignore bad entry logger.warning("Invalid Class-Path entry: " + path); continue; } builder.add(uri); } } return builder.build(); } /** * Returns the absolute uri of the Class-Path entry value as specified in * <a * href="http://docs.oracle.com/javase/6/docs/technotes/guides/jar/jar.html#Main%20Attributes"> * JAR File Specification</a>. Even though the specification only talks about relative urls, * absolute urls are actually supported too (for example, in Maven surefire plugin). */ @VisibleForTesting static URI getClassPathEntry(File jarFile, String path) throws URISyntaxException { URI uri = new URI(path); return uri.isAbsolute() ? uri : new File(jarFile.getParentFile(), path.replace('/', File.separatorChar)).toURI(); } } }
/***************************************************************************** * * * OpenNI 1.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ package org.openni; /** * Locates and continuously tracks a user's hands. <BR><BR> * * HandPoint generators generally make use of the data from a depth generator to * located and track a user's hands. Exact requirements will vary depending on * middleware implementation. * * This generator implements the following events: * handCreateEvent -- Triggered when the system finds a new hand in a scene * handUpdateEvent -- Triggered whenever a previously located hand changes location * handDestroyEvent -- Triggered whenever a previously located hand is lost * */ public class HandsGenerator extends Generator { /** * Constructor, creates a new hands generator form the given context * @param context OpenNI context for this generator * @param nodeHandle Native pointer to this object * @param addRef Indicates whether this object should be registered * @throws GeneralException If underlying native code returns errors, a General Exception is thrown by this function */ HandsGenerator(Context context, long nodeHandle, boolean addRef) throws GeneralException { super(context, nodeHandle, addRef); // TODO Auto-generated constructor stub handCreateEvent = new Observable<ActiveHandEventArgs>() { @Override protected int registerNative(OutArg<Long> phCallback) throws StatusException { return NativeMethods.xnRegisterHandCallbacks(toNative(), this, "callback", null, null, phCallback); } @Override protected void unregisterNative(long hCallback) { NativeMethods.xnUnregisterHandCallbacks(toNative(), hCallback); } @SuppressWarnings("unused") public void callback(int id, Point3D point, float time) { notify(new ActiveHandEventArgs(id, point, time)); } }; handUpdateEvent = new Observable<ActiveHandEventArgs>() { @Override protected int registerNative(OutArg<Long> phCallback) throws StatusException { return NativeMethods.xnRegisterHandCallbacks(toNative(), this, null, "callback", null, phCallback); } @Override protected void unregisterNative(long hCallback) { NativeMethods.xnUnregisterHandCallbacks(toNative(), hCallback); } @SuppressWarnings("unused") public void callback(int id, Point3D point, float time) { notify(new ActiveHandEventArgs(id, point, time)); } }; handDestroyEvent = new Observable<InactiveHandEventArgs>() { @Override protected int registerNative(OutArg<Long> phCallback) throws StatusException { return NativeMethods.xnRegisterHandCallbacks(toNative(), this, null, null, "callback", phCallback); } @Override protected void unregisterNative(long hCallback) { NativeMethods.xnUnregisterHandCallbacks(toNative(), hCallback); } @SuppressWarnings("unused") public void callback(int id, float time) { notify(new InactiveHandEventArgs(id, time)); } }; } /** * Creates a hands generator in a given context, filtered by a given query object, and stores any errors in a given * EnumerationErrors object * @param context OpenNI Context for this generator * @param query Query object to filter results * @param errors Place to store any errors generated * @return Resulting HandsGenerator object * @throws GeneralException If underlying native code returns errors, a General Exception is thrown by this function */ public static HandsGenerator create(Context context, Query query, EnumerationErrors errors) throws GeneralException { OutArg<Long> handle = new OutArg<Long>(); int status = NativeMethods.xnCreateHandsGenerator(context.toNative(), handle, query == null ? 0 : query.toNative(), errors == null ? 0 : errors.toNative()); WrapperUtils.throwOnError(status); HandsGenerator result = (HandsGenerator)context.createProductionNodeObject(handle.value, NodeType.HANDS); NativeMethods.xnProductionNodeRelease(handle.value); return result; } /** * Creates a hands generator in a given context, filtered by a given query object, discarding error data * @param context OpenNI Context for this generator * @param query Query object to filter results * @return Resulting HandsGenerator object * @throws GeneralException If underlying native code returns errors, a General Exception is thrown by this function */ public static HandsGenerator create(Context context, Query query) throws GeneralException { return create(context, query, null); } /** * Creates a hands generator in a given context * @param context OpenNI context for this generator * @return Resulting HandsGenerator object * @throws GeneralException If underlying native code returns errors, a General Exception is thrown by this function */ public static HandsGenerator create(Context context) throws GeneralException { return create(context, null, null); } /** * Stops tracking a specific handpoint, specified by ID number * @param id ID of the handpoint to stop tracking * @throws StatusException The underlying native function (xnStopTracking) * can throw status codes */ public void StopTracking(int id) throws StatusException { int status = NativeMethods.xnStopTracking(toNative(), id); WrapperUtils.throwOnError(status); } /** * Turns off tracking for all active handpoints * @throws StatusException The underlying native function (xnStopTrackingAll) * can throw status codes */ public void StopTrackingAll() throws StatusException { int status = NativeMethods.xnStopTrackingAll(toNative()); WrapperUtils.throwOnError(status); } /** * Starts tracking the user's hand at the specified point in space. Normally, a focus gesture * is used to both indicate the user's intent to start tracking, and to help locate the user's hand * in the scene. This function can be called manually if the user's hand location is known, but * if the location is not known, a focus gesture will be needed for accurate tracking. * @param position Point in space where user's hand is located at start of tracking. * @throws StatusException The underlying native function (xnStartTracking) can throw status codes. */ public void StartTracking(Point3D position) throws StatusException { int status = NativeMethods.xnStartTracking(toNative(), position.getX(), position.getY(), position.getZ()); WrapperUtils.throwOnError(status); } /** * Sets the number for a smoothing factor. A simple smoothing algorithm is applied to average * handpoint locations over several frames. This produces a much less jittery hand point value, but * at the expense of apparent responsiveness. For most UI tasks, some smoothing will produce a better * user experience. * @param factor Smoothing factor number. Higher values produce more smoothing. * @throws StatusException The underlying native function being called * (xnSetTrackingSmoothing)can throw status codes */ public void SetSmoothing(float factor) throws StatusException { int status = NativeMethods.xnSetTrackingSmoothing(toNative(), factor); WrapperUtils.throwOnError(status); } /** * Activates the HandTouchingFOVEdge capability for this node * @return The resulting capability activated by this function * @throws StatusException Required, since node might not be compatible with this capability */ public HandTouchingFOVEdgeCapability getHandTouchingFOVEdgeCapability() throws StatusException { return new HandTouchingFOVEdgeCapability(this); } // Events /** Provides access to the handCreateEvent * @return */ public IObservable<ActiveHandEventArgs> getHandCreateEvent() { return handCreateEvent; } /** * Provides access to the handUpdateEvent * @return */ public IObservable<ActiveHandEventArgs> getHandUpdateEvent() { return handUpdateEvent; } /** * Provides access to the handDestroyEvent * @return */ public IObservable<InactiveHandEventArgs> getHandDestroyEvent() { return handDestroyEvent; } private Observable<ActiveHandEventArgs> handCreateEvent; private Observable<ActiveHandEventArgs> handUpdateEvent; private Observable<InactiveHandEventArgs> handDestroyEvent; }
/* * Waltz - Enterprise Architecture * Copyright (C) 2016, 2017, 2018, 2019 Waltz open source project * See README.md for more information * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific * */ package com.khartec.waltz.data.datatype_decorator; import com.khartec.waltz.common.SetUtilities; import com.khartec.waltz.model.EntityKind; import com.khartec.waltz.model.EntityLifecycleStatus; import com.khartec.waltz.model.EntityReference; import com.khartec.waltz.model.authoritativesource.AuthoritativeRatingVantagePoint; import com.khartec.waltz.model.datatype.DataTypeDecorator; import com.khartec.waltz.model.datatype.DataTypeUsageCharacteristics; import com.khartec.waltz.model.datatype.ImmutableDataTypeDecorator; import com.khartec.waltz.model.datatype.ImmutableDataTypeUsageCharacteristics; import com.khartec.waltz.model.rating.AuthoritativenessRatingValue; import com.khartec.waltz.schema.tables.LogicalFlowDecorator; import com.khartec.waltz.schema.tables.records.LogicalFlowDecoratorRecord; import org.jooq.*; import org.jooq.impl.DSL; import org.jooq.lambda.function.Function2; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import java.sql.Timestamp; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.function.Function; import static com.khartec.waltz.common.Checks.checkNotNull; import static com.khartec.waltz.data.logical_flow.LogicalFlowDao.LOGICAL_NOT_REMOVED; import static com.khartec.waltz.model.EntityKind.DATA_TYPE; import static com.khartec.waltz.model.EntityKind.LOGICAL_DATA_FLOW; import static com.khartec.waltz.model.EntityReference.mkRef; import static com.khartec.waltz.schema.Tables.PHYSICAL_FLOW; import static com.khartec.waltz.schema.tables.Application.APPLICATION; import static com.khartec.waltz.schema.tables.EntityHierarchy.ENTITY_HIERARCHY; import static com.khartec.waltz.schema.tables.LogicalFlow.LOGICAL_FLOW; import static com.khartec.waltz.schema.tables.LogicalFlowDecorator.LOGICAL_FLOW_DECORATOR; import static com.khartec.waltz.schema.tables.PhysicalSpecDataType.PHYSICAL_SPEC_DATA_TYPE; import static java.lang.String.format; import static java.util.stream.Collectors.toList; @Repository public class LogicalFlowDecoratorDao extends DataTypeDecoratorDao { private static final RecordMapper<Record, DataTypeDecorator> TO_DECORATOR_MAPPER = r -> { LogicalFlowDecoratorRecord record = r.into(LOGICAL_FLOW_DECORATOR); return ImmutableDataTypeDecorator.builder() .id(record.getId()) .entityReference(mkRef(LOGICAL_DATA_FLOW, record.getLogicalFlowId())) .decoratorEntity(mkRef( DATA_TYPE, record.getDecoratorEntityId())) .rating(AuthoritativenessRatingValue.ofNullable(record.getRating())) .provenance(record.getProvenance()) .lastUpdatedAt(record.getLastUpdatedAt().toLocalDateTime()) .lastUpdatedBy(record.getLastUpdatedBy()) .isReadonly(record.getIsReadonly()) .authSourceId(Optional.ofNullable(record.getAuthSourceId())) .build(); }; private static final Function<DataTypeDecorator, LogicalFlowDecoratorRecord> TO_RECORD = d -> { LogicalFlowDecoratorRecord r = new LogicalFlowDecoratorRecord(); r.setId(d.id().orElse(null)); r.changed(LOGICAL_FLOW_DECORATOR.ID, false); r.setDecoratorEntityKind(DATA_TYPE.name()); r.setDecoratorEntityId(d.decoratorEntity().id()); r.setLogicalFlowId(d.entityReference().id()); r.setProvenance(d.provenance()); d.rating().ifPresent(rating -> r.setRating(rating.value())); r.setLastUpdatedAt(Timestamp.valueOf(d.lastUpdatedAt())); r.setLastUpdatedBy(d.lastUpdatedBy()); r.setIsReadonly(d.isReadonly()); d.authSourceId().ifPresent(r::setAuthSourceId); return r; }; private final DSLContext dsl; @Autowired public LogicalFlowDecoratorDao(DSLContext dsl) { checkNotNull(dsl, "dsl cannot be null"); this.dsl = dsl; } // --- FINDERS --- @Override public DataTypeDecorator getByEntityIdAndDataTypeId(long flowId, long dataTypeId) { return dsl .selectFrom(LOGICAL_FLOW_DECORATOR) .where(LOGICAL_FLOW_DECORATOR.LOGICAL_FLOW_ID.eq(flowId)) .and(LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_KIND.eq(DATA_TYPE.name())) .and(LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_ID.eq(dataTypeId)) .fetchOne(TO_DECORATOR_MAPPER); } @Override public List<DataTypeDecorator> findByAppIdSelector(Select<Record1<Long>> appIdSelector) { Condition condition = LOGICAL_FLOW.TARGET_ENTITY_ID.in(appIdSelector) .or(LOGICAL_FLOW.SOURCE_ENTITY_ID.in(appIdSelector)); return dsl.select(LOGICAL_FLOW_DECORATOR.fields()) .from(LOGICAL_FLOW_DECORATOR) .innerJoin(LOGICAL_FLOW) .on(LOGICAL_FLOW.ID.eq(LOGICAL_FLOW_DECORATOR.LOGICAL_FLOW_ID)) .and(LOGICAL_NOT_REMOVED) .where(dsl.renderInlined(condition)) .fetch(TO_DECORATOR_MAPPER); } @Override public List<DataTypeDecorator> findByDataTypeIdSelector(Select<Record1<Long>> decoratorEntityIdSelector) { checkNotNull(decoratorEntityIdSelector, "decoratorEntityIdSelector cannot be null"); Condition condition = LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_KIND.eq(DATA_TYPE.name()) .and(LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_ID.in(decoratorEntityIdSelector)); return dsl .select(LOGICAL_FLOW_DECORATOR.fields()) .from(LOGICAL_FLOW_DECORATOR) .innerJoin(LOGICAL_FLOW) .on(LOGICAL_FLOW.ID.eq(LOGICAL_FLOW_DECORATOR.LOGICAL_FLOW_ID)) .where(dsl.renderInlined(condition)) .and(LOGICAL_NOT_REMOVED) .fetch(TO_DECORATOR_MAPPER); } @Override public List<DataTypeDecorator> findByFlowIds(Collection<Long> flowIds) { checkNotNull(flowIds, "flowIds cannot be null"); Condition condition = LOGICAL_FLOW_DECORATOR.LOGICAL_FLOW_ID.in(flowIds); return findByCondition(condition); } public List<DataTypeDecorator> findAll() { return dsl .selectFrom(LOGICAL_FLOW_DECORATOR) .fetch(TO_DECORATOR_MAPPER); } @Override public List<DataTypeDecorator> findByEntityId(long entityId) { return dsl .selectFrom(LOGICAL_FLOW_DECORATOR) .where(LOGICAL_FLOW_DECORATOR.LOGICAL_FLOW_ID.eq(entityId)) .fetch(TO_DECORATOR_MAPPER); } @Override public List<DataTypeDecorator> findByEntityIdSelector(Select<Record1<Long>> entityIdSelector, Optional<EntityKind> entityKind) { checkNotNull(entityKind, "entityKind cannot be null"); checkNotNull(entityIdSelector, "entityIdSelector cannot be null"); return dsl .select(LOGICAL_FLOW_DECORATOR.fields()) .from(LOGICAL_FLOW_DECORATOR) .innerJoin(LOGICAL_FLOW) .on(LOGICAL_FLOW.ID.eq(LOGICAL_FLOW_DECORATOR.LOGICAL_FLOW_ID)) .where(LOGICAL_FLOW.SOURCE_ENTITY_ID.in(entityIdSelector) .and(LOGICAL_FLOW.SOURCE_ENTITY_KIND.eq(entityKind.get().name()))) .or(LOGICAL_FLOW.TARGET_ENTITY_ID.in(entityIdSelector) .and(LOGICAL_FLOW.TARGET_ENTITY_KIND.eq(entityKind.get().name()))) .and(LOGICAL_NOT_REMOVED) .and(LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_KIND.eq(DATA_TYPE.name())) .fetch(TO_DECORATOR_MAPPER); } @Override public int removeDataTypes(EntityReference associatedEntityRef, Collection<Long> dataTypeIds) { return dsl .deleteFrom(LOGICAL_FLOW_DECORATOR) .where(LOGICAL_FLOW_DECORATOR.LOGICAL_FLOW_ID.eq(associatedEntityRef.id())) .and(LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_KIND.eq(DATA_TYPE.name())) .and(LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_ID.in(dataTypeIds)) .and(LOGICAL_FLOW_DECORATOR.IS_READONLY.isFalse()) .execute(); } @Override public int[] addDecorators(Collection<DataTypeDecorator> decorators) { checkNotNull(decorators, "decorators cannot be null"); List<LogicalFlowDecoratorRecord> records = decorators.stream() .map(TO_RECORD) .collect(toList()); Query[] queries = records.stream().map( record -> DSL.using(dsl.configuration()) .insertInto(LOGICAL_FLOW_DECORATOR) .set(record) .onDuplicateKeyUpdate() .set(record)) .toArray(Query[]::new); return dsl.batch(queries).execute(); // todo: in jOOQ 3.10.0 this can be written as follows #2979 // return dsl.batchInsert(records).onDuplicateKeyIgnore().execute(); } public int[] updateDecorators(Set<DataTypeDecorator> decorators) { Set<LogicalFlowDecoratorRecord> records = SetUtilities.map(decorators, TO_RECORD); return dsl.batchUpdate(records).execute(); } public int updateDecoratorsForAuthSource(AuthoritativeRatingVantagePoint ratingVantagePoint) { LogicalFlowDecorator lfd = LOGICAL_FLOW_DECORATOR.as("lfd"); EntityReference vantagePoint = ratingVantagePoint.vantagePoint(); Long appId = ratingVantagePoint.applicationId(); EntityReference dataType = ratingVantagePoint.dataType(); AuthoritativenessRatingValue rating = ratingVantagePoint.rating(); SelectConditionStep<Record1<Long>> orgUnitSubselect = DSL .select(ENTITY_HIERARCHY.ID) .from(ENTITY_HIERARCHY) .where(ENTITY_HIERARCHY.KIND.eq(vantagePoint.kind().name())) .and(ENTITY_HIERARCHY.ANCESTOR_ID.eq(vantagePoint.id())); SelectConditionStep<Record1<Long>> dataTypeSubselect = DSL .select(ENTITY_HIERARCHY.ID) .from(ENTITY_HIERARCHY) .where(ENTITY_HIERARCHY.KIND.eq(DATA_TYPE.name())) .and(ENTITY_HIERARCHY.ANCESTOR_ID.eq(dataType.id())); Condition usingAuthSource = LOGICAL_FLOW.SOURCE_ENTITY_ID.eq(appId); Condition notUsingAuthSource = LOGICAL_FLOW.SOURCE_ENTITY_ID.ne(appId); Function2<Condition, String, Update<LogicalFlowDecoratorRecord>> mkQuery = (appScopingCondition, ratingName) -> dsl .update(LOGICAL_FLOW_DECORATOR) .set(LOGICAL_FLOW_DECORATOR.RATING, ratingName) .set(LOGICAL_FLOW_DECORATOR.AUTH_SOURCE_ID, ratingVantagePoint.authSourceId()) .where(LOGICAL_FLOW_DECORATOR.ID.in( DSL.select(lfd.ID) .from(lfd) .innerJoin(LOGICAL_FLOW).on(LOGICAL_FLOW.ID.eq(lfd.LOGICAL_FLOW_ID)) .innerJoin(APPLICATION) .on(APPLICATION.ID.eq(LOGICAL_FLOW.TARGET_ENTITY_ID) .and(LOGICAL_FLOW.TARGET_ENTITY_KIND.eq(EntityKind.APPLICATION.name()))) .where(LOGICAL_FLOW.SOURCE_ENTITY_KIND.eq(EntityKind.APPLICATION.name()) .and(appScopingCondition) .and(APPLICATION.ORGANISATIONAL_UNIT_ID.in(orgUnitSubselect)) .and(lfd.DECORATOR_ENTITY_KIND.eq(DATA_TYPE.name())) .and(lfd.DECORATOR_ENTITY_ID.in(dataTypeSubselect))) .and(lfd.RATING.in( AuthoritativenessRatingValue.NO_OPINION.value(), AuthoritativenessRatingValue.DISCOURAGED.value())) )); Update<LogicalFlowDecoratorRecord> updateAuthSources = mkQuery.apply( usingAuthSource, rating.value()); Update<LogicalFlowDecoratorRecord> updateNonAuthSources = mkQuery.apply( notUsingAuthSource, AuthoritativenessRatingValue.DISCOURAGED.value()); int authSourceUpdateCount = updateAuthSources.execute(); int nonAuthSourceUpdateCount = updateNonAuthSources.execute(); return authSourceUpdateCount + nonAuthSourceUpdateCount; } @Override public List<DataTypeUsageCharacteristics> findDatatypeUsageCharacteristics(EntityReference ref) { Field<Integer> numberOfFlowsSharingDatatype = DSL .countDistinct(PHYSICAL_FLOW.ID) .filterWhere(PHYSICAL_SPEC_DATA_TYPE.DATA_TYPE_ID.isNotNull()) .as("numberOfFlowsSharingDatatype"); return dsl .select( LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_ID, LOGICAL_FLOW_DECORATOR.IS_READONLY, numberOfFlowsSharingDatatype) .from(LOGICAL_FLOW) .innerJoin(LOGICAL_FLOW_DECORATOR).on(LOGICAL_FLOW.ID.eq(LOGICAL_FLOW_DECORATOR.LOGICAL_FLOW_ID) .and(LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_KIND.eq(DATA_TYPE.name()))) .leftJoin(PHYSICAL_FLOW).on(PHYSICAL_FLOW.LOGICAL_FLOW_ID.eq(LOGICAL_FLOW.ID) .and(PHYSICAL_FLOW.IS_REMOVED.isFalse()) .and(PHYSICAL_FLOW.ENTITY_LIFECYCLE_STATUS.ne(EntityLifecycleStatus.REMOVED.name()))) .leftJoin(PHYSICAL_SPEC_DATA_TYPE).on(PHYSICAL_FLOW.SPECIFICATION_ID.eq(PHYSICAL_SPEC_DATA_TYPE.SPECIFICATION_ID) .and(PHYSICAL_SPEC_DATA_TYPE.DATA_TYPE_ID.eq(LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_ID))) .where(LOGICAL_FLOW.ID.eq(ref.id())) .groupBy(LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_ID, LOGICAL_FLOW_DECORATOR.IS_READONLY) .fetch(r -> { int usageCount = r.get(numberOfFlowsSharingDatatype); return ImmutableDataTypeUsageCharacteristics.builder() .dataTypeId(r.get(LOGICAL_FLOW_DECORATOR.DECORATOR_ENTITY_ID)) .warningMessageForViewers(usageCount == 0 ? "Warning: None of the underlying physical flows reference this data type." : null) .warningMessageForEditors(usageCount > 0 ? format("Cannot be removed as used in %d physical flows. These must be removed first.", usageCount) : null) .isRemovable(usageCount == 0) .build(); }); } // --- HELPERS --- private List<DataTypeDecorator> findByCondition(Condition condition) { return dsl .select(LOGICAL_FLOW_DECORATOR.fields()) .from(LOGICAL_FLOW_DECORATOR) .where(dsl.renderInlined(condition)) .fetch(TO_DECORATOR_MAPPER); } public int updateRatingsByCondition(AuthoritativenessRatingValue rating, Condition condition) { return dsl .update(LOGICAL_FLOW_DECORATOR) .set(LOGICAL_FLOW_DECORATOR.RATING, rating.value()) .where(condition) .execute(); } }
/** * Copyright (c) 2013-2020 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.test.services.grpc; import java.io.File; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.TimeZone; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.service.grpc.protobuf.AnalyticMapreduceGrpc; import org.locationtech.geowave.service.grpc.protobuf.AnalyticMapreduceGrpc.AnalyticMapreduceBlockingStub; import org.locationtech.geowave.service.grpc.protobuf.AnalyticSparkGrpc; import org.locationtech.geowave.service.grpc.protobuf.AnalyticSparkGrpc.AnalyticSparkBlockingStub; import org.locationtech.geowave.service.grpc.protobuf.CQLQueryParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.ClearStoreCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.CliGeoserverGrpc; import org.locationtech.geowave.service.grpc.protobuf.CliGeoserverGrpc.CliGeoserverBlockingStub; import org.locationtech.geowave.service.grpc.protobuf.ConfigGeoServerCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.ConfigHDFSCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.CoreCliGrpc; import org.locationtech.geowave.service.grpc.protobuf.CoreCliGrpc.CoreCliBlockingStub; import org.locationtech.geowave.service.grpc.protobuf.CoreIngestGrpc; import org.locationtech.geowave.service.grpc.protobuf.CoreIngestGrpc.CoreIngestBlockingStub; import org.locationtech.geowave.service.grpc.protobuf.CoreMapreduceGrpc; import org.locationtech.geowave.service.grpc.protobuf.CoreMapreduceGrpc.CoreMapreduceBlockingStub; import org.locationtech.geowave.service.grpc.protobuf.CoreStoreGrpc; import org.locationtech.geowave.service.grpc.protobuf.CoreStoreGrpc.CoreStoreBlockingStub; import org.locationtech.geowave.service.grpc.protobuf.DBScanCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.DescribeTypeCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.FeatureAttributeProtos; import org.locationtech.geowave.service.grpc.protobuf.FeatureProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerAddCoverageCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerAddCoverageStoreCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerAddDatastoreCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerAddFeatureLayerCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerAddLayerCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerAddStyleCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerAddWorkspaceCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerGetCoverageCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerGetCoverageStoreCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerGetDatastoreCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerGetFeatureLayerCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerGetStoreAdapterCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerGetStyleCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerListCoverageStoresCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerListCoveragesCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerListDatastoresCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerListFeatureLayersCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerListStylesCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerListWorkspacesCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveCoverageCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveCoverageStoreCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveDatastoreCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveFeatureLayerCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveStyleCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerRemoveWorkspaceCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoServerSetLayerStyleCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.MapStringStringResponseProtos; import org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.StringResponseProtos; import org.locationtech.geowave.service.grpc.protobuf.KafkaToGeoWaveCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.KdeCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.KmeansSparkCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.ListCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.ListIndexPluginsCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.ListIndicesCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.ListIngestPluginsCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.ListStatsCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.ListStorePluginsCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.ListTypesCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.LocalToGeoWaveCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.LocalToHdfsCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.LocalToKafkaCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.LocalToMapReduceToGeoWaveCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.MapReduceToGeoWaveCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.NearestNeighborCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.RecalculateStatsCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.RemoveIndexCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.RemoveStatCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.RemoveStoreCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.RemoveTypeCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.SetCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.SparkSqlCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.SparkToGeoWaveCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.SpatialJoinCommandParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.SpatialQueryParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.SpatialTemporalQueryParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.TemporalConstraintsProtos; import org.locationtech.geowave.service.grpc.protobuf.VectorGrpc; import org.locationtech.geowave.service.grpc.protobuf.VectorGrpc.VectorBlockingStub; import org.locationtech.geowave.service.grpc.protobuf.VectorGrpc.VectorStub; import org.locationtech.geowave.service.grpc.protobuf.VectorIngestParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.VectorQueryParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.VectorStoreParametersProtos; import org.locationtech.geowave.service.grpc.protobuf.VersionCommandParametersProtos; import org.locationtech.geowave.test.TestUtils; import org.locationtech.geowave.test.kafka.KafkaTestEnvironment; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.io.WKBWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.protobuf.ByteString; import com.google.protobuf.util.Timestamps; import io.grpc.ManagedChannel; import io.grpc.internal.DnsNameResolverProvider; import io.grpc.netty.NettyChannelBuilder; import io.grpc.stub.StreamObserver; public class GeoWaveGrpcTestClient { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveGrpcTestClient.class.getName()); public final ManagedChannel channel; public final VectorBlockingStub vectorBlockingStub; public final VectorStub vectorAsyncStub; public final CoreCliBlockingStub coreCliBlockingStub; public final CoreMapreduceBlockingStub coreMapreduceBlockingStub; public final AnalyticMapreduceBlockingStub analyticMapreduceBlockingStub; public final AnalyticSparkBlockingStub analyticSparkBlockingStub; public final CoreStoreBlockingStub coreStoreBlockingStub; public final CoreIngestBlockingStub coreIngestBlockingStub; public final CliGeoserverBlockingStub cliGeoserverBlockingStub; // test values public int numFeaturesProcessed = 0; public GeoWaveGrpcTestClient(final String host, final int port) { this( NettyChannelBuilder.forAddress(host, port).nameResolverFactory( new DnsNameResolverProvider()).usePlaintext()); } public GeoWaveGrpcTestClient(final NettyChannelBuilder channelBuilder) { channel = channelBuilder.build(); vectorBlockingStub = VectorGrpc.newBlockingStub(channel); vectorAsyncStub = VectorGrpc.newStub(channel); coreCliBlockingStub = CoreCliGrpc.newBlockingStub(channel); coreMapreduceBlockingStub = CoreMapreduceGrpc.newBlockingStub(channel); analyticMapreduceBlockingStub = AnalyticMapreduceGrpc.newBlockingStub(channel); coreStoreBlockingStub = CoreStoreGrpc.newBlockingStub(channel); coreIngestBlockingStub = CoreIngestGrpc.newBlockingStub(channel); cliGeoserverBlockingStub = CliGeoserverGrpc.newBlockingStub(channel); analyticSparkBlockingStub = AnalyticSparkGrpc.newBlockingStub(channel); } public void shutdown() throws InterruptedException { channel.shutdown().awaitTermination(5, TimeUnit.SECONDS); } // Core CLI methods public void setCommand(final String key, final String val) { final ArrayList<String> params = new ArrayList<>(); params.add(key); params.add(val); final SetCommandParametersProtos request = SetCommandParametersProtos.newBuilder().addAllParameters(params).build(); coreCliBlockingStub.setCommand(request); } public Map<String, String> listCommand() { final ListCommandParametersProtos request = ListCommandParametersProtos.newBuilder().build(); final MapStringStringResponseProtos response = coreCliBlockingStub.listCommand(request); final Map<String, String> map = response.getResponseValueMap(); return map; } // Vector Service Methods public void vectorIngest( final int minLat, final int maxLat, final int minLon, final int maxLon, final int latStepDegs, final int lonStepDegs) throws InterruptedException, UnsupportedEncodingException, ParseException { LOGGER.info("Performing Vector Ingest..."); final VectorStoreParametersProtos baseParams = VectorStoreParametersProtos.newBuilder().setStoreName( GeoWaveGrpcTestUtils.storeName).setTypeName(GeoWaveGrpcTestUtils.typeName).setIndexName( GeoWaveGrpcTestUtils.indexName).build(); final CountDownLatch finishLatch = new CountDownLatch(1); final StreamObserver<StringResponseProtos> responseObserver = new StreamObserver<StringResponseProtos>() { @Override public void onNext(final StringResponseProtos value) { try { numFeaturesProcessed = Integer.parseInt(value.getResponseValue()); } catch (final NumberFormatException e) { } LOGGER.info(value.getResponseValue()); } @Override public void onError(final Throwable t) { LOGGER.error("Error: Vector Ingest failed.", t); finishLatch.countDown(); } @Override public void onCompleted() { LOGGER.info("Finished Vector Ingest..."); finishLatch.countDown(); } }; final StreamObserver<VectorIngestParametersProtos> requestObserver = vectorAsyncStub.vectorIngest(responseObserver); // Build up and add features to the request here... final VectorIngestParametersProtos.Builder requestBuilder = VectorIngestParametersProtos.newBuilder(); final FeatureAttributeProtos.Builder attBuilder = FeatureAttributeProtos.newBuilder(); for (int longitude = minLon; longitude <= maxLon; longitude += lonStepDegs) { for (int latitude = minLat; latitude <= maxLat; latitude += latStepDegs) { attBuilder.setValGeometry( copyFrom( new WKBWriter().write( GeometryUtils.GEOMETRY_FACTORY.createPoint( new Coordinate(longitude, latitude))))); requestBuilder.putFeature("geometry", attBuilder.build()); final TimeZone tz = TimeZone.getTimeZone("UTC"); final DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'"); // Quoted "Z" to // indicate UTC, // no timezone offset df.setTimeZone(tz); attBuilder.setValDate( Timestamps.fromMillis( (df.parse(GeoWaveGrpcTestUtils.temporalQueryStartTime).getTime() + df.parse(GeoWaveGrpcTestUtils.temporalQueryEndTime).getTime()) / 2)); requestBuilder.putFeature("TimeStamp", attBuilder.build()); attBuilder.setValDouble(latitude); requestBuilder.putFeature("Latitude", attBuilder.build()); attBuilder.setValDouble(longitude); requestBuilder.putFeature("Longitude", attBuilder.build()); final VectorIngestParametersProtos params = requestBuilder.setBaseParams(baseParams).build(); requestObserver.onNext(params); if (finishLatch.getCount() == 0) { // RPC completed or errored before we finished sending. // Sending further requests won't error, but they will just // be thrown away. return; } } } // Mark the end of requests requestObserver.onCompleted(); // Receiving happens asynchronously if (!finishLatch.await(15, TimeUnit.MINUTES)) { LOGGER.warn("Vector Ingest can not finish within 5 minutes"); } } public ArrayList<FeatureProtos> vectorQuery() throws UnsupportedEncodingException { LOGGER.info("Performing Vector Query..."); final VectorQueryParametersProtos request = VectorQueryParametersProtos.newBuilder().setStoreName( GeoWaveGrpcTestUtils.storeName).setTypeName(GeoWaveGrpcTestUtils.typeName).setQuery( GeoWaveGrpcTestUtils.cqlSpatialQuery).build(); final Iterator<FeatureProtos> features = vectorBlockingStub.vectorQuery(request); final ArrayList<FeatureProtos> feature_list = new ArrayList<>(); // iterate over features for (int i = 1; features.hasNext(); i++) { final FeatureProtos feature = features.next(); feature_list.add(feature); } return feature_list; } private static ByteString copyFrom(final byte[] bytes) { return ByteString.copyFrom(bytes); } public ArrayList<FeatureProtos> cqlQuery() throws UnsupportedEncodingException { LOGGER.info("Performing CQL Query..."); final VectorStoreParametersProtos baseParams = VectorStoreParametersProtos.newBuilder().setStoreName( GeoWaveGrpcTestUtils.storeName).setTypeName(GeoWaveGrpcTestUtils.typeName).setIndexName( GeoWaveGrpcTestUtils.indexName).build(); final CQLQueryParametersProtos request = CQLQueryParametersProtos.newBuilder().setBaseParams(baseParams).setCql( GeoWaveGrpcTestUtils.cqlSpatialQuery).build(); Iterator<FeatureProtos> features; final ArrayList<FeatureProtos> feature_list = new ArrayList<>(); features = vectorBlockingStub.cqlQuery(request); // iterate over features for (int i = 1; features.hasNext(); i++) { final FeatureProtos feature = features.next(); feature_list.add(feature); } return feature_list; } public ArrayList<FeatureProtos> spatialQuery() throws UnsupportedEncodingException { LOGGER.info("Performing Spatial Query..."); final VectorStoreParametersProtos baseParams = VectorStoreParametersProtos.newBuilder().setStoreName( GeoWaveGrpcTestUtils.storeName).setTypeName(GeoWaveGrpcTestUtils.typeName).setIndexName( GeoWaveGrpcTestUtils.indexName).build(); final SpatialQueryParametersProtos request = SpatialQueryParametersProtos.newBuilder().setBaseParams(baseParams).setGeometry( copyFrom(GeoWaveGrpcTestUtils.wkbSpatialQuery)).build(); Iterator<FeatureProtos> features; final ArrayList<FeatureProtos> feature_list = new ArrayList<>(); features = vectorBlockingStub.spatialQuery(request); // iterate over features for (int i = 1; features.hasNext(); i++) { final FeatureProtos feature = features.next(); feature_list.add(feature); } return feature_list; } public ArrayList<FeatureProtos> spatialTemporalQuery() throws ParseException { LOGGER.info("Performing Spatial Temporal Query..."); final VectorStoreParametersProtos baseParams = VectorStoreParametersProtos.newBuilder().setStoreName( GeoWaveGrpcTestUtils.storeName).build(); final TimeZone tz = TimeZone.getTimeZone("UTC"); final DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'"); // Quoted "Z" to indicate // UTC, // no timezone offset df.setTimeZone(tz); final SpatialQueryParametersProtos spatialQuery = SpatialQueryParametersProtos.newBuilder().setBaseParams(baseParams).setGeometry( copyFrom(GeoWaveGrpcTestUtils.wkbSpatialQuery)).build(); final TemporalConstraintsProtos t = TemporalConstraintsProtos.newBuilder().setStartTime( Timestamps.fromMillis( df.parse(GeoWaveGrpcTestUtils.temporalQueryStartTime).getTime())).setEndTime( Timestamps.fromMillis( df.parse(GeoWaveGrpcTestUtils.temporalQueryEndTime).getTime())).build(); final SpatialTemporalQueryParametersProtos request = SpatialTemporalQueryParametersProtos.newBuilder().setSpatialParams( spatialQuery).addTemporalConstraints(0, t).setCompareOperation("CONTAINS").build(); Iterator<FeatureProtos> features; final ArrayList<FeatureProtos> feature_list = new ArrayList<>(); features = vectorBlockingStub.spatialTemporalQuery(request); // iterate over features while (features.hasNext()) { final FeatureProtos feature = features.next(); feature_list.add(feature); } return feature_list; } // Core Mapreduce public boolean configHDFSCommand() { final ConfigHDFSCommandParametersProtos request = ConfigHDFSCommandParametersProtos.newBuilder().addParameters( GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfs()).build(); coreMapreduceBlockingStub.configHDFSCommand(request); return true; } // Analytic Mapreduce public boolean dbScanCommand() { final ArrayList<String> types = new ArrayList<>(); types.add(GeoWaveGrpcTestUtils.typeName); final DBScanCommandParametersProtos request = DBScanCommandParametersProtos.newBuilder().addParameters( GeoWaveGrpcTestUtils.storeName).setClusteringMaxIterations( "5").setClusteringMinimumSize("10").setExtractMinInputSplit( "2").setExtractMaxInputSplit("6").setPartitionMaxDistance( "1000").setOutputReducerCount("4").setMapReduceHdfsHostPort( GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfs()).setMapReduceJobtrackerHostPort( GeoWaveGrpcTestUtils.getMapReduceTestEnv().getJobtracker()).setMapReduceHdfsBaseDir( GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfsBaseDirectory()).addAllTypeNames( types).build(); analyticMapreduceBlockingStub.dBScanCommand(request); return true; } public boolean nearestNeighborCommand() { final ArrayList<String> types = new ArrayList<>(); types.add(GeoWaveGrpcTestUtils.typeName); final NearestNeighborCommandParametersProtos request = NearestNeighborCommandParametersProtos.newBuilder().addParameters( GeoWaveGrpcTestUtils.storeName).addAllTypeNames(types).setExtractQuery( GeoWaveGrpcTestUtils.wktSpatialQuery).setExtractMinInputSplit( "2").setExtractMaxInputSplit("6").setPartitionMaxDistance( "10").setOutputReducerCount("4").setMapReduceHdfsHostPort( GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfs()).setMapReduceJobtrackerHostPort( GeoWaveGrpcTestUtils.getMapReduceTestEnv().getJobtracker()).setOutputHdfsOutputPath( GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfsBaseDirectory() + "/GrpcNearestNeighbor").setMapReduceHdfsBaseDir( GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfsBaseDirectory()).build(); analyticMapreduceBlockingStub.nearestNeighborCommand(request); return true; } public boolean kdeCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); params.add(GeoWaveGrpcTestUtils.outputStoreName); final KdeCommandParametersProtos request = KdeCommandParametersProtos.newBuilder().addAllParameters(params).setCoverageName( "grpc_kde").setFeatureType(GeoWaveGrpcTestUtils.typeName).setHdfsHostPort( GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfs()).setJobTrackerOrResourceManHostPort( GeoWaveGrpcTestUtils.getMapReduceTestEnv().getJobtracker()).setMinLevel( 5).setMaxLevel(26).setMinSplits(32).setMaxSplits(32).setTileSize(1).build(); analyticMapreduceBlockingStub.kdeCommand(request); return true; } // Core Store public boolean RecalculateStatsCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); final RecalculateStatsCommandParametersProtos request = RecalculateStatsCommandParametersProtos.newBuilder().addAllParameters(params).setAll( true).build(); coreStoreBlockingStub.recalculateStatsCommand(request); return true; } public String RemoveIndexCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); params.add(GeoWaveGrpcTestUtils.indexName); final RemoveIndexCommandParametersProtos request = RemoveIndexCommandParametersProtos.newBuilder().addAllParameters(params).build(); final StringResponseProtos resp = coreStoreBlockingStub.removeIndexCommand(request); return resp.getResponseValue(); } public boolean VersionCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); final VersionCommandParametersProtos request = VersionCommandParametersProtos.newBuilder().addAllParameters(params).build(); coreStoreBlockingStub.versionCommand(request); return true; } public String ListIndexCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); final ListIndicesCommandParametersProtos request = ListIndicesCommandParametersProtos.newBuilder().addAllParameters(params).build(); final StringResponseProtos resp = coreStoreBlockingStub.listIndicesCommand(request); return resp.getResponseValue(); } public String ListStatsCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); final ListStatsCommandParametersProtos request = ListStatsCommandParametersProtos.newBuilder().addAllParameters(params).setCsv(true).build(); final StringResponseProtos resp = coreStoreBlockingStub.listStatsCommand(request); return resp.getResponseValue(); } public boolean ClearCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); final ClearStoreCommandParametersProtos request = ClearStoreCommandParametersProtos.newBuilder().addAllParameters(params).build(); coreStoreBlockingStub.clearStoreCommand(request); return true; } public String ListAdapterCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); final ListTypesCommandParametersProtos request = ListTypesCommandParametersProtos.newBuilder().addAllParameters(params).build(); final StringResponseProtos resp = coreStoreBlockingStub.listTypesCommand(request); return resp.getResponseValue(); } public boolean DescribeAdapterCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); params.add(GeoWaveGrpcTestUtils.typeName); final DescribeTypeCommandParametersProtos request = DescribeTypeCommandParametersProtos.newBuilder().addAllParameters(params).build(); coreStoreBlockingStub.describeTypeCommand(request); return true; } public String RemoveStoreCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); final RemoveStoreCommandParametersProtos request = RemoveStoreCommandParametersProtos.newBuilder().addAllParameters(params).build(); final StringResponseProtos resp = coreStoreBlockingStub.removeStoreCommand(request); return resp.getResponseValue(); } public boolean RemoveAdapterCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); params.add(GeoWaveGrpcTestUtils.typeName); final RemoveTypeCommandParametersProtos request = RemoveTypeCommandParametersProtos.newBuilder().addAllParameters(params).build(); coreStoreBlockingStub.removeTypeCommand(request); return true; } public boolean RemoveStatCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); final RemoveStatCommandParametersProtos request = RemoveStatCommandParametersProtos.newBuilder().addAllParameters(params).setStatType( "BOUNDING_BOX").setTypeName(GeoWaveGrpcTestUtils.typeName).setFieldName( "geometry").setForce(true).build(); coreStoreBlockingStub.removeStatCommand(request); return true; } // Cli GeoServer public String GeoServerAddLayerCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerAddLayerCommandParametersProtos request = GeoServerAddLayerCommandParametersProtos.newBuilder().addAllParameters(params).setAdapterId( "GeometryTest").setAddOption("VECTOR").setStyle("default").setWorkspace( "default").build(); return cliGeoserverBlockingStub.geoServerAddLayerCommand(request).getResponseValue(); } public String GeoServerGetDatastoreCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerGetDatastoreCommandParametersProtos request = GeoServerGetDatastoreCommandParametersProtos.newBuilder().addAllParameters( params).setWorkspace("default").build(); return cliGeoserverBlockingStub.geoServerGetDatastoreCommand(request).getResponseValue(); } public String GeoServerGetFeatureProtosLayerCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerGetFeatureLayerCommandParametersProtos request = GeoServerGetFeatureLayerCommandParametersProtos.newBuilder().addAllParameters( params).build(); return cliGeoserverBlockingStub.geoServerGetFeatureLayerCommand(request).getResponseValue(); } public String GeoServerListCoverageStoresCommand() { final GeoServerListCoverageStoresCommandParametersProtos request = GeoServerListCoverageStoresCommandParametersProtos.newBuilder().setWorkspace( "default").build(); return cliGeoserverBlockingStub.geoServerListCoverageStoresCommand(request).getResponseValue(); } public List<String> GeoServerGetStoreAdapterCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerGetStoreAdapterCommandParametersProtos request = GeoServerGetStoreAdapterCommandParametersProtos.newBuilder().addAllParameters( params).build(); return cliGeoserverBlockingStub.geoServerGetStoreAdapterCommand(request).getResponseValueList(); } public String GeoServerGetCoverageCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerGetCoverageCommandParametersProtos request = GeoServerGetCoverageCommandParametersProtos.newBuilder().addAllParameters( params).setWorkspace("default").setCvgstore("test_cvg_store").build(); return cliGeoserverBlockingStub.geoServerGetCoverageCommand(request).getResponseValue(); } public String GeoServerRemoveFeatureProtosLayerCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerRemoveFeatureLayerCommandParametersProtos request = GeoServerRemoveFeatureLayerCommandParametersProtos.newBuilder().addAllParameters( params).build(); return cliGeoserverBlockingStub.geoServerRemoveFeatureLayerCommand(request).getResponseValue(); } public String GeoServerAddCoverageCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerAddCoverageCommandParametersProtos request = GeoServerAddCoverageCommandParametersProtos.newBuilder().addAllParameters( params).setWorkspace("default").setCvgstore("test_cvg_store").build(); return cliGeoserverBlockingStub.geoServerAddCoverageCommand(request).getResponseValue(); } public String GeoServerRemoveWorkspaceCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerRemoveWorkspaceCommandParametersProtos request = GeoServerRemoveWorkspaceCommandParametersProtos.newBuilder().addAllParameters( params).build(); return cliGeoserverBlockingStub.geoServerRemoveWorkspaceCommand(request).getResponseValue(); } public List<String> GeoServerListWorkspacesCommand() { final GeoServerListWorkspacesCommandParametersProtos request = GeoServerListWorkspacesCommandParametersProtos.newBuilder().build(); return cliGeoserverBlockingStub.geoServerListWorkspacesCommand(request).getResponseValueList(); } public String GeoServerGetCoverageStoreCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerGetCoverageStoreCommandParametersProtos request = GeoServerGetCoverageStoreCommandParametersProtos.newBuilder().addAllParameters( params).setWorkspace("default").build(); return cliGeoserverBlockingStub.geoServerGetCoverageStoreCommand(request).getResponseValue(); } public String ConfigGeoServerCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final ConfigGeoServerCommandParametersProtos request = ConfigGeoServerCommandParametersProtos.newBuilder().addAllParameters(params).setWorkspace( "default").setUsername("user").setPass("default").build(); return cliGeoserverBlockingStub.configGeoServerCommand(request).getResponseValue(); } public String GeoServerListCoveragesCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerListCoveragesCommandParametersProtos request = GeoServerListCoveragesCommandParametersProtos.newBuilder().addAllParameters( params).setWorkspace("default").build(); return cliGeoserverBlockingStub.geoServerListCoveragesCommand(request).getResponseValue(); } public String GeoServerListStylesCommand() { final GeoServerListStylesCommandParametersProtos request = GeoServerListStylesCommandParametersProtos.newBuilder().build(); return cliGeoserverBlockingStub.geoServerListStylesCommand(request).getResponseValue(); } public String GeoServerAddCoverageStoreCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerAddCoverageStoreCommandParametersProtos request = GeoServerAddCoverageStoreCommandParametersProtos.newBuilder().addAllParameters( params).setWorkspace("default").setCoverageStore( "coverage-store").setEqualizeHistogramOverride(false).setScaleTo8Bit( false).setInterpolationOverride("0").build(); return cliGeoserverBlockingStub.geoServerAddCoverageStoreCommand(request).getResponseValue(); } public String GeoServerAddFeatureProtosLayerCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerAddFeatureLayerCommandParametersProtos request = GeoServerAddFeatureLayerCommandParametersProtos.newBuilder().addAllParameters( params).setWorkspace("default").setDatastore("grpc").build(); return cliGeoserverBlockingStub.geoServerAddFeatureLayerCommand(request).getResponseValue(); } public String GeoServerAddDatastoreCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); final GeoServerAddDatastoreCommandParametersProtos request = GeoServerAddDatastoreCommandParametersProtos.newBuilder().addAllParameters( params).setWorkspace("default").setDatastore("grpc-store").build(); return cliGeoserverBlockingStub.geoServerAddDatastoreCommand(request).getResponseValue(); } public String GeoServerListDatastoresCommand() { final GeoServerListDatastoresCommandParametersProtos request = GeoServerListDatastoresCommandParametersProtos.newBuilder().setWorkspace("default").build(); return cliGeoserverBlockingStub.geoServerListDatastoresCommand(request).getResponseValue(); } public String GeoServerSetLayerStyleCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerSetLayerStyleCommandParametersProtos request = GeoServerSetLayerStyleCommandParametersProtos.newBuilder().addAllParameters( params).setStyleName("test-style").build(); return cliGeoserverBlockingStub.geoServerSetLayerStyleCommand(request).getResponseValue(); } public String GeoServerRemoveCoverageStoreCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerRemoveCoverageStoreCommandParametersProtos request = GeoServerRemoveCoverageStoreCommandParametersProtos.newBuilder().addAllParameters( params).setWorkspace("default").build(); return cliGeoserverBlockingStub.geoServerRemoveCoverageStoreCommand(request).getResponseValue(); } public String GeoServerRemoveDatastoreCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerRemoveDatastoreCommandParametersProtos request = GeoServerRemoveDatastoreCommandParametersProtos.newBuilder().addAllParameters( params).setWorkspace("default").build(); return cliGeoserverBlockingStub.geoServerRemoveDatastoreCommand(request).getResponseValue(); } public String GeoServerAddStyleCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerAddStyleCommandParametersProtos request = GeoServerAddStyleCommandParametersProtos.newBuilder().addAllParameters(params).setStylesld( "styles-id").build(); return cliGeoserverBlockingStub.geoServerAddStyleCommand(request).getResponseValue(); } public String GeoServerAddWorkspaceCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerAddWorkspaceCommandParametersProtos request = GeoServerAddWorkspaceCommandParametersProtos.newBuilder().addAllParameters(params).build(); return cliGeoserverBlockingStub.geoServerAddWorkspaceCommand(request).getResponseValue(); } public String GeoServerGetStyleCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerGetStyleCommandParametersProtos request = GeoServerGetStyleCommandParametersProtos.newBuilder().addAllParameters(params).build(); return cliGeoserverBlockingStub.geoServerGetStyleCommand(request).getResponseValue(); } public String GeoServerRemoveStyleCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerRemoveStyleCommandParametersProtos request = GeoServerRemoveStyleCommandParametersProtos.newBuilder().addAllParameters(params).build(); return cliGeoserverBlockingStub.geoServerRemoveStyleCommand(request).getResponseValue(); } public String GeoServerRemoveCoverageCommand() { final ArrayList<String> params = new ArrayList<>(); params.add("grpc"); final GeoServerRemoveCoverageCommandParametersProtos request = GeoServerRemoveCoverageCommandParametersProtos.newBuilder().addAllParameters( params).setWorkspace("default").setCvgstore("cvg-store").build(); return cliGeoserverBlockingStub.geoServerRemoveCoverageCommand(request).getResponseValue(); } public String GeoServerListFeatureProtosLayersCommand() { final GeoServerListFeatureLayersCommandParametersProtos request = GeoServerListFeatureLayersCommandParametersProtos.newBuilder().setWorkspace( "default").setDatastore("cvg-store").setGeowaveOnly(true).build(); return cliGeoserverBlockingStub.geoServerListFeatureLayersCommand(request).getResponseValue(); } // Core Ingest public boolean LocalToHdfsCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(TestUtils.TEST_CASE_BASE + "osm_gpx_test_case/"); params.add(GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfsBaseDirectory()); final ArrayList<String> extensions = new ArrayList<>(); final LocalToHdfsCommandParametersProtos request = LocalToHdfsCommandParametersProtos.newBuilder().addAllParameters(params).addAllExtensions( extensions).setFormats("gpx").build(); coreIngestBlockingStub.localToHdfsCommand(request); return true; } public boolean LocalToGeoWaveCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(TestUtils.TEST_CASE_BASE + "osm_gpx_test_case/"); params.add(GeoWaveGrpcTestUtils.storeName); params.add(GeoWaveGrpcTestUtils.indexName); final ArrayList<String> extensions = new ArrayList<>(); final LocalToGeoWaveCommandParametersProtos request = LocalToGeoWaveCommandParametersProtos.newBuilder().addAllParameters( params).addAllExtensions(extensions).setFormats("gpx").setThreads(1).build(); coreIngestBlockingStub.localToGeoWaveCommand(request); return true; } public boolean MapReduceToGeoWaveCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfsBaseDirectory()); params.add(GeoWaveGrpcTestUtils.storeName); params.add(GeoWaveGrpcTestUtils.indexName); final ArrayList<String> extensions = new ArrayList<>(); final MapReduceToGeoWaveCommandParametersProtos request = MapReduceToGeoWaveCommandParametersProtos.newBuilder().addAllParameters( params).addAllExtensions(extensions).setFormats("gpx").setJobTrackerHostPort( GeoWaveGrpcTestUtils.getMapReduceTestEnv().getJobtracker()).build(); coreIngestBlockingStub.mapReduceToGeoWaveCommand(request); return true; } public boolean SparkToGeoWaveCommand() { final ArrayList<String> params = new ArrayList<>(); final File tempDataDir = new File("./" + TestUtils.TEST_CASE_BASE); String hdfsPath = ""; try { hdfsPath = tempDataDir.toURI().toURL().toString(); } catch (final MalformedURLException e) { return false; } // uncomment this line and comment-out the following to test s3 vs hdfs // params.add("s3://geowave-test/data/gdelt"); params.add(hdfsPath + "osm_gpx_test_case/"); params.add(GeoWaveGrpcTestUtils.storeName); params.add(GeoWaveGrpcTestUtils.indexName); final ArrayList<String> extensions = new ArrayList<>(); final SparkToGeoWaveCommandParametersProtos request = SparkToGeoWaveCommandParametersProtos.newBuilder().addAllParameters( params).addAllExtensions(extensions).setFormats("gpx").setAppName( "CoreGeoWaveSparkITs").setMaster("local[*]").setHost("localhost").setNumExecutors( 1).setNumCores(1).build(); coreIngestBlockingStub.sparkToGeoWaveCommand(request); return true; } public boolean LocalToMapReduceToGeoWaveCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(TestUtils.TEST_CASE_BASE + "osm_gpx_test_case/"); params.add(GeoWaveGrpcTestUtils.getMapReduceTestEnv().getHdfsBaseDirectory()); params.add(GeoWaveGrpcTestUtils.storeName); params.add(GeoWaveGrpcTestUtils.indexName); final ArrayList<String> extensions = new ArrayList<>(); final LocalToMapReduceToGeoWaveCommandParametersProtos request = LocalToMapReduceToGeoWaveCommandParametersProtos.newBuilder().addAllParameters( params).addAllExtensions(extensions).setFormats("gpx").setJobTrackerHostPort( GeoWaveGrpcTestUtils.getMapReduceTestEnv().getJobtracker()).build(); coreIngestBlockingStub.localToMapReduceToGeoWaveCommand(request); return true; } public boolean KafkaToGeoWaveCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); params.add(GeoWaveGrpcTestUtils.indexName); final ArrayList<String> extensions = new ArrayList<>(); final KafkaToGeoWaveCommandParametersProtos request = KafkaToGeoWaveCommandParametersProtos.newBuilder().addAllParameters( params).addAllExtensions(extensions).setFormats("gpx").setGroupId( "testGroup").setBootstrapServers( KafkaTestEnvironment.getInstance().getBootstrapServers()).setAutoOffsetReset( "earliest").setMaxPartitionFetchBytes("5000000").setConsumerTimeoutMs( "5000").setReconnectOnTimeout(false).setBatchSize(10000).build(); coreIngestBlockingStub.kafkaToGeoWaveCommand(request); return true; } public String ListIngestPluginsCommand() { final ListIngestPluginsCommandParametersProtos request = ListIngestPluginsCommandParametersProtos.newBuilder().build(); return coreIngestBlockingStub.listIngestPluginsCommand(request).getResponseValue(); } public String ListIndexPluginsCommand() { final ListIndexPluginsCommandParametersProtos request = ListIndexPluginsCommandParametersProtos.newBuilder().build(); return coreStoreBlockingStub.listIndexPluginsCommand(request).getResponseValue(); } public String ListStorePluginsCommand() { final ListStorePluginsCommandParametersProtos request = ListStorePluginsCommandParametersProtos.newBuilder().build(); return coreStoreBlockingStub.listStorePluginsCommand(request).getResponseValue(); } public boolean LocalToKafkaCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(TestUtils.TEST_CASE_BASE + "osm_gpx_test_case/"); final ArrayList<String> extensions = new ArrayList<>(); final LocalToKafkaCommandParametersProtos request = LocalToKafkaCommandParametersProtos.newBuilder().addAllParameters(params).addAllExtensions( extensions).setFormats("gpx").setBootstrapServers( KafkaTestEnvironment.getInstance().getBootstrapServers()).setRetryBackoffMs( "1000").build(); coreIngestBlockingStub.localToKafkaCommand(request); return true; } // Analytic Spark public boolean KmeansSparkCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); params.add(GeoWaveGrpcTestUtils.outputStoreName); final KmeansSparkCommandParametersProtos request = KmeansSparkCommandParametersProtos.newBuilder().addAllParameters(params).setAppName( "test-app") // Spark // app // name .setHost("localhost") // spark host .setMaster("local[*]") // spark master designation Id .setTypeName(GeoWaveGrpcTestUtils.typeName).setNumClusters(2) // .setNumIterations(2).setEpsilon(20.0).setUseTime(false).setGenerateHulls(true) // optional .setComputeHullData(true) // optional .setCqlFilter(GeoWaveGrpcTestUtils.cqlSpatialQuery).setMinSplits(1).setMaxSplits( 4).setCentroidTypeName("poly").setHullTypeName("poly-hull").build(); analyticSparkBlockingStub.kmeansSparkCommand(request); return true; } public boolean SparkSqlCommand() { final ArrayList<String> params = new ArrayList<>(); params.add( "select * from %" + GeoWaveGrpcTestUtils.storeName + "|" + GeoWaveGrpcTestUtils.typeName); final SparkSqlCommandParametersProtos request = SparkSqlCommandParametersProtos.newBuilder().addAllParameters(params).setOutputStoreName( GeoWaveGrpcTestUtils.outputStoreName).setMaster("local[*]").setAppName( "sparkSqlTestApp").setHost("localhost").setOutputTypeName( GeoWaveGrpcTestUtils.typeName).setShowResults(5).build(); analyticSparkBlockingStub.sparkSqlCommand(request); return true; } public boolean SpatialJoinCommand() { final ArrayList<String> params = new ArrayList<>(); params.add(GeoWaveGrpcTestUtils.storeName); params.add(GeoWaveGrpcTestUtils.storeName); params.add(GeoWaveGrpcTestUtils.outputStoreName); final SpatialJoinCommandParametersProtos request = SpatialJoinCommandParametersProtos.newBuilder().addAllParameters(params).setAppName( "test-app2").setMaster("local[*]").setHost("localhost").setLeftAdapterTypeName( GeoWaveGrpcTestUtils.typeName).setRightAdapterTypeName( GeoWaveGrpcTestUtils.typeName).setOutLeftAdapterTypeName( GeoWaveGrpcTestUtils.typeName + "_l").setOutRightAdapterTypeName( GeoWaveGrpcTestUtils.typeName + "_r").setPredicate( "GeomIntersects").setRadius(0.1).setNegativeTest(false).build(); analyticSparkBlockingStub.spatialJoinCommand(request); return true; } }
/* * //****************************************************************** * // * // Copyright 2016 Samsung Electronics All Rights Reserved. * // * //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= * // * // Licensed under the Apache License, Version 2.0 (the "License"); * // you may not use this file except in compliance with the License. * // You may obtain a copy of the License at * // * // http://www.apache.org/licenses/LICENSE-2.0 * // * // Unless required by applicable law or agreed to in writing, software * // distributed under the License is distributed on an "AS IS" BASIS, * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * // See the License for the specific language governing permissions and * // limitations under the License. * // * //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= */ package org.iotivity.cloud.accountserver.resources.acl.group; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import org.iotivity.cloud.accountserver.Constants; import org.iotivity.cloud.accountserver.db.AccountDBManager; import org.iotivity.cloud.accountserver.db.GroupTable; import org.iotivity.cloud.accountserver.util.TypeCastingManager; import org.iotivity.cloud.base.exception.ServerException.BadRequestException; import org.iotivity.cloud.base.exception.ServerException.PreconditionFailedException; import org.iotivity.cloud.util.Log; /** * * This class provides a set of APIs to manage group * */ public class GroupManager { private static GroupManager mGrManager = new GroupManager(); private TypeCastingManager<GroupTable> mTypeGroup = new TypeCastingManager<GroupTable>(); private GroupPolicyManager mGroupPolicyManager = new GroupPolicyManager(); private GroupAclManager mGroupAclManager = GroupAclManager .getInstance(); /** * Function to get GroupManager as a singleton * * @return GroupManager as a singleton */ public static GroupManager getInstance() { return mGrManager; } /** * API to replace group name to the group * * @param gid * group id * @param gname * group name */ public void replaceGnameToGroup(String gid, String gname) { replaceProperties(gid, Constants.KEYFIELD_GROUP_NAME, gname); } /** * API to replace owner id to the group * * @param gid * group id * @param owner * owner id */ public void replaceOwnerToGroup(String gid, String owner) { replaceProperties(gid, Constants.KEYFIELD_GROUP_OWNER, owner); } /** * API to add members to the group * * @param gid * group id * @param values * user uuid list */ public void addMembersToGroup(String gid, ArrayList<String> members) { addProperties(gid, Constants.KEYFIELD_GROUP_MEMBERS, members); } /** * API to add masters to the group * * @param gid * group id * @param values * user uuid list */ public void addMastersToGroup(String gid, ArrayList<String> masters) { addProperties(gid, Constants.KEYFIELD_GROUP_MASTERS, masters); addProperties(gid, Constants.KEYFIELD_GROUP_MEMBERS, masters); } /** * API to add resources to the group * * @param gid * group id * @param values * resource list */ public void addResourcesToGroup(String gid, ArrayList<Object> resources) { ArrayList<Object> addedResources = new ArrayList<>(); // filter added resource list : if the device is already to the // group, the resource is not added to the group for (Object resource : resources) { String deviceId = getDeviceIdFromResource( (HashMap<String, Object>) resource); ArrayList<String> devices = getGroupTable(gid).getDevices(); if (devices == null) { addedResources.add(resource); } else { if (!devices.contains(deviceId)) { addedResources.add(resource); } } } addProperties(gid, Constants.KEYFIELD_GROUP_RESOURCES, addedResources); } /** * API to add devices to the group * * @param gid * group id * @param values * device list */ public void addDevicesToGroup(String gid, ArrayList<String> devices) { // if resources regarding to the device is already registered, delete // resources in advance deleteResourcesOfDevices(gid, devices); addProperties(gid, Constants.KEYFIELD_GROUP_DEVICES, devices); } /** * API to delete member list from the group * * @param gid * group id * @param values * member uuid list */ public void deleteMembersFromGroup(String gid, ArrayList<String> members) { GroupTable groupTable = getGroupTable(gid); ArrayList<String> devices = groupTable.getDevices(); if (devices != null) { // delete devices owned by deleted members ArrayList<String> deletedDevices = new ArrayList<String>(); for (String device : devices) { if (members.contains(findDeviceOwner(device))) { deletedDevices.add(device); } } deleteDevicesFromGroup(gid, deletedDevices); } deleteProperties(gid, Constants.KEYFIELD_GROUP_MEMBERS, members); deleteProperties(gid, Constants.KEYFIELD_GROUP_MASTERS, members); } /** * API to delete device and resources of each device from all groups * * @param device * device id to be deleted from all groups */ public void deleteDevicesFromAllGroup(String device) { ArrayList<HashMap<String, Object>> groupList = readGroupList( Constants.KEYFIELD_GROUP_MEMBERS, findDeviceOwner(device)); if (groupList == null) { return; } ArrayList<String> devices = new ArrayList<>(); devices.add(device); for (HashMap<String, Object> group : groupList) { // deleteProperties((String) group.get(Constants.REQ_GROUP_ID), // Constants.KEYFIELD_GROUP_DEVICES, devices); deleteDevicesFromGroup((String) group.get(Constants.REQ_GROUP_ID), devices); } } private ArrayList<HashMap<String, Object>> readGroupList(String key, String value) { HashMap<String, Object> condition = new HashMap<>(); condition.put(key, value); ArrayList<HashMap<String, Object>> records = AccountDBManager .getInstance().selectRecord(Constants.GROUP_TABLE, condition); return records; } /** * API to delete master list from the group * * @param gid * group id * @param values * master uuid list */ public void deleteMastersFromGroup(String gid, ArrayList<String> masters) { deleteProperties(gid, Constants.KEYFIELD_GROUP_MASTERS, masters); } /** * API to delete resource list from the group * * @param gid * group id * @param values * resource list */ public void deleteResourcesFromGroup(String gid, ArrayList<Object> deletedResources) { deleteProperties(gid, Constants.KEYFIELD_GROUP_RESOURCES, deletedResources); } /** * API to delete device list from the group * * @param gid * group id * @param values * device list */ public void deleteDevicesFromGroup(String gid, ArrayList<String> devices) { // delete resources owned by deleted members deleteResourcesOfDevices(gid, devices); deleteProperties(gid, Constants.KEYFIELD_GROUP_DEVICES, devices); } /** * API to verify if the member user is eligible to add/delete/replace the * requested property values * * @param gid * group id * @param mid * user uuid * @param properties * property key/value map to check * @param operation * user operation */ public void verifyPostRequestAuthz(String gid, String mid, HashMap<String, Object> properties, UserOperation operation) { ArrayList<String> keySet = new ArrayList<String>(); keySet.addAll(properties.keySet()); mGroupPolicyManager.verifyOperationAuthorization(gid, mid, operation, keySet); switch (operation) { case ADD: verifyPostAddPolicy(gid, mid, properties); break; case DELETE: verifyPostDeletePolicy(gid, mid, properties); break; case REPLACE: verifyPostReplacePolicy(gid, mid, properties); break; default: throw new PreconditionFailedException( operation + " is not supported"); } } /** * API to get added property value list to the group among the entire value * list to update * * @param gid * group id * @param property * property to update * @param values * value list to update * @return property value list to be added to the group */ public <T> ArrayList<T> getAddPropertyValues(String gid, String property, ArrayList<T> values) { GroupTable groupTable = getGroupTable(gid); if (groupTable == null) { throw new BadRequestException("group " + gid + " does not exist"); } ArrayList<T> propertyValues = groupTable.getPropertyValue(property); ArrayList<T> addedValues = new ArrayList<>(); for (int i = 0; i < values.size(); i++) { if (!propertyValues.contains(values.get(i))) { addedValues.add(values.get(i)); } } return addedValues; } /** * API to get deleted property value list from the group * * @param gid * group id * @param property * property to update * @param values * value list to update * @return property value list to be deleted from the group */ public <T> ArrayList<T> getDeletePropertyValues(String gid, String property, ArrayList<T> values) { GroupTable groupTable = getGroupTable(gid); if (groupTable == null) { throw new BadRequestException("group " + gid + " does not exist"); } ArrayList<T> propertyValues = groupTable.getPropertyValue(property); ArrayList<T> deletedValues = new ArrayList<>(); for (int i = 0; i < propertyValues.size(); i++) { if (!values.contains(propertyValues.get(i))) { deletedValues.add(propertyValues.get(i)); } } return deletedValues; } /** * API to verify if the user is eligible to get the group information * * @param gid * group id * @param mid * user uuid */ public void verifyGetRequestAuthz(String gid, String mid) { verifyMemberExistenceInGroup(gid, mid); } /** * API to verify if the user is eligible to delete the group * * @param gid * group id * @param mid * user uuid */ public void verifyDeleteRequestAuthz(String gid, String mid) { ArrayList<String> property = new ArrayList<>(); property.add(Constants.KEYFIELD_GROUP); mGroupPolicyManager.verifyOperationAuthorization(gid, mid, UserOperation.DELETE, property); } /** * API to delete a group * * @param gmid * An unique identifier of member who must be a group master. * Group master can be user or resource client. * @param gid * An unique identifier of the group created under user entity * who requested for group creation. */ public void deleteGroup(String gid) { GroupTable groupTable = getGroupTable(gid); String parentGid = groupTable.getParent(); // delete subgroup ID of the parent group if (parentGid != null && !parentGid.isEmpty()) { ArrayList<Object> gidList = new ArrayList<Object>( Arrays.asList(gid)); deleteProperties(parentGid, Constants.KEYFIELD_GROUP_SUBGROUPS, gidList); } HashMap<String, Object> condition = new HashMap<>(); condition.put(Constants.KEYFIELD_GID, gid); // delete group from the table AccountDBManager.getInstance().deleteRecord(Constants.GROUP_TABLE, condition); ArrayList<String> subgroups = (ArrayList<String>) groupTable .getSubgroups(); // delete subgroups if (subgroups != null) { for (String subgroup : subgroups) { deleteGroup(subgroup); mGroupAclManager.removeAceByGroup(subgroup); } } } /** * API to get the group information from the db * * @param gid * group id * @return group information payload */ public HashMap<String, Object> getGroupInfo(String gid) { GroupTable grouptable = getGroupTable(gid); return mTypeGroup.convertObjectToMap(grouptable); } /** * API to add property value list to the group * * @param gid * group id * @param property * property * @param values * value list */ private <T> void addProperties(String gid, String property, ArrayList<T> values) { Log.d("added property name: " + property + ", values : " + values + " , to group : " + gid); if (values == null || values.isEmpty()) { return; } GroupTable groupTable = getGroupTable(gid); ArrayList<T> propertyValues = groupTable.getPropertyValue(property); if (propertyValues == null) { propertyValues = new ArrayList<T>(); } values.removeAll(propertyValues); propertyValues.addAll(values); groupTable.setPropertyValue(property, propertyValues); AccountDBManager.getInstance().updateRecord(Constants.GROUP_TABLE, mTypeGroup.convertObjectToMap(groupTable)); updateAclist(property, values, UserOperation.ADD, groupTable); } /** * API to delete property value list from the group * * @param gid * group id * @param property * property * @param values * value list */ private <T> void deleteProperties(String gid, String property, ArrayList<T> values) { Log.d("deleted property name: " + property + ", values : " + values + " , from group : " + gid); GroupTable groupTable = getGroupTable(gid); if (groupTable == null || values == null || values.isEmpty()) { return; } ArrayList<T> propertyValues = groupTable.getPropertyValue(property); if (propertyValues != null) { if (propertyValues.removeAll(values) == false) { return; } } groupTable.setPropertyValue(property, propertyValues); AccountDBManager.getInstance().updateRecord(Constants.GROUP_TABLE, mTypeGroup.convertObjectToMap(groupTable)); ArrayList<String> subgroups = (ArrayList<String>) groupTable .getSubgroups(); updateAclist(property, values, UserOperation.DELETE, groupTable); if (subgroups != null) { for (int i = 0; i < subgroups.size(); i++) { deleteProperties(subgroups.get(i), property, values); } } } /** * API to replace property value list to the group * * @param gid * group id * @param property * property * @param value * value string */ private void replaceProperties(String gid, String property, String value) { Log.d("replaced property name: " + property + ", value : " + value + ", to group : " + gid); if (value == null || value.isEmpty()) { return; } GroupTable groupTable = getGroupTable(gid); groupTable.setPropertyValue(property, value); AccountDBManager.getInstance().updateRecord(Constants.GROUP_TABLE, mTypeGroup.convertObjectToMap(groupTable)); } /** * API to get group table as an instance of GroupTable class * * @param gid * group id * @return group table */ public GroupTable getGroupTable(String gid) { GroupTable getGroupTable = new GroupTable(); ArrayList<HashMap<String, Object>> groupList = AccountDBManager .getInstance().selectRecord(Constants.GROUP_TABLE, getCondition(Constants.REQ_GROUP_ID, gid)); if (groupList.isEmpty()) { return null; } getGroupTable = mTypeGroup.convertMaptoObject(groupList.get(0), getGroupTable); return getGroupTable; } private void deleteResourcesOfDevices(String gid, ArrayList<String> devices) { GroupTable groupTable = getGroupTable(gid); ArrayList<Object> resources = groupTable.getResources(); if (resources == null) { return; } ArrayList<Object> deletedResources = new ArrayList<>(); for (Object object : resources) { HashMap<String, Object> resource = (HashMap<String, Object>) object; String resourceHref = (String) resource .get(Constants.KEYFIELD_ACE_RESOURCE_HREF); String splitHref[] = resourceHref.split("/"); String deviceId = new String(); for (int i = 0; i < splitHref.length; i++) { if (splitHref[i].equals(Constants.REQ_DEVICE_ID) && (i + 1) < splitHref.length) { deviceId = splitHref[i + 1]; break; } } if (devices.contains(deviceId)) { deletedResources.add(resource); } } deleteResourcesFromGroup(gid, deletedResources); } private void verifyPostReplacePolicy(String gid, String mid, HashMap<String, Object> properties) { ArrayList<String> updatedKeySet = new ArrayList<String>(); updatedKeySet.addAll(properties.keySet()); mGroupPolicyManager.verifyOperationAuthorization(gid, mid, UserOperation.REPLACE, updatedKeySet); for (String key : properties.keySet()) { if (!(properties.get(key) instanceof String)) { throw new BadRequestException( "replace property value should be an instance of String"); } } } private void verifyMemberExistenceInGroup(String gid, String mid) { GroupTable groupTable = getGroupTable(gid); if (groupTable == null) { throw new BadRequestException("group " + gid + " does not exist"); } if (groupTable.getMembers() == null) { throw new BadRequestException("there are no members in the group"); } if (!groupTable.getMembers().contains(mid)) { throw new BadRequestException("uid is not a member of the group"); } } private void verifyPostAddPolicy(String gid, String mid, HashMap<String, Object> properties) { for (String key : properties.keySet()) { if (!(properties.get(key) instanceof List)) { throw new BadRequestException( "add property value should be an instance of Array"); } switch (key) { case Constants.KEYFIELD_GROUP_DEVICES: verifyDeviceOwner(mid, (ArrayList<String>) properties.get(key)); verifyExistenceInParentGroup(gid, key, (ArrayList<Object>) properties.get(key)); break; case Constants.KEYFIELD_GROUP_RESOURCES: verifyResourceFormat( Arrays.asList(Constants.KEYFIELD_RESOURCE_RT, Constants.KEYFIELD_RESOURCE_IF, Constants.KEYFIELD_ACE_RESOURCE_HREF), (ArrayList<HashMap<String, Object>>) properties .get(key)); verifyResourceOwner(mid, (ArrayList<HashMap<String, Object>>) properties .get(key)); verifyExistenceInParentGroup(gid, key, filterResourceExistenceInParentGroupDeviceProperty( gid, (ArrayList<HashMap<String, Object>>) properties .get(key))); break; case Constants.KEYFIELD_GROUP_MEMBERS: case Constants.KEYFIELD_GROUP_MASTERS: // TODO verify if members are registered to the Account user // DB verifyExistenceInParentGroup(gid, Constants.KEYFIELD_GROUP_MEMBERS, (ArrayList<Object>) properties.get(key)); break; default: throw new PreconditionFailedException( key + " is not supported"); } } } private void verifyResourceFormat(List<String> propertyList, ArrayList<HashMap<String, Object>> resources) { for (HashMap<String, Object> resource : resources) { for (String property : propertyList) { if (!resource.containsKey(property)) throw new PreconditionFailedException( property + " property is not included"); switch (property) { case Constants.KEYFIELD_RESOURCE_RT: case Constants.KEYFIELD_RESOURCE_IF: if (!(resource.get(property) instanceof List)) { throw new BadRequestException(property + " property values should be an instance of array"); } break; case Constants.KEYFIELD_ACE_RESOURCE_HREF: if (resource.get(property) == null) { throw new BadRequestException( property + " property is null"); } break; } } } } private void verifyPostDeletePolicy(String gid, String mid, HashMap<String, Object> properties) { for (String key : properties.keySet()) { if (!(properties.get(key) instanceof List)) { throw new BadRequestException( "delete property value should be an instance of Array"); } switch (key) { case Constants.REQ_UUID_ID: break; case Constants.KEYFIELD_GROUP_DEVICES: case Constants.KEYFIELD_GROUP_RESOURCES: case Constants.KEYFIELD_GROUP_MEMBERS: if ((boolean) ((ArrayList<String>) properties.get(key)) .contains(getGroupTable(gid).getOwner())) { throw new BadRequestException("cannot remove owner Id"); } case Constants.KEYFIELD_GROUP_MASTERS: verifyExistenceInParentGroup(gid, key, (ArrayList<Object>) properties.get(key)); break; default: throw new BadRequestException( key + " property is not supported to "); } } } private ArrayList<HashMap<String, Object>> filterResourceExistenceInParentGroupDeviceProperty( String gid, ArrayList<HashMap<String, Object>> resources) { GroupTable parentGroupTable = getParentGroupTable(gid); if (parentGroupTable == null) { return resources; } ArrayList<String> devices = parentGroupTable.getDevices(); if (devices == null) { return resources; } for (HashMap<String, Object> resource : resources) { // if the device is registered to the parent group, filter the // resource list String deviceId = getDeviceIdFromResource(resource); if (devices.contains(deviceId)) { resources.remove(resource); } } return resources; } private GroupTable getParentGroupTable(String gid) { try { return getGroupTable(getGroupTable(gid).getParent()); } catch (Exception e) { return null; } } private <T> void verifyExistenceInParentGroup(String gid, String property, ArrayList<T> values) { GroupTable parentGroupTable = getParentGroupTable(gid); if (parentGroupTable == null) { return; } ArrayList<Object> groupValues = parentGroupTable .getPropertyValue(property); if (groupValues == null) { throw new BadRequestException( "verifying parent group Existence failed"); } if (!groupValues.containsAll(values)) { throw new BadRequestException( "verifying parent group Existence failed"); } } private void verifyDeviceOwner(String mid, ArrayList<String> values) { for (String deviceId : values) { if (!findDeviceOwner(deviceId).equals(mid)) { throw new BadRequestException("verifying device owner failed"); } } } private void verifyResourceOwner(String mid, ArrayList<HashMap<String, Object>> resources) { for (HashMap<String, Object> resource : resources) { String deviceId = getDeviceIdFromResource(resource); if (!findDeviceOwner(deviceId).equals(mid)) { throw new BadRequestException( "verifying resource owner failed"); } } } private String getDeviceIdFromResource(HashMap<String, Object> resource) { String resourceHref = (String) resource .get(Constants.KEYFIELD_ACE_RESOURCE_HREF); String splitHref[] = resourceHref.split("/"); for (int i = 0; i < splitHref.length; i++) { if (splitHref[i].equals(Constants.REQ_DEVICE_ID) && (i + 1) < splitHref.length) { return splitHref[i + 1]; } } return null; } private String findDeviceOwner(String deviceId) { return mGroupAclManager.getDeviceOwnerId(deviceId); } private String findResourceOwner(String resourceHref) { String splitHref[] = resourceHref.split("/"); for (int i = 0; i < splitHref.length; i++) { if (splitHref[i].equals(Constants.REQ_DEVICE_ID) && (i + 1) < splitHref.length) { return findDeviceOwner(splitHref[i + 1]); } } return null; } private HashMap<String, Object> getCondition(String property, String value) { HashMap<String, Object> condition = new HashMap<>(); condition.put(property, value); return condition; } private <T> void updateAclist(String property, ArrayList<T> values, UserOperation operation, GroupTable groupTable) { switch (operation) { case ADD: addAclist(property, values, groupTable); break; case DELETE: removeAclist(property, values, groupTable); break; default: throw new BadRequestException( operation + " is not supported operation in the group"); } } private <T> void addAclist(String property, ArrayList<T> values, GroupTable groupTable) { String gid = groupTable.getGid(); int permission = 0; for (Object gaclObject : groupTable.getGacl()) { HashMap<String, Object> gacl = (HashMap<String, Object>) gaclObject; permission = (int) gacl.get(Constants.KEYFIELD_ACE_PERMISSION); } switch (property) { case Constants.KEYFIELD_GROUP_MEMBERS: mGroupAclManager.addAceByMembers(gid, permission, (ArrayList<String>) values); break; case Constants.KEYFIELD_GROUP_DEVICES: mGroupAclManager.addAceByDevices(gid, permission, (ArrayList<String>) values); break; case Constants.KEYFIELD_GROUP_RESOURCES: mGroupAclManager.addAceByResources(gid, permission, (ArrayList<HashMap<String, Object>>) values); break; case Constants.KEYFIELD_GROUP_OWNER: case Constants.KEYFIELD_GROUP_MASTERS: case Constants.KEYFIELD_GROUP_SUBGROUPS: case Constants.KEYFIELD_GROUP_GACL: return; default: throw new BadRequestException( property + " is not supported property in the group"); } } private <T> void removeAclist(String property, ArrayList<T> values, GroupTable groupTable) { String gid = groupTable.getGid(); switch (property) { case Constants.KEYFIELD_GROUP_MEMBERS: mGroupAclManager.removeAceByMembers((ArrayList<String>) values, gid); break; case Constants.KEYFIELD_GROUP_DEVICES: mGroupAclManager.removeAceByDevices((ArrayList<String>) values, gid); break; case Constants.KEYFIELD_GROUP_RESOURCES: mGroupAclManager.removeAceByResources( (ArrayList<HashMap<String, Object>>) values, gid); break; case Constants.KEYFIELD_GROUP_OWNER: case Constants.KEYFIELD_GROUP_MASTERS: case Constants.KEYFIELD_GROUP_SUBGROUPS: case Constants.KEYFIELD_GROUP_GACL: return; default: throw new BadRequestException( property + " is not supported property in the group"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.compile; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import org.apache.hadoop.hbase.util.Pair; import org.apache.http.annotation.Immutable; import org.apache.phoenix.compile.OrderPreservingTracker.Ordering; import org.apache.phoenix.coprocessor.BaseScannerRegionObserver; import org.apache.phoenix.exception.SQLExceptionCode; import org.apache.phoenix.exception.SQLExceptionInfo; import org.apache.phoenix.execute.TupleProjector; import org.apache.phoenix.expression.CoerceExpression; import org.apache.phoenix.expression.Expression; import org.apache.phoenix.parse.AliasedNode; import org.apache.phoenix.parse.ParseNode; import org.apache.phoenix.parse.SelectStatement; import org.apache.phoenix.schema.AmbiguousColumnException; import org.apache.phoenix.schema.ColumnNotFoundException; import org.apache.phoenix.schema.types.PDataType; import org.apache.phoenix.schema.types.PDecimal; import org.apache.phoenix.schema.types.PVarchar; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; /** * * Validates GROUP BY clause and builds a {@link GroupBy} instance to encapsulate the * group by expressions. * * * @since 0.1 */ public class GroupByCompiler { @Immutable public static class GroupBy { private final List<Expression> expressions; private final List<Expression> keyExpressions; private final String scanAttribName; public static final GroupByCompiler.GroupBy EMPTY_GROUP_BY = new GroupBy(new GroupByBuilder()); private GroupBy(GroupByBuilder builder) { this.expressions = ImmutableList.copyOf(builder.expressions); this.keyExpressions = ImmutableList.copyOf(builder.keyExpressions); this.scanAttribName = builder.scanAttribName; assert(expressions.size() == keyExpressions.size()); } public List<Expression> getExpressions() { return expressions; } public List<Expression> getKeyExpressions() { return keyExpressions; } public String getScanAttribName() { return scanAttribName; } public boolean isEmpty() { return expressions.isEmpty(); } public static class GroupByBuilder { private String scanAttribName; private List<Expression> expressions = Collections.emptyList(); private List<Expression> keyExpressions = Collections.emptyList(); public GroupByBuilder() { } public GroupByBuilder setScanAttribName(String scanAttribName) { this.scanAttribName = scanAttribName; return this; } public GroupByBuilder setExpressions(List<Expression> expressions) { this.expressions = expressions; return this; } public GroupByBuilder setKeyExpressions(List<Expression> keyExpressions) { this.keyExpressions = keyExpressions; return this; } public GroupBy build() { return new GroupBy(this); } } public boolean isOrderPreserving() { return !BaseScannerRegionObserver.UNORDERED_GROUP_BY_EXPRESSIONS.equals(scanAttribName); } public void explain(List<String> planSteps, Integer limit) { if (scanAttribName != null) { if (BaseScannerRegionObserver.UNGROUPED_AGG.equals(scanAttribName)) { planSteps.add(" SERVER AGGREGATE INTO SINGLE ROW"); } else if (BaseScannerRegionObserver.UNORDERED_GROUP_BY_EXPRESSIONS.equals(scanAttribName)) { planSteps.add(" SERVER AGGREGATE INTO DISTINCT ROWS BY " + getExpressions() + (limit == null ? "" : " LIMIT " + limit + " GROUP" + (limit.intValue() == 1 ? "" : "S"))); } else { planSteps.add(" SERVER AGGREGATE INTO ORDERED DISTINCT ROWS BY " + getExpressions() + (limit == null ? "" : " LIMIT " + limit + " GROUP" + (limit.intValue() == 1 ? "" : "S"))); } } } } /** * Get list of columns in the GROUP BY clause. * @param context query context kept between compilation of different query clauses * @param statement SQL statement being compiled * @return the {@link GroupBy} instance encapsulating the group by clause * @throws ColumnNotFoundException if column name could not be resolved * @throws AmbiguousColumnException if an unaliased column name is ambiguous across multiple tables */ public static GroupBy compile(StatementContext context, SelectStatement statement, TupleProjector tupleProjector, boolean isInRowKeyOrder) throws SQLException { List<ParseNode> groupByNodes = statement.getGroupBy(); /** * Distinct can use an aggregate plan if there's no group by. * Otherwise, we need to insert a step after the Merge that dedups. * Order by only allowed on columns in the select distinct */ if (groupByNodes.isEmpty()) { if (statement.isAggregate()) { return new GroupBy.GroupByBuilder().setScanAttribName(BaseScannerRegionObserver.UNGROUPED_AGG).build(); } if (!statement.isDistinct()) { return GroupBy.EMPTY_GROUP_BY; } groupByNodes = Lists.newArrayListWithExpectedSize(statement.getSelect().size()); for (AliasedNode aliasedNode : statement.getSelect()) { groupByNodes.add(aliasedNode.getNode()); } } // Accumulate expressions in GROUP BY ExpressionCompiler compiler = new ExpressionCompiler(context, GroupBy.EMPTY_GROUP_BY); List<Pair<Integer,Expression>> groupBys = Lists.newArrayListWithExpectedSize(groupByNodes.size()); OrderPreservingTracker tracker = new OrderPreservingTracker(context, GroupBy.EMPTY_GROUP_BY, Ordering.UNORDERED, groupByNodes.size(), tupleProjector); for (int i = 0; i < groupByNodes.size(); i++) { ParseNode node = groupByNodes.get(i); Expression expression = node.accept(compiler); if (!expression.isStateless()) { if (compiler.isAggregate()) { throw new SQLExceptionInfo.Builder(SQLExceptionCode.AGGREGATE_IN_GROUP_BY) .setMessage(expression.toString()).build().buildException(); } tracker.track(expression); groupBys.add(new Pair<Integer,Expression>(i,expression)); } compiler.reset(); } if (groupBys.isEmpty()) { return GroupBy.EMPTY_GROUP_BY; } boolean isRowKeyOrderedGrouping = isInRowKeyOrder && tracker.isOrderPreserving(); List<Expression> expressions = Lists.newArrayListWithExpectedSize(groupBys.size()); List<Expression> keyExpressions = expressions; String groupExprAttribName; // This is true if the GROUP BY is composed of only PK columns. We further check here that // there are no "gaps" in the PK columns positions used (i.e. we start with the first PK // column and use each subsequent one in PK order). if (isRowKeyOrderedGrouping) { groupExprAttribName = BaseScannerRegionObserver.KEY_ORDERED_GROUP_BY_EXPRESSIONS; for (Pair<Integer,Expression> groupBy : groupBys) { expressions.add(groupBy.getSecond()); } } else { /* * Otherwise, our coprocessor needs to collect all distinct groups within a region, sort them, and * hold on to them until the scan completes. */ groupExprAttribName = BaseScannerRegionObserver.UNORDERED_GROUP_BY_EXPRESSIONS; /* * Put fixed length nullables at the end, so that we can represent null by the absence of the trailing * value in the group by key. If there is more than one, we'll need to convert the ones not at the end * into a Decimal so that we can use an empty byte array as our representation for null (which correctly * maintains the sort order). We convert the Decimal back to the appropriate type (Integer or Long) when * it's retrieved from the result set. * * More specifically, order into the following buckets: * 1) non nullable fixed width * 2) variable width * 3) nullable fixed width * Within each bucket, order based on the column position in the schema. Putting the fixed width values * in the beginning optimizes access to subsequent values. */ Collections.sort(groupBys, new Comparator<Pair<Integer,Expression>>() { @Override public int compare(Pair<Integer,Expression> gb1, Pair<Integer,Expression> gb2) { Expression e1 = gb1.getSecond(); Expression e2 = gb2.getSecond(); boolean isFixed1 = e1.getDataType().isFixedWidth(); boolean isFixed2 = e2.getDataType().isFixedWidth(); boolean isFixedNullable1 = e1.isNullable() &&isFixed1; boolean isFixedNullable2 = e2.isNullable() && isFixed2; if (isFixedNullable1 == isFixedNullable2) { if (isFixed1 == isFixed2) { // Not strictly necessary, but forces the order to match the schema // column order (with PK columns before value columns). //return o1.getColumnPosition() - o2.getColumnPosition(); return gb1.getFirst() - gb2.getFirst(); } else if (isFixed1) { return -1; } else { return 1; } } else if (isFixedNullable1) { return 1; } else { return -1; } } }); for (Pair<Integer,Expression> groupBy : groupBys) { expressions.add(groupBy.getSecond()); } for (int i = expressions.size()-2; i >= 0; i--) { Expression expression = expressions.get(i); PDataType keyType = getKeyType(expression); if (keyType == expression.getDataType()) { continue; } // Copy expressions only when keyExpressions will be different than expressions if (keyExpressions == expressions) { keyExpressions = new ArrayList<Expression>(expressions); } // Wrap expression in an expression that coerces the expression to the required type.. // This is done so that we have a way of expressing null as an empty key when more // than one fixed and nullable types are used in a group by clause keyExpressions.set(i, CoerceExpression.create(expression, keyType)); } } GroupBy groupBy = new GroupBy.GroupByBuilder().setScanAttribName(groupExprAttribName).setExpressions(expressions).setKeyExpressions(keyExpressions).build(); return groupBy; } private static PDataType getKeyType(Expression expression) { PDataType type = expression.getDataType(); if (!expression.isNullable() || !type.isFixedWidth()) { return type; } if (type.isCastableTo(PDecimal.INSTANCE)) { return PDecimal.INSTANCE; } if (type.isCastableTo(PVarchar.INSTANCE)) { return PVarchar.INSTANCE; } // This might happen if someone tries to group by an array throw new IllegalStateException("Multiple occurrences of type " + type + " may not occur in a GROUP BY clause"); } private GroupByCompiler() { } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.sdk.skeletons; import com.google.common.annotations.VisibleForTesting; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.vfs.JarFileSystem; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.io.ZipUtil; import com.jetbrains.python.PyNames; import com.jetbrains.python.sdk.PySdkUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.regex.PatternSyntaxException; public class DefaultPregeneratedSkeletonsProvider implements PyPregeneratedSkeletonsProvider { private static final Logger LOG = Logger.getInstance(DefaultPregeneratedSkeletonsProvider.class); @Nullable private static File findPregeneratedSkeletonsRoot() { final String path = PathManager.getHomePath(); LOG.info("Home path is " + path); File f = new File(path, "python/skeletons"); // from sources if (f.exists()) return f; f = new File(path, "skeletons"); // compiled binary if (f.exists()) return f; return null; } @VisibleForTesting public static boolean isApplicableZippedSkeletonsFileName(@NotNull String prebuiltSkeletonsName, @NotNull String fileName) { try { return fileName.matches(".*" + prebuiltSkeletonsName + "\\.?\\d*\\.zip"); } catch (PatternSyntaxException e) { return false; } } @Nullable public static String getPregeneratedSkeletonsName(@NotNull Sdk sdk, int generatorVersion, boolean withMinorVersion, boolean withExtension) { if (PySdkUtil.isRemote(sdk)) { return null; } @NonNls final String versionString = sdk.getVersionString(); if (versionString == null) { return null; } return getPrebuiltSkeletonsName(generatorVersion, versionString, withMinorVersion, withExtension); } @NotNull @VisibleForTesting public static String getPrebuiltSkeletonsName(int generatorVersion, @NotNull @NonNls String versionString, boolean withMinorVersion, boolean withExtension) { String version = versionString.toLowerCase().replace(" ", "-"); if (!withMinorVersion) { int ind = version.lastIndexOf("."); if (ind != -1) { // strip last version version = version.substring(0, ind); } } if (SystemInfo.isMac) { String osVersion = SystemInfo.OS_VERSION; int dot = osVersion.indexOf('.'); if (dot >= 0) { int secondDot = osVersion.indexOf('.', dot + 1); if (secondDot >= 0) { osVersion = osVersion.substring(0, secondDot); } } return "skeletons-mac-" + generatorVersion + "-" + osVersion + "-" + version + (withExtension ? ".zip" : ""); } else { String os = SystemInfo.isWindows ? "win" : "nix"; return "skeletons-" + os + "-" + generatorVersion + "-" + version + (withExtension ? ".zip" : ""); } } @Override public PyPregeneratedSkeletons getSkeletonsForSdk(Sdk sdk, int generatorVersion) { final File root = findPregeneratedSkeletonsRoot(); if (root == null || !root.exists()) { return null; } LOG.info("Pregenerated skeletons root is " + root); String prebuiltSkeletonsName = getPregeneratedSkeletonsName(sdk, generatorVersion, Registry .is("python.prebuilt.skeletons.minor.version.aware"), false); if (prebuiltSkeletonsName == null) return null; File f = null; File[] children = root.listFiles(); if (children != null) { for (File file : children) { if (isApplicableZippedSkeletonsFileName(prebuiltSkeletonsName, file.getName())) { f = file; break; } } } if (f != null) { LOG.info("Found pre-generated skeletons at " + f.getPath()); final VirtualFile virtualFile = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(f); if (virtualFile == null) { LOG.info("Could not find pre-generated skeletons in VFS"); return null; } return new ArchivedSkeletons(JarFileSystem.getInstance().getJarRootForLocalFile(virtualFile)); } else { LOG.info("Not found pre-generated skeletons at " + root); return null; } } private static class ArchivedSkeletons implements PyPregeneratedSkeletons { private final VirtualFile myArchiveRoot; ArchivedSkeletons(VirtualFile archiveRoot) { myArchiveRoot = archiveRoot; } @Override public boolean copyPregeneratedSkeleton(String moduleName, String skeletonDir) { File targetDir; final String modulePath = moduleName.replace('.', '/'); File skeletonsDir = new File(skeletonDir); VirtualFile pregenerated = myArchiveRoot.findFileByRelativePath(modulePath + ".py"); if (pregenerated == null) { pregenerated = myArchiveRoot.findFileByRelativePath(modulePath + "/" + PyNames.INIT_DOT_PY); targetDir = new File(skeletonsDir, modulePath); } else { int pos = modulePath.lastIndexOf('/'); if (pos < 0) { targetDir = skeletonsDir; } else { final String moduleParentPath = modulePath.substring(0, pos); targetDir = new File(skeletonsDir, moduleParentPath); } } if (pregenerated != null && (targetDir.exists() || targetDir.mkdirs())) { LOG.info("Pre-generated skeleton for " + moduleName); File target = new File(targetDir, pregenerated.getName()); try { FileOutputStream fos = new FileOutputStream(target); try { FileUtil.copy(pregenerated.getInputStream(), fos); } finally { fos.close(); } } catch (IOException e) { LOG.info("Error copying pre-generated skeleton", e); return false; } return true; } return false; } @Override public void unpackPreGeneratedSkeletons(String skeletonDir) { ProgressManager.progress("Unpacking pre-generated skeletons..."); try { final VirtualFile jar = JarFileSystem.getInstance().getVirtualFileForJar(myArchiveRoot); if (jar != null) { ZipUtil.extract(new File(jar.getPath()), new File(skeletonDir), null); } } catch (IOException e) { LOG.info("Error unpacking pre-generated skeletons", e); } } } }