gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package com.webonise.tomcat8.redisession; import com.webonise.tomcat8.redisession.redisclient.BooleanConverter; import com.webonise.tomcat8.redisession.redisclient.Redis; import org.apache.catalina.*; import org.apache.catalina.util.StandardSessionIdGenerator; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import redis.clients.jedis.ScanParams; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.io.IOException; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import java.util.function.*; import java.util.stream.*; /** * Responsible for creating sessions that persist into Redis. */ public class RedisSessionManager implements Manager { private static final Log LOG = LogFactory.getLog(RedisSessionManager.class); private final AtomicLong sessionCounter = new AtomicLong(0L); private final PropertyChangeSupport changeListeners = new PropertyChangeSupport(this); private final Redis redis = new Redis(); // TODO Allow this to be populated from XML config private volatile Context context; private volatile boolean distributable = true; private volatile int getMaxInactiveInterval = (int) TimeUnit.HOURS.toSeconds(1L); private volatile SessionIdGenerator sessionIdGenerator = new StandardSessionIdGenerator(); public Redis getRedis() { return redis; } /** * Return the Container with which this Manager is associated. * * @deprecated Use {@link #getContext()}. This method will be removed in * Tomcat 9 onwards. */ @Override @Deprecated public Container getContainer() { return getContext(); } /** * Set the Container with which this Manager is associated. * * @param container The newly associated Container * @deprecated Use {@link #setContext(Context)}. This method will be removed in * Tomcat 9 onwards. */ @Override @Deprecated public void setContainer(Container container) { setContext((Context) container); } /** * Return the Context with which this Manager is associated. */ @Override public Context getContext() { return this.context; } /** * Set the Container with which this Manager is associated. * * @param context The newly associated Context */ @Override public void setContext(Context context) { Objects.requireNonNull(context, "context associated with the manager"); this.context = context; } /** * Return the distributable flag for the sessions supported by * this Manager, which is {@code true} by default. */ @Override public boolean getDistributable() { return this.distributable; } /** * Set the distributable flag for the sessions supported by this * Manager. If this flag is set, all user data objects added to * sessions associated with this manager must implement Serializable. * * @param distributable The new distributable flag */ @Override public void setDistributable(boolean distributable) { this.distributable = distributable; } /** * Return the default maximum inactive interval (in seconds) * for Sessions created by this Manager, which is 12 hours (in seconds) by default. */ @Override public int getMaxInactiveInterval() { return this.getMaxInactiveInterval; } /** * Set the default maximum inactive interval (in seconds) * for Sessions created by this Manager. * * @param interval The new default value, which must be greater than 0. */ @Override public void setMaxInactiveInterval(int interval) { if (interval <= 0) { throw new IllegalArgumentException("Interval must be greater than 0; was " + interval); } this.getMaxInactiveInterval = interval; } /** * return the session id generator */ @Override public SessionIdGenerator getSessionIdGenerator() { return this.sessionIdGenerator; } /** * Sets the session id generator * * @param sessionIdGenerator The session id generator, which may not be {@code ull} */ @Override public void setSessionIdGenerator(SessionIdGenerator sessionIdGenerator) { Objects.requireNonNull(sessionIdGenerator, "session ID generator"); this.sessionIdGenerator = sessionIdGenerator; } /** * Gets the session id length (in bytes) of Sessions created by * this Manager. * * @return The session id length * @deprecated Use {@link SessionIdGenerator#getSessionIdLength()}. * This method will be removed in Tomcat 9 onwards. */ @Override @Deprecated public int getSessionIdLength() { return sessionIdGenerator.getSessionIdLength(); } /** * Sets the session id length (in bytes) for Sessions created by this * Manager. * * @param idLength The session id length * @deprecated Use {@link SessionIdGenerator#setSessionIdLength(int)}. * This method will be removed in Tomcat 9 onwards. */ @Override @Deprecated public void setSessionIdLength(int idLength) { sessionIdGenerator.setSessionIdLength(idLength); } /** * Returns the total number of sessions created by this manager. * * @return Total number of sessions created by this manager. */ @Override public long getSessionCounter() { return sessionCounter.get(); } /** * Sets the total number of sessions created by this manager. * * @param sessionCounter Total number of sessions created by this manager, which must be nonnegative. */ @Override public void setSessionCounter(long sessionCounter) { this.sessionCounter.set(sessionCounter); } /** * Gets the maximum number of sessions that have been active at the same * time. * * @return Maximum number of sessions that have been active at the same * time */ @Override public int getMaxActive() { return fetchIntegerKey(Convention.SESSION_MAX_ACTIVE_KEY); } /** * (Re)sets the maximum number of sessions that have been active at the * same time. * * @param maxActive Maximum number of sessions that have been active at * the same time, which must be nonnegative. */ @Override public void setMaxActive(int maxActive) { if (maxActive < 0) { throw new IllegalArgumentException("Maximum active sesions must be nonnegative"); } storeIntegerKey(Convention.SESSION_MAX_ACTIVE_KEY, maxActive); } /** * Gets the number of currently active sessions. * * @return Number of currently active sessions */ @Override public int getActiveSessions() { return fetchIntegerKey(Convention.ACTIVE_SESSIONS_COUNT_KEY); } public void setActiveSessions(int activeSessions) { redis.withRedisAsync(jedis -> { jedis.set(Convention.ACTIVE_SESSIONS_COUNT_KEY, Integer.toString(activeSessions)); }); int maxActive = getMaxActive(); if (maxActive < activeSessions) setMaxActive(activeSessions); } /** * Cleans all the sessions as per {@link #cleanSession(String)}. */ protected void cleanSessions() throws Exception { createAttributesKeyStream() .map(Convention::attributesKeyToSessionId) .forEach(sessionId -> this.cleanSession(sessionId)); } /** * Cleans up the state of the session, invalidating it if its expiration has passed or it is otherwise nonsensical. * * @param sessionid The session id to check; never {@code null} * @return {@code true} if the session is still valid after cleaning; {@code false} if the session is now invalid * @throws Exception */ protected boolean cleanSession(String sessionid) { try { Date expDate = getSessionExpirationDate(sessionid); // If the expiration date is in the past, expire it. if (expDate.before(new Date())) { invalidateSession(sessionid); return false; } return true; } catch (Exception e) { LOG.warn("Exception when checking for expiration; the session may not be invalidated", e); return true; } } protected void invalidateSession(String sessionid) { String metadataKey = Convention.sessionIdToMetadataKey(sessionid); try { redis.withRedis(jedis -> { // Ordering is significant: invalid requires expired time to be set, and deleting attributes requires invalid jedis.hset(metadataKey, Convention.EXPIRED_TIME_HKEY, Convention.stringFromDate(new Date())); jedis.hset(metadataKey, Convention.IS_VALID_HKEY, new BooleanConverter().convertToString(false)); jedis.del(Convention.sessionIdToAttributesKey(sessionid)); }); } catch (Exception e) { LOG.error("Could not invalidate session with id: " + sessionid, e); } } /** * Gets the number of sessions that have expired. * * @return Number of sessions that have expired */ @Override public long getExpiredSessions() { return fetchLongKey(Convention.EXPIRED_SESSIONS_COUNT_KEY); } /** * Sets the number of sessions that have expired. * * @param expiredSessions Number of sessions that have expired */ @Override public void setExpiredSessions(long expiredSessions) { storeLongKey(Convention.EXPIRED_SESSIONS_COUNT_KEY, expiredSessions); } protected long fetchLongKey(String key) { try { // See if we have a cached value String valueString = redis.withRedis(jedis -> { return jedis.get(key); }); if (valueString == null || valueString.isEmpty()) return 0L; return Long.parseLong(valueString); } catch (Exception e) { LOG.error("Error retrieving long key " + key + "; returning 0", e); return 0L; } } protected void storeLongKey(String key, long value) { try { redis.withRedis(jedis -> { jedis.set(key, Long.toString(value)); }); } catch (Exception e) { LOG.error("Error storing long value into key " + key + "; nothing set", e); } } /** * Gets the number of sessions that were not created because the maximum * number of active sessions was reached. * * @return Number of rejected sessions */ @Override public int getRejectedSessions() { // We don't do this return 0; } /** * Gets the longest time (in seconds) that an expired session had been * alive. * * @return Longest time (in seconds) that an expired session had been * alive. */ @Override public int getSessionMaxAliveTime() { return fetchIntegerKey(Convention.SESSION_MAX_ALIVE_TIME_KEY); } /** * Sets the longest time (in seconds) that an expired session had been * alive. * * @param sessionMaxAliveTime Longest time (in seconds) that an expired * session had been alive. */ @Override public void setSessionMaxAliveTime(int sessionMaxAliveTime) { storeIntegerKey(Convention.SESSION_MAX_ALIVE_TIME_KEY, sessionMaxAliveTime); } /** * Provides how long the session was alive for, assuming it was expired. * * @param sessionId The session to check; never {@code null} * @return How long the session was alive for (in milliseconds); may be {@code null} if it could not be determined. */ protected Long getSessionAliveTime(String sessionId) { if (sessionId == null || sessionId.isEmpty()) return null; try { String metadataKey = Convention.metadataKeyToSessionId(sessionId); List<String> results = redis.withRedis(jedis -> { return jedis.hmget(metadataKey, Convention.CREATION_TIME_HKEY, Convention.EXPIRED_TIME_HKEY); }); String creationTimeString = results.get(0); String expiredTimeString = results.get(1); if (creationTimeString == null || expiredTimeString == null) return null; if (creationTimeString.isEmpty() || expiredTimeString.isEmpty()) return null; Date creationTime = Convention.dateFromString(creationTimeString); Date expiredTime = Convention.dateFromString(expiredTimeString); return expiredTime.getTime() - creationTime.getTime(); } catch (Exception e) { LOG.info("Could not determine the session alive time for " + sessionId, e); return null; } } /** * Gets the average time (in seconds) that expired sessions had been * alive. This may be based on sample data. * * @return Average time (in seconds) that expired sessions had been * alive. */ @Override public int getSessionAverageAliveTime() { return fetchIntegerKey(Convention.SESSION_AVERAGE_ALIVE_TIME_KEY); } protected int fetchIntegerKey(String key) { try { String valueString = redis.withRedis(jedis -> { return jedis.get(key); }); if (valueString == null || valueString.isEmpty()) return 0; return Integer.parseInt(valueString); } catch (Exception e) { LOG.error("Error when trying to retrieve key " + key + " from Redis; returning 0", e); return 0; } } protected void storeIntegerKey(String key, int value) { try { redis.withRedis(jedis -> { jedis.set(key, Integer.toString(value)); }); } catch (Exception e) { LOG.error("Error when trying to store the key " + key + " with value " + value + " in Redis; not updated", e); } } /** * Gets the current rate of session creation (in session per minute). This * may be based on sample data. * * @return The current rate (in sessions per minute) of session creation */ @Override public int getSessionCreateRate() { return fetchIntegerKey(Convention.SESSION_CREATE_RATE_KEY); } public void setSessionCreateRate(int sessionsPerMinute) { storeIntegerKey(Convention.SESSION_CREATE_RATE_KEY, sessionsPerMinute); } protected int calculateSessionCreateRate() throws Exception { return createSessionIdStream() .map(this::getSessionCreateDate).filter(it -> it != null) .map(SessionRateData::new) .reduce(SessionRateData::merge) .map(SessionRateData::getSessionsPerMinute).orElse(0); } private Date getSessionCreateDate(String sessionId) { try { String metadataKey = Convention.sessionIdToMetadataKey(sessionId); String createDateString = redis.withRedis(jedis -> { return jedis.hget(metadataKey, Convention.CREATION_TIME_HKEY); }); if (createDateString == null || createDateString.isEmpty()) return new Date(); return Convention.dateFromString(createDateString); } catch (Exception e) { LOG.warn("Could not get the create date for sessionId " + sessionId + "; defaulting to now", e); return new Date(); } } private Stream<String> createSessionIdStream() throws Exception { return createMetadataKeyStream().map(Convention::metadataKeyToSessionId); } /** * Gets the current rate of session expiration (in session per minute). This * may be based on sample data * * @return The current rate (in sessions per minute) of session expiration */ @Override public int getSessionExpireRate() { return fetchIntegerKey(Convention.SESSION_EXPIRE_RATE_KEY); } public void setSessionExpireRate(int rate) { storeIntegerKey(Convention.SESSION_EXPIRE_RATE_KEY, rate); } public int calculateSessionExpireRate() throws Exception { return createExpiredSessionIdStream() .map(this::getSessionExpirationDate).filter(it -> it != null) .map(SessionRateData::new) .reduce(SessionRateData::merge) .map(SessionRateData::getSessionsPerMinute).orElse(0); } /** * Add this Session to the set of active Sessions for this Manager. * * @param session Session to be added */ @Override public void add(Session session) { if (session == null || session instanceof RedisSession) return; throw new IllegalArgumentException("This manager only supports RedisSession"); // TODO Maybe clone the session into Redis? } /** * Add a property change listener to this component. * * @param listener The listener to add */ @Override public void addPropertyChangeListener(PropertyChangeListener listener) { this.changeListeners.addPropertyChangeListener(listener); } /** * Change the session ID of the current session to a new randomly generated * session ID. * * @param session The session to change the session ID for */ @Override public void changeSessionId(Session session) { Objects.requireNonNull(sessionIdGenerator, "the means of generating the session"); String newId = sessionIdGenerator.generateSessionId(); this.changeSessionId(session, newId); } /** * Change the session ID of the current session to a specified session ID. * * @param session The session to change the session ID for * @param newId new session ID */ @Override public void changeSessionId(Session session, String newId) { Objects.requireNonNull(session, "session to change the id"); Objects.requireNonNull(redis, "the Redis client"); String oldId = session.getId(); Map<String, String> keyMapping = Convention.getChangeSessionIdMapping(oldId, newId); try { redis.withRedis(jedis -> { keyMapping.entrySet().forEach(entry -> { jedis.rename(entry.getKey(), entry.getValue()); }); }); } catch (Exception e) { throw new RuntimeException("Could not change session id for " + oldId + " to " + newId, e); } } /** * Get a session from the recycled ones or create a new empty one. * The PersistentManager manager does not need to create session data * because it reads it from the Store. */ @Override public RedisSession createEmptySession() { return createSession(null); } /** * Construct and return a new session object, based on the default * settings specified by this Manager's properties. The session * id specified will be used as the session id. * If a new session cannot be created for any reason, return * <code>null</code>. * * @param sessionId The session id which should be used to create the * new session; if <code>null</code>, the session * id will be assigned by this method, and available via the getId() * method of the returned session. * @throws IllegalStateException if a new session cannot be * instantiated for any reason */ @Override public RedisSession createSession(String sessionId) { if (sessionId == null) { Objects.requireNonNull(this.sessionIdGenerator, "session id generator"); sessionId = this.sessionIdGenerator.generateSessionId(); } return new RedisSession(this, sessionId); } /** * Return the active Session, associated with this Manager, with the * specified session id (if any); otherwise return <code>null</code>. * * @param id The session id for the session to be returned * @throws IllegalStateException if a new session cannot be * instantiated for any reason * @throws IOException if an input/output error occurs while * processing this request */ @Override public RedisSession findSession(String id) throws IOException { try { boolean found = redis.withRedis(jedis -> { return jedis.exists(Convention.sessionIdToMetadataKey(id)); }); if (!found) return null; RedisSession session = new RedisSession(this, id); boolean isValid = session.isValidInternal(); if (!isValid) return null; return session; } catch (Exception e) { LOG.error("Could not retrieve session for id " + id, e); return null; } } /** * Return the set of active Sessions associated with this Manager. * If this Manager has no active Sessions, a zero-length array is returned. */ @Override public Session[] findSessions() { try { Function<String, Session> sessionFunction = sessionId -> { try { return this.findSession(sessionId); } catch (Exception e) { LOG.error("Could not retrieve session for id " + sessionId, e); return null; } }; return createValidSessionIdStream().map(sessionFunction).filter(it -> it != null).toArray(i -> new Session[i]); } catch (Exception e) { LOG.error("Could not retrieve sessions; returning an empty array as per the API", e); return new Session[0]; } } /** * Load any currently active sessions that were previously unloaded * to the appropriate persistence mechanism, if any. If persistence is not * supported, this method returns without doing anything. * * @throws ClassNotFoundException if a serialized class cannot be * found during the reload * @throws IOException if an input/output error occurs */ @Override public void load() throws ClassNotFoundException, IOException { // DO NOTHING } /** * Remove this Session from the active Sessions for this Manager. * * @param session Session to be removed */ @Override public void remove(Session session) { remove(session, false); } /** * Remove this Session from the active Sessions for this Manager. * * @param session Session to be removed * @param update Should the expiration statistics be updated */ @Override public void remove(Session session, boolean update) { Objects.requireNonNull(session, "session to remove"); String sessionId = session.getId(); Objects.requireNonNull(sessionId, "id of session to remove"); invalidateSession(sessionId); if (update) { try { doAllInBackground( () -> setSessionExpireRate(calculateSessionExpireRate()), () -> setExpiredSessions(countCurrentExpiredSessions()) ).forEach(ForkJoinTask::join); } catch (Exception e) { throw new RuntimeException("Could not update the expiration statistics", e); } } } /** * Remove a property change listener from this component. * * @param listener The listener to remove */ @Override public void removePropertyChangeListener(PropertyChangeListener listener) { this.changeListeners.removePropertyChangeListener(listener); } /** * Save any currently active sessions in the appropriate persistence * mechanism, if any. If persistence is not supported, this method * returns without doing anything. * * @throws IOException if an input/output error occurs */ @Override public void unload() throws IOException { // DO NOTHING } public List<ForkJoinTask<Void>> doAllInBackground(Procedure... procedures) { return Arrays.asList(procedures).parallelStream().map(this::doInBackground).collect(Collectors.toList()); } /** * This method will be invoked by the context/container on a periodic * basis and allows the manager to implement * a method that executes periodic tasks, such as expiring sessions etc. */ @Override public void backgroundProcess() { doAllInBackground( this::cleanSessions, () -> setActiveSessions(countCurrentActiveSessions()), () -> setExpiredSessions(countCurrentExpiredSessions()), () -> setSessionMaxAliveTime(calculateSessionMaxAliveTime()), () -> setSessionAverageAliveTime(calculateSessionAverageAliveTime()), () -> setSessionCreateRate(calculateSessionCreateRate()), () -> setSessionExpireRate(calculateSessionExpireRate()) ); } /** * Returns the session average alive time in seconds. * * @return The average time that a session has been alive, in seconds; or 0 if that cannot be calculated. */ protected int calculateSessionAverageAliveTime() { try { Collector<String, ?, Double> averager = Collectors.averagingLong(this::getSessionAliveTime); Double average = createExpiredSessionIdStream().collect(averager); if (average == null) return 0; return Double.valueOf(average / 1000).intValue(); } catch (Exception e) { LOG.error("Could not calculate the average session alive time; defaulting to 0", e); return 0; } } protected int setSessionAverageAliveTime(int averageAliveTimeSeconds) { if (averageAliveTimeSeconds < 0) { throw new IllegalArgumentException("Average alive time must be nonnegative; was " + averageAliveTimeSeconds); } try { redis.withRedis(jedis -> { jedis.set(Convention.SESSION_AVERAGE_ALIVE_TIME_KEY, Integer.toString(averageAliveTimeSeconds)); }); } catch (Exception e) { LOG.error("Could not set the average session alive time in Redis", e); } return getSessionAverageAliveTime(); } protected int calculateSessionMaxAliveTime() { try { Optional<Long> result = createMetadataKeyStream() .map(this::getSessionAliveTime) .max(Comparator.naturalOrder()); Long maxAliveTime = result.orElse(null); if (maxAliveTime == null) return 0; maxAliveTime = TimeUnit.MILLISECONDS.toSeconds(maxAliveTime); return (int) Math.min(Integer.MAX_VALUE, maxAliveTime); } catch (Exception e) { LOG.error("Error determining the session max alive time; defaulting to 0", e); return 0; } } protected ForkJoinTask<Void> doInBackground(Procedure action) { return ForkJoinPool.commonPool().submit(() -> { action.apply(); return null; }); } protected <T> ForkJoinTask<T> doInBackground(Supplier<T> action) { return ForkJoinPool.commonPool().submit(action::get); } protected long countCurrentExpiredSessions() { try { return createMetadataKeyStream() .map(Convention::metadataKeyToSessionId) .filter(sessionId -> !getSessionValidity(sessionId)) .distinct().count(); } catch (Exception e) { LOG.error("Error counting the current expired sessions; returning 0", e); return 0L; } } /** * Given a session id, provides the expiration date for that session. * * @param sessionId The session whose expiry is desired; never {@code null}. * @return The expiration date for that session, if known; otherwise, {@code new Date(0)}. */ protected Date getSessionExpirationDate(String sessionId) { Objects.requireNonNull(sessionId, "session id whose expiration date is desired"); Date defaultDate = new Date(0L); String metadataKey = Convention.sessionIdToMetadataKey(sessionId); try { List<String> results = redis.withRedis(jedis -> { return jedis.hmget(metadataKey, Convention.LAST_ACCESS_TIME_HKEY, Convention.MAX_INACTIVE_INTERVAL_HKEY); }); String lastAccessTimeString = results.get(0); if (lastAccessTimeString == null || lastAccessTimeString.isEmpty()) { LOG.info("No last access time in Redis for " + sessionId + "; returning " + defaultDate); return defaultDate; } Date lastAccessTime = Convention.dateFromString(lastAccessTimeString); String maxInactiveIntervalString = results.get(1); if (maxInactiveIntervalString == null || maxInactiveIntervalString.isEmpty()) { LOG.info("No max inactive inveral in Redis for " + sessionId + "; returning " + defaultDate); return defaultDate; } long maxInactiveInterval = Long.parseLong(maxInactiveIntervalString); return new Date(lastAccessTime.getTime() + maxInactiveInterval); } catch (Exception e) { LOG.error("Error while determining the session expiration date for " + sessionId + "; returning " + defaultDate, e); return defaultDate; } } protected boolean getSessionValidity(String sessionId) { Objects.requireNonNull(sessionId, "session id whose expiration date is desired"); String metadataKey = Convention.sessionIdToMetadataKey(sessionId); try { String validityString = redis.withRedis(jedis -> { return jedis.hget(metadataKey, Convention.IS_VALID_HKEY); }); return new BooleanConverter().convertFromString(validityString); } catch (Exception e) { LOG.error("Error while determining session validity for " + sessionId + "; returning invalid", e ); return false; } } /** * Provides a stream of the metadata keys (some of which may be duplicated). * * @return The stream of metadata keys. */ protected Stream<String> createMetadataKeyStream() throws Exception { return createKeyScanStream(Convention.METADATA_KEY_PATTERN); } /** * Provides a stream of attribute keys (some of which may be duplicated). * * @return The stream of attribute keys. */ protected Stream<String> createAttributesKeyStream() throws Exception { return createKeyScanStream(Convention.ATTRIBUTES_KEY_PATTERN); } /** * Creates a key scan stream that matches keys with the given pattern. * * @param pattern The Redis pattern to match. */ protected Stream<String> createKeyScanStream(String pattern) throws Exception { return redis.fullScan(() -> { ScanParams param = new ScanParams(); param.match(pattern); return param; }).parallel(); } protected Stream<String> createValidSessionIdStream() throws Exception { cleanSessions(); return createSessionIdStreamWithValidityFilter(true); } protected Stream<String> createExpiredSessionIdStream() throws Exception { return createSessionIdStreamWithValidityFilter(false); } protected Stream<String> createSessionIdStreamWithValidityFilter(boolean valid) throws Exception { Predicate<String> validityTest = this::getSessionValidity; if (!valid) validityTest = validityTest.negate(); return createSessionIdStream().filter(validityTest); } protected int countCurrentActiveSessions() { try { long count = createValidSessionIdStream().distinct().count(); return (int) Math.min(Integer.MAX_VALUE, count); } catch (Exception e) { LOG.error("Could not calculate max active; returning 0", e); return 0; } } public void autovivifySession(String sessionId) { Objects.requireNonNull(sessionId, "session id to autovivify"); String metadataId = Convention.sessionIdToMetadataKey(sessionId); String dateString = Convention.stringFromDate(new Date()); String isValidString = new BooleanConverter().convertToString(true); // Do this work simultaneously, but ensure it's done before we exit the method Stream.<Redis.RedisConsumer>of( jedis -> { jedis.hsetnx(metadataId, Convention.IS_VALID_HKEY, isValidString); }, jedis -> { jedis.hsetnx(metadataId, Convention.CREATION_TIME_HKEY, dateString); }, jedis -> { jedis.hset(metadataId, Convention.LAST_ACCESS_TIME_HKEY, dateString); } ).map(consumer -> doInBackground(() -> redis.withRedis(consumer))).forEach(ForkJoinTask::join); } private interface Procedure { void apply() throws Exception; } private static class SessionRateData { private final long earliestCreateMoment; private final long sessionCount; public SessionRateData(Date earliestCreateTime) { this(earliestCreateTime.getTime(), 1L); } public SessionRateData(long earliestCreateMoment, long sessionCount) { this.earliestCreateMoment = earliestCreateMoment; this.sessionCount = sessionCount; } public static SessionRateData merge(SessionRateData left, SessionRateData right) { final long newEarliestMoment = Math.min(left.earliestCreateMoment, right.earliestCreateMoment); final long newSessionCount = left.sessionCount + right.sessionCount; return new SessionRateData(newEarliestMoment, newSessionCount); } public int getSessionsPerMinute() { long count = sessionCount; if (count == 0) return 0; long now = System.currentTimeMillis(); long earliest = earliestCreateMoment; if (earliest >= now) return 0; double rate = (count * 1.0) / (now - earliest); rate = rate / (1000 * 60); // Convert from millis to minutes return Double.valueOf(rate).intValue(); } } }
/* * Copyright 2015 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.web.servlet; import java.io.OutputStreamWriter; import java.net.URI; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.StreamingOutput; import javax.ws.rs.core.UriInfo; import org.glassfish.jersey.server.mvc.Viewable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terasology.module.Module; import org.terasology.module.ModuleMetadata; import org.terasology.module.ModuleMetadataJsonAdapter; import org.terasology.module.RemoteModuleExtension; import org.terasology.naming.Name; import org.terasology.naming.Version; import org.terasology.naming.exception.VersionParseException; import org.terasology.web.version.VersionInfo; import org.terasology.web.model.ModuleListModel; import org.terasology.web.model.jenkins.Job; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Multimap; import com.google.common.collect.TreeMultimap; import com.google.gson.stream.JsonWriter; /** * TODO Type description */ @Path("/modules/") public class ModuleServlet { private static final Logger logger = LoggerFactory.getLogger(ModuleServlet.class); private final ModuleListModel model; private final ModuleMetadataJsonAdapter metadataWriter; /** * Sorts modules descending by version - id is ignored */ private final Comparator<Module> versionComparator = (m1, m2) -> m2.getVersion().compareTo(m1.getVersion()); public ModuleServlet(ModuleListModel model) { this.model = model; this.metadataWriter = new ModuleMetadataJsonAdapter(); for (RemoteModuleExtension ext : RemoteModuleExtension.values()) { metadataWriter.registerExtension(ext.getKey(), ext.getValueType()); } } @GET @Path("list") @Produces(MediaType.APPLICATION_JSON) public Response list() { logger.info("Requested module list as json"); StreamingOutput stream = os -> { List<Name> sortedModuleIds = new ArrayList<>(model.getModuleIds()); sortedModuleIds.sort(null); try (JsonWriter writer = new JsonWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8))) { writer.beginArray(); writer.setIndent(" "); // enable pretty printing for (Name name : sortedModuleIds) { for (Module module : model.getModuleVersions(name)) { ModuleMetadata meta = module.getMetadata(); metadataWriter.write(meta, writer); } } writer.endArray(); } }; return Response.ok(stream).build(); } @GET @Path("show") @Produces(MediaType.TEXT_HTML) public Viewable show() { logger.info("Requested module list as HTML"); Set<Name> names = model.getModuleIds(); // the key needs to be string, so that FreeMarker can use it for lookups Multimap<String, Module> map = TreeMultimap.create(String.CASE_INSENSITIVE_ORDER, versionComparator); for (Name name : names) { map.putAll(name.toString(), model.getModuleVersions(name)); } ImmutableMap<Object, Object> dataModel = ImmutableMap.builder() .put("items", map.asMap()) .put("version", VersionInfo.getVersion()) .build(); return new Viewable("/module-list.ftl", dataModel); } @GET @Path("list/latest") @Produces(MediaType.APPLICATION_JSON) public Response listLatest() { logger.info("Requested lastest info as json"); StreamingOutput stream = os -> { List<Name> sortedModuleIds = new ArrayList<>(model.getModuleIds()); sortedModuleIds.sort(null); try (JsonWriter writer = new JsonWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8))) { writer.beginArray(); writer.setIndent(" "); // enable pretty printing for (Name name : sortedModuleIds) { Module module = model.getLatestModuleVersion(name); ModuleMetadata meta = module.getMetadata(); metadataWriter.write(meta, writer); } writer.endArray(); } }; return Response.ok(stream).build(); } @GET @Path("list/{module}") @Produces(MediaType.APPLICATION_JSON) public Response listModule(@PathParam("module") String moduleName) { logger.info("Requested module versions as json"); Name name = new Name(moduleName); StreamingOutput stream = os -> { try (JsonWriter writer = new JsonWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8))) { writer.beginArray(); writer.setIndent(" "); // enable pretty printing for (Module module : model.getModuleVersions(name)) { ModuleMetadata meta = module.getMetadata(); metadataWriter.write(meta, writer); } writer.endArray(); } }; return Response.ok(stream).build(); } @GET @Path("show/{module}") @Produces(MediaType.TEXT_HTML) public Viewable showModule(@PathParam("module") String module) { logger.info("Requested module versions as HTML"); Name name = new Name(module); List<Module> sortedList = new ArrayList<>(model.getModuleVersions(name)); sortedList.sort(versionComparator); Map<String, Collection<Module>> map = Collections.singletonMap(module, sortedList); ImmutableMap<Object, Object> dataModel = ImmutableMap.builder() .put("items", map) .put("moduleId", module) .put("version", VersionInfo.getVersion()) .build(); return new Viewable("/module-list.ftl", dataModel); } @GET @Path("list/{module}/latest") @Produces(MediaType.TEXT_HTML) public Response listModuleLatest(@Context UriInfo uriInfo, @PathParam("module") String module) { logger.info("Requested lastest module info as HTML"); int pathLen = uriInfo.getPath().length(); String path = uriInfo.getPath().substring(0, pathLen - "latest".length()); Module latest = model.getLatestModuleVersion(new Name(module)); if (latest == null) { return Response.status(Status.NOT_FOUND).build(); } String ver = latest.getVersion().toString(); URI redirect = URI.create(uriInfo.getBaseUri() + path + ver); return Response.temporaryRedirect(redirect).build(); } @GET @Path("list/{module}/{version}") @Produces(MediaType.APPLICATION_JSON) public Response listModuleVersion(@PathParam("module") String moduleName, @PathParam("version") String versionStr) { logger.info("Requested single module info as json"); try { Version version = new Version(versionStr); Module module = model.getModule(new Name(moduleName), version); if (module == null) { return Response.status(Status.NOT_FOUND).build(); } ModuleMetadata meta = module.getMetadata(); StreamingOutput stream = os -> { try (OutputStreamWriter writer = new OutputStreamWriter(os, StandardCharsets.UTF_8)) { metadataWriter.write(meta, writer); } }; return Response.ok(stream).build(); } catch (VersionParseException e) { logger.warn("Invalid version for module '{}' specified: {}", moduleName, versionStr); return Response.status(Status.NOT_FOUND).build(); } } @GET @Path("show/{module}/latest") @Produces(MediaType.TEXT_HTML) public Response showModuleLatest(@Context UriInfo uriInfo, @PathParam("module") String module) { logger.info("Requested lastest module info as HTML"); int pathLen = uriInfo.getPath().length(); String path = uriInfo.getPath().substring(0, pathLen - "latest".length()); Module latest = model.getLatestModuleVersion(new Name(module)); if (latest == null) { return Response.status(Status.NOT_FOUND).build(); } String ver = latest.getVersion().toString(); URI redirect = URI.create(uriInfo.getBaseUri() + path + ver); return Response.temporaryRedirect(redirect).build(); } @GET @Path("show/{module}/{version}") @Produces(MediaType.TEXT_HTML) public Response showModuleVersion(@PathParam("module") String module, @PathParam("version") String version) { logger.info("Requested module info as HTML"); try { Name moduleName = new Name(module); Version modVersion = new Version(version); Module mod = model.getModule(moduleName, modVersion); if (mod == null) { logger.warn("No entry for module '{}' found", module); return Response.status(Status.NOT_FOUND).build(); } ModuleMetadata meta = mod.getMetadata(); Set<Module> deps = model.resolve(moduleName, modVersion); ImmutableMap<Object, Object> dataModel = ImmutableMap.builder() .put("meta", meta) .put("updated", RemoteModuleExtension.getLastUpdated(meta)) .put("downloadUrl", RemoteModuleExtension.getDownloadUrl(meta)) .put("downloadSize", RemoteModuleExtension.getArtifactSize(meta) / 1024) .put("dependencies", deps) .put("version", VersionInfo.getVersion()) .build(); return Response.ok(new Viewable("/module-info.ftl", dataModel)).build(); } catch (VersionParseException e) { logger.warn("Invalid version for module '{}' specified: {}", module, version); return Response.status(Status.NOT_FOUND).build(); } } @POST @Path("update") @Consumes(MediaType.APPLICATION_JSON) public Response updateModulePost(Job jobState) { String job = jobState.getName(); logger.info("Requested module update for {}", job); model.updateModule(new Name(job)); return Response.ok().build(); } @POST @Path("update-all") @Consumes(MediaType.APPLICATION_JSON) public Response updateAllModulesPost() { logger.info("Requested complete module update"); new Thread(model::updateAllModules).start(); return Response.ok().build(); } }
/** * Copyright Pravega Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.pravega.cli.admin.segmentstore; import io.pravega.cli.admin.AdminCommandState; import io.pravega.cli.admin.serializers.ContainerKeySerializer; import io.pravega.cli.admin.serializers.ContainerMetadataSerializer; import io.pravega.cli.admin.serializers.SltsKeySerializer; import io.pravega.cli.admin.serializers.SltsMetadataSerializer; import io.pravega.cli.admin.utils.TestUtils; import io.pravega.client.ClientConfig; import io.pravega.client.EventStreamClientFactory; import io.pravega.client.stream.EventStreamWriter; import io.pravega.client.stream.EventWriterConfig; import io.pravega.client.stream.StreamConfiguration; import io.pravega.client.stream.impl.JavaSerializer; import io.pravega.controller.server.WireCommandFailedException; import io.pravega.segmentstore.contracts.Attributes; import io.pravega.shared.security.auth.DefaultCredentials; import io.pravega.test.common.AssertExtensions; import io.pravega.test.common.SecurityConfigDefaults; import io.pravega.test.integration.utils.SetupUtils; import lombok.Cleanup; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.Before; import org.junit.After; import org.junit.rules.Timeout; import java.io.File; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.Properties; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import static io.pravega.cli.admin.segmentstore.tableSegment.GetTableSegmentInfoCommand.ENTRY_COUNT; import static io.pravega.cli.admin.segmentstore.tableSegment.GetTableSegmentInfoCommand.KEY_LENGTH; import static io.pravega.cli.admin.segmentstore.tableSegment.GetTableSegmentInfoCommand.LENGTH; import static io.pravega.cli.admin.segmentstore.tableSegment.GetTableSegmentInfoCommand.SEGMENT_NAME; import static io.pravega.cli.admin.segmentstore.tableSegment.GetTableSegmentInfoCommand.START_OFFSET; import static io.pravega.cli.admin.serializers.AbstractSerializer.appendField; import static io.pravega.cli.admin.serializers.ContainerMetadataSerializer.SEGMENT_ID; import static io.pravega.cli.admin.serializers.ContainerMetadataSerializer.SEGMENT_PROPERTIES_LENGTH; import static io.pravega.cli.admin.serializers.ContainerMetadataSerializer.SEGMENT_PROPERTIES_NAME; import static io.pravega.cli.admin.serializers.ContainerMetadataSerializer.SEGMENT_PROPERTIES_SEALED; import static io.pravega.cli.admin.serializers.ContainerMetadataSerializer.SEGMENT_PROPERTIES_START_OFFSET; import static io.pravega.shared.NameUtils.getMetadataSegmentName; import static io.pravega.test.integration.utils.TestUtils.pathToConfig; /** * This test is for testing the segment store cli commands. */ public abstract class AbstractSegmentStoreCommandsTest { // Setup utility. protected static final SetupUtils SETUP_UTILS = new SetupUtils(); protected static final AtomicReference<AdminCommandState> STATE = new AtomicReference<>(); protected static final int CONTAINER_COUNT = 1; @Rule public final Timeout globalTimeout = new Timeout(60, TimeUnit.SECONDS); private ClientConfig clientConfig; public void setup(boolean enableAuth, boolean enableTls) throws Exception { ClientConfig.ClientConfigBuilder clientConfigBuilder = ClientConfig.builder().controllerURI(SETUP_UTILS.getControllerUri()); STATE.set(new AdminCommandState()); SETUP_UTILS.startAllServices(enableAuth, enableTls); Properties pravegaProperties = new Properties(); pravegaProperties.setProperty("cli.controller.rest.uri", SETUP_UTILS.getControllerRestUri().toString()); pravegaProperties.setProperty("cli.controller.grpc.uri", SETUP_UTILS.getControllerUri().toString()); pravegaProperties.setProperty("pravegaservice.zk.connect.uri", SETUP_UTILS.getZkTestServer().getConnectString()); pravegaProperties.setProperty("pravegaservice.container.count", String.valueOf(CONTAINER_COUNT)); pravegaProperties.setProperty("pravegaservice.admin.gateway.port", String.valueOf(SETUP_UTILS.getAdminPort())); if (enableAuth) { clientConfigBuilder = clientConfigBuilder.credentials(new DefaultCredentials(SecurityConfigDefaults.AUTH_ADMIN_PASSWORD, SecurityConfigDefaults.AUTH_ADMIN_USERNAME)); pravegaProperties.setProperty("cli.channel.auth", Boolean.toString(true)); pravegaProperties.setProperty("cli.credentials.username", SecurityConfigDefaults.AUTH_ADMIN_USERNAME); pravegaProperties.setProperty("cli.credentials.pwd", SecurityConfigDefaults.AUTH_ADMIN_PASSWORD); } if (enableTls) { clientConfigBuilder = clientConfigBuilder.trustStore(pathToConfig() + SecurityConfigDefaults.TLS_CA_CERT_FILE_NAME) .validateHostName(false); pravegaProperties.setProperty("cli.channel.tls", Boolean.toString(true)); pravegaProperties.setProperty("cli.trustStore.location", "../../config/" + SecurityConfigDefaults.TLS_CA_CERT_FILE_NAME); pravegaProperties.setProperty("cli.trustStore.access.token.ttl.seconds", Integer.toString(300)); } STATE.get().getConfigBuilder().include(pravegaProperties); clientConfig = clientConfigBuilder.build(); } @Test public void testGetSegmentInfoCommand() throws Exception { TestUtils.createScopeStream(SETUP_UTILS.getController(), "segmentstore", "getinfo", StreamConfiguration.builder().build()); String commandResult = TestUtils.executeCommand("segmentstore get-segment-info segmentstore/getinfo/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/_abortStream/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/_requeststream/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/_RGcommitStreamReaders/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/_RGscaleGroup/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/_RGkvtStreamReaders/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/_RGabortStreamReaders/0.#epoch.0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-info _system/containers/metadata_0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("StreamSegmentInfo:")); AssertExtensions.assertThrows(WireCommandFailedException.class, () -> TestUtils.executeCommand("segmentstore get-segment-info not/exists/0 localhost", STATE.get())); Assert.assertNotNull(GetSegmentInfoCommand.descriptor()); } @Test public void testReadSegmentRangeCommand() throws Exception { // Create a temporary directory. Path tempDirPath = Files.createTempDirectory("readSegmentDir"); String filename = Paths.get(tempDirPath.toString(), "tmp" + System.currentTimeMillis(), "readSegmentTest.txt").toString(); TestUtils.createScopeStream(SETUP_UTILS.getController(), "segmentstore", "readsegment", StreamConfiguration.builder().build()); @Cleanup EventStreamClientFactory factory = EventStreamClientFactory.withScope("segmentstore", clientConfig); @Cleanup EventStreamWriter<String> writer = factory.createEventWriter("readsegment", new JavaSerializer<>(), EventWriterConfig.builder().build()); writer.writeEvents("rk", Arrays.asList("a", "2", "3")); writer.flush(); // Check to make sure that the file exists and data is written into it. String commandResult = TestUtils.executeCommand("segmentstore read-segment segmentstore/readsegment/0.#epoch.0 0 8 localhost " + filename, STATE.get()); Assert.assertTrue(commandResult.contains("The segment data has been successfully written into")); File file = new File(filename); Assert.assertTrue(file.exists()); Assert.assertNotEquals(0, file.length()); AssertExtensions.assertThrows(FileAlreadyExistsException.class, () -> TestUtils.executeCommand("segmentstore read-segment _system/_RGcommitStreamReaders/0.#epoch.0 0 8 localhost " + filename, STATE.get())); // Delete file created during the test. Files.deleteIfExists(Paths.get(filename)); AssertExtensions.assertThrows(WireCommandFailedException.class, () -> TestUtils.executeCommand("segmentstore read-segment not/exists/0 0 1 localhost " + filename, STATE.get())); Assert.assertNotNull(ReadSegmentRangeCommand.descriptor()); // Delete file created during the test. Files.deleteIfExists(Paths.get(filename)); // Delete the temporary directory. tempDirPath.toFile().deleteOnExit(); } @Test public void testGetSegmentAttributeCommand() throws Exception { TestUtils.createScopeStream(SETUP_UTILS.getController(), "segmentstore", "getattribute", StreamConfiguration.builder().build()); String commandResult = TestUtils.executeCommand("segmentstore get-segment-attribute segmentstore/getattribute/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("GetSegmentAttribute:")); commandResult = TestUtils.executeCommand("segmentstore get-segment-attribute _system/_abortStream/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("GetSegmentAttribute:")); AssertExtensions.assertThrows(WireCommandFailedException.class, () -> TestUtils.executeCommand("segmentstore get-segment-attribute not/exists/0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get())); Assert.assertNotNull(GetSegmentAttributeCommand.descriptor()); } @Test public void testUpdateSegmentAttributeCommand() throws Exception { TestUtils.createScopeStream(SETUP_UTILS.getController(), "segmentstore", "updateattribute", StreamConfiguration.builder().build()); // First, get the existing value of that attribute for the segment. String commandResult = TestUtils.executeCommand("segmentstore get-segment-attribute segmentstore/updateattribute/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("GetSegmentAttribute:")); long oldValue = Long.parseLong(commandResult.substring(commandResult.lastIndexOf("=") + 1, commandResult.indexOf(")"))); Assert.assertNotEquals(0L, oldValue); // Update the Segment to a value of 0. commandResult = TestUtils.executeCommand("segmentstore update-segment-attribute segmentstore/updateattribute/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " 0 " + oldValue + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("UpdateSegmentAttribute:")); // Check that the value has been updated. commandResult = TestUtils.executeCommand("segmentstore get-segment-attribute segmentstore/updateattribute/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get()); oldValue = Long.parseLong(commandResult.substring(commandResult.lastIndexOf("=") + 1, commandResult.indexOf(")"))); Assert.assertEquals(0L, oldValue); // Do the same for an internal segment. commandResult = TestUtils.executeCommand("segmentstore get-segment-attribute _system/_abortStream/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("GetSegmentAttribute:")); oldValue = Long.parseLong(commandResult.substring(commandResult.lastIndexOf("=") + 1, commandResult.indexOf(")"))); Assert.assertNotEquals(0L, oldValue); // Update the Segment to a value of 0. commandResult = TestUtils.executeCommand("segmentstore update-segment-attribute _system/_abortStream/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " 0 " + oldValue + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("UpdateSegmentAttribute:")); // Check that the value has been updated. commandResult = TestUtils.executeCommand("segmentstore get-segment-attribute _system/_abortStream/0.#epoch.0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " localhost", STATE.get()); oldValue = Long.parseLong(commandResult.substring(commandResult.lastIndexOf("=") + 1, commandResult.indexOf(")"))); Assert.assertEquals(0L, oldValue); AssertExtensions.assertThrows(WireCommandFailedException.class, () -> TestUtils.executeCommand("segmentstore update-segment-attribute not/exists/0 " + new UUID(Attributes.CORE_ATTRIBUTE_ID_PREFIX, 0) + " 0 0 localhost", STATE.get())); Assert.assertNotNull(UpdateSegmentAttributeCommand.descriptor()); } @Test public void testFlushToStorageCommandAllCase() throws Exception { String commandResult = TestUtils.executeCommand("container flush-to-storage all localhost", STATE.get()); for (int id = 0; id < CONTAINER_COUNT; id++) { Assert.assertTrue(commandResult.contains("Flushed the Segment Container with containerId " + id + " to Storage.")); } Assert.assertNotNull(FlushToStorageCommand.descriptor()); } @Test public void testFlushToStorageCommand() throws Exception { String commandResult = TestUtils.executeCommand("container flush-to-storage 0 localhost", STATE.get()); Assert.assertTrue(commandResult.contains("Flushed the Segment Container with containerId 0 to Storage.")); Assert.assertNotNull(FlushToStorageCommand.descriptor()); } @Test public void testSetSerializerCommand() throws Exception { Assert.assertNull(STATE.get().getKeySerializer()); Assert.assertNull(STATE.get().getValueSerializer()); String commandResult = TestUtils.executeCommand("table-segment set-serializer dummy", STATE.get()); Assert.assertTrue(commandResult.contains("Serializers named dummy do not exist.")); Assert.assertNull(STATE.get().getKeySerializer()); Assert.assertNull(STATE.get().getValueSerializer()); commandResult = TestUtils.executeCommand("table-segment set-serializer slts", STATE.get()); Assert.assertTrue(commandResult.contains("Serializers changed to slts successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof SltsKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof SltsMetadataSerializer); commandResult = TestUtils.executeCommand("table-segment set-serializer container_meta", STATE.get()); Assert.assertTrue(commandResult.contains("Serializers changed to container_meta successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof ContainerKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof ContainerMetadataSerializer); } @Test public void testGetTableSegmentInfoCommand() throws Exception { String tableSegmentName = getMetadataSegmentName(0); String commandResult = TestUtils.executeCommand("table-segment get-info " + tableSegmentName + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains(tableSegmentName)); Assert.assertTrue(commandResult.contains(SEGMENT_NAME)); Assert.assertTrue(commandResult.contains(START_OFFSET)); Assert.assertTrue(commandResult.contains(LENGTH)); Assert.assertTrue(commandResult.contains(ENTRY_COUNT)); Assert.assertTrue(commandResult.contains(KEY_LENGTH)); } @Test public void testListTableSegmentKeysCommand() throws Exception { String setSerializerResult = TestUtils.executeCommand("table-segment set-serializer container_meta", STATE.get()); Assert.assertTrue(setSerializerResult.contains("Serializers changed to container_meta successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof ContainerKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof ContainerMetadataSerializer); String tableSegmentName = getMetadataSegmentName(0); int keyCount = 5; String commandResult = TestUtils.executeCommand("table-segment list-keys " + tableSegmentName + " " + keyCount + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("List of at most " + keyCount + " keys in " + tableSegmentName)); } @Test public void testGetTableSegmentEntryCommand() throws Exception { String setSerializerResult = TestUtils.executeCommand("table-segment set-serializer container_meta", STATE.get()); Assert.assertTrue(setSerializerResult.contains("Serializers changed to container_meta successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof ContainerKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof ContainerMetadataSerializer); String tableSegmentName = getMetadataSegmentName(0); String key = "_system/_RGkvtStreamReaders/0.#epoch.0"; String commandResult = TestUtils.executeCommand("table-segment get " + tableSegmentName + " " + key + " localhost", STATE.get()); Assert.assertTrue(commandResult.contains("container metadata info:")); Assert.assertTrue(commandResult.contains(SEGMENT_ID)); Assert.assertTrue(commandResult.contains(SEGMENT_PROPERTIES_NAME)); Assert.assertTrue(commandResult.contains(SEGMENT_PROPERTIES_SEALED)); Assert.assertTrue(commandResult.contains(SEGMENT_PROPERTIES_START_OFFSET)); Assert.assertTrue(commandResult.contains(SEGMENT_PROPERTIES_LENGTH)); } @Test public void testPutTableSegmentEntryCommand() throws Exception { String setSerializerResult = TestUtils.executeCommand("table-segment set-serializer container_meta", STATE.get()); Assert.assertTrue(setSerializerResult.contains("Serializers changed to container_meta successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof ContainerKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof ContainerMetadataSerializer); String tableSegmentName = getMetadataSegmentName(0); String key = "_system/_RGkvtStreamReaders/0.#epoch.0"; StringBuilder newValueBuilder = new StringBuilder(); appendField(newValueBuilder, SEGMENT_ID, "1"); appendField(newValueBuilder, SEGMENT_PROPERTIES_NAME, key); appendField(newValueBuilder, SEGMENT_PROPERTIES_SEALED, "false"); appendField(newValueBuilder, SEGMENT_PROPERTIES_START_OFFSET, "0"); appendField(newValueBuilder, SEGMENT_PROPERTIES_LENGTH, "10"); appendField(newValueBuilder, "80000000-0000-0000-0000-000000000000", "1632728432718"); String commandResult = TestUtils.executeCommand("table-segment put " + tableSegmentName + " localhost " + key + " " + newValueBuilder.toString(), STATE.get()); Assert.assertTrue(commandResult.contains("Successfully updated the key " + key + " in table " + tableSegmentName)); } @Test public void testModifyTableSegmentEntryCommandValidFieldCase() throws Exception { String setSerializerResult = TestUtils.executeCommand("table-segment set-serializer container_meta", STATE.get()); Assert.assertTrue(setSerializerResult.contains("Serializers changed to container_meta successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof ContainerKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof ContainerMetadataSerializer); String tableSegmentName = getMetadataSegmentName(0); String key = "_system/_RGkvtStreamReaders/0.#epoch.0"; StringBuilder newFieldValueBuilder = new StringBuilder(); appendField(newFieldValueBuilder, SEGMENT_PROPERTIES_START_OFFSET, "20"); appendField(newFieldValueBuilder, SEGMENT_PROPERTIES_LENGTH, "30"); appendField(newFieldValueBuilder, "80000000-0000-0000-0000-000000000000", "1632728432718"); appendField(newFieldValueBuilder, "dummy_field", "dummy"); String commandResult = TestUtils.executeCommand("table-segment modify " + tableSegmentName + " localhost " + key + " " + newFieldValueBuilder.toString(), STATE.get()); Assert.assertTrue(commandResult.contains("dummy_field field does not exist.")); Assert.assertTrue(commandResult.contains("Successfully modified the following fields in the value for key " + key + " in table " + tableSegmentName)); } @Test public void testModifyTableSegmentEntryCommandInValidFieldCase() throws Exception { String setSerializerResult = TestUtils.executeCommand("table-segment set-serializer container_meta", STATE.get()); Assert.assertTrue(setSerializerResult.contains("Serializers changed to container_meta successfully.")); Assert.assertTrue(STATE.get().getKeySerializer() instanceof ContainerKeySerializer); Assert.assertTrue(STATE.get().getValueSerializer() instanceof ContainerMetadataSerializer); String tableSegmentName = getMetadataSegmentName(0); String key = "_system/_RGkvtStreamReaders/0.#epoch.0"; StringBuilder newFieldValueBuilder = new StringBuilder(); appendField(newFieldValueBuilder, "dummy_field", "dummy"); String commandResult = TestUtils.executeCommand("table-segment modify " + tableSegmentName + " localhost " + key + " " + newFieldValueBuilder.toString(), STATE.get()); Assert.assertTrue(commandResult.contains("dummy_field field does not exist.")); Assert.assertTrue(commandResult.contains("No fields provided to modify.")); } @After public void tearDown() throws Exception { SETUP_UTILS.stopAllServices(); STATE.get().close(); } //endregion //region Actual Test Implementations public static class SecureSegmentStoreCommandsTest extends AbstractSegmentStoreCommandsTest { @Before public void startUp() throws Exception { setup(true, true); } } public static class SegmentStoreCommandsTest extends AbstractSegmentStoreCommandsTest { @Before public void startUp() throws Exception { setup(false, false); } } //endregion }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2016.02.17 at 03:00:54 PM CST // package org.shaolin.bmdp.datamodel.page; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * the sub component of object list could be simple component and reference entity, but no UIPanel * * <p>Java class for UITableType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="UITableType"> * &lt;complexContent> * &lt;extension base="{http://bmdp.shaolin.org/datamodel/Page}UIComponentType"> * &lt;sequence> * &lt;element name="beElement" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="selectMode" type="{http://bmdp.shaolin.org/datamodel/Page}UITableSelectModeType" minOccurs="0"/> * &lt;element name="selectedRowAction" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="defaultRowSize" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="appendRowMode" type="{http://www.w3.org/2001/XMLSchema}boolean"/> * &lt;element name="refreshInterval" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="editableCell" type="{http://www.w3.org/2001/XMLSchema}boolean"/> * &lt;element name="showBigItem" type="{http://www.w3.org/2001/XMLSchema}boolean"/> * &lt;element name="showActionBar" type="{http://www.w3.org/2001/XMLSchema}boolean"/> * &lt;element name="showFilter" type="{http://www.w3.org/2001/XMLSchema}boolean"/> * &lt;element name="stats" type="{http://bmdp.shaolin.org/datamodel/Page}UITableStatsType" minOccurs="0"/> * &lt;element name="initQuery" type="{http://bmdp.shaolin.org/datamodel/Page}ExpressionPropertyType" minOccurs="0"/> * &lt;element name="query" type="{http://bmdp.shaolin.org/datamodel/Page}ExpressionPropertyType"/> * &lt;element name="totalCount" type="{http://bmdp.shaolin.org/datamodel/Page}ExpressionPropertyType"/> * &lt;element name="rowFilter" type="{http://bmdp.shaolin.org/datamodel/Page}ExpressionPropertyType"/> * &lt;element name="column" type="{http://bmdp.shaolin.org/datamodel/Page}UITableColumnType" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="defaultActions" type="{http://bmdp.shaolin.org/datamodel/Page}UITableDefaultActionType"/> * &lt;element name="actionGroup" type="{http://bmdp.shaolin.org/datamodel/Page}UITableActionGroupType" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "UITableType", propOrder = { "beElement", "selectMode", "selectedRowAction", "defaultRowSize", "appendRowMode", "refreshInterval", "editableCell", "showBigItem", "showActionBar", "showFilter", "stats", "initQuery", "query", "totalCount", "rowFilter", "columns", "defaultActions", "actionGroups" }) public class UITableType extends UIComponentType implements Serializable { private final static long serialVersionUID = 1L; @XmlElement(required = true) protected String beElement; protected UITableSelectModeType selectMode; @XmlElement(required = true) protected String selectedRowAction; protected int defaultRowSize; @XmlElement(defaultValue = "false") protected boolean appendRowMode; protected int refreshInterval; @XmlElement(defaultValue = "true") protected boolean editableCell; @XmlElement(defaultValue = "false") protected boolean showBigItem; @XmlElement(defaultValue = "true") protected boolean showActionBar; @XmlElement(defaultValue = "true") protected boolean showFilter; protected UITableStatsType stats; protected ExpressionPropertyType initQuery; @XmlElement(required = true) protected ExpressionPropertyType query; @XmlElement(required = true) protected ExpressionPropertyType totalCount; @XmlElement(required = true) protected ExpressionPropertyType rowFilter; @XmlElement(name = "column") protected List<UITableColumnType> columns; @XmlElement(required = true) protected UITableDefaultActionType defaultActions; @XmlElement(name = "actionGroup") protected List<UITableActionGroupType> actionGroups; /** * Gets the value of the beElement property. * * @return * possible object is * {@link String } * */ public String getBeElement() { return beElement; } /** * Sets the value of the beElement property. * * @param value * allowed object is * {@link String } * */ public void setBeElement(String value) { this.beElement = value; } /** * Gets the value of the selectMode property. * * @return * possible object is * {@link UITableSelectModeType } * */ public UITableSelectModeType getSelectMode() { return selectMode; } /** * Sets the value of the selectMode property. * * @param value * allowed object is * {@link UITableSelectModeType } * */ public void setSelectMode(UITableSelectModeType value) { this.selectMode = value; } /** * Gets the value of the selectedRowAction property. * * @return * possible object is * {@link String } * */ public String getSelectedRowAction() { return selectedRowAction; } /** * Sets the value of the selectedRowAction property. * * @param value * allowed object is * {@link String } * */ public void setSelectedRowAction(String value) { this.selectedRowAction = value; } /** * Gets the value of the defaultRowSize property. * */ public int getDefaultRowSize() { return defaultRowSize; } /** * Sets the value of the defaultRowSize property. * */ public void setDefaultRowSize(int value) { this.defaultRowSize = value; } /** * Gets the value of the appendRowMode property. * */ public boolean isAppendRowMode() { return appendRowMode; } /** * Sets the value of the appendRowMode property. * */ public void setAppendRowMode(boolean value) { this.appendRowMode = value; } /** * Gets the value of the refreshInterval property. * */ public int getRefreshInterval() { return refreshInterval; } /** * Sets the value of the refreshInterval property. * */ public void setRefreshInterval(int value) { this.refreshInterval = value; } /** * Gets the value of the editableCell property. * */ public boolean isEditableCell() { return editableCell; } /** * Sets the value of the editableCell property. * */ public void setEditableCell(boolean value) { this.editableCell = value; } /** * Gets the value of the showBigItem property. * */ public boolean isShowBigItem() { return showBigItem; } /** * Sets the value of the showBigItem property. * */ public void setShowBigItem(boolean value) { this.showBigItem = value; } /** * Gets the value of the showActionBar property. * */ public boolean isShowActionBar() { return showActionBar; } /** * Sets the value of the showActionBar property. * */ public void setShowActionBar(boolean value) { this.showActionBar = value; } /** * Gets the value of the showFilter property. * */ public boolean isShowFilter() { return showFilter; } /** * Sets the value of the showFilter property. * */ public void setShowFilter(boolean value) { this.showFilter = value; } /** * Gets the value of the stats property. * * @return * possible object is * {@link UITableStatsType } * */ public UITableStatsType getStats() { return stats; } /** * Sets the value of the stats property. * * @param value * allowed object is * {@link UITableStatsType } * */ public void setStats(UITableStatsType value) { this.stats = value; } /** * Gets the value of the initQuery property. * * @return * possible object is * {@link ExpressionPropertyType } * */ public ExpressionPropertyType getInitQuery() { return initQuery; } /** * Sets the value of the initQuery property. * * @param value * allowed object is * {@link ExpressionPropertyType } * */ public void setInitQuery(ExpressionPropertyType value) { this.initQuery = value; } /** * Gets the value of the query property. * * @return * possible object is * {@link ExpressionPropertyType } * */ public ExpressionPropertyType getQuery() { return query; } /** * Sets the value of the query property. * * @param value * allowed object is * {@link ExpressionPropertyType } * */ public void setQuery(ExpressionPropertyType value) { this.query = value; } /** * Gets the value of the totalCount property. * * @return * possible object is * {@link ExpressionPropertyType } * */ public ExpressionPropertyType getTotalCount() { return totalCount; } /** * Sets the value of the totalCount property. * * @param value * allowed object is * {@link ExpressionPropertyType } * */ public void setTotalCount(ExpressionPropertyType value) { this.totalCount = value; } /** * Gets the value of the rowFilter property. * * @return * possible object is * {@link ExpressionPropertyType } * */ public ExpressionPropertyType getRowFilter() { return rowFilter; } /** * Sets the value of the rowFilter property. * * @param value * allowed object is * {@link ExpressionPropertyType } * */ public void setRowFilter(ExpressionPropertyType value) { this.rowFilter = value; } /** * Gets the value of the columns property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the columns property. * * <p> * For example, to add a new item, do as follows: * <pre> * getColumns().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link UITableColumnType } * * */ public List<UITableColumnType> getColumns() { if (columns == null) { columns = new ArrayList<UITableColumnType>(); } return this.columns; } /** * Gets the value of the defaultActions property. * * @return * possible object is * {@link UITableDefaultActionType } * */ public UITableDefaultActionType getDefaultActions() { return defaultActions; } /** * Sets the value of the defaultActions property. * * @param value * allowed object is * {@link UITableDefaultActionType } * */ public void setDefaultActions(UITableDefaultActionType value) { this.defaultActions = value; } /** * Gets the value of the actionGroups property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the actionGroups property. * * <p> * For example, to add a new item, do as follows: * <pre> * getActionGroups().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link UITableActionGroupType } * * */ public List<UITableActionGroupType> getActionGroups() { if (actionGroups == null) { actionGroups = new ArrayList<UITableActionGroupType>(); } return this.actionGroups; } }
package org.hl7.fhir.instance.model; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Wed, Nov 11, 2015 10:54-0500 for FHIR v1.0.2 import java.util.*; import org.hl7.fhir.utilities.Utilities; import org.hl7.fhir.instance.model.annotations.ResourceDef; import org.hl7.fhir.instance.model.annotations.SearchParamDefinition; import org.hl7.fhir.instance.model.annotations.Child; import org.hl7.fhir.instance.model.annotations.Description; import org.hl7.fhir.instance.model.annotations.Block; import org.hl7.fhir.instance.model.api.*; /** * A container for slot(s) of time that may be available for booking appointments. */ @ResourceDef(name="Schedule", profile="http://hl7.org/fhir/Profile/Schedule") public class Schedule extends DomainResource { /** * External Ids for this item. */ @Child(name = "identifier", type = {Identifier.class}, order=0, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="External Ids for this item", formalDefinition="External Ids for this item." ) protected List<Identifier> identifier; /** * The schedule type can be used for the categorization of healthcare services or other appointment types. */ @Child(name = "type", type = {CodeableConcept.class}, order=1, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=false) @Description(shortDefinition="The schedule type can be used for the categorization of healthcare services or other appointment types", formalDefinition="The schedule type can be used for the categorization of healthcare services or other appointment types." ) protected List<CodeableConcept> type; /** * The resource this Schedule resource is providing availability information for. These are expected to usually be one of HealthcareService, Location, Practitioner, Device, Patient or RelatedPerson. */ @Child(name = "actor", type = {Patient.class, Practitioner.class, RelatedPerson.class, Device.class, HealthcareService.class, Location.class}, order=2, min=1, max=1, modifier=false, summary=true) @Description(shortDefinition="The resource this Schedule resource is providing availability information for. These are expected to usually be one of HealthcareService, Location, Practitioner, Device, Patient or RelatedPerson", formalDefinition="The resource this Schedule resource is providing availability information for. These are expected to usually be one of HealthcareService, Location, Practitioner, Device, Patient or RelatedPerson." ) protected Reference actor; /** * The actual object that is the target of the reference (The resource this Schedule resource is providing availability information for. These are expected to usually be one of HealthcareService, Location, Practitioner, Device, Patient or RelatedPerson.) */ protected Resource actorTarget; /** * The period of time that the slots that are attached to this Schedule resource cover (even if none exist). These cover the amount of time that an organization's planning horizon; the interval for which they are currently accepting appointments. This does not define a "template" for planning outside these dates. */ @Child(name = "planningHorizon", type = {Period.class}, order=3, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="The period of time that the slots that are attached to this Schedule resource cover (even if none exist). These cover the amount of time that an organization's planning horizon; the interval for which they are currently accepting appointments. This does not define a \"template\" for planning outside these dates", formalDefinition="The period of time that the slots that are attached to this Schedule resource cover (even if none exist). These cover the amount of time that an organization's planning horizon; the interval for which they are currently accepting appointments. This does not define a \"template\" for planning outside these dates." ) protected Period planningHorizon; /** * Comments on the availability to describe any extended information. Such as custom constraints on the slot(s) that may be associated. */ @Child(name = "comment", type = {StringType.class}, order=4, min=0, max=1, modifier=false, summary=false) @Description(shortDefinition="Comments on the availability to describe any extended information. Such as custom constraints on the slot(s) that may be associated", formalDefinition="Comments on the availability to describe any extended information. Such as custom constraints on the slot(s) that may be associated." ) protected StringType comment; private static final long serialVersionUID = 158030926L; /* * Constructor */ public Schedule() { super(); } /* * Constructor */ public Schedule(Reference actor) { super(); this.actor = actor; } /** * @return {@link #identifier} (External Ids for this item.) */ public List<Identifier> getIdentifier() { if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); return this.identifier; } public boolean hasIdentifier() { if (this.identifier == null) return false; for (Identifier item : this.identifier) if (!item.isEmpty()) return true; return false; } /** * @return {@link #identifier} (External Ids for this item.) */ // syntactic sugar public Identifier addIdentifier() { //3 Identifier t = new Identifier(); if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); this.identifier.add(t); return t; } // syntactic sugar public Schedule addIdentifier(Identifier t) { //3 if (t == null) return this; if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); this.identifier.add(t); return this; } /** * @return {@link #type} (The schedule type can be used for the categorization of healthcare services or other appointment types.) */ public List<CodeableConcept> getType() { if (this.type == null) this.type = new ArrayList<CodeableConcept>(); return this.type; } public boolean hasType() { if (this.type == null) return false; for (CodeableConcept item : this.type) if (!item.isEmpty()) return true; return false; } /** * @return {@link #type} (The schedule type can be used for the categorization of healthcare services or other appointment types.) */ // syntactic sugar public CodeableConcept addType() { //3 CodeableConcept t = new CodeableConcept(); if (this.type == null) this.type = new ArrayList<CodeableConcept>(); this.type.add(t); return t; } // syntactic sugar public Schedule addType(CodeableConcept t) { //3 if (t == null) return this; if (this.type == null) this.type = new ArrayList<CodeableConcept>(); this.type.add(t); return this; } /** * @return {@link #actor} (The resource this Schedule resource is providing availability information for. These are expected to usually be one of HealthcareService, Location, Practitioner, Device, Patient or RelatedPerson.) */ public Reference getActor() { if (this.actor == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Schedule.actor"); else if (Configuration.doAutoCreate()) this.actor = new Reference(); // cc return this.actor; } public boolean hasActor() { return this.actor != null && !this.actor.isEmpty(); } /** * @param value {@link #actor} (The resource this Schedule resource is providing availability information for. These are expected to usually be one of HealthcareService, Location, Practitioner, Device, Patient or RelatedPerson.) */ public Schedule setActor(Reference value) { this.actor = value; return this; } /** * @return {@link #actor} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The resource this Schedule resource is providing availability information for. These are expected to usually be one of HealthcareService, Location, Practitioner, Device, Patient or RelatedPerson.) */ public Resource getActorTarget() { return this.actorTarget; } /** * @param value {@link #actor} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The resource this Schedule resource is providing availability information for. These are expected to usually be one of HealthcareService, Location, Practitioner, Device, Patient or RelatedPerson.) */ public Schedule setActorTarget(Resource value) { this.actorTarget = value; return this; } /** * @return {@link #planningHorizon} (The period of time that the slots that are attached to this Schedule resource cover (even if none exist). These cover the amount of time that an organization's planning horizon; the interval for which they are currently accepting appointments. This does not define a "template" for planning outside these dates.) */ public Period getPlanningHorizon() { if (this.planningHorizon == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Schedule.planningHorizon"); else if (Configuration.doAutoCreate()) this.planningHorizon = new Period(); // cc return this.planningHorizon; } public boolean hasPlanningHorizon() { return this.planningHorizon != null && !this.planningHorizon.isEmpty(); } /** * @param value {@link #planningHorizon} (The period of time that the slots that are attached to this Schedule resource cover (even if none exist). These cover the amount of time that an organization's planning horizon; the interval for which they are currently accepting appointments. This does not define a "template" for planning outside these dates.) */ public Schedule setPlanningHorizon(Period value) { this.planningHorizon = value; return this; } /** * @return {@link #comment} (Comments on the availability to describe any extended information. Such as custom constraints on the slot(s) that may be associated.). This is the underlying object with id, value and extensions. The accessor "getComment" gives direct access to the value */ public StringType getCommentElement() { if (this.comment == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Schedule.comment"); else if (Configuration.doAutoCreate()) this.comment = new StringType(); // bb return this.comment; } public boolean hasCommentElement() { return this.comment != null && !this.comment.isEmpty(); } public boolean hasComment() { return this.comment != null && !this.comment.isEmpty(); } /** * @param value {@link #comment} (Comments on the availability to describe any extended information. Such as custom constraints on the slot(s) that may be associated.). This is the underlying object with id, value and extensions. The accessor "getComment" gives direct access to the value */ public Schedule setCommentElement(StringType value) { this.comment = value; return this; } /** * @return Comments on the availability to describe any extended information. Such as custom constraints on the slot(s) that may be associated. */ public String getComment() { return this.comment == null ? null : this.comment.getValue(); } /** * @param value Comments on the availability to describe any extended information. Such as custom constraints on the slot(s) that may be associated. */ public Schedule setComment(String value) { if (Utilities.noString(value)) this.comment = null; else { if (this.comment == null) this.comment = new StringType(); this.comment.setValue(value); } return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("identifier", "Identifier", "External Ids for this item.", 0, java.lang.Integer.MAX_VALUE, identifier)); childrenList.add(new Property("type", "CodeableConcept", "The schedule type can be used for the categorization of healthcare services or other appointment types.", 0, java.lang.Integer.MAX_VALUE, type)); childrenList.add(new Property("actor", "Reference(Patient|Practitioner|RelatedPerson|Device|HealthcareService|Location)", "The resource this Schedule resource is providing availability information for. These are expected to usually be one of HealthcareService, Location, Practitioner, Device, Patient or RelatedPerson.", 0, java.lang.Integer.MAX_VALUE, actor)); childrenList.add(new Property("planningHorizon", "Period", "The period of time that the slots that are attached to this Schedule resource cover (even if none exist). These cover the amount of time that an organization's planning horizon; the interval for which they are currently accepting appointments. This does not define a \"template\" for planning outside these dates.", 0, java.lang.Integer.MAX_VALUE, planningHorizon)); childrenList.add(new Property("comment", "string", "Comments on the availability to describe any extended information. Such as custom constraints on the slot(s) that may be associated.", 0, java.lang.Integer.MAX_VALUE, comment)); } public Schedule copy() { Schedule dst = new Schedule(); copyValues(dst); if (identifier != null) { dst.identifier = new ArrayList<Identifier>(); for (Identifier i : identifier) dst.identifier.add(i.copy()); }; if (type != null) { dst.type = new ArrayList<CodeableConcept>(); for (CodeableConcept i : type) dst.type.add(i.copy()); }; dst.actor = actor == null ? null : actor.copy(); dst.planningHorizon = planningHorizon == null ? null : planningHorizon.copy(); dst.comment = comment == null ? null : comment.copy(); return dst; } protected Schedule typedCopy() { return copy(); } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof Schedule)) return false; Schedule o = (Schedule) other; return compareDeep(identifier, o.identifier, true) && compareDeep(type, o.type, true) && compareDeep(actor, o.actor, true) && compareDeep(planningHorizon, o.planningHorizon, true) && compareDeep(comment, o.comment, true) ; } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof Schedule)) return false; Schedule o = (Schedule) other; return compareValues(comment, o.comment, true); } public boolean isEmpty() { return super.isEmpty() && (identifier == null || identifier.isEmpty()) && (type == null || type.isEmpty()) && (actor == null || actor.isEmpty()) && (planningHorizon == null || planningHorizon.isEmpty()) && (comment == null || comment.isEmpty()); } @Override public ResourceType getResourceType() { return ResourceType.Schedule; } @SearchParamDefinition(name="actor", path="Schedule.actor", description="The individual(HealthcareService, Practitioner, Location, ...) to find a Schedule for", type="reference" ) public static final String SP_ACTOR = "actor"; @SearchParamDefinition(name="date", path="Schedule.planningHorizon", description="Search for Schedule resources that have a period that contains this date specified", type="date" ) public static final String SP_DATE = "date"; @SearchParamDefinition(name="identifier", path="Schedule.identifier", description="A Schedule Identifier", type="token" ) public static final String SP_IDENTIFIER = "identifier"; @SearchParamDefinition(name="type", path="Schedule.type", description="The type of appointments that can be booked into associated slot(s)", type="token" ) public static final String SP_TYPE = "type"; }
package edu.mit.csail.sdg.alloy4viz; import edu.mit.csail.sdg.alloy4.*; import edu.mit.csail.sdg.alloy4.Util.IntPref; import edu.mit.csail.sdg.alloy4.Util.StringPref; import edu.mit.csail.sdg.alloy4graph.GraphViewer; import static edu.mit.csail.sdg.alloy4.OurUtil.menu; import static edu.mit.csail.sdg.alloy4.OurUtil.menuItem; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.Font; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ComponentEvent; import java.awt.event.ComponentListener; import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import java.io.File; import java.io.IOException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.prefs.Preferences; import javax.swing.Box; import javax.swing.Icon; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTextArea; import javax.swing.JToolBar; import javax.swing.plaf.basic.BasicSplitPaneUI; public class VizGUICustom implements ComponentListener { /** The background color for the toolbar. */ private static final Color background = new Color(0.9f, 0.9f, 0.9f); /** The icon for a "checked" menu item. */ private static final Icon iconYes = OurUtil.loadIcon("images/menu1.gif"); /** The icon for an "unchecked" menu item. */ private static final Icon iconNo = OurUtil.loadIcon("images/menu0.gif"); /** Whether the JVM should shutdown after the last file is closed. */ private final boolean standalone; /** The current display mode. */ private VisualizerMode currentMode = VisualizerMode.get(); /** The JFrame for the main GUI window; or null if we intend to display the graph inside a user-given JPanel instead. */ private final JFrame frame; /** The toolbar. */ private final JToolBar toolbar; /** The projection popup menu. */ private final JPopupMenu projectionPopup; /** The buttons on the toolbar. */ private final JButton projectionButton, openSettingsButton, closeSettingsButton, magicLayout, loadSettingsButton, saveSettingsButton, saveAsSettingsButton, resetSettingsButton, updateSettingsButton, openEvaluatorButton, closeEvaluatorButton, enumerateButton, vizButton, treeButton, txtButton/*, dotButton, xmlButton*/; /** This list must contain all the display mode buttons (that is, vizButton, xmlButton...) */ private final List<JButton> solutionButtons = new ArrayList<JButton>(); /** The "theme" menu. */ private final JMenu thememenu; /** The "window" menu. */ private final JMenu windowmenu; /** The "show next" menu item. */ private final JMenuItem enumerateMenu; /** Current font size. */ private int fontSize=12; /** 0: theme and evaluator are both invisible; 1: theme is visible; 2: evaluator is visible. */ private int settingsOpen=0; /** The current instance and visualization settings; null if none is loaded. */ private VizState myState=null; /** Returns the current visualization settings (and you can call getOriginalInstance() on it to get the current instance). * If you make changes to the state, you should call doApply() on the VizGUI object to refresh the screen. */ public VizState getVizState() { return myState; } /** The customization panel to the left; null if it is not yet loaded. */ private VizCustomizationPanel myCustomPanel=null; /** The evaluator panel to the left; null if it is not yet loaded. */ private OurConsole myEvaluatorPanel=null; /** The graphical panel to the right; null if it is not yet loaded. */ private VizGraphPanel myGraphPanel=null; /** The splitpane between the customization panel and the graph panel. */ private final JSplitPane splitpane; /** The tree or graph or text being displayed on the right hand side. */ private JComponent content=null; /** Returns the JSplitPane containing the customization/evaluator panel in the left and the graph on the right. */ public JSplitPane getPanel() { return splitpane; } /** The last known divider position between the customization panel and the graph panel. */ private int lastDividerPosition=0; /** If nonnull, you can pass in an expression to be evaluated. * If it throws an exception, that means an error has occurred. */ private final Computer evaluator; /** If nonnull, you can pass in an XML file to find the next solution. */ private final Computer enumerator; //==============================================================================================// /** The current theme file; "" if there is no theme file loaded. */ private String thmFileName=""; /** Returns the current THM filename; "" if no theme file is currently loaded. */ public String getThemeFilename() { return thmFileName; } //==============================================================================================// /** The current XML file; "" if there is no XML file loaded. */ private String xmlFileName=""; /** Returns the current XML filename; "" if no file is currently loaded. */ public String getXMLfilename() { return xmlFileName; } //==============================================================================================// /** The list of XML files loaded in this session so far. */ private final List<String> xmlLoaded=new ArrayList<String>(); /** Return the list of XML files loaded in this session so far. */ public ConstList<String> getInstances() { return ConstList.make(xmlLoaded); } //==============================================================================================// /** This maps each XML filename to a descriptive title. */ private Map<String,String> xml2title = new LinkedHashMap<String,String>(); /** Returns a short descriptive title associated with an XML file. */ public String getInstanceTitle(String xmlFileName) { String answer = xml2title.get(Util.canon(xmlFileName)); return (answer==null) ? "(unknown)" : answer; } //==============================================================================================// /** Add a vertical divider to the toolbar. */ private void addDivider() { JPanel divider = OurUtil.makeH(new Dimension(1, 40), Color.LIGHT_GRAY); divider.setAlignmentY(0.5f); if (!Util.onMac()) toolbar.add(OurUtil.makeH(5,background)); else toolbar.add(OurUtil.makeH(5)); toolbar.add(divider); if (!Util.onMac()) toolbar.add(OurUtil.makeH(5,background)); else toolbar.add(OurUtil.makeH(5)); } //======== The Preferences ======================================================================================// //======== Note: you must make sure each preference has a unique key ============================================// /** This enum defines the set of possible visualizer modes. */ private enum VisualizerMode { /** Visualize using graphviz's dot. */ Viz("graphviz"), // /** See the DOT content. */ DOT("dot"), // /** See the XML content. */ XML("xml"), /** See the instance as text. */ TEXT("txt"), /** See the instance as a tree. */ Tree("tree"); /** This is a unique String for this value; it should be kept consistent in future versions. */ private final String id; /** Constructs a new VisualizerMode value with the given id. */ private VisualizerMode(String id) { this.id=id; } /** Given an id, return the enum value corresponding to it (if there's no match, then return Viz). */ private static VisualizerMode parse(String id) { for(VisualizerMode vm: values()) if (vm.id.equals(id)) return vm; return Viz; } /** Saves this value into the Java preference object. */ public void set() { Preferences.userNodeForPackage(Util.class).put("VisualizerMode",id); } /** Reads the current value of the Java preference object (if it's not set, then return Viz). */ public static VisualizerMode get() { return parse(Preferences.userNodeForPackage(Util.class).get("VisualizerMode","")); } }; /** The latest X corrdinate of the Alloy Visualizer window. */ private static final IntPref VizX = new IntPref("VizX",0,-1,65535); /** The latest Y corrdinate of the Alloy Visualizer window. */ private static final IntPref VizY = new IntPref("VizY",0,-1,65535); /** The latest width of the Alloy Visualizer window. */ private static final IntPref VizWidth = new IntPref("VizWidth",0,-1,65535); /** The latest height of the Alloy Visualizer window. */ private static final IntPref VizHeight = new IntPref("VizHeight",0,-1,65535); /** The first file in Alloy Visualizer's "open recent theme" list. */ private static final StringPref Theme0 = new StringPref("Theme0"); /** The second file in Alloy Visualizer's "open recent theme" list. */ private static final StringPref Theme1 = new StringPref("Theme1"); /** The third file in Alloy Visualizer's "open recent theme" list. */ private static final StringPref Theme2 = new StringPref("Theme2"); /** The fourth file in Alloy Visualizer's "open recent theme" list. */ private static final StringPref Theme3 = new StringPref("Theme3"); //==============================================================================================// /** If true, that means the event handlers should return a Runner encapsulating them, rather than perform the actual work. */ private boolean wrap = false; /** Wraps the calling method into a Runnable whose run() will call the calling method with (false) as the only argument. */ private Runner wrapMe() { final String name; try { throw new Exception(); } catch(Exception ex) { name = ex.getStackTrace()[1].getMethodName(); } Method[] methods = getClass().getDeclaredMethods(); Method m=null; for(int i=0; i<methods.length; i++) if (methods[i].getName().equals(name)) { m=methods[i]; break; } final Method method=m; return new Runner() { private static final long serialVersionUID = 0; public void run() { try { method.setAccessible(true); method.invoke(VizGUICustom.this, new Object[]{}); } catch (Throwable ex) { ex = new IllegalArgumentException("Failed call to "+name+"()", ex); Thread.getDefaultUncaughtExceptionHandler().uncaughtException(Thread.currentThread(), ex); } } public void run(Object arg) { run(); } }; } /** Wraps the calling method into a Runnable whose run() will call the calling method with (false,argument) as the two arguments. */ private Runner wrapMe(final Object argument) { final String name; try { throw new Exception(); } catch(Exception ex) { name = ex.getStackTrace()[1].getMethodName(); } Method[] methods = getClass().getDeclaredMethods(); Method m=null; for(int i=0; i<methods.length; i++) if (methods[i].getName().equals(name)) { m=methods[i]; break; } final Method method=m; return new Runner() { private static final long serialVersionUID = 0; public void run(Object arg) { try { method.setAccessible(true); method.invoke(VizGUICustom.this, new Object[]{arg}); } catch (Throwable ex) { ex = new IllegalArgumentException("Failed call to "+name+"("+arg+")", ex); Thread.getDefaultUncaughtExceptionHandler().uncaughtException(Thread.currentThread(), ex); } } public void run() { run(argument); } }; } /** Creates a new visualization GUI window; this method can only be called by the AWT event thread. * @param standalone - whether the JVM should shutdown after the last file is closed * @param xmlFileName - the filename of the incoming XML file; "" if there's no file to open * @param windowmenu - if standalone==false and windowmenu!=null, then this will be added as a menu on the menubar * * <p> Note: if standalone==false and xmlFileName.length()==0, then we will initially hide the window. */ public VizGUICustom(boolean standalone, String xmlFileName, JMenu windowmenu) { this(standalone, xmlFileName, windowmenu, null, null); } /** Creates a new visualization GUI window; this method can only be called by the AWT event thread. * @param standalone - whether the JVM should shutdown after the last file is closed * @param xmlFileName - the filename of the incoming XML file; "" if there's no file to open * @param windowmenu - if standalone==false and windowmenu!=null, then this will be added as a menu on the menubar * @param enumerator - if it's not null, it provides solution enumeration ability * @param evaluator - if it's not null, it provides solution evaluation ability * * <p> Note: if standalone==false and xmlFileName.length()==0, then we will initially hide the window. */ public VizGUICustom(boolean standalone, String xmlFileName, JMenu windowmenu, Computer enumerator, Computer evaluator) { this(standalone, xmlFileName, windowmenu, enumerator, evaluator, true); } /** Creates a new visualization GUI window; this method can only be called by the AWT event thread. * @param standalone - whether the JVM should shutdown after the last file is closed * @param xmlFileName - the filename of the incoming XML file; "" if there's no file to open * @param windowmenu - if standalone==false and windowmenu!=null, then this will be added as a menu on the menubar * @param enumerator - if it's not null, it provides solution enumeration ability * @param evaluator - if it's not null, it provides solution evaluation ability * @param makeWindow - if false, then we will only construct the JSplitPane, without making the window * * <p> Note: if standalone==false and xmlFileName.length()==0 and makeWindow==true, then we will initially hide the window. */ public VizGUICustom(boolean standalone, String xmlFileName, JMenu windowmenu, Computer enumerator, Computer evaluator, boolean makeWindow) { this.enumerator = enumerator; this.standalone = standalone; this.evaluator = evaluator; this.frame = makeWindow ? new JFrame("Alloy Visualizer") : null; // Figure out the desired x, y, width, and height int screenWidth=OurUtil.getScreenWidth(), screenHeight=OurUtil.getScreenHeight(); int width=VizWidth.get(); if (width<0) width=screenWidth-150; else if (width<100) width=100; if (width>screenWidth) width=screenWidth; int height=VizHeight.get(); if (height<0) height=screenHeight-150; else if (height<100) height=100; if (height>screenHeight) height=screenHeight; int x=VizX.get(); if (x<0 || x>screenWidth-10) x=0; int y=VizY.get(); if (y<0 || y>screenHeight-10) y=0; // Create the menubar JMenuBar mb = new JMenuBar(); try { wrap = true; JMenu fileMenu = menu(mb, "&File", null); menuItem(fileMenu, "Open...", 'O', 'O', doLoad()); JMenu exportMenu = menu(null, "&Export To", null); menuItem(exportMenu, "Dot...", 'D', 'D', doExportDot()); menuItem(exportMenu, "XML...", 'X', 'X', doExportXml()); fileMenu.add(exportMenu); menuItem(fileMenu, "Close", 'W', 'W', doClose()); if (standalone) menuItem(fileMenu, "Quit", 'Q', 'Q', doCloseAll()); else menuItem(fileMenu, "Close All", 'A', doCloseAll()); JMenu instanceMenu = menu(mb, "&Instance", null); enumerateMenu = menuItem(instanceMenu, "Show Next Solution", 'N', 'N', doNext()); thememenu = menu(mb, "&Theme", doRefreshTheme()); if (standalone || windowmenu==null) windowmenu = menu(mb, "&Window", doRefreshWindow()); this.windowmenu = windowmenu; } finally { wrap = false; } mb.add(windowmenu); thememenu.setEnabled(false); windowmenu.setEnabled(false); if (frame!=null) frame.setJMenuBar(mb); // Create the toolbar projectionPopup = new JPopupMenu(); projectionButton = new JButton("Projection: none"); projectionButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { repopulateProjectionPopup(); if (projectionPopup.getComponentCount()>0) projectionPopup.show(projectionButton, 10, 10); } }); repopulateProjectionPopup(); toolbar = new JToolBar(); toolbar.setVisible(false); toolbar.setFloatable(false); toolbar.setBorder(null); if (!Util.onMac()) toolbar.setBackground(background); try { wrap = true; vizButton=makeSolutionButton("Viz", "Show Visualization", "images/24_graph.gif", doShowViz()); // dotButton=makeSolutionButton("Dot", "Show the Dot File for the Graph", "images/24_plaintext.gif", doShowDot()); // xmlButton=makeSolutionButton("XML", "Show XML", "images/24_plaintext.gif", doShowXML()); txtButton=makeSolutionButton("Txt", "Show the textual output for the Graph", "images/24_plaintext.gif", doShowTxt()); treeButton=makeSolutionButton("Tree", "Show Tree", "images/24_texttree.gif", doShowTree()); if (frame!=null) addDivider(); toolbar.add(closeSettingsButton=OurUtil.button("Close", "Close the theme customization panel", "images/24_settings_close2.gif", doCloseThemePanel())); toolbar.add(updateSettingsButton=OurUtil.button("Apply", "Apply the changes to the current theme", "images/24_settings_apply2.gif", doApply())); toolbar.add(openSettingsButton=OurUtil.button("Theme", "Open the theme customization panel", "images/24_settings.gif", doOpenThemePanel())); toolbar.add(magicLayout=OurUtil.button("Magic Layout", "Automatic theme customization (will reset current theme)", "images/24_settings_apply2.gif", doMagicLayout())); toolbar.add(openEvaluatorButton=OurUtil.button("Evaluator", "Open the evaluator", "images/24_settings.gif", doOpenEvalPanel())); toolbar.add(closeEvaluatorButton=OurUtil.button("Close Evaluator", "Close the evaluator", "images/24_settings_close2.gif", doCloseEvalPanel())); toolbar.add(enumerateButton=OurUtil.button("Next", "Show the next solution", "images/24_history.gif", doNext())); toolbar.add(projectionButton); toolbar.add(loadSettingsButton=OurUtil.button("Load", "Load the theme customization from a theme file", "images/24_open.gif", doLoadTheme())); toolbar.add(saveSettingsButton=OurUtil.button("Save", "Save the current theme customization", "images/24_save.gif", doSaveTheme())); toolbar.add(saveAsSettingsButton=OurUtil.button("Save As", "Save the current theme customization as a new theme file", "images/24_save.gif", doSaveThemeAs())); toolbar.add(resetSettingsButton=OurUtil.button("Reset", "Reset the theme customization", "images/24_settings_close2.gif", doResetTheme())); } finally { wrap = false; } settingsOpen=0; // Create the horizontal split pane splitpane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT); splitpane.setOneTouchExpandable(false); splitpane.setResizeWeight(0.); splitpane.setContinuousLayout(true); splitpane.setBorder(null); ((BasicSplitPaneUI)(splitpane.getUI())).getDivider().setBorder(new OurBorder(false,true,false,false)); // Display the window, then proceed to load the input file if (frame!=null) { frame.pack(); if (!Util.onMac() && !Util.onWindows()) { // many Window managers do not respect ICCCM2; this should help avoid the Title Bar being shifted "off screen" if (x<30) { if (x<0) x=0; width=width-(30-x); x=30; } if (y<30) { if (y<0) y=0; height=height-(30-y); y=30; } if (width<100) width=100; if (height<100) height=100; } frame.setSize(width, height); frame.setLocation(x, y); frame.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); try { wrap=true; frame.addWindowListener(doClose()); } finally { wrap=false; } frame.addComponentListener(this); } if (xmlFileName.length()>0) doLoadInstance(xmlFileName); // add nemo icon... //frame.setIconImage(Toolkit.getDefaultToolkit().getImage(VizGUICustom.class.getResource("/resources/icon/atom-48x48.png"))); } /** Invoked when the Visualizationwindow is resized. */ public void componentResized(ComponentEvent e) { componentMoved(e); } /** Invoked when the Visualizationwindow is moved. */ public void componentMoved(ComponentEvent e) { if (frame!=null) { VizWidth.set(frame.getWidth()); VizHeight.set(frame.getHeight()); VizX.set(frame.getX()); VizY.set(frame.getY()); } } /** Invoked when the Visualizationwindow is shown. */ public void componentShown(ComponentEvent e) { } /** Invoked when the Visualizationwindow is hidden. */ public void componentHidden(ComponentEvent e) { } /** Helper method that repopulates the Porjection popup menu. */ private void repopulateProjectionPopup() { int num=0; String label="Projection: none"; if (myState==null) { projectionButton.setEnabled(false); return; } projectionButton.setEnabled(true); projectionPopup.removeAll(); final Set<AlloyType> projected = myState.getProjectedTypes(); for(final AlloyType t: myState.getOriginalModel().getTypes()) if (myState.canProject(t)) { final boolean on = projected.contains(t); final JMenuItem m = new JMenuItem(t.getName(), on ? OurCheckbox.ON : OurCheckbox.OFF); m.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (on) myState.deproject(t); else myState.project(t); updateDisplay(); } }); projectionPopup.add(m); if (on) { num++; if (num==1) label="Projected over "+t.getName(); } } projectionButton.setText(num>1 ? ("Projected over "+num+" sigs") : label); } /** Helper method that refreshes the right-side visualization panel with the latest settings. */ private void updateDisplay() { if (myState==null) return; // First, update the toolbar currentMode.set(); for(JButton button:solutionButtons) button.setEnabled(settingsOpen!=1); switch (currentMode) { case Tree: treeButton.setEnabled(false); break; case TEXT: txtButton.setEnabled(false); break; // case XML: xmlButton.setEnabled(false); break; // case DOT: dotButton.setEnabled(false); break; default: vizButton.setEnabled(false); } final boolean isMeta = myState.getOriginalInstance().isMetamodel; vizButton.setVisible(frame!=null); treeButton.setVisible(frame!=null); txtButton.setVisible(frame!=null); // dotButton.setVisible(frame!=null); // xmlButton.setVisible(frame!=null); magicLayout.setVisible((settingsOpen==0 || settingsOpen==1) && currentMode==VisualizerMode.Viz); projectionButton.setVisible((settingsOpen==0 || settingsOpen==1) && currentMode==VisualizerMode.Viz); openSettingsButton.setVisible( settingsOpen==0 && currentMode==VisualizerMode.Viz); loadSettingsButton.setVisible(frame==null && settingsOpen==1 && currentMode==VisualizerMode.Viz); saveSettingsButton.setVisible(frame==null && settingsOpen==1 && currentMode==VisualizerMode.Viz); saveAsSettingsButton.setVisible(frame==null && settingsOpen==1 && currentMode==VisualizerMode.Viz); resetSettingsButton.setVisible(frame==null && settingsOpen==1 && currentMode==VisualizerMode.Viz); closeSettingsButton.setVisible(settingsOpen==1 && currentMode==VisualizerMode.Viz); updateSettingsButton.setVisible(settingsOpen==1 && currentMode==VisualizerMode.Viz); openEvaluatorButton.setVisible(!isMeta && settingsOpen==0 && evaluator!=null); closeEvaluatorButton.setVisible(!isMeta && settingsOpen==2 && evaluator!=null); enumerateMenu.setEnabled(!isMeta && settingsOpen==0 && enumerator!=null); enumerateButton.setVisible(!isMeta && settingsOpen==0 && enumerator!=null); toolbar.setVisible(true); // Now, generate the graph or tree or textarea that we want to display on the right if (frame!=null) frame.setTitle(makeVizTitle()); switch (currentMode) { case Tree: { final VizTree t = new VizTree(myState.getOriginalInstance().originalA4, makeVizTitle(), fontSize); final JScrollPane scroll = OurUtil.scrollpane(t, Color.BLACK, Color.WHITE, new OurBorder(true, false, true, false)); scroll.addFocusListener(new FocusListener() { public final void focusGained(FocusEvent e) { t.requestFocusInWindow(); } public final void focusLost(FocusEvent e) { } }); content = scroll; break; } case TEXT: { String textualOutput = myState.getOriginalInstance().originalA4.toString(); content = getTextComponent(textualOutput); break; } // case XML: { // content=getTextComponent(xmlFileName); // break; // } default: { if (myGraphPanel==null) { myGraphPanel=new VizGraphPanel(myState, false); } else { myGraphPanel.seeDot(false); myGraphPanel.remakeAll(); } } content=myGraphPanel; } // Now that we've re-constructed "content", let's set its font size if (currentMode != VisualizerMode.Tree) { content.setFont(OurUtil.getVizFont().deriveFont((float)fontSize)); content.invalidate(); content.repaint(); content.validate(); } // Now, display them! final Box instanceTopBox = Box.createHorizontalBox(); instanceTopBox.add(toolbar); final JPanel instanceArea = new JPanel(new BorderLayout()); instanceArea.add(instanceTopBox, BorderLayout.NORTH); instanceArea.add(content, BorderLayout.CENTER); instanceArea.setVisible(true); if (!Util.onMac()) { instanceTopBox.setBackground(background); instanceArea.setBackground(background); } JComponent left = null; if (settingsOpen==1) { if (myCustomPanel==null) myCustomPanel = new VizCustomizationPanel(splitpane,myState); else myCustomPanel.remakeAll(); left = myCustomPanel; } else if (settingsOpen>1) { if (myEvaluatorPanel==null) myEvaluatorPanel = new OurConsole(evaluator, true, "The ", true, "Alloy Evaluator ", false, "allows you to type\nin Alloy expressions and see their values.\nFor example, ", true, "univ", false, " shows the list of all atoms.\n(You can press UP and DOWN to recall old inputs).\n"); try { evaluator.compute(new File(xmlFileName)); } catch(Exception ex) { } // exception should not happen left = myEvaluatorPanel; left.setBorder(new OurBorder(false, false, false, false)); } if (frame!=null && frame.getContentPane()==splitpane) lastDividerPosition=splitpane.getDividerLocation(); splitpane.setRightComponent(instanceArea); splitpane.setLeftComponent(left); if (left!=null) { Dimension dim = left.getPreferredSize(); if (lastDividerPosition<50 && frame!=null) lastDividerPosition = frame.getWidth()/2; if (lastDividerPosition<dim.width) lastDividerPosition = dim.width; if (settingsOpen==2 && lastDividerPosition>400) lastDividerPosition = 400; splitpane.setDividerLocation(lastDividerPosition); } if (frame!=null) frame.setContentPane(splitpane); if (settingsOpen!=2) content.requestFocusInWindow(); else myEvaluatorPanel.requestFocusInWindow(); repopulateProjectionPopup(); if (frame!=null) frame.validate(); else splitpane.validate(); } /** Helper method that creates a button and add it to both the "SolutionButtons" list, as well as the toolbar. */ private JButton makeSolutionButton(String label, String toolTip, String image, ActionListener mode) { JButton button = OurUtil.button(label, toolTip, image, mode); solutionButtons.add(button); toolbar.add(button); return button; } /** Helper method that returns a concise description of the instance currently being displayed. */ private String makeVizTitle() { String filename = (myState!=null ? myState.getOriginalInstance().filename : ""); String commandname = (myState!=null ? myState.getOriginalInstance().commandname : ""); int i=filename.lastIndexOf('/'); if (i>=0) filename=filename.substring(i+1); i=filename.lastIndexOf('\\'); if (i>=0) filename=filename.substring(i+1); int n=filename.length(); if (n>4 && filename.substring(n-4).equalsIgnoreCase(".als")) filename=filename.substring(0,n-4); if (filename.length()>0) return "("+filename+") "+commandname; else return commandname; } /** Helper method that inserts "filename" into the "recently opened THEME file list". */ private void addThemeHistory(String filename) { String name0=Theme0.get(), name1=Theme1.get(), name2=Theme2.get(); if (name0.equals(filename)) return; else {Theme0.set(filename); Theme1.set(name0);} if (name1.equals(filename)) return; else Theme2.set(name1); if (name2.equals(filename)) return; else Theme3.set(name2); } /** Helper method returns a JTextArea containing the given text. */ private JComponent getTextComponent(String text) { final JTextArea ta = OurUtil.textarea(text, 10, 10, false, true); final JScrollPane ans = new JScrollPane(ta, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED) { private static final long serialVersionUID = 0; @Override public void setFont(Font font) { ta.setFont(font); } }; ans.setBorder(new OurBorder(true, false, true, false)); return ans; } // /** Helper method that reads a file and then return a JTextArea containing it. */ // private JComponent getTextComponentFromFile(String filename) { // String text = ""; // try { text="<!-- "+filename+" -->\n"+Util.readAll(filename); } catch(IOException ex) { text="# Error reading from "+filename; } // return getTextComponent(text); // } /** Returns the GraphViewer that contains the graph; can be null if the graph hasn't been loaded yet. */ public GraphViewer getViewer() { if (null == myGraphPanel) return null; return myGraphPanel.alloyGetViewer(); } /** Load the XML instance. */ public void loadXML(final String fileName, boolean forcefully) { final String xmlFileName = Util.canon(fileName); File f = new File(xmlFileName); if (forcefully || !xmlFileName.equals(this.xmlFileName)) { AlloyInstance myInstance; try { if (!f.exists()) throw new IOException("File " + xmlFileName + " does not exist."); myInstance = StaticInstanceReader.parseInstance(f); } catch (Throwable e) { xmlLoaded.remove(fileName); xmlLoaded.remove(xmlFileName); OurDialog.alert("Cannot read or parse Alloy instance: "+xmlFileName+"\n\nError: "+e.getMessage()); if (xmlLoaded.size()>0) { loadXML(xmlLoaded.get(xmlLoaded.size()-1), false); return; } doCloseAll(); return; } if (myState==null) myState=new VizState(myInstance); else myState.loadInstance(myInstance); repopulateProjectionPopup(); xml2title.put(xmlFileName, makeVizTitle()); this.xmlFileName = xmlFileName; } if (!xmlLoaded.contains(xmlFileName)) xmlLoaded.add(xmlFileName); toolbar.setEnabled(true); settingsOpen=0; thememenu.setEnabled(true); windowmenu.setEnabled(true); if (frame!=null) { frame.setVisible(true); frame.setTitle("Alloy Visualizer "+Version.version()+" loading... Please wait..."); OurUtil.show(frame); } loadThemeFile(thmFileName); updateDisplay(); } /** This method loads a specific theme file. */ public boolean loadThemeFile(String filename) { thmFileName=filename; if (myState==null) return false; // Can only load if there is a VizState loaded filename = Util.canon(filename); try { myState.loadPaletteXML(filename); } catch (IOException ex) { OurDialog.alert("Error: " + ex.getMessage()); return false; } repopulateProjectionPopup(); if (myCustomPanel!=null) myCustomPanel.remakeAll(); if (myGraphPanel!=null) myGraphPanel.remakeAll(); addThemeHistory(filename); thmFileName=filename; updateDisplay(); return true; } /** This method saves a specific current theme (if filename==null, it asks the user); returns true if it succeeded. */ public boolean saveThemeFile(String filename) { if (myState==null) return false; // Can only save if there is a VizState loaded if (filename==null) { File file=OurDialog.askFile(false, null, ".thm", ".thm theme files"); if (file==null) return false; if (file.exists()) if (!OurDialog.askOverwrite(Util.canon(file.getPath()))) return false; Util.setCurrentDirectory(file.getParentFile()); filename = file.getPath(); } filename = Util.canon(filename); try { myState.savePaletteXML(filename); filename = Util.canon(filename); // Since the canon name may have changed addThemeHistory(filename); } catch (Throwable er) { OurDialog.alert("Error saving the theme.\n\nError: " + er.getMessage()); return false; } thmFileName = filename; return true; } //========================================= EVENTS ============================================================================================ /** This method changes the font size for everything (except the graph) */ public void doSetFontSize(int fontSize) { this.fontSize = fontSize; if (!(content instanceof VizGraphPanel)) updateDisplay(); else content.setFont(OurUtil.getVizFont().deriveFont((float)fontSize)); } /** This method asks the user for a new XML instance file to load. */ private Runner doLoad() { if (wrap) return wrapMe(); File file=OurDialog.askFile(true, null, ".xml", ".xml instance files"); if (file==null) return null; Util.setCurrentDirectory(file.getParentFile()); loadXML(file.getPath(), true); return null; } /** This method loads a new XML instance file if it's not the current file. */ private Runner doLoadInstance(String fileName) { if (!wrap) loadXML(fileName, false); return wrapMe(fileName); } /** This method closes the current instance; if there are previously loaded files, we will load one of them; * otherwise, this window will set itself as invisible (if not in standalone mode), * or it will terminate the entire application (if in standalone mode). */ private Runner doClose() { if (wrap) return wrapMe(); xmlLoaded.remove(xmlFileName); if (xmlLoaded.size()>0) { doLoadInstance(xmlLoaded.get(xmlLoaded.size()-1)); return null; } if (standalone) System.exit(0); else if (frame!=null) frame.setVisible(false); return null; } /** This method closes every XML file. * If in standalone mode, the JVM will then shutdown, otherwise it will just set the window invisible. */ private Runner doCloseAll() { if (wrap) return wrapMe(); xmlLoaded.clear(); xmlFileName=""; if (standalone) System.exit(0); else if (frame!=null) frame.setVisible(false); return null; } /** This method refreshes the "theme" menu. */ private Runner doRefreshTheme() { if (wrap) return wrapMe(); String defaultTheme = System.getProperty("alloy.theme0"); thememenu.removeAll(); try { wrap=true; menuItem(thememenu, "Load Theme...", 'L', doLoadTheme()); if (defaultTheme!=null && defaultTheme.length()>0 && (new File(defaultTheme)).isDirectory()) menuItem(thememenu, "Load Sample Theme...", 'B', doLoadSampleTheme()); menuItem(thememenu, "Save Theme", 'S', doSaveTheme()); menuItem(thememenu, "Save Theme As...", 'A', doSaveThemeAs()); menuItem(thememenu, "Reset Theme", 'R', doResetTheme()); } finally { wrap=false; } return null; } /** This method asks the user for a new theme file to load. */ private Runner doLoadTheme() { if (wrap) return wrapMe(); String defaultTheme=System.getProperty("alloy.theme0"); if (defaultTheme==null) defaultTheme=""; if (myState==null) return null; // Can only load if there is a VizState loaded if (myState.changedSinceLastSave()) { char opt = OurDialog.askSaveDiscardCancel("The current theme"); if (opt=='c') return null; if (opt=='s' && !saveThemeFile(thmFileName.length()==0 ? null : thmFileName)) return null; } File file=OurDialog.askFile(true, null, ".thm", ".thm theme files"); if (file!=null) { Util.setCurrentDirectory(file.getParentFile()); loadThemeFile(file.getPath()); } return null; } /** This method asks the user for a new theme file (from the default Alloy4 distribution) to load. */ private Runner doLoadSampleTheme() { if (wrap) return wrapMe(); String defaultTheme=System.getProperty("alloy.theme0"); if (defaultTheme==null) defaultTheme=""; if (myState==null) return null; // Can only load if there is a VizState loaded if (myState.changedSinceLastSave()) { char opt = OurDialog.askSaveDiscardCancel("The current theme"); if (opt=='c') return null; if (opt=='s' && !saveThemeFile(thmFileName.length()==0 ? null : thmFileName)) return null; } File file=OurDialog.askFile(true, defaultTheme, ".thm", ".thm theme files"); if (file!=null) loadThemeFile(file.getPath()); return null; } /** This method saves the current theme. */ private Runner doSaveTheme() { if (!wrap) saveThemeFile(thmFileName.length()==0 ? null : thmFileName); return wrapMe(); } /** This method saves the current theme to a new ".thm" file. */ private Runner doSaveThemeAs() { if (wrap) return wrapMe(); File file=OurDialog.askFile(false, null, ".thm", ".thm theme files"); if (file==null) return null; if (file.exists()) if (!OurDialog.askOverwrite(Util.canon(file.getPath()))) return null; Util.setCurrentDirectory(file.getParentFile()); saveThemeFile(file.getPath()); return null; } private Runner doExportDot() { if (wrap) return wrapMe(); File file=OurDialog.askFile(false, null, ".dot", ".dot graph files"); if (file==null) return null; if (file.exists()) if (!OurDialog.askOverwrite(Util.canon(file.getPath()))) return null; Util.setCurrentDirectory(file.getParentFile()); String filename = Util.canon(file.getPath()); try { Util.writeAll(filename, myGraphPanel.toDot()); } catch (Throwable er) { OurDialog.alert("Error saving the theme.\n\nError: " + er.getMessage()); } return null; } private Runner doExportXml() { if (wrap) return wrapMe(); File file=OurDialog.askFile(false, null, ".xml", ".xml XML files"); if (file==null) return null; if (file.exists()) if (!OurDialog.askOverwrite(Util.canon(file.getPath()))) return null; Util.setCurrentDirectory(file.getParentFile()); String filename = Util.canon(file.getPath()); try { Util.writeAll(filename, Util.readAll(xmlFileName)); } catch (Throwable er) { OurDialog.alert("Error saving XML instance.\n\nError: " + er.getMessage()); } return null; } /** This method resets the current theme. */ private Runner doResetTheme() { if (wrap) return wrapMe(); if (myState==null) return null; if (!OurDialog.yesno("Are you sure you wish to clear all your customizations?", "Yes, clear them", "No, keep them")) return null; myState.resetTheme(); repopulateProjectionPopup(); if (myCustomPanel!=null) myCustomPanel.remakeAll(); if (myGraphPanel!=null) myGraphPanel.remakeAll(); thmFileName=""; updateDisplay(); return null; } /** This method modifies the theme using a set of heuristics. */ private Runner doMagicLayout() { if (wrap) return wrapMe(); if (myState==null) return null; if (!OurDialog.yesno("This will clear your original customizations. Are you sure?", "Yes, clear them", "No, keep them")) return null; myState.resetTheme(); try { MagicLayout.magic(myState); MagicColor.magic(myState); } catch(Throwable ex) { } repopulateProjectionPopup(); if (myCustomPanel!=null) myCustomPanel.remakeAll(); if (myGraphPanel!=null) myGraphPanel.remakeAll(); updateDisplay(); return null; } /** This method refreshes the "window" menu. */ private Runner doRefreshWindow() { if (wrap) return wrapMe(); windowmenu.removeAll(); try { wrap=true; for(final String f:getInstances()) { JMenuItem it = new JMenuItem("Instance: "+getInstanceTitle(f), null); it.setIcon(f.equals(getXMLfilename())?iconYes:iconNo); it.addActionListener(doLoadInstance(f)); windowmenu.add(it); } } finally { wrap=false; } return null; } /** This method inserts "Minimize" and "Maximize" entries into a JMenu. */ public void addMinMaxActions(JMenu menu) { try { wrap=true; menuItem(menu, "Minimize", 'M', doMinimize(), iconNo); menuItem(menu, "Zoom", doZoom(), iconNo); } finally { wrap=false; } } /** This method minimizes the window. */ private Runner doMinimize() { if (!wrap && frame!=null) OurUtil.minimize(frame); return wrapMe(); } /** This method alternatingly maximizes or restores the window. */ private Runner doZoom() { if (!wrap && frame!=null) OurUtil.zoom(frame); return wrapMe(); } /** This method attempts to derive the next satisfying instance. */ private Runner doNext() { if (wrap) return wrapMe(); if (settingsOpen!=0) return null; if (xmlFileName.length()==0) { OurDialog.alert("Cannot display the next solution since no instance is currently loaded."); } else if (enumerator==null) { OurDialog.alert("Cannot display the next solution since the analysis engine is not loaded with the visualizer."); } else { try { enumerator.compute(xmlFileName); } catch(Throwable ex) { OurDialog.alert(ex.getMessage()); } } return null; } /** This method updates the graph with the current theme customization. */ private Runner doApply() { if (!wrap) updateDisplay(); if( (new File(thmFileName)).exists() ) (new File(thmFileName)).delete(); saveThemeFile(thmFileName); loadThemeFile(thmFileName); return wrapMe(); } /** This method opens the theme customization panel if closed. */ private Runner doOpenThemePanel() { if (!wrap) { settingsOpen=1; updateDisplay(); } return wrapMe(); } /** This method closes the theme customization panel if open. */ private Runner doCloseThemePanel() { if (!wrap) { settingsOpen=0; updateDisplay(); } return wrapMe(); } /** This method opens the evaluator panel if closed. */ private Runner doOpenEvalPanel() { if (!wrap) { settingsOpen=2; updateDisplay(); } return wrapMe(); } /** This method closes the evaluator panel if open. */ private Runner doCloseEvalPanel() { if (!wrap) { settingsOpen=0; updateDisplay(); } return wrapMe(); } /** This method changes the display mode to show the instance as a graph (the return value is always null). */ public Runner doShowViz() { if (!wrap) { currentMode=VisualizerMode.Viz; updateDisplay(); return null; } return wrapMe(); } /** This method changes the display mode to show the instance as a tree (the return value is always null). */ public Runner doShowTree() { if (!wrap) { currentMode=VisualizerMode.Tree; updateDisplay(); return null; } return wrapMe(); } /** * This method changes the display mode to show the equivalent dot text (the * return value is always null). */ public Runner doShowTxt() { if (!wrap) { currentMode = VisualizerMode.TEXT; updateDisplay(); return null; } return wrapMe(); } // /** This method changes the display mode to show the equivalent dot text (the return value is always null). */ // public Runner doShowDot() { // if (!wrap) { currentMode=VisualizerMode.DOT; updateDisplay(); return null; } // return wrapMe(); // } // // /** This method changes the display mode to show the instance as XML (the return value is always null). */ // public Runner doShowXML() { // if (!wrap) { currentMode=VisualizerMode.XML; updateDisplay(); return null; } // return wrapMe(); // } }
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.sshd.commands; import static com.google.gerrit.common.data.GlobalCapability.MAINTAIN_SERVER; import static com.google.gerrit.common.data.GlobalCapability.VIEW_CACHES; import static com.google.gerrit.sshd.CommandMetaData.Mode.MASTER_OR_SLAVE; import com.google.common.base.Strings; import com.google.gerrit.common.TimeUtil; import com.google.gerrit.common.Version; import com.google.gerrit.extensions.annotations.RequiresAnyCapability; import com.google.gerrit.extensions.events.LifecycleListener; import com.google.gerrit.server.CurrentUser; import com.google.gerrit.server.config.ConfigResource; import com.google.gerrit.server.config.GetSummary; import com.google.gerrit.server.config.GetSummary.JvmSummaryInfo; import com.google.gerrit.server.config.GetSummary.MemSummaryInfo; import com.google.gerrit.server.config.GetSummary.SummaryInfo; import com.google.gerrit.server.config.GetSummary.TaskSummaryInfo; import com.google.gerrit.server.config.GetSummary.ThreadSummaryInfo; import com.google.gerrit.server.config.ListCaches; import com.google.gerrit.server.config.ListCaches.CacheInfo; import com.google.gerrit.server.config.ListCaches.CacheType; import com.google.gerrit.sshd.CommandMetaData; import com.google.gerrit.sshd.SshCommand; import com.google.gerrit.sshd.SshDaemon; import com.google.inject.Inject; import com.google.inject.Provider; import org.apache.sshd.common.io.IoAcceptor; import org.apache.sshd.common.io.IoSession; import org.apache.sshd.common.io.mina.MinaSession; import org.apache.sshd.server.Environment; import org.kohsuke.args4j.Option; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Date; import java.util.Map; import java.util.Map.Entry; /** Show the current cache states. */ @RequiresAnyCapability({VIEW_CACHES, MAINTAIN_SERVER}) @CommandMetaData(name = "show-caches", description = "Display current cache statistics", runsAt = MASTER_OR_SLAVE) final class ShowCaches extends SshCommand { private static volatile long serverStarted; static class StartupListener implements LifecycleListener { @Override public void start() { serverStarted = TimeUtil.nowMs(); } @Override public void stop() { } } @Option(name = "--gc", usage = "perform Java GC before printing memory stats") private boolean gc; @Option(name = "--show-jvm", usage = "show details about the JVM") private boolean showJVM; @Option(name = "--show-threads", usage = "show detailed thread counts") private boolean showThreads; @Inject private SshDaemon daemon; @Inject private Provider<ListCaches> listCaches; @Inject private Provider<GetSummary> getSummary; @Inject private Provider<CurrentUser> self; @Option(name = "--width", aliases = {"-w"}, metaVar = "COLS", usage = "width of output table") private int columns = 80; private int nw; @Override public void start(Environment env) throws IOException { String s = env.getEnv().get(Environment.ENV_COLUMNS); if (s != null && !s.isEmpty()) { try { columns = Integer.parseInt(s); } catch (NumberFormatException err) { columns = 80; } } super.start(env); } @Override protected void run() throws UnloggedFailure { nw = columns - 50; Date now = new Date(); stdout.format( "%-25s %-20s now %16s\n", "Gerrit Code Review", Version.getVersion() != null ? Version.getVersion() : "", new SimpleDateFormat("HH:mm:ss zzz").format(now)); stdout.format( "%-25s %-20s uptime %16s\n", "", "", uptime(now.getTime() - serverStarted)); stdout.print('\n'); stdout.print(String.format(// "%1s %-"+nw+"s|%-21s| %-5s |%-9s|\n" // , "" // , "Name" // , "Entries" // , "AvgGet" // , "Hit Ratio" // )); stdout.print(String.format(// "%1s %-"+nw+"s|%6s %6s %7s| %-5s |%-4s %-4s|\n" // , "" // , "" // , "Mem" // , "Disk" // , "Space" // , "" // , "Mem" // , "Disk" // )); stdout.print("--"); for (int i = 0; i < nw; i++) { stdout.print('-'); } stdout.print("+---------------------+---------+---------+\n"); Collection<CacheInfo> caches = getCaches(); printMemoryCoreCaches(caches); printMemoryPluginCaches(caches); printDiskCaches(caches); stdout.print('\n'); if (self.get().getCapabilities().canMaintainServer()) { sshSummary(); SummaryInfo summary = getSummary.get().setGc(gc).setJvm(showJVM).apply(new ConfigResource()); taskSummary(summary.taskSummary); memSummary(summary.memSummary); threadSummary(summary.threadSummary); if (showJVM && summary.jvmSummary != null) { jvmSummary(summary.jvmSummary); } } stdout.flush(); } private Collection<CacheInfo> getCaches() { @SuppressWarnings("unchecked") Map<String, CacheInfo> caches = (Map<String, CacheInfo>) listCaches.get().apply(new ConfigResource()); for (Map.Entry<String, CacheInfo> entry : caches.entrySet()) { CacheInfo cache = entry.getValue(); cache.name = entry.getKey(); } return caches.values(); } private void printMemoryCoreCaches(Collection<CacheInfo> caches) { for (CacheInfo cache : caches) { if (!cache.name.contains("-") && CacheType.MEM.equals(cache.type)) { printCache(cache); } } } private void printMemoryPluginCaches(Collection<CacheInfo> caches) { for (CacheInfo cache : caches) { if (cache.name.contains("-") && CacheType.MEM.equals(cache.type)) { printCache(cache); } } } private void printDiskCaches(Collection<CacheInfo> caches) { for (CacheInfo cache : caches) { if (CacheType.DISK.equals(cache.type)) { printCache(cache); } } } private void printCache(CacheInfo cache) { stdout.print(String.format( "%1s %-"+nw+"s|%6s %6s %7s| %7s |%4s %4s|\n", CacheType.DISK.equals(cache.type) ? "D" : "", cache.name, nullToEmpty(cache.entries.mem), nullToEmpty(cache.entries.disk), Strings.nullToEmpty(cache.entries.space), Strings.nullToEmpty(cache.averageGet), formatAsPercent(cache.hitRatio.mem), formatAsPercent(cache.hitRatio.disk) )); } private static String nullToEmpty(Long l) { return l != null ? String.valueOf(l) : ""; } private static String formatAsPercent(Integer i) { return i != null ? String.valueOf(i) + "%" : ""; } private void memSummary(MemSummaryInfo memSummary) { stdout.format("Mem: %s total = %s used + %s free + %s buffers\n", memSummary.total, memSummary.used, memSummary.free, memSummary.buffers); stdout.format(" %s max\n", memSummary.max); stdout.format(" %8d open files\n", nullToZero(memSummary.openFiles)); stdout.print('\n'); } private void threadSummary(ThreadSummaryInfo threadSummary) { stdout.format("Threads: %d CPUs available, %d threads\n", threadSummary.cpus, threadSummary.threads); if (showThreads) { stdout.print(String.format(" %22s", "")); for (Thread.State s : Thread.State.values()) { stdout.print(String.format(" %14s", s.name())); } stdout.print('\n'); for (Entry<String, Map<Thread.State, Integer>> e : threadSummary.counts.entrySet()) { stdout.print(String.format(" %-22s", e.getKey())); for (Thread.State s : Thread.State.values()) { stdout.print(String.format(" %14d", nullToZero(e.getValue().get(s)))); } stdout.print('\n'); } } stdout.print('\n'); } private void taskSummary(TaskSummaryInfo taskSummary) { stdout.format( "Tasks: %4d total = %4d running + %4d ready + %4d sleeping\n", nullToZero(taskSummary.total), nullToZero(taskSummary.running), nullToZero(taskSummary.ready), nullToZero(taskSummary.sleeping)); } private static int nullToZero(Integer i) { return i != null ? i : 0; } private void sshSummary() { IoAcceptor acceptor = daemon.getIoAcceptor(); if (acceptor == null) { return; } long now = TimeUtil.nowMs(); Collection<IoSession> list = acceptor.getManagedSessions().values(); long oldest = now; for (IoSession s : list) { if (s instanceof MinaSession) { MinaSession minaSession = (MinaSession)s; oldest = Math.min(oldest, minaSession.getSession().getCreationTime()); } } stdout.format( "SSH: %4d users, oldest session started %s ago\n", list.size(), uptime(now - oldest)); } private void jvmSummary(JvmSummaryInfo jvmSummary) { stdout.format("JVM: %s %s %s\n", jvmSummary.vmVendor, jvmSummary.vmName, jvmSummary.vmVersion); stdout.format(" on %s %s %s\n", jvmSummary.osName, jvmSummary.osVersion, jvmSummary.osArch); stdout.format(" running as %s on %s\n", jvmSummary.user, Strings.nullToEmpty(jvmSummary.host)); stdout.format(" cwd %s\n", jvmSummary.currentWorkingDirectory); stdout.format(" site %s\n", jvmSummary.site); } private String uptime(long uptimeMillis) { if (uptimeMillis < 1000) { return String.format("%3d ms", uptimeMillis); } long uptime = uptimeMillis / 1000L; long min = uptime / 60; if (min < 60) { return String.format("%2d min %2d sec", min, uptime - min * 60); } long hr = uptime / 3600; if (hr < 24) { min = (uptime - hr * 3600) / 60; return String.format("%2d hrs %2d min", hr, min); } long days = uptime / (24 * 3600); hr = (uptime - (days * 24 * 3600)) / 3600; return String.format("%4d days %2d hrs", days, hr); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2013 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.users; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.management.relation.Role; import org.apache.commons.configuration.Configuration; import org.apache.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.control.Control; import org.parosproxy.paros.db.RecordContext; import org.parosproxy.paros.extension.Extension; import org.parosproxy.paros.extension.ExtensionAdaptor; import org.parosproxy.paros.extension.ExtensionHook; import org.parosproxy.paros.model.Session; import org.zaproxy.zap.authentication.AuthenticationMethodType; import org.zaproxy.zap.control.ExtensionFactory; import org.zaproxy.zap.extension.authentication.ExtensionAuthentication; import org.zaproxy.zap.extension.httpsessions.ExtensionHttpSessions; import org.zaproxy.zap.extension.sessions.ExtensionSessionManagement; import org.zaproxy.zap.model.Context; import org.zaproxy.zap.model.ContextDataFactory; import org.zaproxy.zap.users.User; import org.zaproxy.zap.view.AbstractContextPropertiesPanel; import org.zaproxy.zap.view.ContextPanelFactory; /** * The Extension for managing {@link User Users}, {@link Role Roles}, and related entities. * <p> * This class also handles the loading of {@link AuthenticationMethodType} and * {@link AuthenticationMethodType} classes in the system using the AddOnLoader ( * {@link ExtensionFactory#getAddOnLoader()}). * </p> */ public class ExtensionUserManagement extends ExtensionAdaptor implements ContextPanelFactory, ContextDataFactory { public static final String CONTEXT_CONFIG_USERS = Context.CONTEXT_CONFIG + ".users"; public static final String CONTEXT_CONFIG_USERS_USER = CONTEXT_CONFIG_USERS + ".user"; /** * The extension's order during loading. Make sure we load this extension AFTER the * Authentication one. */ public static final int EXTENSION_ORDER = ExtensionAuthentication.EXTENSION_ORDER + 5; /** The NAME of the extension. */ public static final String NAME = "ExtensionUserManagement"; /** The Constant log. */ private static final Logger log = Logger.getLogger(ExtensionUserManagement.class); /** The user panels, mapped to each context. */ private Map<Integer, ContextUsersPanel> userPanelsMap = new HashMap<>(); /** The context managers, mapped to each context. */ private Map<Integer, ContextUserAuthManager> contextManagers = new HashMap<>(); private UsersAPI api; /** The Constant EXTENSION DEPENDENCIES. */ private static final List<Class<? extends Extension>> EXTENSION_DEPENDENCIES; static { // Prepare a list of Extensions on which this extension depends List<Class<? extends Extension>> dependencies = new ArrayList<>(3); dependencies.add(ExtensionHttpSessions.class); dependencies.add(ExtensionAuthentication.class); dependencies.add(ExtensionSessionManagement.class); EXTENSION_DEPENDENCIES = Collections.unmodifiableList(dependencies); } /** A reference to the http sessions extension. */ private ExtensionHttpSessions extensionHttpSessions; /** * Instantiates a new extension. */ public ExtensionUserManagement() { initialize(); } /** * Gets the ExtensionHttpSessions, if it's enabled. * * @return the Http Sessions extension or null, if it's not available */ protected ExtensionHttpSessions getExtensionHttpSessions() { if (extensionHttpSessions == null) { extensionHttpSessions = Control.getSingleton().getExtensionLoader().getExtension(ExtensionHttpSessions.class); if (extensionHttpSessions == null) log.error("Http Sessions Extension should be enabled for the " + ExtensionUserManagement.class.getSimpleName() + " to work."); } return extensionHttpSessions; } /** * Initialize the extension. */ private void initialize() { this.setName(NAME); // Added to make sure the ExtensionForcedUser is loaded after this one. // See: ExtensionForcedUser#getOrder() this.setOrder(EXTENSION_ORDER); } @Override public String getUIName() { return Constant.messages.getString("users.name"); } @Override public String getAuthor() { return Constant.ZAP_TEAM; } @Override public void hook(ExtensionHook extensionHook) { super.hook(extensionHook); // Register this as a context data factory extensionHook.addContextDataFactory(this); if (getView() != null) { // Factory for generating Session Context Users panels extensionHook.getHookView().addContextPanelFactory(this); } // Prepare API this.api = new UsersAPI(this); extensionHook.addApiImplementor(api); } @Override public List<Class<? extends Extension>> getDependencies() { return EXTENSION_DEPENDENCIES; } @Override public URL getURL() { try { return new URL(Constant.ZAP_HOMEPAGE); } catch (MalformedURLException e) { return null; } } @Override public AbstractContextPropertiesPanel getContextPanel(Context ctx) { return getContextPanel(ctx.getIndex()); } /** * Gets the context panel for a given context. * * @param contextId the context id * @return the context panel */ private ContextUsersPanel getContextPanel(int contextId) { ContextUsersPanel panel = this.userPanelsMap.get(contextId); if (panel == null) { panel = new ContextUsersPanel(this, contextId); this.userPanelsMap.put(contextId, panel); } return panel; } /** * Gets the context user auth manager for a given context. * * @param contextId the context id * @return the context user auth manager */ public ContextUserAuthManager getContextUserAuthManager(int contextId) { ContextUserAuthManager manager = contextManagers.get(contextId); if (manager == null) { manager = new ContextUserAuthManager(contextId); contextManagers.put(contextId, manager); } return manager; } /** * Gets an unmodifiable view of the users that are currently shown in the UI. * * @param contextId the context id * @return the uI configured users */ public List<User> getUIConfiguredUsers(int contextId) { ContextUsersPanel panel = this.userPanelsMap.get(contextId); if (panel != null) { return Collections.unmodifiableList(panel.getUsersTableModel().getUsers()); } return null; } /** * Gets the model of the users that are currently shown in the UI. * * @param contextId the context id * @return the users model, if any, or null, if there is no panel for the given model */ public UsersTableModel getUIConfiguredUsersModel(int contextId) { ContextUsersPanel panel = this.userPanelsMap.get(contextId); if (panel != null) { return panel.getUsersTableModel(); } return null; } @Override public void discardContexts() { this.contextManagers.clear(); this.userPanelsMap.clear(); } @Override public void discardContext(Context ctx) { this.contextManagers.remove(ctx.getIndex()); this.userPanelsMap.remove(ctx.getIndex()); } @Override public void loadContextData(Session session, Context context) { try { List<String> encodedUsers = session.getContextDataStrings(context.getIndex(), RecordContext.TYPE_USER); ContextUserAuthManager usersManager = getContextUserAuthManager(context.getIndex()); for (String e : encodedUsers) { User u = User.decode(context.getIndex(), e); usersManager.addUser(u); } } catch (Exception ex) { log.error("Unable to load Users.", ex); } } @Override public void persistContextData(Session session, Context context) { try { List<String> encodedUsers = new ArrayList<>(); ContextUserAuthManager m = contextManagers.get(context.getIndex()); if (m != null) { for (User u : m.getUsers()) { encodedUsers.add(User.encode(u)); } session.setContextData(context.getIndex(), RecordContext.TYPE_USER, encodedUsers); } } catch (Exception ex) { log.error("Unable to persist Users.", ex); } } /** * Removes all the users that are shown in the UI (for the Users context panel) and correspond * to a particular shared Context. * * @param sharedContext the shared context */ public void removeSharedContextUsers(Context sharedContext) { this.getContextPanel(sharedContext.getIndex()).getUsersTableModel().removeAllUsers(); } /** * Add a new user shown in the UI (for the Users context panel) that corresponds * to a particular shared Context. * * @param sharedContext the shared context * @param user the user */ public void addSharedContextUser(Context sharedContext, User user) { this.getContextPanel(sharedContext.getIndex()).getUsersTableModel().addUser(user); } public List<User> getSharedContextUsers(Context sharedContext){ return getContextPanel(sharedContext.getIndex()).getUsersTableModel().getUsers(); } /** * Removes all the that correspond to a Context with a given id. * * @param contextId the context id */ public void removeContextUsers(int contextId) { this.getContextUserAuthManager(contextId).removeAllUsers(); } @Override public void exportContextData(Context ctx, Configuration config) { ContextUserAuthManager m = contextManagers.get(ctx.getIndex()); if (m != null) { for (User u : m.getUsers()) { config.addProperty(CONTEXT_CONFIG_USERS_USER, User.encode(u)); } } } @Override public void importContextData(Context ctx, Configuration config) { List<Object> list = config.getList(CONTEXT_CONFIG_USERS_USER); ContextUserAuthManager m = getContextUserAuthManager(ctx.getIndex()); for (Object o : list) { User usersManager = User.decode(ctx.getIndex(), o.toString()); m.addUser(usersManager); } } /** * No database tables used, so all supported */ @Override public boolean supportsDb(String type) { return true; } }
package com.github.aloomaio.androidsdk.viewcrawler; import android.annotation.TargetApi; import android.app.Activity; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.os.Build; import android.os.Handler; import android.os.Looper; import android.util.Base64; import android.util.Base64OutputStream; import android.util.DisplayMetrics; import android.util.JsonWriter; import android.util.Log; import android.util.LruCache; import android.view.View; import android.view.ViewGroup; import com.github.aloomaio.androidsdk.aloomametrics.AConfig; import com.github.aloomaio.androidsdk.aloomametrics.ResourceIds; import org.json.JSONObject; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.FutureTask; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; @TargetApi(AConfig.UI_FEATURES_MIN_API) /* package */ class ViewSnapshot { public ViewSnapshot(List<PropertyDescription> properties, ResourceIds resourceIds) { mProperties = properties; mResourceIds = resourceIds; mMainThreadHandler = new Handler(Looper.getMainLooper()); mRootViewFinder = new RootViewFinder(); mClassnameCache = new ClassNameCache(MAX_CLASS_NAME_CACHE_SIZE); } /** * Take a snapshot of each activity in liveActivities. The given UIThreadSet will be accessed * on the main UI thread, and should contain a set with elements for every activity to be * snapshotted. Given stream out will be written on the calling thread. */ public void snapshots(UIThreadSet<Activity> liveActivities, OutputStream out) throws IOException { mRootViewFinder.findInActivities(liveActivities); final FutureTask<List<RootViewInfo>> infoFuture = new FutureTask<List<RootViewInfo>>(mRootViewFinder); mMainThreadHandler.post(infoFuture); final OutputStreamWriter writer = new OutputStreamWriter(out); try { final List<RootViewInfo> infoList = infoFuture.get(1, TimeUnit.SECONDS); final int infoCount = infoList.size(); writer.write("["); for (int i = 0; i < infoCount; i++) { if (i > 0) { writer.write(","); } final RootViewInfo info = infoList.get(i); writer.write("{"); writer.write("\"activity\":"); writer.write(JSONObject.quote(info.activityName)); writer.write(","); writer.write("\"scale\":"); writer.write(String.format("%s", info.scale)); writer.write(","); writer.write("\"serialized_objects\":"); { final JsonWriter j = new JsonWriter(writer); j.beginObject(); j.name("rootObject").value(info.rootView.hashCode()); j.name("objects"); snapshotViewHierarchy(j, info.rootView); j.endObject(); j.flush(); } writer.write(","); writer.write("\"screenshot\":"); writer.flush(); info.screenshot.writeBitmapJSON(Bitmap.CompressFormat.PNG, 100, out); writer.write("}"); } writer.write("]"); writer.flush(); } catch (final InterruptedException e) { if (AConfig.DEBUG) { Log.d(LOGTAG, "Screenshot interrupted, no screenshot will be sent.", e); } } catch (final TimeoutException e) { if (AConfig.DEBUG) { Log.i(LOGTAG, "Screenshot took more than 1 second to be scheduled and executed. No screenshot will be sent.", e); } } catch (final ExecutionException e) { if (AConfig.DEBUG) { Log.e(LOGTAG, "Exception thrown during screenshot attempt", e); } } } // For testing only /* package */ List<PropertyDescription> getProperties() { return mProperties; } /* package */ void snapshotViewHierarchy(JsonWriter j, View rootView) throws IOException { j.beginArray(); snapshotView(j, rootView); j.endArray(); } private void snapshotView(JsonWriter j, View view) throws IOException { final int viewId = view.getId(); final String viewIdName; if (-1 == viewId) { viewIdName = null; } else { viewIdName = mResourceIds.nameForId(viewId); } j.beginObject(); j.name("hashCode").value(view.hashCode()); j.name("id").value(viewId); j.name("mp_id_name").value(viewIdName); final CharSequence description = view.getContentDescription(); if (null == description) { j.name("contentDescription").nullValue(); } else { j.name("contentDescription").value(description.toString()); } final Object tag = view.getTag(); if (null == tag) { j.name("tag").nullValue(); } else if (tag instanceof CharSequence) { j.name("tag").value(tag.toString()); } j.name("top").value(view.getTop()); j.name("left").value(view.getLeft()); j.name("width").value(view.getWidth()); j.name("height").value(view.getHeight()); j.name("scrollX").value(view.getScrollX()); j.name("scrollY").value(view.getScrollY()); j.name("visibility").value(view.getVisibility()); float translationX = 0; float translationY = 0; if (Build.VERSION.SDK_INT >= 11) { translationX = view.getTranslationX(); translationY = view.getTranslationY(); } j.name("translationX").value(translationX); j.name("translationY").value(translationY); j.name("classes"); j.beginArray(); Class<?> klass = view.getClass(); do { j.value(mClassnameCache.get(klass)); klass = klass.getSuperclass(); } while (klass != Object.class && klass != null); j.endArray(); addProperties(j, view); j.name("subviews"); j.beginArray(); if (view instanceof ViewGroup) { final ViewGroup group = (ViewGroup) view; final int childCount = group.getChildCount(); for (int i = 0; i < childCount; i++) { final View child = group.getChildAt(i); // child can be null when views are getting disposed. if (null != child) { j.value(child.hashCode()); } } } j.endArray(); j.endObject(); if (view instanceof ViewGroup) { final ViewGroup group = (ViewGroup) view; final int childCount = group.getChildCount(); for (int i = 0; i < childCount; i++) { final View child = group.getChildAt(i); // child can be null when views are getting disposed. if (null != child) { snapshotView(j, child); } } } } private void addProperties(JsonWriter j, View v) throws IOException { final Class<?> viewClass = v.getClass(); for (final PropertyDescription desc : mProperties) { if (desc.targetClass.isAssignableFrom(viewClass) && null != desc.accessor) { final Object value = desc.accessor.applyMethod(v); if (null == value) { // Don't produce anything in this case } else if (value instanceof Number) { j.name(desc.name).value((Number) value); } else if (value instanceof Boolean) { j.name(desc.name).value((Boolean) value); } else { j.name(desc.name).value(value.toString()); } } } } private static class ClassNameCache extends LruCache<Class<?>, String> { public ClassNameCache(int maxSize) { super(maxSize); } @Override protected String create(Class<?> klass) { return klass.getCanonicalName(); } } private static class RootViewFinder implements Callable<List<RootViewInfo>> { public RootViewFinder() { mDisplayMetrics = new DisplayMetrics(); mRootViews = new ArrayList<RootViewInfo>(); mCachedBitmap = new CachedBitmap(); } public void findInActivities(UIThreadSet<Activity> liveActivities) { mLiveActivities = liveActivities; } @Override public List<RootViewInfo> call() throws Exception { mRootViews.clear(); final Set<Activity> liveActivities = mLiveActivities.getAll(); for (final Activity a : liveActivities) { final String activityName = a.getClass().getCanonicalName(); final View rootView = a.getWindow().getDecorView().getRootView(); a.getWindowManager().getDefaultDisplay().getMetrics(mDisplayMetrics); final RootViewInfo info = new RootViewInfo(activityName, rootView); mRootViews.add(info); } final int viewCount = mRootViews.size(); for (int i = 0; i < viewCount; i++) { final RootViewInfo info = mRootViews.get(i); takeScreenshot(info); } return mRootViews; } private void takeScreenshot(final RootViewInfo info) { final View rootView = info.rootView; Bitmap rawBitmap = null; try { final Method createSnapshot = View.class.getDeclaredMethod("createSnapshot", Bitmap.Config.class, Integer.TYPE, Boolean.TYPE); createSnapshot.setAccessible(true); rawBitmap = (Bitmap) createSnapshot.invoke(rootView, Bitmap.Config.RGB_565, Color.WHITE, false); } catch (final NoSuchMethodException e) { if (AConfig.DEBUG) { Log.v(LOGTAG, "Can't call createSnapshot, will use drawCache", e); } } catch (final IllegalArgumentException e) { Log.d(LOGTAG, "Can't call createSnapshot with arguments", e); } catch (final InvocationTargetException e) { Log.e(LOGTAG, "Exception when calling createSnapshot", e); } catch (final IllegalAccessException e) { Log.e(LOGTAG, "Can't access createSnapshot, using drawCache", e); } catch (final ClassCastException e) { Log.e(LOGTAG, "createSnapshot didn't return a bitmap?", e); } Boolean originalCacheState = null; try { if (null == rawBitmap) { originalCacheState = rootView.isDrawingCacheEnabled(); rootView.setDrawingCacheEnabled(true); rootView.buildDrawingCache(true); rawBitmap = rootView.getDrawingCache(); } } catch (final RuntimeException e) { if (AConfig.DEBUG) { Log.v(LOGTAG, "Can't take a bitmap snapshot of view " + rootView + ", skipping for now.", e); } } float scale = 1.0f; if (null != rawBitmap) { final int rawDensity = rawBitmap.getDensity(); if (rawDensity != Bitmap.DENSITY_NONE) { scale = ((float) mClientDensity) / rawDensity; } final int rawWidth = rawBitmap.getWidth(); final int rawHeight = rawBitmap.getHeight(); final int destWidth = (int) ((rawBitmap.getWidth() * scale) + 0.5); final int destHeight = (int) ((rawBitmap.getHeight() * scale) + 0.5); if (rawWidth > 0 && rawHeight > 0 && destWidth > 0 && destHeight > 0) { mCachedBitmap.recreate(destWidth, destHeight, mClientDensity, rawBitmap); } } if (null != originalCacheState && !originalCacheState) { rootView.setDrawingCacheEnabled(false); } info.scale = scale; info.screenshot = mCachedBitmap; } private UIThreadSet<Activity> mLiveActivities; private final List<RootViewInfo> mRootViews; private final DisplayMetrics mDisplayMetrics; private final CachedBitmap mCachedBitmap; private final int mClientDensity = DisplayMetrics.DENSITY_DEFAULT; } private static class CachedBitmap { public CachedBitmap() { mPaint = new Paint(Paint.FILTER_BITMAP_FLAG); mCached = null; } public synchronized void recreate(int width, int height, int destDensity, Bitmap source) { if (null == mCached || mCached.getWidth() != width || mCached.getHeight() != height) { try { mCached = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565); } catch (final OutOfMemoryError e) { mCached = null; } if (null != mCached) { mCached.setDensity(destDensity); } } if (null != mCached) { final Canvas scaledCanvas = new Canvas(mCached); scaledCanvas.drawBitmap(source, 0, 0, mPaint); } } // Writes a QUOTED base64 string (or the string null) to the output stream public synchronized void writeBitmapJSON(Bitmap.CompressFormat format, int quality, OutputStream out) throws IOException { if (null == mCached || mCached.getWidth() == 0 || mCached.getHeight() == 0) { out.write("null".getBytes()); } else { out.write('"'); final Base64OutputStream imageOut = new Base64OutputStream(out, Base64.NO_WRAP); mCached.compress(Bitmap.CompressFormat.PNG, 100, imageOut); imageOut.flush(); out.write('"'); } } private Bitmap mCached; private final Paint mPaint; } private static class RootViewInfo { public RootViewInfo(String activityName, View rootView) { this.activityName = activityName; this.rootView = rootView; this.screenshot = null; this.scale = 1.0f; } public final String activityName; public final View rootView; public CachedBitmap screenshot; public float scale; } private final RootViewFinder mRootViewFinder; private final List<PropertyDescription> mProperties; private final ClassNameCache mClassnameCache; private final Handler mMainThreadHandler; private final ResourceIds mResourceIds; private static final int MAX_CLASS_NAME_CACHE_SIZE = 255; @SuppressWarnings("unused") private static final String LOGTAG = "AloomaAPI.ViewSnapshot"; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.fontbox.afm; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.StringTokenizer; import org.apache.fontbox.util.BoundingBox; /** * This class is used to parse AFM(Adobe Font Metrics) documents. * * @see <A href="http://partners.adobe.com/asn/developer/type/">AFM Documentation</A> * * @author Ben Litchfield * */ public class AFMParser { /** * This is a comment in a AFM file. */ public static final String COMMENT = "Comment"; /** * This is the constant used in the AFM file to start a font metrics item. */ public static final String START_FONT_METRICS = "StartFontMetrics"; /** * This is the constant used in the AFM file to end a font metrics item. */ public static final String END_FONT_METRICS = "EndFontMetrics"; /** * This is the font name. */ public static final String FONT_NAME = "FontName"; /** * This is the full name. */ public static final String FULL_NAME = "FullName"; /** * This is the Family name. */ public static final String FAMILY_NAME = "FamilyName"; /** * This is the weight. */ public static final String WEIGHT = "Weight"; /** * This is the font bounding box. */ public static final String FONT_BBOX = "FontBBox"; /** * This is the version of the font. */ public static final String VERSION = "Version"; /** * This is the notice. */ public static final String NOTICE = "Notice"; /** * This is the encoding scheme. */ public static final String ENCODING_SCHEME = "EncodingScheme"; /** * This is the mapping scheme. */ public static final String MAPPING_SCHEME = "MappingScheme"; /** * This is the escape character. */ public static final String ESC_CHAR = "EscChar"; /** * This is the character set. */ public static final String CHARACTER_SET = "CharacterSet"; /** * This is the characters attribute. */ public static final String CHARACTERS = "Characters"; /** * This will determine if this is a base font. */ public static final String IS_BASE_FONT = "IsBaseFont"; /** * This is the V Vector attribute. */ public static final String V_VECTOR = "VVector"; /** * This will tell if the V is fixed. */ public static final String IS_FIXED_V = "IsFixedV"; /** * This is the cap height attribute. */ public static final String CAP_HEIGHT = "CapHeight"; /** * This is the X height. */ public static final String X_HEIGHT = "XHeight"; /** * This is ascender attribute. */ public static final String ASCENDER = "Ascender"; /** * This is the descender attribute. */ public static final String DESCENDER = "Descender"; /** * The underline position. */ public static final String UNDERLINE_POSITION = "UnderlinePosition"; /** * This is the Underline thickness. */ public static final String UNDERLINE_THICKNESS = "UnderlineThickness"; /** * This is the italic angle. */ public static final String ITALIC_ANGLE = "ItalicAngle"; /** * This is the char width. */ public static final String CHAR_WIDTH = "CharWidth"; /** * This will determine if this is fixed pitch. */ public static final String IS_FIXED_PITCH = "IsFixedPitch"; /** * This is the start of character metrics. */ public static final String START_CHAR_METRICS = "StartCharMetrics"; /** * This is the end of character metrics. */ public static final String END_CHAR_METRICS = "EndCharMetrics"; /** * The character metrics c value. */ public static final String CHARMETRICS_C = "C"; /** * The character metrics c value. */ public static final String CHARMETRICS_CH = "CH"; /** * The character metrics value. */ public static final String CHARMETRICS_WX = "WX"; /** * The character metrics value. */ public static final String CHARMETRICS_W0X = "W0X"; /** * The character metrics value. */ public static final String CHARMETRICS_W1X = "W1X"; /** * The character metrics value. */ public static final String CHARMETRICS_WY = "WY"; /** * The character metrics value. */ public static final String CHARMETRICS_W0Y = "W0Y"; /** * The character metrics value. */ public static final String CHARMETRICS_W1Y = "W1Y"; /** * The character metrics value. */ public static final String CHARMETRICS_W = "W"; /** * The character metrics value. */ public static final String CHARMETRICS_W0 = "W0"; /** * The character metrics value. */ public static final String CHARMETRICS_W1 = "W1"; /** * The character metrics value. */ public static final String CHARMETRICS_VV = "VV"; /** * The character metrics value. */ public static final String CHARMETRICS_N = "N"; /** * The character metrics value. */ public static final String CHARMETRICS_B = "B"; /** * The character metrics value. */ public static final String CHARMETRICS_L = "L"; /** * The character metrics value. */ public static final String STD_HW = "StdHW"; /** * The character metrics value. */ public static final String STD_VW = "StdVW"; /** * This is the start of track kern data. */ public static final String START_TRACK_KERN = "StartTrackKern"; /** * This is the end of track kern data. */ public static final String END_TRACK_KERN = "EndTrackKern"; /** * This is the start of kern data. */ public static final String START_KERN_DATA = "StartKernData"; /** * This is the end of kern data. */ public static final String END_KERN_DATA = "EndKernData"; /** * This is the start of kern pairs data. */ public static final String START_KERN_PAIRS = "StartKernPairs"; /** * This is the end of kern pairs data. */ public static final String END_KERN_PAIRS = "EndKernPairs"; /** * This is the start of kern pairs data. */ public static final String START_KERN_PAIRS0 = "StartKernPairs0"; /** * This is the start of kern pairs data. */ public static final String START_KERN_PAIRS1 = "StartKernPairs1"; /** * This is the start composites data section. */ public static final String START_COMPOSITES = "StartComposites"; /** * This is the end composites data section. */ public static final String END_COMPOSITES = "EndComposites"; /** * This is a composite character. */ public static final String CC = "CC"; /** * This is a composite character part. */ public static final String PCC = "PCC"; /** * This is a kern pair. */ public static final String KERN_PAIR_KP = "KP"; /** * This is a kern pair. */ public static final String KERN_PAIR_KPH = "KPH"; /** * This is a kern pair. */ public static final String KERN_PAIR_KPX = "KPX"; /** * This is a kern pair. */ public static final String KERN_PAIR_KPY = "KPY"; private static final int BITS_IN_HEX = 16; private final InputStream input; /** * Constructor. * * @param in The input stream to read the AFM document from. */ public AFMParser( InputStream in ) { input = in; } /** * This will parse the AFM document. The input stream is closed * when the parsing is finished. * * @return the parsed FontMetric * * @throws IOException If there is an IO error reading the document. */ public FontMetrics parse() throws IOException { return parseFontMetric(false); } /** * This will parse the AFM document. The input stream is closed * when the parsing is finished. * * @param reducedDataset parse a reduced subset of data if set to true * @return the parsed FontMetric * * @throws IOException If there is an IO error reading the document. */ public FontMetrics parse(boolean reducedDataset) throws IOException { return parseFontMetric(reducedDataset); } /** * This will parse a font metrics item. * * @return The parse font metrics item. * * @throws IOException If there is an error reading the AFM file. */ private FontMetrics parseFontMetric(boolean reducedDataset) throws IOException { readCommand(START_FONT_METRICS); FontMetrics fontMetrics = new FontMetrics(); fontMetrics.setAFMVersion( readFloat() ); String nextCommand; boolean charMetricsRead = false; while (!END_FONT_METRICS.equals(nextCommand = readString())) { switch (nextCommand) { case FONT_NAME: fontMetrics.setFontName( readLine() ); break; case FULL_NAME: fontMetrics.setFullName( readLine() ); break; case FAMILY_NAME: fontMetrics.setFamilyName( readLine() ); break; case WEIGHT: fontMetrics.setWeight( readLine() ); break; case FONT_BBOX: BoundingBox bBox = new BoundingBox(); bBox.setLowerLeftX( readFloat() ); bBox.setLowerLeftY( readFloat() ); bBox.setUpperRightX( readFloat() ); bBox.setUpperRightY( readFloat() ); fontMetrics.setFontBBox( bBox ); break; case VERSION: fontMetrics.setFontVersion( readLine() ); break; case NOTICE: fontMetrics.setNotice( readLine() ); break; case ENCODING_SCHEME: fontMetrics.setEncodingScheme( readLine() ); break; case MAPPING_SCHEME: fontMetrics.setMappingScheme( readInt() ); break; case ESC_CHAR: fontMetrics.setEscChar( readInt() ); break; case CHARACTER_SET: fontMetrics.setCharacterSet( readLine() ); break; case CHARACTERS: fontMetrics.setCharacters( readInt() ); break; case IS_BASE_FONT: fontMetrics.setIsBaseFont( readBoolean() ); break; case V_VECTOR: float[] vector = new float[2]; vector[0] = readFloat(); vector[1] = readFloat(); fontMetrics.setVVector( vector ); break; case IS_FIXED_V: fontMetrics.setIsFixedV( readBoolean() ); break; case CAP_HEIGHT: fontMetrics.setCapHeight( readFloat() ); break; case X_HEIGHT: fontMetrics.setXHeight( readFloat() ); break; case ASCENDER: fontMetrics.setAscender( readFloat() ); break; case DESCENDER: fontMetrics.setDescender( readFloat() ); break; case STD_HW: fontMetrics.setStandardHorizontalWidth( readFloat() ); break; case STD_VW: fontMetrics.setStandardVerticalWidth( readFloat() ); break; case COMMENT: fontMetrics.addComment( readLine() ); break; case UNDERLINE_POSITION: fontMetrics.setUnderlinePosition( readFloat() ); break; case UNDERLINE_THICKNESS: fontMetrics.setUnderlineThickness( readFloat() ); break; case ITALIC_ANGLE: fontMetrics.setItalicAngle( readFloat() ); break; case CHAR_WIDTH: float[] widths = new float[2]; widths[0] = readFloat(); widths[1] = readFloat(); fontMetrics.setCharWidth( widths ); break; case IS_FIXED_PITCH: fontMetrics.setFixedPitch( readBoolean() ); break; case START_CHAR_METRICS: charMetricsRead = parseCharMetrics(fontMetrics); break; case START_KERN_DATA: if( !reducedDataset) { parseKernData(fontMetrics); } break; case START_COMPOSITES: if( !reducedDataset) { parseComposites(fontMetrics); } break; default: if (!reducedDataset || !charMetricsRead) { throw new IOException("Unknown AFM key '" + nextCommand + "'"); } } } return fontMetrics; } /** * This will parse the kern data. * * @param fontMetrics The metrics class to put the parsed data into. * * @throws IOException If there is an error parsing the data. */ private void parseKernData( FontMetrics fontMetrics ) throws IOException { String nextCommand; while( !(nextCommand = readString()).equals( END_KERN_DATA ) ) { switch(nextCommand) { case START_TRACK_KERN: int countTrackKern = readInt(); for (int i = 0; i < countTrackKern; i++) { fontMetrics.addTrackKern(new TrackKern(readInt(), readFloat(), readFloat(), readFloat(), readFloat())); } readCommand(END_TRACK_KERN); break; case START_KERN_PAIRS: parseKernPairs(fontMetrics); break; case START_KERN_PAIRS0: parseKernPairs0(fontMetrics); break; case START_KERN_PAIRS1: parseKernPairs1(fontMetrics); break; default: throw new IOException( "Unknown kerning data type '" + nextCommand + "'" ); } } } private void parseKernPairs(FontMetrics fontMetrics) throws IOException { int countKernPairs = readInt(); for (int i = 0; i < countKernPairs; i++) { fontMetrics.addKernPair(parseKernPair()); } readCommand(END_KERN_PAIRS); } private void parseKernPairs0(FontMetrics fontMetrics) throws IOException { int countKernPairs = readInt(); for (int i = 0; i < countKernPairs; i++) { fontMetrics.addKernPair0(parseKernPair()); } readCommand(END_KERN_PAIRS); } private void parseKernPairs1(FontMetrics fontMetrics) throws IOException { int countKernPairs = readInt(); for (int i = 0; i < countKernPairs; i++) { fontMetrics.addKernPair1(parseKernPair()); } readCommand(END_KERN_PAIRS); } /** * This will parse a kern pair from the data stream. * * @return The kern pair that was parsed from the stream. * * @throws IOException If there is an error reading from the stream. */ private KernPair parseKernPair() throws IOException { String cmd = readString(); switch (cmd) { case KERN_PAIR_KP: return new KernPair(readString(), readString(), // readFloat(), readFloat()); case KERN_PAIR_KPH: return new KernPair(hexToString(readString()), hexToString(readString()), // readFloat(), readFloat()); case KERN_PAIR_KPX: return new KernPair(readString(), readString(), // readFloat(), 0); case KERN_PAIR_KPY: return new KernPair(readString(), readString(), // 0, readFloat()); default: throw new IOException( "Error expected kern pair command actual='" + cmd + "'" ); } } /** * This will convert and angle bracket hex string to a string. * * @param hexToString An angle bracket string. * * @return The bytes of the hex string. * * @throws IOException If the string is in an invalid format. */ private String hexToString(String hexToString) throws IOException { if (hexToString.length() < 2) { throw new IOException("Error: Expected hex string of length >= 2 not='" + hexToString); } if (hexToString.charAt(0) != '<' || hexToString.charAt(hexToString.length() - 1) != '>') { throw new IOException( "String should be enclosed by angle brackets '" + hexToString + "'"); } String hexString = hexToString.substring(1, hexToString.length() - 1); byte[] data = new byte[hexString.length() / 2]; for( int i=0; i<hexString.length(); i+=2 ) { String hex = Character.toString(hexString.charAt(i)) + hexString.charAt(i + 1); data[i / 2] = (byte) parseInt(hex, BITS_IN_HEX); } return new String( data, StandardCharsets.ISO_8859_1 ); } private void parseComposites(FontMetrics fontMetrics) throws IOException { int countComposites = readInt(); for (int i = 0; i < countComposites; i++) { fontMetrics.addComposite(parseComposite()); } readCommand(END_COMPOSITES); } /** * This will parse a composite part from the stream. * * @return The composite. * * @throws IOException If there is an error parsing the composite. */ private Composite parseComposite() throws IOException { String partData = readLine(); StringTokenizer tokenizer = new StringTokenizer( partData, " ;" ); String cc = tokenizer.nextToken(); if( !cc.equals( CC ) ) { throw new IOException( "Expected '" + CC + "' actual='" + cc + "'" ); } String name = tokenizer.nextToken(); Composite composite = new Composite(name); int partCount = parseInt(tokenizer.nextToken()); for( int i=0; i<partCount; i++ ) { String pcc = tokenizer.nextToken(); if( !pcc.equals( PCC ) ) { throw new IOException( "Expected '" + PCC + "' actual='" + pcc + "'" ); } String partName = tokenizer.nextToken(); int x = parseInt(tokenizer.nextToken()); int y = parseInt(tokenizer.nextToken()); composite.addPart(new CompositePart(partName, x, y)); } return composite; } private boolean parseCharMetrics(FontMetrics fontMetrics) throws IOException { int countMetrics = readInt(); for (int i = 0; i < countMetrics; i++) { fontMetrics.addCharMetric(parseCharMetric()); } readCommand(END_CHAR_METRICS); return true; } /** * This will parse a single CharMetric object from the stream. * * @return The next char metric in the stream. * * @throws IOException If there is an error reading from the stream. */ private CharMetric parseCharMetric() throws IOException { CharMetric charMetric = new CharMetric(); String metrics = readLine(); StringTokenizer metricsTokenizer = new StringTokenizer( metrics ); while (metricsTokenizer.hasMoreTokens()) { String nextCommand = metricsTokenizer.nextToken(); switch (nextCommand) { case CHARMETRICS_C: String charCodeC = metricsTokenizer.nextToken(); charMetric.setCharacterCode(parseInt(charCodeC)); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_CH: // Is the hex string <FF> or FF, the spec is a little // unclear, wait and see if it breaks anything. String charCodeCH = metricsTokenizer.nextToken(); charMetric.setCharacterCode(parseInt(charCodeCH, BITS_IN_HEX)); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_WX: charMetric.setWx(parseFloat(metricsTokenizer.nextToken())); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_W0X: charMetric.setW0x(parseFloat(metricsTokenizer.nextToken())); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_W1X: charMetric.setW1x(parseFloat(metricsTokenizer.nextToken())); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_WY: charMetric.setWy(parseFloat(metricsTokenizer.nextToken())); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_W0Y: charMetric.setW0y(parseFloat(metricsTokenizer.nextToken())); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_W1Y: charMetric.setW1y(parseFloat(metricsTokenizer.nextToken())); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_W: float[] w = new float[2]; w[0] = parseFloat(metricsTokenizer.nextToken()); w[1] = parseFloat(metricsTokenizer.nextToken()); charMetric.setW(w); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_W0: float[] w0 = new float[2]; w0[0] = parseFloat(metricsTokenizer.nextToken()); w0[1] = parseFloat(metricsTokenizer.nextToken()); charMetric.setW0(w0); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_W1: float[] w1 = new float[2]; w1[0] = parseFloat(metricsTokenizer.nextToken()); w1[1] = parseFloat(metricsTokenizer.nextToken()); charMetric.setW1(w1); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_VV: float[] vv = new float[2]; vv[0] = parseFloat(metricsTokenizer.nextToken()); vv[1] = parseFloat(metricsTokenizer.nextToken()); charMetric.setVv(vv); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_N: charMetric.setName(metricsTokenizer.nextToken()); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_B: BoundingBox box = new BoundingBox(); box.setLowerLeftX(parseFloat(metricsTokenizer.nextToken())); box.setLowerLeftY(parseFloat(metricsTokenizer.nextToken())); box.setUpperRightX(parseFloat(metricsTokenizer.nextToken())); box.setUpperRightY(parseFloat(metricsTokenizer.nextToken())); charMetric.setBoundingBox(box); verifySemicolon(metricsTokenizer); break; case CHARMETRICS_L: Ligature lig = new Ligature(metricsTokenizer.nextToken(), metricsTokenizer.nextToken()); charMetric.addLigature(lig); verifySemicolon(metricsTokenizer); break; default: throw new IOException("Unknown CharMetrics command '" + nextCommand + "'"); } } return charMetric; } /** * This is used to verify that a semicolon is the next token in the stream. * * @param tokenizer The tokenizer to read from. * * @throws IOException If the semicolon is missing. */ private void verifySemicolon( StringTokenizer tokenizer ) throws IOException { if( tokenizer.hasMoreTokens() ) { String semicolon = tokenizer.nextToken(); if (!";".equals(semicolon)) { throw new IOException( "Error: Expected semicolon in stream actual='" + semicolon + "'" ); } } else { throw new IOException( "CharMetrics is missing a semicolon after a command" ); } } /** * This will read a boolean from the stream. * * @return The boolean in the stream. */ private boolean readBoolean() throws IOException { return Boolean.parseBoolean(readString()); } /** * This will read an integer from the stream. * * @return The integer in the stream. */ private int readInt() throws IOException { return parseInt(readString(), 10); } private int parseInt(String intValue) throws IOException { return parseInt(intValue, 10); } private int parseInt(String intValue, int radix) throws IOException { try { return Integer.parseInt(intValue, radix); } catch (NumberFormatException e) { throw new IOException("Error parsing AFM document:" + e, e); } } /** * This will read a float from the stream. * * @return The float in the stream. */ private float readFloat() throws IOException { return parseFloat(readString()); } private float parseFloat(String floatValue) throws IOException { try { return Float.parseFloat(floatValue); } catch (NumberFormatException e) { throw new IOException("Error parsing AFM document:" + e, e); } } /** * This will read until the end of a line. * * @return The string that is read. */ private String readLine() throws IOException { //First skip the whitespace StringBuilder buf = new StringBuilder(60); int nextByte = input.read(); while( isWhitespace( nextByte ) ) { nextByte = input.read(); //do nothing just skip the whitespace. } buf.append( (char)nextByte ); //now read the data nextByte = input.read(); while (nextByte != -1 && !isEOL(nextByte)) { buf.append((char) nextByte); nextByte = input.read(); } return buf.toString(); } /** * This will read a string from the input stream and stop at any whitespace. * * @return The string read from the stream. * * @throws IOException If an IO error occurs when reading from the stream. */ private String readString() throws IOException { //First skip the whitespace StringBuilder buf = new StringBuilder(24); int nextByte = input.read(); while( isWhitespace( nextByte ) ) { nextByte = input.read(); //do nothing just skip the whitespace. } buf.append( (char)nextByte ); //now read the data nextByte = input.read(); while (nextByte != -1 && !isWhitespace(nextByte)) { buf.append((char) nextByte); nextByte = input.read(); } return buf.toString(); } /** * Read the next string. Throw an exception if it differs from the expected command. * * @param expectedCommand the expected command * @throws IOException IF the read string differs from the expected command */ private void readCommand(String expectedCommand) throws IOException { String command = readString(); if (!expectedCommand.equals(command)) { throw new IOException( "Error: Expected '" + expectedCommand + "' actual '" + command + "'"); } } /** * This will determine if the byte is a whitespace character or not. * * @param character The character to test for whitespace. * * @return true If the character is whitespace as defined by the AFM spec. */ private boolean isEOL( int character ) { return character == 0x0D || character == 0x0A; } /** * This will determine if the byte is a whitespace character or not. * * @param character The character to test for whitespace. * * @return true If the character is whitespace as defined by the AFM spec. */ private boolean isWhitespace( int character ) { return character == ' ' || character == '\t' || character == 0x0D || character == 0x0A; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client.watcher; import org.elasticsearch.client.NodesResponseHeader; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; /** * The response from an 'ack watch' request. */ public class WatcherStatsResponse { private final List<Node> nodes; private final NodesResponseHeader header; private final String clusterName; private final WatcherMetadata watcherMetadata; public WatcherStatsResponse(NodesResponseHeader header, String clusterName, WatcherMetadata watcherMetadata, List<Node> nodes) { this.nodes = nodes; this.header = header; this.clusterName = clusterName; this.watcherMetadata = watcherMetadata; } /** * @return the status of the requested watch. If an action was * successfully acknowledged, this will be reflected in its status. */ public WatcherMetadata getWatcherMetadata() { return watcherMetadata; } /** * returns a list of nodes that returned stats */ public List<Node> getNodes() { return nodes; } /** * Gets information about the number of total, successful and failed nodes the request was run on. * Also includes exceptions if relevant. */ public NodesResponseHeader getHeader() { return header; } /** * Get the cluster name associated with all of the nodes. * * @return Never {@code null}. */ public String getClusterName() { return clusterName; } @SuppressWarnings("unchecked") private static final ConstructingObjectParser<WatcherStatsResponse, Void> PARSER = new ConstructingObjectParser<>("watcher_stats_response", true, a -> new WatcherStatsResponse((NodesResponseHeader) a[0], (String) a[1], new WatcherMetadata((boolean) a[2]), (List<Node>) a[3])); static { PARSER.declareObject(ConstructingObjectParser.constructorArg(), NodesResponseHeader::fromXContent, new ParseField("_nodes")); PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("cluster_name")); PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), new ParseField("manually_stopped")); PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> Node.PARSER.apply(p, null), new ParseField("stats")); } public static WatcherStatsResponse fromXContent(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; WatcherStatsResponse that = (WatcherStatsResponse) o; return Objects.equals(nodes, that.nodes) && Objects.equals(header, that.header) && Objects.equals(clusterName, that.clusterName) && Objects.equals(watcherMetadata, that.watcherMetadata); } @Override public int hashCode() { return Objects.hash(nodes, header, clusterName, watcherMetadata); } public static class Node { @SuppressWarnings("unchecked") public static final ConstructingObjectParser<Node, Void> PARSER = new ConstructingObjectParser<>("watcher_stats_node", true, (args, c) -> new Node( (String) args[0], WatcherState.valueOf(((String) args[1]).toUpperCase(Locale.ROOT)), (long) args[2], ((Tuple<Long, Long>) args[3]).v1(), ((Tuple<Long, Long>) args[3]).v2(), (List<WatchExecutionSnapshot>) args[4], (List<QueuedWatch>) args[5], (Map<String, Object>) args[6] )); private static final ConstructingObjectParser<Tuple<Long, Long>, Void> THREAD_POOL_PARSER = new ConstructingObjectParser<>("execution_thread_pool", true, (args, id) -> new Tuple<>((Long) args[0], (Long) args[1])); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("node_id")); PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("watcher_state")); PARSER.declareLong(ConstructingObjectParser.constructorArg(), new ParseField("watch_count")); PARSER.declareObject(ConstructingObjectParser.constructorArg(), THREAD_POOL_PARSER::apply, new ParseField("execution_thread_pool")); PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), WatchExecutionSnapshot.PARSER, new ParseField("current_watches")); PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), QueuedWatch.PARSER, new ParseField("queued_watches")); PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> p.map(), new ParseField("stats")); THREAD_POOL_PARSER.declareLong(ConstructingObjectParser.constructorArg(), new ParseField("queue_size")); THREAD_POOL_PARSER.declareLong(ConstructingObjectParser.constructorArg(), new ParseField("max_size")); } private final String nodeId; private WatcherState watcherState; private long watchesCount; private long threadPoolQueueSize; private long threadPoolMaxSize; private List<WatchExecutionSnapshot> snapshots; private List<QueuedWatch> queuedWatches; private Map<String, Object> stats; public Node(String nodeId, WatcherState watcherState, long watchesCount, long threadPoolQueueSize, long threadPoolMaxSize, List<WatchExecutionSnapshot> snapshots, List<QueuedWatch> queuedWatches, Map<String, Object> stats) { this.nodeId = nodeId; this.watcherState = watcherState; this.watchesCount = watchesCount; this.threadPoolQueueSize = threadPoolQueueSize; this.threadPoolMaxSize = threadPoolMaxSize; this.snapshots = snapshots; this.queuedWatches = queuedWatches; this.stats = stats; } public String getNodeId() { return nodeId; } public long getWatchesCount() { return watchesCount; } public WatcherState getWatcherState() { return watcherState; } public long getThreadPoolQueueSize() { return threadPoolQueueSize; } public long getThreadPoolMaxSize() { return threadPoolMaxSize; } public List<WatchExecutionSnapshot> getSnapshots() { return snapshots; } public List<QueuedWatch> getQueuedWatches() { return queuedWatches; } public Map<String, Object> getStats() { return stats; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Node node = (Node) o; return watchesCount == node.watchesCount && threadPoolQueueSize == node.threadPoolQueueSize && threadPoolMaxSize == node.threadPoolMaxSize && Objects.equals(nodeId, node.nodeId) && watcherState == node.watcherState && Objects.equals(snapshots, node.snapshots) && Objects.equals(queuedWatches, node.queuedWatches) && Objects.equals(stats, node.stats); } @Override public int hashCode() { return Objects.hash(nodeId, watcherState, watchesCount, threadPoolQueueSize, threadPoolMaxSize, snapshots, queuedWatches, stats); } } }
/*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package edu.wpi.first.wpilibj.templates; import edu.wpi.first.wpilibj.*; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the SimpleRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class RobotTemplate extends SimpleRobot { String modeString; DashboardData dashboardData; //For sending dashboard information String blockString = "BBBBBBBBBBB"; // Used to communicate image data to dashboard String theBlobString = "FFFFFFFFFFFFFFFFFF"; // Dito Joystick rightStick ;// new Joystick(Constants.USB_LeftJoyStickPort); Joystick leftStick ;// new Joystick(Constants.USB_RightJoyStickPort); Joystick operatorStick ;// new Joystick(Constants.USB_OperatorJoyStickPort); AnalogChannel rotationSensor ;// new AnalogChannel(Constants.ANALOG_RackPinionEncoder); AnalogChannel motorcurrent ;// new AnalogChannel(Constants.ANALOG_MotorCurrentPort); // Please note that SW BOT had left Front & left back reversed, others were forward SmartEncoder leftFrontEncoder ;// new SmartEncoder(Constants.DIO_LeftFrontEncoderPortA, Constants.DIO_LeftFrontEncoderPortB, true); SmartEncoder rightFrontEncoder ;// new SmartEncoder(Constants.DIO_RightFrontEncoderPortA, Constants.DIO_RightFrontEncoderPortB, false); SmartEncoder leftBackEncoder ;// new SmartEncoder(Constants.DIO_LeftBackEncoderPortA, Constants.DIO_LeftBackEncoderPortB, true); SmartEncoder rightBackEncoder ;// new SmartEncoder(Constants.DIO_RightBackEncoderPortA, Constants.DIO_RightBackEncoderPortB, false); SmartMotor leftFrontMotor ;// new SmartMotor(Constants.PWM_LeftFrontMotorPort, leftFrontEncoder, true, Constants.motorAccelTime); SmartMotor rightFrontMotor ;// new SmartMotor(Constants.PWM_RightFrontMotorPort, rightFrontEncoder, false, Constants.motorAccelTime); SmartMotor leftBackMotor ;// new SmartMotor(Constants.PWM_LeftBackMotorPort, leftBackEncoder, true, Constants.motorAccelTime); SmartMotor rightBackMotor ;// new SmartMotor(Constants.PWM_RightBackMotorPort, rightBackEncoder, false, Constants.motorAccelTime); SmartMotor steeringMotor ;// new SmartMotor(Constants.PWM_RackPinionMotorPort, null, true, 0); SmartMotor ballHandlerMotor ;// new SmartMotor(Constants.PWM_BallHandlerMotorPort, null, true, 0); SmartMotor smartBallDoorMotor ;// new SmartMotor(Constants.PWM_BallDoorMotorPort, null, true, 0); SmartAccelerometer accelerometerX ;// new Accelerometer(Constants.ANALOG_AccelerometerXPort); SmartAccelerometer accelerometerY ;// new Accelerometer(Constants.ANALOG_AccelerometerYPort); Servo cameraPanServo ;// new Servo(Constants.PWM_CameraPanServoPort); Servo cameraTiltServo ;// new Servo(Constants.PWM_CameraTiltServoPort); MenuControl menuControl ;// new MenuControl(rightStick); // For Managing menu interface Gyro gyro ;// new Gyro(Constants.ANALOG_GyroPort); GenesisDrive genesisDrive ;// new GenesisDrive(leftStick, rightStick, BallLauncher ballLauncher ;// new BallLauncher(ballHandlerMotor, smartBallDoorMotor, rightStick); // Create autonomous obkect Autonomous autonomous ; // new Autonomous(genesisDrive); // Create BlockyImage object BlockyImage blockyImage ; // new BlockyImage(); //tbd RobotServiceThread serviceThread ; // new RobotServiceThread(this); TwilightZoneThread twilightZoneThreead; public RobotTemplate(){ System.out.println("RobotTemplate()------------------------------------"); rightStick = new Joystick(Constants.USB_LeftJoyStickPort); leftStick = new Joystick(Constants.USB_RightJoyStickPort); operatorStick = new Joystick(Constants.USB_OperatorJoyStickPort); while(leftStick.getRawButton(3) == false && DriverStation.getInstance().isDisabled()){ Timer.delay(1.0); // wait 1 seconds System.out.println("Waiting for things to Start"); } System.out.println("Construction and Initialization in progress------------------------------------"); getWatchdog().setEnabled(false); //drivetrain = new RobotDrive(2, 5); // create RobotDriv dashboardData = new DashboardData(); // For sending dashboard information rotationSensor = new AnalogChannel(Constants.ANALOG_RackPinionEncoder); motorcurrent = new AnalogChannel(Constants.ANALOG_MotorCurrentPort); //Please note that SW BOT had left Front & left back reversed, others were forward leftFrontEncoder = new SmartEncoder(Constants.DIO_LeftFrontEncoderPortA, Constants.DIO_LeftFrontEncoderPortB, true); rightFrontEncoder = new SmartEncoder(Constants.DIO_RightFrontEncoderPortA, Constants.DIO_RightFrontEncoderPortB, false); leftBackEncoder = new SmartEncoder(Constants.DIO_LeftBackEncoderPortA, Constants.DIO_LeftBackEncoderPortB, true); rightBackEncoder = new SmartEncoder(Constants.DIO_RightBackEncoderPortA, Constants.DIO_RightBackEncoderPortB, false); leftFrontMotor = new SmartMotor(Constants.PWM_LeftFrontMotorPort, leftFrontEncoder, true, Constants.motorAccelTime); rightFrontMotor = new SmartMotor(Constants.PWM_RightFrontMotorPort, rightFrontEncoder, false, Constants.motorAccelTime); leftBackMotor = new SmartMotor(Constants.PWM_LeftBackMotorPort, leftBackEncoder, true, Constants.motorAccelTime); rightBackMotor = new SmartMotor(Constants.PWM_RightBackMotorPort, rightBackEncoder, false, Constants.motorAccelTime); steeringMotor = new SmartMotor(Constants.PWM_RackPinionMotorPort, null, true, 0); ballHandlerMotor = new SmartMotor(Constants.PWM_BallHandlerMotorPort, null, true, 0); smartBallDoorMotor = new SmartMotor(Constants.PWM_BallDoorMotorPort, null, true, 0); accelerometerX = new SmartAccelerometer(Constants.ANALOG_AccelerometerXPort); accelerometerY = new SmartAccelerometer(Constants.ANALOG_AccelerometerYPort); cameraPanServo = new Servo(Constants.PWM_CameraPanServoPort); cameraTiltServo = new Servo(Constants.PWM_CameraTiltServoPort); menuControl = new MenuControl(rightStick); // For Managing menu interface gyro = new Gyro(Constants.ANALOG_GyroPort); genesisDrive= new GenesisDrive(leftStick, rightStick, leftFrontMotor, rightFrontMotor, leftBackMotor, rightBackMotor, steeringMotor, rotationSensor, menuControl, gyro, 1.0); ballLauncher = new BallLauncher(ballHandlerMotor, smartBallDoorMotor, rightStick); //Create autonomous obkect autonomous = new Autonomous(genesisDrive); //Create BlockyImage object blockyImage = new BlockyImage(); //tbd serviceThread = new RobotServiceThread(this); twilightZoneThreead = new TwilightZoneThread(this); } /** * This function inits all hardware */ public void initialize() { System.out.println("initialize called"); gyro.reset(); cameraTiltServo.setAngle(.05); cameraPanServo.setAngle(0.401); leftFrontEncoder.reset(); rightFrontEncoder.reset(); leftBackEncoder.reset(); rightBackEncoder.reset(); //rackPinionEncoder->Reset(); leftFrontEncoder.start(); rightFrontEncoder.start(); leftBackEncoder.start(); rightBackEncoder.start(); // BlockyImageInit(); // TwilightZoneInit(); serviceThread.start(); } public void setNextResolution() { // TBD Stubed for now while portit is in progress } /** * This function is called once each time the robot enters autonomous mode. */ public void autonomous() { System.out.println("autonomous called"); initialize(); } /** * This function is called once each time the robot enters operator control. */ public void operatorControl() { System.out.println("Made it to Operator Control"); initialize(); //downTimer.StartTimer(120.0f); // Start 15 Second Timer genesisDrive.SetDriverMode(true); // set for operator mode control cameraTiltServo.set(0.624); //blockyImage.SelectBallColors(); //tbd while (true) { Timer.delay(0.010f); // THE #2 BUTTON ON THE LEFT JOYSTICK CURRENTLY BREAKS THE CAMERA etc, IT NEEDS DEBUGGING, ASK ANSEL [OLD] if(leftStick.getButton(Joystick.ButtonType.kTop)) { setNextResolution(); } if(rightStick.getButton(Joystick.ButtonType.kTop)) modeString = "Hello"; else modeString = "Goodbye.."; // if(rightStick->GetButton(Joystick::kTopButton)) { // ScreenShot(); // Wait(.5); // } menuControl.PeriodicService(); ballLauncher.OperateBallCollector(); // TBD Need a better home -->SmartMotor.SetCollectData(rightStick->GetButton(Joystick::kTriggerButton)); //UpdateDashboard (); } } public void PeriodicService() { SmartMotor.MotorTask(); accelerometerX.PeriodicService(); accelerometerY.PeriodicService(); genesisDrive.PeriodicService(); //if(SmartMotor::DoCollectData() && mainRobot) // mainRobot->PrintData(); } public void TwilightZonePeriodicService() { menuControl.PeriodicService(); UpdateDashboard(); } public void UpdateDashboard() { int menuIndex = menuControl.GetMenuValue(); int choiceIndex = menuControl.GetChoiceValue(); float lEncoder = 0f, rEncoder=0f, lSpeed=0f, rSpeed=0f, lAccel=0f, rAccel=0f; leftFrontEncoder.GetData(lEncoder, lSpeed, lAccel); rightFrontEncoder.GetData(rEncoder, rSpeed, rAccel); boolean isTankMode = genesisDrive.IsTankDrive(); float doorSpeed = 0f, ballHandlerSpeed = 0f; ballLauncher.GetSpeeds(doorSpeed,ballHandlerSpeed); // TBDfloat timeLeftInMatch = downTimer.GetRemainingTime(); // Start 15 Second Timer float timeLeftMatch = 0.66f; // smartBallDoorMotor->Set(doorSpeed); // smartBallHandlerMotor->Set(ballHandlerSpeed); DriverStation ds = DriverStation.getInstance(); dashboardData.reset(); // Let's fill the buffer from scratch dashboardData.add(ds.getBatteryVoltage()); // 0 Send Battery Voltage dashboardData.add(!ds.isEnabled()); // 1 Send enabled/disabled dashboardData.add(ds.isAutonomous()); // 2 Send autonomous/teleoperated dashboardData.add(false); // 3 Send isManualOverride dashboardData.add("This is mode string"); // 4 Send mode string dashboardData.add((rSpeed+lSpeed)/2); // 5 send average speed dashboardData.add(gyro.getAngle()); // 6 send current gyro heading dashboardData.add(22.2); // 7 Send genesisRobot.GetSteeringPositionDegrees() dashboardData.add(menuIndex); // 8 Send current menu index dashboardData.add(choiceIndex); // 9 Send current choice index dashboardData.add(isTankMode); // 10 Send tank vs. Ackerman (boolean) dashboardData.add(doorSpeed); // 12 Send current door speed (float) dashboardData.add(ballHandlerSpeed); // 13 Send current hall handler speed (float) dashboardData.add(blockString); // 14 Send the block string dashboardData.add(timeLeftMatch); // 15 Send the time remaining dashboardData.addBlobData(); // 16 Send compressed image data... // Perform run length encoding on encoded & packed data stream //#if 1 // (void)EncodeBuffer((theBlobString[updateCount&1]), encodedBinaryData, 701); // dashboard.Printf("%s\t%s", str, encodedBinaryData); dashboardData.SendData(); } private class RobotServiceThread extends Thread { RobotTemplate robot; RobotServiceThread(RobotTemplate r) { robot = r; } public void run() { while (true) { robot.PeriodicService(); try { Thread.sleep(20); } catch (InterruptedException e) { } } } } private class TwilightZoneThread extends Thread { RobotTemplate robot; TwilightZoneThread(RobotTemplate r) { robot = r; start(); } public void run() { while (true) { robot.TwilightZonePeriodicService(); try { Thread.sleep(40); } catch (InterruptedException e) { } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.security; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.catchThrowable; import static org.mockito.Mockito.mock; import java.io.NotSerializableException; import java.io.Serializable; import java.security.Principal; import javax.naming.NamingException; import org.apache.commons.lang3.SerializationUtils; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.apache.geode.test.junit.categories.SecurityTest; /** * Unit tests for {@link NotAuthorizedException}. */ @Category(SecurityTest.class) public class NotAuthorizedExceptionTest { private String message; private String causeMessage; private Object nonSerializableResolvedObj; private NamingException nonSerializableNamingException; private SerializableObject serializableResolvedObj; private NamingException serializableNamingException; private String principalName; private Principal nonSerializablePrincipal; private SerializablePrincipal serializablePrincipal; @Rule public TestName testName = new TestName(); @Before public void setUp() throws Exception { message = testName.getMethodName() + " message"; causeMessage = testName.getMethodName() + " cause message"; nonSerializableResolvedObj = new Object(); nonSerializableNamingException = new NamingException(causeMessage); nonSerializableNamingException.setResolvedObj(nonSerializableResolvedObj); serializableResolvedObj = new SerializableObject(testName.getMethodName()); serializableNamingException = new NamingException(causeMessage); serializableNamingException.setResolvedObj(serializableResolvedObj); principalName = "jsmith"; nonSerializablePrincipal = mock(Principal.class); serializablePrincipal = new SerializablePrincipal(principalName); assertPreconditions(); } private void assertPreconditions() { Throwable thrown = catchThrowable(() -> SerializationUtils.clone(nonSerializableNamingException)); assertThat(thrown).isNotNull(); assertThat(thrown.getCause()).isInstanceOf(NotSerializableException.class); thrown = catchThrowable(() -> SerializationUtils.clone(serializableNamingException)); assertThat(thrown).isNull(); assertThat(nonSerializableResolvedObj).isNotInstanceOf(Serializable.class); thrown = catchThrowable(() -> SerializationUtils.clone(serializableResolvedObj)); assertThat(thrown).isNull(); assertThat(nonSerializablePrincipal).isNotInstanceOf(Serializable.class); thrown = catchThrowable(() -> SerializationUtils.clone(serializablePrincipal)); assertThat(thrown).isNull(); } @Test public void isSerializable() { assertThat(NotAuthorizedException.class).isInstanceOf(Serializable.class); } @Test public void serializes() { NotAuthorizedException instance = new NotAuthorizedException(message); NotAuthorizedException cloned = (NotAuthorizedException) SerializationUtils.clone(instance); assertThat(cloned).hasMessage(message); } @Test public void serializesWithThrowable() { Throwable cause = new Exception(causeMessage); NotAuthorizedException instance = new NotAuthorizedException(message, cause); NotAuthorizedException cloned = (NotAuthorizedException) SerializationUtils.clone(instance); assertThat(cloned).hasMessage(message); assertThat(cloned).hasCause(cause); } @Test public void serializesWithNonSerializablePrincipal() { NotAuthorizedException instance = new NotAuthorizedException(message, nonSerializablePrincipal); assertThat(instance.getPrincipal()).isNotNull(); NotAuthorizedException cloned = (NotAuthorizedException) SerializationUtils.clone(instance); assertThat(cloned).hasMessage(message); assertThat(cloned.getPrincipal()).isNull(); } @Test public void serializesWithSerializablePrincipal() { NotAuthorizedException instance = new NotAuthorizedException(message, serializablePrincipal); NotAuthorizedException cloned = (NotAuthorizedException) SerializationUtils.clone(instance); assertThat(cloned).hasMessage(message); assertThat(cloned.getPrincipal()).isNotNull().isEqualTo(serializablePrincipal); } private static class SerializableObject implements Serializable { private final String name; SerializableObject(String name) { this.name = name; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SerializableObject that = (SerializableObject) o; return name != null ? name.equals(that.name) : that.name == null; } @Override public int hashCode() { return name != null ? name.hashCode() : 0; } } private static class SerializablePrincipal implements Principal, Serializable { private final String name; SerializablePrincipal(String name) { this.name = name; } @Override public String getName() { return name; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SerializablePrincipal that = (SerializablePrincipal) o; return name != null ? name.equals(that.name) : that.name == null; } @Override public int hashCode() { return name != null ? name.hashCode() : 0; } } }
/** * Copyright (C) FuseSource, Inc. * http://fusesource.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.fabric.api.jmx; import org.fusesource.fabric.utils.Strings; import org.fusesource.fabric.zookeeper.ZkDefs; import java.util.List; /** * Represents the broker configuration of a logical broker profile which can be mapped to multiple containers */ public class MQBrokerConfigDTO { private BrokerKind kind; private String brokerName; private String profile; private String parentProfile; private String clientProfile; private String clientParentProfile; private List<String> properties; private String configUrl; private String data; private String group; private String[] networks; private String networksUserName; private String networksPassword; private String version; private String jvmOpts; private Integer replicas; private Integer minimumInstances; @Override public String toString() { return "MQBrokerConfigDTO{" + "group='" + group + '\'' + ", profile='" + profile() + '\'' + ", brokerName='" + brokerName + '\'' + ", kind='" + kind + '\'' + '}'; } /** * Based on the kind of replication (N+1 or replicated etc) or based on configuration * return now many instances of the broker profile are required */ public int requiredInstances() { if (replicas != null) { return replicas.intValue(); } if (minimumInstances != null) { return minimumInstances.intValue(); } if (kind != null) { switch (kind) { case StandAlone: return 1; case Replicated: return 3; } } return 2; } /** * Returns the version if there is one configured or the default version */ public String version() { String answer = getVersion(); if (Strings.isNullOrBlank(answer)) { answer = ZkDefs.DEFAULT_VERSION; } return answer; } /** * Returns the group if there is one configured or 'default' for the default group */ public String group() { String answer = getGroup(); if (Strings.isNullOrBlank(answer)) { answer = "default"; } return answer; } /** * Returns the kind of the broker or the default */ public BrokerKind kind() { BrokerKind answer = getKind(); if (answer == null) { answer = BrokerKind.DEFAULT; } return answer; } /** * Returns the configured profile name or defaults it to "mq-broker-$group.$brokerName" */ public String profile() { if (Strings.isNullOrBlank(profile)) { profile = "mq-broker-" + group() + "." + getBrokerName(); } return profile; } /** * Returns the client connection profile name or defaults it to "mq-client-$group" */ public String clientProfile() { if (Strings.isNullOrBlank(clientProfile)) { clientProfile = "mq-client-" + group(); } return clientProfile; } // Properties //------------------------------------------------------------------------- public BrokerKind getKind() { return kind; } public void setKind(BrokerKind kind) { this.kind = kind; } /** * Return the Broker name */ public String getBrokerName() { return brokerName; } public void setBrokerName(String brokerName) { this.brokerName = brokerName; } /** * Returns the profile name (which defaults to the broker name) */ public String getProfile() { return profile; } public void setProfile(String profile) { this.profile = profile; } /** * Returns the parent profile to extend */ public String getParentProfile() { return parentProfile; } public void setParentProfile(String parentProfile) { this.parentProfile = parentProfile; } /** * Returns the profile that clients use to connect to this group */ public String getClientProfile() { return clientProfile; } public void setClientProfile(String clientProfile) { this.clientProfile = clientProfile; } /** * Returns the parent profile for the client profile. Defaults to "default" */ public String getClientParentProfile() { return clientParentProfile; } public void setClientParentProfile(String clientParentProfile) { this.clientParentProfile = clientParentProfile; } /** * Returns additional properties to define in the profile */ public List<String> getProperties() { return properties; } public void setProperties(List<String> properties) { this.properties = properties; } /** * Returns the configuration URL to use */ public String getConfigUrl() { return configUrl; } public void setConfigUrl(String configUrl) { this.configUrl = configUrl; } /** * Returns the data directory for the broker */ public String getData() { return data; } public void setData(String data) { this.data = data; } /** * Returns the broker group */ public String getGroup() { return group; } public void setGroup(String group) { this.group = group; } /** * Returns the group names of the network of brokers to create */ public String[] getNetworks() { return networks; } public void setNetworks(String[] networks) { this.networks = networks; } /** * Returns the broker networks username */ public String getNetworksUserName() { return networksUserName; } public void setNetworksUserName(String networksUserName) { this.networksUserName = networksUserName; } /** * Returns the broker networks password */ public String getNetworksPassword() { return networksPassword; } public void setNetworksPassword(String networksPassword) { this.networksPassword = networksPassword; } /** * The version id in the registry */ public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } /** * Returns the JVM options to pass to the containers */ public String getJvmOpts() { return jvmOpts; } public void setJvmOpts(String jvmOpts) { this.jvmOpts = jvmOpts; } /** * Returns the number of replicas for a replicated message broker */ public Integer getReplicas() { return replicas; } public void setReplicas(Integer replicas) { this.replicas = replicas; } /** * Returns the number of slaves if using master/slave rather than replicated or N+1 */ public Integer getMinimumInstances() { return minimumInstances; } public void setMinimumInstances(Integer minimumInstances) { this.minimumInstances = minimumInstances; } }
/* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config.materials; import com.thoughtworks.go.config.*; import com.thoughtworks.go.config.validation.FilePathTypeValidator; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.materials.MaterialConfig; import com.thoughtworks.go.util.FilenameUtil; import com.thoughtworks.go.util.command.UrlArgument; import org.apache.commons.lang.StringUtils; import java.io.File; import java.io.IOException; import java.util.Map; import static com.thoughtworks.go.util.ExceptionUtils.bomb; /** * @understands a source control repository and its configuration */ public abstract class ScmMaterialConfig extends AbstractMaterialConfig implements ParamsAttributeAware { public static final String URL = "url"; public static final String USERNAME = "username"; @ConfigSubtag private Filter filter; @ConfigAttribute(value = "invertFilter", optional = true) private boolean invertFilter = false; @ConfigAttribute(value = "dest", allowNull = true) protected String folder; @ConfigAttribute(value = "autoUpdate", optional = true) private boolean autoUpdate = true; public static final String PASSWORD = "password"; public static final String ENCRYPTED_PASSWORD = "encryptedPassword"; public static final String PASSWORD_CHANGED = "passwordChanged"; public static final String AUTO_UPDATE = "autoUpdate"; public static final String FOLDER = "folder"; public static final String FILTER = "filterAsString"; public static final String INVERT_FILTER = "invertFilter"; public ScmMaterialConfig(String typeName) { super(typeName); } public ScmMaterialConfig(CaseInsensitiveString name, Filter filter, boolean invertFilter, String folder, boolean autoUpdate, String typeName, ConfigErrors errors) { super(typeName, name, errors); this.filter = filter; this.invertFilter = invertFilter; this.folder = folder; this.autoUpdate = autoUpdate; } @Override protected void appendPipelineUniqueCriteria(Map<String, Object> basicCriteria) { basicCriteria.put("dest", folder); } public File workingdir(File baseFolder) { if (getFolder() == null) { return baseFolder; } return new File(baseFolder, getFolder()); } //most of the material such as hg, git, p4 all print the file from the root without '/' //but subverion print it with '/', we standarize it here. look at the implementation of subversion as well. public boolean matches(String name, String regex) { if (regex.startsWith("/")) { regex = regex.substring(1); } return name.matches(regex); } public abstract String getUserName(); public abstract String getPassword(); public abstract String getEncryptedPassword(); public abstract boolean isCheckExternals(); public abstract String getUrl(); public abstract void setUrl(String url); protected abstract UrlArgument getUrlArgument(); protected abstract String getLocation(); public Filter filter() { if (filter == null) { return new Filter(); } return filter; } public String getFilterAsString() { return filter().getStringForDisplay(); } public Filter rawFilter() { return filter; } public void setFilter(Filter filter) { this.filter = filter; } public boolean isInvertFilter() { return invertFilter; } public boolean getInvertFilter() { return invertFilter; } public void setInvertFilter(boolean value) { invertFilter = value; } public String getDescription() { return getUriForDisplay(); } public String getUriForDisplay() { return getUrlArgument().forDisplay(); } public String getFolder() { return folder; } public String getDisplayName() { return name == null ? getUriForDisplay() : CaseInsensitiveString.str(name); } public boolean isAutoUpdate() { return autoUpdate; } public boolean getAutoUpdate() { return autoUpdate; } public void setAutoUpdate(boolean value) { autoUpdate = value; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } ScmMaterialConfig that = (ScmMaterialConfig) o; if (folder != null ? !folder.equals(that.folder) : that.folder != null) { return false; } return super.equals(that); } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + (folder != null ? folder.hashCode() : 0); return result; } @Override protected final void validateConcreteMaterial(ValidationContext validationContext) { validateNotOutsideSandbox(); validateDestFolderPath(); validateConcreteScmMaterial(); } public abstract void validateConcreteScmMaterial(); private void validateDestFolderPath() { if (StringUtils.isBlank(folder)) { return; } if (!new FilePathTypeValidator().isPathValid(folder)) { errors().add(FOLDER, FilePathTypeValidator.errorMessage("directory", getFolder())); } } public void setConfigAttributes(Object attributes) { super.setConfigAttributes(attributes); Map map = (Map) attributes; if (map.containsKey(FOLDER)) { String folder = (String) map.get(FOLDER); if (StringUtils.isBlank(folder)) { folder = null; } this.folder = folder; } this.setAutoUpdate("true".equals(map.get(AUTO_UPDATE))); this.setInvertFilter("true".equals(map.get(INVERT_FILTER))); if (map.containsKey(FILTER)) { String pattern = (String) map.get(FILTER); if (!StringUtils.isBlank(pattern)) { this.setFilter(Filter.fromDisplayString(pattern)); } else { this.setFilter(null); } } } public boolean isAutoUpdateStateMismatch(MaterialConfigs materialAutoUpdateMap) { if(materialAutoUpdateMap.size() > 1){ for (MaterialConfig otherMaterial : materialAutoUpdateMap) { if (otherMaterial.isAutoUpdate() != this.autoUpdate) { return true; } } } return false; } public void setAutoUpdateMismatchError() { addError(AUTO_UPDATE, String.format("Material of type %s (%s) is specified more than once in the configuration with different values for the autoUpdate attribute." + " All copies of this material must have the same value for this attribute.", getTypeForDisplay(), getDescription())); } public void setAutoUpdateMismatchErrorWithConfigRepo() { addError(AUTO_UPDATE, String.format("Material of type %s (%s) is specified as a configuration repository and pipeline material with disabled autoUpdate." + " All copies of this material must have autoUpdate enabled or configuration repository must be removed", getTypeForDisplay(), getDescription())); } public void setDestinationFolderError(String message) { addError(FOLDER, message); } public void validateNotSubdirectoryOf(String otherSCMMaterialFolder) { String myDirPath = this.getFolder(); if (myDirPath == null || otherSCMMaterialFolder == null) { return; } try { if (FilenameUtil.isNormalizedDirectoryPathInsideNormalizedParentDirectory(myDirPath, otherSCMMaterialFolder)) { addError(FOLDER, "Invalid Destination Directory. Every material needs a different destination directory and the directories should not be nested."); } } catch (IOException e) { throw bomb("Dest folder specification is not valid. " + e.getMessage()); } } public void validateDestinationDirectoryName(String otherSCMMaterialFolder) { if (folder != null && folder.equalsIgnoreCase(otherSCMMaterialFolder)) { addError(FOLDER, "The destination directory must be unique across materials."); } } private void validateNotOutsideSandbox() { String dest = this.getFolder(); if (dest == null) { return; } if (!(FilenameUtil.isNormalizedPathOutsideWorkingDir(dest))) { setDestinationFolderError(String.format("Dest folder '%s' is not valid. It must be a sub-directory of the working folder.", dest)); } } public Boolean isUsedInFetchArtifact(PipelineConfig pipelineConfig) { return false; } // TODO: Consider renaming this to dest since we use that word in the UI & Config public void setFolder(String folder) { this.folder = folder; } }
/* * Copyright (C) 2015 Vincent Mi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.wallace.tools.view.round; import android.content.Context; import android.content.res.ColorStateList; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.Bitmap; import android.graphics.ColorFilter; import android.graphics.Shader; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.graphics.drawable.LayerDrawable; import android.net.Uri; import android.support.annotation.ColorInt; import android.support.annotation.DimenRes; import android.support.annotation.DrawableRes; import android.util.AttributeSet; import android.util.Log; import com.wallace.tools.R; @SuppressWarnings("UnusedDeclaration") public class RoundedImageView extends android.support.v7.widget.AppCompatImageView { // Constants for tile mode attributes private static final int TILE_MODE_UNDEFINED = -2; private static final int TILE_MODE_CLAMP = 0; private static final int TILE_MODE_REPEAT = 1; private static final int TILE_MODE_MIRROR = 2; public static final String TAG = "RoundedImageView"; public static final float DEFAULT_RADIUS = 0f; public static final float DEFAULT_BORDER_WIDTH = 0f; public static final Shader.TileMode DEFAULT_TILE_MODE = Shader.TileMode.CLAMP; private static final ScaleType[] SCALE_TYPES = { ScaleType.MATRIX, ScaleType.FIT_XY, ScaleType.FIT_START, ScaleType.FIT_CENTER, ScaleType.FIT_END, ScaleType.CENTER, ScaleType.CENTER_CROP, ScaleType.CENTER_INSIDE }; private final float[] mCornerRadii = new float[] { DEFAULT_RADIUS, DEFAULT_RADIUS, DEFAULT_RADIUS, DEFAULT_RADIUS }; private Drawable mBackgroundDrawable; private ColorStateList mBorderColor = ColorStateList.valueOf(RoundedDrawable.DEFAULT_BORDER_COLOR); private float mBorderWidth = DEFAULT_BORDER_WIDTH; private ColorFilter mColorFilter = null; private boolean mColorMod = false; private Drawable mDrawable; private boolean mHasColorFilter = false; private boolean mIsOval = false; private boolean mMutateBackground = false; private int mResource; private int mBackgroundResource; private ScaleType mScaleType = ScaleType.FIT_CENTER; private Shader.TileMode mTileModeX = DEFAULT_TILE_MODE; private Shader.TileMode mTileModeY = DEFAULT_TILE_MODE; public RoundedImageView(Context context) { super(context); } public RoundedImageView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public RoundedImageView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.RoundedImageView, defStyle, 0); int index = a.getInt(R.styleable.RoundedImageView_android_scaleType, -1); if (index >= 0) { setScaleType(SCALE_TYPES[index]); } else { // default scaletype to FIT_CENTER setScaleType(ScaleType.FIT_CENTER); } float cornerRadiusOverride = a.getDimensionPixelSize(R.styleable.RoundedImageView_riv_corner_radius, -1); mCornerRadii[Corner.TOP_LEFT] = a.getDimensionPixelSize(R.styleable.RoundedImageView_riv_corner_radius_top_left, -1); mCornerRadii[Corner.TOP_RIGHT] = a.getDimensionPixelSize(R.styleable.RoundedImageView_riv_corner_radius_top_right, -1); mCornerRadii[Corner.BOTTOM_RIGHT] = a.getDimensionPixelSize(R.styleable.RoundedImageView_riv_corner_radius_bottom_right, -1); mCornerRadii[Corner.BOTTOM_LEFT] = a.getDimensionPixelSize(R.styleable.RoundedImageView_riv_corner_radius_bottom_left, -1); boolean any = false; for (int i = 0, len = mCornerRadii.length; i < len; i++) { if (mCornerRadii[i] < 0) { mCornerRadii[i] = 0f; } else { any = true; } } if (!any) { if (cornerRadiusOverride < 0) { cornerRadiusOverride = DEFAULT_RADIUS; } for (int i = 0, len = mCornerRadii.length; i < len; i++) { mCornerRadii[i] = cornerRadiusOverride; } } mBorderWidth = a.getDimensionPixelSize(R.styleable.RoundedImageView_riv_border_width, -1); if (mBorderWidth < 0) { mBorderWidth = DEFAULT_BORDER_WIDTH; } mBorderColor = a.getColorStateList(R.styleable.RoundedImageView_riv_border_color); if (mBorderColor == null) { mBorderColor = ColorStateList.valueOf(RoundedDrawable.DEFAULT_BORDER_COLOR); } mMutateBackground = a.getBoolean(R.styleable.RoundedImageView_riv_mutate_background, false); mIsOval = a.getBoolean(R.styleable.RoundedImageView_riv_oval, false); final int tileMode = a.getInt(R.styleable.RoundedImageView_riv_tile_mode, TILE_MODE_UNDEFINED); if (tileMode != TILE_MODE_UNDEFINED) { setTileModeX(parseTileMode(tileMode)); setTileModeY(parseTileMode(tileMode)); } final int tileModeX = a.getInt(R.styleable.RoundedImageView_riv_tile_mode_x, TILE_MODE_UNDEFINED); if (tileModeX != TILE_MODE_UNDEFINED) { setTileModeX(parseTileMode(tileModeX)); } final int tileModeY = a.getInt(R.styleable.RoundedImageView_riv_tile_mode_y, TILE_MODE_UNDEFINED); if (tileModeY != TILE_MODE_UNDEFINED) { setTileModeY(parseTileMode(tileModeY)); } updateDrawableAttrs(); updateBackgroundDrawableAttrs(true); if (mMutateBackground) { // when setBackground() is called by View constructor, mMutateBackground is not loaded from the attribute, // so it's false by default, what doesn't allow to create the RoundedDrawable. At this point, after load // mMutateBackground and updated BackgroundDrawable to RoundedDrawable, the View's background drawable needs to // be changed to this new drawable. //noinspection deprecation super.setBackgroundDrawable(mBackgroundDrawable); } a.recycle(); } private static Shader.TileMode parseTileMode(int tileMode) { switch (tileMode) { case TILE_MODE_CLAMP: return Shader.TileMode.CLAMP; case TILE_MODE_REPEAT: return Shader.TileMode.REPEAT; case TILE_MODE_MIRROR: return Shader.TileMode.MIRROR; default: return null; } } @Override protected void drawableStateChanged() { super.drawableStateChanged(); invalidate(); } @Override public ScaleType getScaleType() { return mScaleType; } @Override public void setScaleType(ScaleType scaleType) { assert scaleType != null; if (mScaleType != scaleType) { mScaleType = scaleType; switch (scaleType) { case CENTER: case CENTER_CROP: case CENTER_INSIDE: case FIT_CENTER: case FIT_START: case FIT_END: case FIT_XY: super.setScaleType(ScaleType.FIT_XY); break; default: super.setScaleType(scaleType); break; } updateDrawableAttrs(); updateBackgroundDrawableAttrs(false); invalidate(); } } @Override public void setImageDrawable(Drawable drawable) { mResource = 0; mDrawable = RoundedDrawable.fromDrawable(drawable); updateDrawableAttrs(); super.setImageDrawable(mDrawable); } @Override public void setImageBitmap(Bitmap bm) { mResource = 0; mDrawable = RoundedDrawable.fromBitmap(bm); updateDrawableAttrs(); super.setImageDrawable(mDrawable); } @Override public void setImageResource(@DrawableRes int resId) { if (mResource != resId) { mResource = resId; mDrawable = resolveResource(); updateDrawableAttrs(); super.setImageDrawable(mDrawable); } } @Override public void setImageURI(Uri uri) { super.setImageURI(uri); setImageDrawable(getDrawable()); } private Drawable resolveResource() { Resources rsrc = getResources(); if (rsrc == null) { return null; } Drawable d = null; if (mResource != 0) { try { d = rsrc.getDrawable(mResource); } catch (Exception e) { Log.w(TAG, "Unable to find resource: " + mResource, e); // Don't try again. mResource = 0; } } return RoundedDrawable.fromDrawable(d); } @Override public void setBackground(Drawable background) { setBackgroundDrawable(background); } @Override public void setBackgroundResource(@DrawableRes int resId) { if (mBackgroundResource != resId) { mBackgroundResource = resId; mBackgroundDrawable = resolveBackgroundResource(); setBackgroundDrawable(mBackgroundDrawable); } } @Override public void setBackgroundColor(int color) { mBackgroundDrawable = new ColorDrawable(color); setBackgroundDrawable(mBackgroundDrawable); } private Drawable resolveBackgroundResource() { Resources rsrc = getResources(); if (rsrc == null) { return null; } Drawable d = null; if (mBackgroundResource != 0) { try { d = rsrc.getDrawable(mBackgroundResource); } catch (Exception e) { Log.w(TAG, "Unable to find resource: " + mBackgroundResource, e); // Don't try again. mBackgroundResource = 0; } } return RoundedDrawable.fromDrawable(d); } private void updateDrawableAttrs() { updateAttrs(mDrawable, mScaleType); } private void updateBackgroundDrawableAttrs(boolean convert) { if (mMutateBackground) { if (convert) { mBackgroundDrawable = RoundedDrawable.fromDrawable(mBackgroundDrawable); } updateAttrs(mBackgroundDrawable, ScaleType.FIT_XY); } } @Override public void setColorFilter(ColorFilter cf) { if (mColorFilter != cf) { mColorFilter = cf; mHasColorFilter = true; mColorMod = true; applyColorMod(); invalidate(); } } private void applyColorMod() { // Only mutate and apply when modifications have occurred. This should // not reset the mColorMod flag, since these filters need to be // re-applied if the Drawable is changed. if (mDrawable != null && mColorMod) { mDrawable = mDrawable.mutate(); if (mHasColorFilter) { mDrawable.setColorFilter(mColorFilter); } //mDrawable.setXfermode(mXfermode); //mDrawable.setAlpha(mAlpha * mViewAlphaScale >> 8); } } private void updateAttrs(Drawable drawable, ScaleType scaleType) { if (drawable == null) { return; } if (drawable instanceof RoundedDrawable) { ((RoundedDrawable) drawable) .setScaleType(scaleType) .setBorderWidth(mBorderWidth) .setBorderColor(mBorderColor) .setOval(mIsOval) .setTileModeX(mTileModeX) .setTileModeY(mTileModeY); if (mCornerRadii != null) { ((RoundedDrawable) drawable).setCornerRadius( mCornerRadii[Corner.TOP_LEFT], mCornerRadii[Corner.TOP_RIGHT], mCornerRadii[Corner.BOTTOM_RIGHT], mCornerRadii[Corner.BOTTOM_LEFT]); } applyColorMod(); } else if (drawable instanceof LayerDrawable) { // loop through layers to and set drawable attrs LayerDrawable ld = ((LayerDrawable) drawable); for (int i = 0, layers = ld.getNumberOfLayers(); i < layers; i++) { updateAttrs(ld.getDrawable(i), scaleType); } } } @Override @Deprecated public void setBackgroundDrawable(Drawable background) { mBackgroundDrawable = background; updateBackgroundDrawableAttrs(true); //noinspection deprecation super.setBackgroundDrawable(mBackgroundDrawable); } /** * @return the largest corner radius. */ public float getCornerRadius() { return getMaxCornerRadius(); } /** * @return the largest corner radius. */ public float getMaxCornerRadius() { float maxRadius = 0; for (float r : mCornerRadii) { maxRadius = Math.max(r, maxRadius); } return maxRadius; } /** * Get the corner radius of a specified corner. * * @param corner the corner. * @return the radius. */ public float getCornerRadius(@Corner int corner) { return mCornerRadii[corner]; } /** * Set all the corner radii from a dimension resource id. * * @param resId dimension resource id of radii. */ public void setCornerRadiusDimen(@DimenRes int resId) { float radius = getResources().getDimension(resId); setCornerRadius(radius, radius, radius, radius); } /** * Set the corner radius of a specific corner from a dimension resource id. * * @param corner the corner to set. * @param resId the dimension resource id of the corner radius. */ public void setCornerRadiusDimen(@Corner int corner, @DimenRes int resId) { setCornerRadius(corner, getResources().getDimensionPixelSize(resId)); } /** * Set the corner radii of all corners in px. * * @param radius the radius to set. */ public void setCornerRadius(float radius) { setCornerRadius(radius, radius, radius, radius); } /** * Set the corner radius of a specific corner in px. * * @param corner the corner to set. * @param radius the corner radius to set in px. */ public void setCornerRadius(@Corner int corner, float radius) { if (mCornerRadii[corner] == radius) { return; } mCornerRadii[corner] = radius; updateDrawableAttrs(); updateBackgroundDrawableAttrs(false); invalidate(); } /** * Set the corner radii of each corner individually. Currently only one unique nonzero value is * supported. * * @param topLeft radius of the top left corner in px. * @param topRight radius of the top right corner in px. * @param bottomRight radius of the bottom right corner in px. * @param bottomLeft radius of the bottom left corner in px. */ public void setCornerRadius(float topLeft, float topRight, float bottomLeft, float bottomRight) { if (mCornerRadii[Corner.TOP_LEFT] == topLeft && mCornerRadii[Corner.TOP_RIGHT] == topRight && mCornerRadii[Corner.BOTTOM_RIGHT] == bottomRight && mCornerRadii[Corner.BOTTOM_LEFT] == bottomLeft) { return; } mCornerRadii[Corner.TOP_LEFT] = topLeft; mCornerRadii[Corner.TOP_RIGHT] = topRight; mCornerRadii[Corner.BOTTOM_LEFT] = bottomLeft; mCornerRadii[Corner.BOTTOM_RIGHT] = bottomRight; updateDrawableAttrs(); updateBackgroundDrawableAttrs(false); invalidate(); } public float getBorderWidth() { return mBorderWidth; } public void setBorderWidth(@DimenRes int resId) { setBorderWidth(getResources().getDimension(resId)); } public void setBorderWidth(float width) { if (mBorderWidth == width) { return; } mBorderWidth = width; updateDrawableAttrs(); updateBackgroundDrawableAttrs(false); invalidate(); } @ColorInt public int getBorderColor() { return mBorderColor.getDefaultColor(); } public void setBorderColor(@ColorInt int color) { setBorderColor(ColorStateList.valueOf(color)); } public ColorStateList getBorderColors() { return mBorderColor; } public void setBorderColor(ColorStateList colors) { if (mBorderColor.equals(colors)) { return; } mBorderColor = (colors != null) ? colors : ColorStateList.valueOf(RoundedDrawable.DEFAULT_BORDER_COLOR); updateDrawableAttrs(); updateBackgroundDrawableAttrs(false); if (mBorderWidth > 0) { invalidate(); } } public boolean isOval() { return mIsOval; } public void setOval(boolean oval) { mIsOval = oval; updateDrawableAttrs(); updateBackgroundDrawableAttrs(false); invalidate(); } public Shader.TileMode getTileModeX() { return mTileModeX; } public void setTileModeX(Shader.TileMode tileModeX) { if (this.mTileModeX == tileModeX) { return; } this.mTileModeX = tileModeX; updateDrawableAttrs(); updateBackgroundDrawableAttrs(false); invalidate(); } public Shader.TileMode getTileModeY() { return mTileModeY; } public void setTileModeY(Shader.TileMode tileModeY) { if (this.mTileModeY == tileModeY) { return; } this.mTileModeY = tileModeY; updateDrawableAttrs(); updateBackgroundDrawableAttrs(false); invalidate(); } public boolean mutatesBackground() { return mMutateBackground; } public void mutateBackground(boolean mutate) { if (mMutateBackground == mutate) { return; } mMutateBackground = mutate; updateBackgroundDrawableAttrs(true); invalidate(); } }
/* * Copyright 2013 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp.newtypes; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Multimap; import com.google.common.collect.Sets; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.TreeSet; /** * * @author blickly@google.com (Ben Lickly) * @author dimvar@google.com (Dimitris Vardoulakis) */ public abstract class JSType { protected static final int BOTTOM_MASK = 0x0; protected static final int TYPEVAR_MASK = 0x1; protected static final int NON_SCALAR_MASK = 0x2; protected static final int ENUM_MASK = 0x4; protected static final int TRUE_MASK = 0x8; // These two print out protected static final int FALSE_MASK = 0x10; // as 'boolean' protected static final int NULL_MASK = 0x20; protected static final int NUMBER_MASK = 0x40; protected static final int STRING_MASK = 0x80; protected static final int UNDEFINED_MASK = 0x100; protected static final int END_MASK = UNDEFINED_MASK * 2; // When either of the next two bits is set, the rest of the type isn't // guaranteed to be in a consistent state. protected static final int TRUTHY_MASK = 0x200; protected static final int FALSY_MASK = 0x400; // Room to grow. protected static final int UNKNOWN_MASK = 0x7fffffff; // @type {?} protected static final int TOP_MASK = 0xffffffff; // @type {*} protected static final int BOOLEAN_MASK = TRUE_MASK | FALSE_MASK; protected static final int TOP_SCALAR_MASK = NUMBER_MASK | STRING_MASK | BOOLEAN_MASK | NULL_MASK | UNDEFINED_MASK; // Used only for development public static boolean mockToString = false; private static JSType makeType(int mask, ImmutableSet<ObjectType> objs, String typeVar, ImmutableSet<EnumType> enums) { // Fix up the mask for objects and enums if (enums != null) { if (enums.isEmpty()) { mask &= ~ENUM_MASK; } else { mask |= ENUM_MASK; } } if (objs != null) { if (objs.isEmpty()) { mask &= ~NON_SCALAR_MASK; } else { mask |= NON_SCALAR_MASK; } } if (objs == null && typeVar == null && enums == null) { return MaskType.make(mask); } if (!JSType.isInhabitable(objs)) { return BOTTOM; } if (mask == NON_SCALAR_MASK) { return new ObjsType(objs); } if (mask == (NON_SCALAR_MASK | NULL_MASK)) { return new NullableObjsType(objs); } return new UnionType(mask, objs, typeVar, enums); } private static JSType makeType(int mask) { return makeType(mask, null, null, null); } protected abstract int getMask(); protected abstract ImmutableSet<ObjectType> getObjs(); protected abstract String getTypeVar(); protected abstract ImmutableSet<EnumType> getEnums(); // Factory method for wrapping a function in a JSType public static JSType fromFunctionType(FunctionType fn) { return makeType(NON_SCALAR_MASK, ImmutableSet.of(ObjectType.fromFunction(fn)), null, null); } public static JSType fromObjectType(ObjectType obj) { return makeType(NON_SCALAR_MASK, ImmutableSet.of(obj), null, null); } public static JSType fromTypeVar(String template) { return makeType(TYPEVAR_MASK, null, template, null); } static JSType fromEnum(EnumType e) { return makeType(ENUM_MASK, null, null, ImmutableSet.of(e)); } boolean isValidType() { if (isUnknown() || isTop()) { return true; } if ((getMask() & NON_SCALAR_MASK) != 0 && (getObjs() == null || getObjs().isEmpty())) { return false; } if ((getMask() & NON_SCALAR_MASK) == 0 && getObjs() != null) { return false; } if ((getMask() & ENUM_MASK) != 0 && (getEnums() == null || getEnums().isEmpty())) { return false; } if ((getMask() & ENUM_MASK) == 0 && getEnums() != null) { return false; } return ((getMask() & TYPEVAR_MASK) != 0) == (getTypeVar() != null); } public static final JSType BOOLEAN = new MaskType(TRUE_MASK | FALSE_MASK); public static final JSType BOTTOM = new MaskType(BOTTOM_MASK); public static final JSType FALSE_TYPE = new MaskType(FALSE_MASK); public static final JSType FALSY = new MaskType(FALSY_MASK); public static final JSType NULL = new MaskType(NULL_MASK); public static final JSType NUMBER = new MaskType(NUMBER_MASK); public static final JSType STRING = new MaskType(STRING_MASK); public static final JSType TOP = new MaskType(TOP_MASK); public static final JSType TOP_SCALAR = makeType(TOP_SCALAR_MASK); public static final JSType TRUE_TYPE = new MaskType(TRUE_MASK); public static final JSType TRUTHY = new MaskType(TRUTHY_MASK); public static final JSType UNDEFINED = new MaskType(UNDEFINED_MASK); public static final JSType UNKNOWN = new MaskType(UNKNOWN_MASK); public static final JSType TOP_OBJECT = fromObjectType(ObjectType.TOP_OBJECT); public static final JSType TOP_STRUCT = fromObjectType(ObjectType.TOP_STRUCT); public static final JSType TOP_DICT = fromObjectType(ObjectType.TOP_DICT); private static JSType TOP_FUNCTION = null; private static JSType QMARK_FUNCTION = null; // Some commonly used types public static final JSType NULL_OR_UNDEF = new MaskType(NULL_MASK | UNDEFINED_MASK); public static final JSType NUM_OR_STR = new MaskType(NUMBER_MASK | STRING_MASK); // Explicitly contains most types. Used only by removeType. private static final JSType ALMOST_TOP = makeType( TRUE_MASK | FALSE_MASK | NUMBER_MASK | STRING_MASK | NULL_MASK | UNDEFINED_MASK | NON_SCALAR_MASK, ImmutableSet.of(ObjectType.TOP_OBJECT), null, null); public static JSType topFunction() { if (TOP_FUNCTION == null) { TOP_FUNCTION = fromFunctionType(FunctionType.TOP_FUNCTION); } return TOP_FUNCTION; } // Corresponds to Function, which is a subtype and supertype of all functions. static JSType qmarkFunction() { if (QMARK_FUNCTION == null) { QMARK_FUNCTION = fromFunctionType(FunctionType.QMARK_FUNCTION); } return QMARK_FUNCTION; } public boolean isTop() { return TOP_MASK == getMask(); } public boolean isBottom() { return BOTTOM_MASK == getMask(); } public boolean isUnknown() { return UNKNOWN_MASK == getMask(); } public boolean isTruthy() { return TRUTHY_MASK == getMask() || TRUE_MASK == getMask(); } public boolean isFalsy() { return FALSY_MASK == getMask() || FALSE_MASK == getMask(); } public boolean isBoolean() { return (getMask() & ~BOOLEAN_MASK) == 0 && (getMask() & BOOLEAN_MASK) != 0; } public boolean isNullOrUndef() { int nullUndefMask = NULL_MASK | UNDEFINED_MASK; return getMask() != 0 && (getMask() | nullUndefMask) == nullUndefMask; } public boolean isScalar() { return getMask() == NUMBER_MASK || getMask() == STRING_MASK || getMask() == NULL_MASK || getMask() == UNDEFINED_MASK || isBoolean(); } // True iff there exists a value that can have this type private static boolean isInhabitable(Set<ObjectType> objs) { if (objs == null) { return true; } for (ObjectType obj : objs) { if (!obj.isInhabitable()) { return false; } } return true; } public boolean hasNonScalar() { return getObjs() != null || EnumType.hasNonScalar(getEnums()); } public boolean isNullable() { return (getMask() & NULL_MASK) != 0; } boolean isTypeVariable() { return (getMask() & TYPEVAR_MASK) != 0 && (getMask() & ~TYPEVAR_MASK) == 0; } public boolean isRecordType() { return getMask() == NON_SCALAR_MASK && getObjs().size() == 1 && Iterables.getOnlyElement(getObjs()).isRecordType(); } public boolean isStruct() { if (getObjs() == null) { return false; } for (ObjectType objType : getObjs()) { if (objType.isStruct()) { return true; } } return false; } public boolean isLooseStruct() { if (getObjs() == null) { return false; } boolean foundLooseStruct = false; boolean foundNonLooseStruct = false; for (ObjectType objType : getObjs()) { if (objType.isLooseStruct()) { foundLooseStruct = true; } else if (objType.isStruct()) { foundNonLooseStruct = true; } } return foundLooseStruct && !foundNonLooseStruct; } public boolean isLoose() { ImmutableSet<ObjectType> objs = getObjs(); return objs != null && objs.size() == 1 && Iterables.getOnlyElement(objs).isLoose(); } public boolean isDict() { if (getObjs() == null) { return false; } for (ObjectType objType : getObjs()) { if (objType.isDict()) { return true; } } return false; } public boolean isEnumElement() { return getMask() == ENUM_MASK && getEnums().size() == 1; } public boolean isUnion() { if (isBottom() || isTop() || isUnknown() || isScalar() || isTypeVariable() || isEnumElement()) { return false; } return !(getMask() == NON_SCALAR_MASK && getObjs().size() == 1); } public static boolean areCompatibleScalarTypes(JSType lhs, JSType rhs) { Preconditions.checkArgument( lhs.isSubtypeOf(TOP_SCALAR) || rhs.isSubtypeOf(TOP_SCALAR)); return lhs.isBottom() || rhs.isBottom() || lhs.isUnknown() || rhs.isUnknown() || (lhs.isBoolean() && rhs.isBoolean()) || lhs.equals(rhs); } // Only makes sense for a JSType that represents a single enum public JSType getEnumeratedType() { return isEnumElement() ? Iterables.getOnlyElement(getEnums()).getEnumeratedType() : null; } static JSType nullAcceptingJoin(JSType t1, JSType t2) { if (t1 == null) { return t2; } else if (t2 == null) { return t1; } return JSType.join(t1, t2); } // When joining w/ TOP or UNKNOWN, avoid setting more fields on them, eg, obj. public static JSType join(JSType lhs, JSType rhs) { if (lhs.isTop() || rhs.isTop()) { return TOP; } else if (lhs.isUnknown() || rhs.isUnknown()) { return UNKNOWN; } else if (lhs.isBottom()) { return rhs; } else if (rhs.isBottom()) { return lhs; } if (lhs.getTypeVar() != null && rhs.getTypeVar() != null && !lhs.getTypeVar().equals(rhs.getTypeVar())) { // For now return ? when joining two type vars. This is probably uncommon. return UNKNOWN; } int newMask = lhs.getMask() | rhs.getMask(); ImmutableSet<ObjectType> newObjs = ObjectType.joinSets(lhs.getObjs(), rhs.getObjs()); String newTypevar = lhs.getTypeVar() != null ? lhs.getTypeVar() : rhs.getTypeVar(); ImmutableSet<EnumType> newEnums = EnumType.union(lhs.getEnums(), rhs.getEnums()); if (newEnums == null) { return makeType(newMask, newObjs, newTypevar, null); } JSType tmpJoin = makeType(newMask & ~ENUM_MASK, newObjs, newTypevar, null); return makeType(newMask, newObjs, newTypevar, EnumType.normalizeForJoin(newEnums, tmpJoin)); } public JSType substituteGenerics(Map<String, JSType> concreteTypes) { if (isTop() || isUnknown() || getObjs() == null && getTypeVar() == null || concreteTypes.isEmpty()) { return this; } ImmutableSet<ObjectType> newObjs = null; if (getObjs() != null) { ImmutableSet.Builder<ObjectType> builder = ImmutableSet.builder(); for (ObjectType obj : getObjs()) { builder.add(obj.substituteGenerics(concreteTypes)); } newObjs = builder.build(); } JSType current = makeType( getMask() & ~TYPEVAR_MASK, newObjs, null, getEnums()); if ((getMask() & TYPEVAR_MASK) != 0) { current = JSType.join(current, concreteTypes.containsKey(getTypeVar()) ? concreteTypes.get(getTypeVar()) : fromTypeVar(getTypeVar())); } return current; } private static void updateTypemap( Multimap<String, JSType> typeMultimap, String typeParam, JSType type) { Set<JSType> typesToRemove = new HashSet<>(); for (JSType other : typeMultimap.get(typeParam)) { JSType unified = unifyUnknowns(type, other); if (unified != null) { // Can't remove elms while iterating over the collection, so do it later typesToRemove.add(other); type = unified; } } for (JSType typeToRemove : typesToRemove) { typeMultimap.remove(typeParam, typeToRemove); } typeMultimap.put(typeParam, type); } private static int promoteBoolean(int mask) { if ((mask & (TRUE_MASK | FALSE_MASK)) != 0) { return mask | TRUE_MASK | FALSE_MASK; } return mask; } /** * Unify the two types symmetrically, given that we have already instantiated * the type variables of interest in {@code t1} and {@code t2}, treating * JSType.UNKNOWN as a "hole" to be filled. * @return The unified type, or null if unification fails */ static JSType unifyUnknowns(JSType t1, JSType t2) { if (t1.isUnknown()) { return t2; } else if (t2.isUnknown()) { return t1; } else if (t1.isTop() && t2.isTop()) { return TOP; } else if (t1.isTop() || t2.isTop()) { return null; } ImmutableSet<EnumType> newEnums = null; if (t1.getEnums() == null) { if (t2.getEnums() != null) { return null; } newEnums = null; } else if (t2.getEnums() == null) { return null; } else if (!t1.getEnums().equals(t2.getEnums())) { return null; } else { newEnums = t1.getEnums(); } int t1Mask = promoteBoolean(t1.getMask()); int t2Mask = promoteBoolean(t2.getMask()); if (t1Mask != t2Mask || !Objects.equals(t1.getTypeVar(), t2.getTypeVar())) { return null; } // All scalar types are equal if ((t1Mask & NON_SCALAR_MASK) == 0) { return t1; } if (t1.getObjs().size() != t2.getObjs().size()) { return null; } Set<ObjectType> ununified = new HashSet<>(t2.getObjs()); Set<ObjectType> unifiedObjs = new HashSet<>(); for (ObjectType objType1 : t1.getObjs()) { ObjectType unified = objType1; boolean hasUnified = false; for (ObjectType objType2 : t2.getObjs()) { ObjectType tmp = ObjectType.unifyUnknowns(unified, objType2); if (tmp != null) { hasUnified = true; ununified.remove(objType2); unified = tmp; } } if (!hasUnified) { return null; } unifiedObjs.add(unified); } if (!ununified.isEmpty()) { return null; } return makeType(t1Mask, ImmutableSet.copyOf(unifiedObjs), t1.getTypeVar(), newEnums); } /** * Unify {@code this}, which may contain free type variables, * with {@code other}, a concrete type, modifying the supplied * {@code typeMultimap} to add any new template variable type bindings. * @return Whether unification succeeded */ public boolean unifyWith(JSType other, List<String> typeParameters, Multimap<String, JSType> typeMultimap) { if (this.isUnknown()) { return true; } else if (this.isTop()) { return other.isTop(); } else if (getMask() == TYPEVAR_MASK && typeParameters.contains(getTypeVar())) { updateTypemap(typeMultimap, getTypeVar(), makeType(promoteBoolean(other.getMask()), other.getObjs(), other.getTypeVar(), other.getEnums())); return true; } else if (other.isTop()) { return false; } else if (other.isUnknown()) { return true; } Set<EnumType> ununifiedEnums = null; if (getEnums() == null) { ununifiedEnums = other.getEnums(); } else if (other.getEnums() == null) { return false; } else { ununifiedEnums = new HashSet<>(); for (EnumType e : getEnums()) { if (!other.getEnums().contains(e)) { return false; } } for (EnumType e : other.getEnums()) { if (!getEnums().contains(e)) { ununifiedEnums.add(e); } } if (ununifiedEnums.isEmpty()) { ununifiedEnums = null; } } Set<ObjectType> ununified = ImmutableSet.of(); if (other.getObjs() != null) { ununified = new HashSet<>(other.getObjs()); } // Each obj in this must unify w/ exactly one obj in other. // However, we don't check that two different objects of this don't unify // with the same other type. if (getObjs() != null) { if (other.getObjs() == null) { return false; } for (ObjectType targetObj : getObjs()) { boolean hasUnified = false; for (ObjectType sourceObj : other.getObjs()) { if (targetObj.unifyWith(sourceObj, typeParameters, typeMultimap)) { ununified.remove(sourceObj); hasUnified = true; } } if (!hasUnified) { return false; } } } String thisTypevar = getTypeVar(); String otherTypevar = other.getTypeVar(); if (thisTypevar == null) { return otherTypevar == null && getMask() == other.getMask(); } else if (!typeParameters.contains(thisTypevar)) { return thisTypevar.equals(otherTypevar) && getMask() == other.getMask(); } else { // this is (T | ...) int templateMask = 0; int thisScalarBits = getMask() & ~NON_SCALAR_MASK & ~TYPEVAR_MASK; int otherScalarBits = other.getMask() & ~NON_SCALAR_MASK & ~TYPEVAR_MASK; templateMask |= otherScalarBits & ~thisScalarBits; if (templateMask == BOTTOM_MASK) { // nothing left in other to assign to thisTypevar return false; } JSType templateType = makeType( promoteBoolean(templateMask), ImmutableSet.copyOf(ununified), otherTypevar, ununifiedEnums == null ? null : ImmutableSet.copyOf(ununifiedEnums)); updateTypemap(typeMultimap, getTypeVar(), templateType); // We don't do fancy unification, eg, // T|number doesn't unify with TOP // Foo<number>|Foo<string> doesn't unify with Foo<T>|Foo<string> return true; } } // TODO(dimvar): Now that we don't have locations, we may be able to combine // specialize and meet into a single function. // Specialize might still not be symmetric however, b/c of truthy/falsy. public JSType specialize(JSType other) { if (other.isTop() || other.isUnknown()) { return this; } else if (other.isTruthy()) { return makeTruthy(); } else if (other.isFalsy()) { return makeFalsy(); } else if (this.isTop() || this.isUnknown()) { return other; } int newMask = getMask() & other.getMask(); String newTypevar; if (Objects.equals(getTypeVar(), other.getTypeVar())) { newTypevar = getTypeVar(); } else { newTypevar = null; newMask &= ~TYPEVAR_MASK; } return meetEnums( newMask, getMask() | other.getMask(), ObjectType.specializeSet(getObjs(), other.getObjs()), newTypevar, getObjs(), other.getObjs(), getEnums(), other.getEnums()); } public static JSType meet(JSType lhs, JSType rhs) { if (lhs.isTop()) { return rhs; } else if (rhs.isTop()) { return lhs; } else if (lhs.isUnknown()) { return rhs; } else if (rhs.isUnknown()) { return lhs; } int newMask = lhs.getMask() & rhs.getMask(); String newTypevar; if (Objects.equals(lhs.getTypeVar(), rhs.getTypeVar())) { newTypevar = lhs.getTypeVar(); } else { newTypevar = null; newMask = newMask & ~TYPEVAR_MASK; } return meetEnums( newMask, lhs.getMask() | rhs.getMask(), ObjectType.meetSets(lhs.getObjs(), rhs.getObjs()), newTypevar, lhs.getObjs(), rhs.getObjs(), lhs.getEnums(), rhs.getEnums()); } /** * Both {@code meet} and {@code specialize} do the same computation for enums. * They don't just compute the set of enums; they may modify mask and objs. * So, both methods finish off by calling this one. */ private static JSType meetEnums(int newMask, int unionMask, ImmutableSet<ObjectType> newObjs, String newTypevar, ImmutableSet<ObjectType> objs1, ImmutableSet<ObjectType> objs2, ImmutableSet<EnumType> enums1, ImmutableSet<EnumType> enums2) { if (Objects.equals(enums1, enums2)) { return makeType(newMask, newObjs, newTypevar, enums1); } ImmutableSet.Builder<EnumType> enumBuilder = ImmutableSet.builder(); ImmutableSet<EnumType> allEnums = EnumType.union(enums1, enums2); for (EnumType e : allEnums) { // An enum in the intersection will always be in the result if (enums1 != null && enums1.contains(e) && enums2 != null && enums2.contains(e)) { enumBuilder.add(e); continue; } // An enum {?} in the union will always be in the result JSType enumeratedType = e.getEnumeratedType(); if (enumeratedType.isUnknown()) { enumBuilder.add(e); continue; } // An enum {TypeA} meets with any supertype of TypeA. When this happens, // we put the enum in the result and remove the supertype. // The following would be much more complex if we allowed the type of // an enum to be a union. if (enumeratedType.getMask() != NON_SCALAR_MASK) { if ((enumeratedType.getMask() & unionMask) != 0) { enumBuilder.add(e); newMask &= ~enumeratedType.getMask(); } } else if (objs1 != null || objs2 != null) { Set<ObjectType> objsToRemove = new HashSet<>(); ObjectType enumObj = Iterables.getOnlyElement(enumeratedType.getObjs()); if (objs1 != null) { for (ObjectType obj1 : objs1) { if (enumObj.isSubtypeOf(obj1)) { enumBuilder.add(e); objsToRemove.add(obj1); } } } if (objs2 != null) { for (ObjectType obj2 : objs2) { if (enumObj.isSubtypeOf(obj2)) { enumBuilder.add(e); objsToRemove.add(obj2); } } } if (!objsToRemove.isEmpty() && newObjs != null) { newObjs = Sets.difference(newObjs, objsToRemove).immutableCopy(); } } } return makeType(newMask, newObjs, newTypevar, enumBuilder.build()); } private JSType makeTruthy() { if (this.isTop() || this.isUnknown()) { return this; } return makeType(getMask() & ~NULL_MASK & ~FALSE_MASK & ~UNDEFINED_MASK, getObjs(), getTypeVar(), getEnums()); } private JSType makeFalsy() { if (this.isTop() || this.isUnknown()) { return this; } return makeType(getMask() & ~TRUE_MASK & ~NON_SCALAR_MASK, null, getTypeVar(), getEnums()); } public static JSType plus(JSType lhs, JSType rhs) { int newtype = (lhs.getMask() | rhs.getMask()) & STRING_MASK; if ((lhs.getMask() & ~STRING_MASK) != 0 && (rhs.getMask() & ~STRING_MASK) != 0) { newtype |= NUMBER_MASK; } return makeType(newtype); } public JSType negate() { if (isTop() || isUnknown()) { return this; } if (isTruthy()) { return FALSY; } else if (isFalsy()) { return TRUTHY; } return UNKNOWN; } public JSType toBoolean() { if (isTruthy()) { return TRUE_TYPE; } else if (isFalsy()) { return FALSE_TYPE; } return BOOLEAN; } public boolean isNonLooseSubtypeOf(JSType other) { return isSubtypeOfHelper(false, other); } public boolean isSubtypeOf(JSType other) { return isSubtypeOfHelper(true, other); } private boolean isSubtypeOfHelper( boolean keepLoosenessOfThis, JSType other) { if (isUnknown() || other.isUnknown() || other.isTop()) { return true; } if (!EnumType.areSubtypes(this, other)) { return false; } int mask = getMask() & ~ENUM_MASK; if ((mask | other.getMask()) != other.getMask()) { return false; } if (!Objects.equals(getTypeVar(), other.getTypeVar())) { return false; } if (getObjs() == null) { return true; } // Because of optional properties, // x \le y \iff x \join y = y does not hold. return ObjectType.isUnionSubtype( keepLoosenessOfThis, getObjs(), other.getObjs()); } public JSType removeType(JSType other) { int otherMask = other.getMask(); Preconditions.checkState( !other.isTop() && !other.isUnknown() && (otherMask & TYPEVAR_MASK) == 0 && (otherMask & ENUM_MASK) == 0); if (isUnknown()) { return this; } if (isTop()) { return ALMOST_TOP.removeType(other); } int newMask = getMask() & ~otherMask; if ((otherMask & NON_SCALAR_MASK) == 0) { return makeType(newMask, getObjs(), getTypeVar(), getEnums()); } // TODO(dimvar): If objs and enums stay unchanged, reuse, don't recreate. Preconditions.checkState(other.getObjs().size() == 1, "Invalid type to remove: %s", other); ObjectType otherObj = Iterables.getOnlyElement(other.getObjs()); ImmutableSet<ObjectType> newObjs = null; ImmutableSet<EnumType> newEnums = null; if (getObjs() != null) { ImmutableSet.Builder<ObjectType> builder = ImmutableSet.builder(); for (ObjectType obj : getObjs()) { if (!obj.isSubtypeOf(otherObj)) { builder.add(obj); } } newObjs = builder.build(); } if (getEnums() != null) { ImmutableSet.Builder<EnumType> builder = ImmutableSet.builder(); for (EnumType e : getEnums()) { if (!e.getEnumeratedType().isSubtypeOf(other)) { builder.add(e); } } newEnums = builder.build(); } return makeType(newMask, newObjs, getTypeVar(), newEnums); } public FunctionType getFunTypeIfSingletonObj() { if (getMask() != NON_SCALAR_MASK || getObjs().size() > 1) { return null; } return Iterables.getOnlyElement(getObjs()).getFunType(); } public FunctionType getFunType() { if (getObjs() == null) { return null; } if (getObjs().size() == 1) { // The common case is fast return Iterables.getOnlyElement(getObjs()).getFunType(); } FunctionType result = FunctionType.TOP_FUNCTION; for (ObjectType obj : getObjs()) { result = FunctionType.meet(result, obj.getFunType()); } return result; } NominalType getNominalTypeIfUnique() { if (getObjs() == null || getObjs().size() > 1) { return null; } return Iterables.getOnlyElement(getObjs()).getNominalType(); } public boolean isInterfaceDefinition() { if (getObjs() == null || getObjs().size() > 1) { return false; } FunctionType ft = Iterables.getOnlyElement(getObjs()).getFunType(); return ft != null && ft.isInterfaceDefinition(); } /** Turns the class-less object of this type (if any) into a loose object */ public JSType withLoose() { if (getObjs() == null) { Preconditions.checkState(getEnums() != null); return this; } return makeType(getMask(), ObjectType.withLooseObjects(getObjs()), getTypeVar(), getEnums()); } public JSType getProp(QualifiedName qname) { if (isBottom() || isUnknown()) { return UNKNOWN; } Preconditions.checkState(getObjs() != null || getEnums() != null); return nullAcceptingJoin( TypeWithPropertiesStatics.getProp(getObjs(), qname), TypeWithPropertiesStatics.getProp(getEnums(), qname)); } public JSType getDeclaredProp(QualifiedName qname) { if (isUnknown()) { return UNKNOWN; } Preconditions.checkState(getObjs() != null || getEnums() != null); return nullAcceptingJoin( TypeWithPropertiesStatics.getDeclaredProp(getObjs(), qname), TypeWithPropertiesStatics.getDeclaredProp(getEnums(), qname)); } public boolean mayHaveProp(QualifiedName qname) { return TypeWithPropertiesStatics.mayHaveProp(getObjs(), qname) || TypeWithPropertiesStatics.mayHaveProp(getEnums(), qname); } public boolean hasProp(QualifiedName qname) { if (getObjs() != null && !TypeWithPropertiesStatics.hasProp(getObjs(), qname)) { return false; } if (getEnums() != null && !TypeWithPropertiesStatics.hasProp(getEnums(), qname)) { return false; } return getEnums() != null || getObjs() != null; } public boolean hasConstantProp(QualifiedName pname) { Preconditions.checkArgument(pname.isIdentifier()); return TypeWithPropertiesStatics.hasConstantProp(getObjs(), pname) || TypeWithPropertiesStatics.hasConstantProp(getEnums(), pname); } public JSType withoutProperty(QualifiedName qname) { return getObjs() == null ? this : makeType(getMask(), ObjectType.withoutProperty(getObjs(), qname), getTypeVar(), getEnums()); } public JSType withProperty(QualifiedName qname, JSType type) { Preconditions.checkArgument(type != null); if (isUnknown() || isBottom()) { return this; } Preconditions.checkState(getObjs() != null); return makeType(getMask(), ObjectType.withProperty(getObjs(), qname, type), getTypeVar(), getEnums()); } public JSType withDeclaredProperty( QualifiedName qname, JSType type, boolean isConstant) { Preconditions.checkState(getObjs() != null); if (type == null && isConstant) { type = JSType.UNKNOWN; } return makeType(getMask(), ObjectType.withDeclaredProperty(getObjs(), qname, type, isConstant), getTypeVar(), getEnums()); } public JSType withPropertyRequired(String pname) { return (isUnknown() || getObjs() == null) ? this : makeType(getMask(), ObjectType.withPropertyRequired(getObjs(), pname), getTypeVar(), getEnums()); } @Override public String toString() { if (mockToString) { return ""; } return appendTo(new StringBuilder()).toString(); } public StringBuilder appendTo(StringBuilder builder) { return typeToString(builder); } /** For use in {@link #typeToString} */ private static final Joiner PIPE_JOINER = Joiner.on("|"); private StringBuilder typeToString(StringBuilder builder) { switch (getMask()) { case BOTTOM_MASK: return builder.append("bottom"); case TOP_MASK: return builder.append("*"); case UNKNOWN_MASK: return builder.append("?"); default: int tags = getMask(); boolean firstIteration = true; for (int tag = 1; tag != END_MASK; tag <<= 1) { if ((tags & tag) != 0) { if (!firstIteration) { builder.append('|'); } firstIteration = false; switch (tag) { case TRUE_MASK: case FALSE_MASK: builder.append("boolean"); tags &= ~BOOLEAN_MASK; continue; case NULL_MASK: builder.append("null"); tags &= ~NULL_MASK; continue; case NUMBER_MASK: builder.append("number"); tags &= ~NUMBER_MASK; continue; case STRING_MASK: builder.append("string"); tags &= ~STRING_MASK; continue; case UNDEFINED_MASK: builder.append("undefined"); tags &= ~UNDEFINED_MASK; continue; case TYPEVAR_MASK: builder.append(getTypeVar()); tags &= ~TYPEVAR_MASK; continue; case NON_SCALAR_MASK: { if (getObjs().size() == 1) { Iterables.getOnlyElement(getObjs()).appendTo(builder); } else { Set<String> strReps = new TreeSet<>(); for (ObjectType obj : getObjs()) { strReps.add(obj.toString()); } PIPE_JOINER.appendTo(builder, strReps); } tags &= ~NON_SCALAR_MASK; continue; } case ENUM_MASK: { if (getEnums().size() == 1) { builder.append(Iterables.getOnlyElement(getEnums())); } else { Set<String> strReps = new TreeSet<>(); for (EnumType e : getEnums()) { strReps.add(e.toString()); } PIPE_JOINER.appendTo(builder, strReps); } tags &= ~ENUM_MASK; continue; } } } } if (tags == 0) { // Found all types in the union return builder; } else if (tags == TRUTHY_MASK) { return builder.append("truthy"); } else if (tags == FALSY_MASK) { return builder.append("falsy"); } else { return builder.append("Unrecognized type: " + tags); } } } @Override public boolean equals(Object o) { if (o == null) { return false; } if (this == o) { return true; } Preconditions.checkArgument(o instanceof JSType); JSType t2 = (JSType) o; return getMask() == t2.getMask() && Objects.equals(getObjs(), t2.getObjs()); } @Override public int hashCode() { return Objects.hash(getMask(), getObjs()); } } final class UnionType extends JSType { private final int mask; // objs is null for scalar types private final ImmutableSet<ObjectType> objs; // typeVar is null for non-generic types private final String typeVar; // enums is null for types that don't have enums private final ImmutableSet<EnumType> enums; UnionType(int mask, ImmutableSet<ObjectType> objs, String typeVar, ImmutableSet<EnumType> enums) { if (enums == null) { this.enums = null; } else if (enums.isEmpty()) { this.enums = null; } else { this.enums = enums; } if (objs == null) { this.objs = null; } else if (objs.isEmpty()) { this.objs = null; } else { this.objs = objs; } if (typeVar != null) { mask |= TYPEVAR_MASK; } this.typeVar = typeVar; this.mask = mask; Preconditions.checkState(isValidType(), "Cannot create type with bits <<<%s>>>, " + "objs <<<%s>>>, typeVar <<<%s>>>, enums <<<%s>>>", mask, objs, typeVar, enums); } UnionType(int mask) { this(mask, null, null, null); } protected int getMask() { return mask; } protected ImmutableSet<ObjectType> getObjs() { return objs; } protected String getTypeVar() { return typeVar; } protected ImmutableSet<EnumType> getEnums() { return enums; } } class MaskType extends JSType { // Masks for common types: private static final int NUMBER_OR_STRING_MASK = NUMBER_MASK | STRING_MASK; // union of undefined and stuff private static final int UNDEFINED_OR_BOOLEAN_MASK = UNDEFINED_MASK | TRUE_MASK | FALSE_MASK; private static final int UNDEFINED_OR_NUMBER_MASK = UNDEFINED_MASK | NUMBER_MASK; private static final int UNDEFINED_OR_STRING_MASK = UNDEFINED_MASK | STRING_MASK; private static final int UNDEFINED_OR_NULL_MASK = UNDEFINED_MASK | NULL_MASK; // union of null and stuff private static final int NULL_OR_BOOLEAN_MASK = NULL_MASK | TRUE_MASK | FALSE_MASK; private static final int NULL_OR_NUMBER_MASK = NULL_MASK | NUMBER_MASK; private static final int NULL_OR_STRING_MASK = NULL_MASK | STRING_MASK; private static final MaskType NUMBER_OR_STRING = new MaskType(NUMBER_OR_STRING_MASK); private static final MaskType UNDEFINED_OR_BOOLEAN = new MaskType(UNDEFINED_OR_BOOLEAN_MASK); private static final MaskType UNDEFINED_OR_NUMBER = new MaskType(UNDEFINED_OR_NUMBER_MASK); private static final MaskType UNDEFINED_OR_STRING = new MaskType(UNDEFINED_OR_STRING_MASK); private static final MaskType UNDEFINED_OR_NULL = new MaskType(UNDEFINED_OR_NULL_MASK); private static final MaskType NULL_OR_BOOLEAN = new MaskType(NULL_OR_BOOLEAN_MASK); private static final MaskType NULL_OR_NUMBER = new MaskType(NULL_OR_NUMBER_MASK); private static final MaskType NULL_OR_STRING = new MaskType(NULL_OR_STRING_MASK); protected final int mask; MaskType(int mask) { this.mask = mask; } static JSType make(int mask) { switch (mask) { case BOTTOM_MASK: return JSType.BOTTOM; case TRUE_MASK: return JSType.TRUE_TYPE; case FALSE_MASK: return JSType.FALSE_TYPE; case NULL_MASK: return JSType.NULL; case NUMBER_MASK: return JSType.NUMBER; case STRING_MASK: return JSType.STRING; case UNDEFINED_MASK: return JSType.UNDEFINED; case TRUTHY_MASK: return JSType.TRUTHY; case FALSY_MASK: return JSType.FALSY; case UNKNOWN_MASK: return JSType.UNKNOWN; case TOP_MASK: return JSType.TOP; case BOOLEAN_MASK: return JSType.BOOLEAN; case NUMBER_OR_STRING_MASK: return NUMBER_OR_STRING; case UNDEFINED_OR_BOOLEAN_MASK: return UNDEFINED_OR_BOOLEAN; case UNDEFINED_OR_NUMBER_MASK: return UNDEFINED_OR_NUMBER; case UNDEFINED_OR_STRING_MASK: return UNDEFINED_OR_STRING; case UNDEFINED_OR_NULL_MASK: return UNDEFINED_OR_NULL; case NULL_OR_BOOLEAN_MASK: return NULL_OR_BOOLEAN; case NULL_OR_NUMBER_MASK: return NULL_OR_NUMBER; case NULL_OR_STRING_MASK: return NULL_OR_STRING; default: return new MaskType(mask); } } protected int getMask() { return mask; } protected ImmutableSet<ObjectType> getObjs() { return null; } protected String getTypeVar() { return null; } protected ImmutableSet<EnumType> getEnums() { return null; } } final class ObjsType extends JSType { private ImmutableSet<ObjectType> objs; ObjsType(ImmutableSet<ObjectType> objs) { this.objs = objs; } protected int getMask() { return NON_SCALAR_MASK; } protected ImmutableSet<ObjectType> getObjs() { return objs; } protected String getTypeVar() { return null; } protected ImmutableSet<EnumType> getEnums() { return null; } } final class NullableObjsType extends JSType { private ImmutableSet<ObjectType> objs; NullableObjsType(ImmutableSet<ObjectType> objs) { this.objs = objs; } protected int getMask() { return NON_SCALAR_MASK | NULL_MASK; } protected ImmutableSet<ObjectType> getObjs() { return objs; } protected String getTypeVar() { return null; } protected ImmutableSet<EnumType> getEnums() { return null; } }
/** * Copyright 2005-2016 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.patch.management.conflicts; import java.io.ByteArrayOutputStream; import java.io.File; import java.util.Enumeration; import java.util.HashSet; import java.util.Set; import io.fabric8.patch.management.impl.Activator; import org.apache.felix.utils.properties.Properties; import org.osgi.service.log.LogService; /** * <p>A kind of 3-way merge conflict resolver, but working at property name/value level.</p> * <p>We collect name-value pairs and if value isn't changed either between firstChange vs. base or between * secondChange vs. base, there's no conflict. If both diffs change property value, we choose value from * <code>secondChange</code>.</p> */ public class PropertiesFileResolver implements ResolverEx { @Override public String resolve(File firstChange, File base, File secondChange) { return resolve(firstChange, base, secondChange, true, false); } @Override public String resolve(File firstChange, File base, File secondChange, boolean useFirstChangeAsBase, boolean rollback/*=false*/) { try { Properties baseProperties = new Properties(false); if (base != null) { baseProperties.load(base); } Properties firstProperties = new Properties(false); firstProperties.load(firstChange); Properties secondProperties = new Properties(secondChange, false); secondProperties.load(secondChange); Properties result = useFirstChangeAsBase ? firstProperties : secondProperties; Properties otherSource = useFirstChangeAsBase ? secondProperties : firstProperties; // first let's iterate over what we have in selected base - we may already find new properties in // incoming change vs. base version Set<String> keys = new HashSet<>(); for (Enumeration<?> e = result.propertyNames(); e.hasMoreElements(); ) { keys.add((String) e.nextElement()); } for (String key : keys) { // treat more important properties (result) as "ours" Change state = kindOfChange(key, baseProperties, result, otherSource); switch (state) { case NONE: case ADDED_BY_US: case MODIFIED_BY_US: // already reflected in "result" properties break; case DELETED_BY_US: case BOTH_DELETED: case ADDED_BY_THEM: // can't happen in this loop break; case BOTH_ADDED: result.put(key, specialPropertyMerge(key, firstProperties, secondProperties, rollback)); break; case BOTH_MODIFIED: // may mean also that we have change vs. removal if (secondProperties.getProperty(key) == null) { result.remove(key); } else { result.put(key, specialPropertyMerge(key, firstProperties, secondProperties, rollback)); } break; case DELETED_BY_THEM: result.remove(key); break; case MODIFIED_BY_THEM: result.put(key, otherSource.getProperty(key)); break; } } // then we can have additions in less important change, for example if patch adds new properties // but we want to preserve layout of properties file from user (it may have user comments for example) // we will handle only properties added in "otherSource" keys.clear(); for (Enumeration<?> e = otherSource.propertyNames(); e.hasMoreElements(); ) { keys.add((String) e.nextElement()); } for (Enumeration<?> e = result.propertyNames(); e.hasMoreElements(); ) { String key = (String) e.nextElement(); keys.remove(key); } for (String key : keys) { // treat more important properties (result) as "ours" Change state = kindOfChange(key, baseProperties, result, otherSource); switch (state) { case NONE: case BOTH_DELETED: case BOTH_ADDED: case BOTH_MODIFIED: case ADDED_BY_US: case MODIFIED_BY_US: case DELETED_BY_THEM: case DELETED_BY_US: break; case ADDED_BY_THEM: case MODIFIED_BY_THEM: result.put(key, otherSource.getProperty(key)); break; } } ByteArrayOutputStream baos = new ByteArrayOutputStream(); result.store(baos, null); return new String(baos.toByteArray(), "UTF-8"); } catch (Exception e) { Activator.log(LogService.LOG_ERROR, null, "Problem resolving conflict: " + e.getMessage(), e, true); } return null; } /** * Special handling of particular key. By default we just pick value from <em>more important</em> set * of properties * Subclasses ay override this method. * @param key * @param firstProperties * @param secondProperties * @param rollback * @return */ protected String specialPropertyMerge(String key, Properties firstProperties, Properties secondProperties, boolean rollback) { return rollback ? firstProperties.get(key) : secondProperties.get(key); } /** * Checks kind of change at property (instead of line or git blob) level * @param key * @param baseProperties * @param oursProperties * @param theirsProperties * @return */ private Change kindOfChange(String key, Properties baseProperties, Properties oursProperties, Properties theirsProperties) { String base = baseProperties.getProperty(key); String ours = oursProperties.getProperty(key); String theirs = theirsProperties.getProperty(key); if (base == null) { if (ours == null && theirs == null) { // weird return Change.NONE; } else if (ours != null && theirs != null) { // conflict, but quite unimaginable to have both user and patch add new property with the same name // will be resolved by picking version from "second" (more important) change return Change.BOTH_ADDED; } // non-conflict - add new property return ours == null ? Change.ADDED_BY_THEM : Change.ADDED_BY_US; } if (ours == null && theirs == null) { // non-conflict - remove property return Change.BOTH_DELETED; } if (ours == null) { return !theirs.equals(base) ? Change.BOTH_MODIFIED : Change.DELETED_BY_US; } if (theirs == null) { return !ours.equals(base) ? Change.BOTH_MODIFIED : Change.DELETED_BY_THEM; } if (ours.equals(base) && theirs.equals(base)) { return Change.NONE; } if (!ours.equals(base) && !theirs.equals(base)) { return Change.BOTH_MODIFIED; } else { return ours.equals(base) ? Change.MODIFIED_BY_THEM : Change.MODIFIED_BY_US; } } private enum Change { NONE, BOTH_DELETED, BOTH_ADDED, BOTH_MODIFIED, DELETED_BY_US, DELETED_BY_THEM, ADDED_BY_US, ADDED_BY_THEM, MODIFIED_BY_US, MODIFIED_BY_THEM } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; /** * <p> * Describes an attachment between a virtual private gateway and a VPC. * </p> */ public class VpcAttachment implements Serializable, Cloneable { /** * The ID of the VPC. */ private String vpcId; /** * The current state of the attachment. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>attaching, attached, detaching, detached */ private String state; /** * The ID of the VPC. * * @return The ID of the VPC. */ public String getVpcId() { return vpcId; } /** * The ID of the VPC. * * @param vpcId The ID of the VPC. */ public void setVpcId(String vpcId) { this.vpcId = vpcId; } /** * The ID of the VPC. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param vpcId The ID of the VPC. * * @return A reference to this updated object so that method calls can be chained * together. */ public VpcAttachment withVpcId(String vpcId) { this.vpcId = vpcId; return this; } /** * The current state of the attachment. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>attaching, attached, detaching, detached * * @return The current state of the attachment. * * @see AttachmentStatus */ public String getState() { return state; } /** * The current state of the attachment. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>attaching, attached, detaching, detached * * @param state The current state of the attachment. * * @see AttachmentStatus */ public void setState(String state) { this.state = state; } /** * The current state of the attachment. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>attaching, attached, detaching, detached * * @param state The current state of the attachment. * * @return A reference to this updated object so that method calls can be chained * together. * * @see AttachmentStatus */ public VpcAttachment withState(String state) { this.state = state; return this; } /** * The current state of the attachment. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>attaching, attached, detaching, detached * * @param state The current state of the attachment. * * @see AttachmentStatus */ public void setState(AttachmentStatus state) { this.state = state.toString(); } /** * The current state of the attachment. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>attaching, attached, detaching, detached * * @param state The current state of the attachment. * * @return A reference to this updated object so that method calls can be chained * together. * * @see AttachmentStatus */ public VpcAttachment withState(AttachmentStatus state) { this.state = state.toString(); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getVpcId() != null) sb.append("VpcId: " + getVpcId() + ","); if (getState() != null) sb.append("State: " + getState() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode()); hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof VpcAttachment == false) return false; VpcAttachment other = (VpcAttachment)obj; if (other.getVpcId() == null ^ this.getVpcId() == null) return false; if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false) return false; if (other.getState() == null ^ this.getState() == null) return false; if (other.getState() != null && other.getState().equals(this.getState()) == false) return false; return true; } @Override public VpcAttachment clone() { try { return (VpcAttachment) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright (C) 2014 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.genomics.utils; import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; import com.google.api.client.googleapis.services.CommonGoogleClientRequestInitializer; import com.google.api.client.googleapis.services.GoogleClientRequestInitializer; import com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient; import com.google.api.client.googleapis.util.Utils; import com.google.api.client.http.HttpBackOffIOExceptionHandler; import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler; import com.google.api.client.http.HttpIOExceptionHandler; import com.google.api.client.http.HttpRequest; import com.google.api.client.http.HttpRequestInitializer; import com.google.api.client.http.HttpResponse; import com.google.api.client.http.HttpTransport; import com.google.api.client.http.HttpUnsuccessfulResponseHandler; import com.google.api.client.json.JsonFactory; import com.google.api.client.util.ExponentialBackOff; import com.google.api.services.genomics.Genomics; import com.google.api.services.genomics.GenomicsScopes; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import java.io.File; import java.io.IOException; import java.security.GeneralSecurityException; import java.util.Collection; import java.util.concurrent.atomic.AtomicInteger; import javax.annotation.Nullable; /** * Code required to manufacture instances of a {@link Genomics} stub. * * Several authentication mechanisms are supported. For more detail, see * https://developers.google.com/api-client-library/java/google-api-java-client/oauth2 */ public class GenomicsFactory { private static final int DEFAULT_CONNECT_TIMEOUT = 20000; private static final int DEFAULT_READ_TIMEOUT = 20000; private static final int DEFAULT_NUMBER_OF_RETRIES = 5; private static final String DEFAULT_APPLICATION_NAME = "genomics"; /** * A builder class for {@link GenomicsFactory} objects. */ public static class Builder { // TODO is application name used for anything other than the credential store path? // If not, it should be removed. @VisibleForTesting final String applicationName; private HttpTransport httpTransport = Utils.getDefaultTransport(); private int connectTimeout = DEFAULT_CONNECT_TIMEOUT; private int readTimeout = DEFAULT_READ_TIMEOUT; private int numRetries = DEFAULT_NUMBER_OF_RETRIES; private JsonFactory jsonFactory = Utils.getDefaultJsonFactory(); private Optional<String> rootUrl = Optional.absent(); private Optional<String> servicePath = Optional.absent(); private Collection<String> scopes = GenomicsScopes.all(); Builder(String applicationName) { this.applicationName = applicationName; } /** * Build the {@link GenomicsFactory}. * * @return The built {@link GenomicsFactory} */ public GenomicsFactory build() { return new GenomicsFactory( applicationName, httpTransport, connectTimeout, readTimeout, numRetries, jsonFactory, scopes, rootUrl, servicePath); } /** * Sets the {@link HttpTransport} to use. Most code will never need to call this method. * * @param httpTransport the {@code HttpTransport} to use * @return this builder */ public Builder setHttpTransport(HttpTransport httpTransport) { this.httpTransport = httpTransport; return this; } /** * @deprecated */ @Deprecated public Builder setConnectTimeout(int connectTimeout) { this.connectTimeout = connectTimeout; return this; } /** * @deprecated */ @Deprecated public Builder setReadTimeout(int readTimeout) { this.readTimeout = readTimeout; return this; } /** * The number of times to retry a failed request to the Genomics API. * * @param numRetries * @return this */ public Builder setNumberOfRetries(int numRetries) { this.numRetries = numRetries; return this; } /** * Sets the {@link JsonFactory} to use. Most code will never need to call this method. * * @param jsonFactory the {@code JsonFactory} to use * @return this builder */ public Builder setJsonFactory(JsonFactory jsonFactory) { this.jsonFactory = jsonFactory; return this; } /** * The URL of the endpoint to send requests to. The default is * {@code https://www.googleapis.com}. * * @param rootUrl The URL of the endpoint to send requests to * @return this builder */ public Builder setRootUrl(String rootUrl) { this.rootUrl = Optional.of(rootUrl); return this; } /** * Sets the URL-encoded service path of the service. * Setting this field is uncommon and should only be used when trying to use a * non-Google API provider. * * @param servicePath The URL-encoded service path of the service. * @return this builder */ public Builder setServicePath(String servicePath) { this.servicePath = Optional.of(servicePath); return this; } /** * The OAuth scopes to attach to outgoing requests. Most code will not have to call this method. * * @param scopes The OAuth scopes to attach to outgoing requests * @return this builder */ public Builder setScopes(Collection<String> scopes) { this.scopes = scopes; return this; } } /** * Create a new {@link Builder} for {@code GenomicsFactory} objects. * * @return the new {@code Builder} object. */ public static Builder builder() { return new Builder(DEFAULT_APPLICATION_NAME); } /** * Create a new {@link Builder} for {@code GenomicsFactory} objects. * * @param applicationName The name of this application. * @return the new {@code Builder} object. */ public static Builder builder(String applicationName) { return new Builder(applicationName); } private final String applicationName; private final HttpTransport httpTransport; private final int connectTimeout; private final int readTimeout; private final int numRetries; private final JsonFactory jsonFactory; private final Optional<String> rootUrl; private final Optional<String> servicePath; private final Collection<String> scopes; private final AtomicInteger initializedRequestsCount = new AtomicInteger(); private final AtomicInteger unsuccessfulResponsesCount = new AtomicInteger(); private final AtomicInteger ioExceptionsCount = new AtomicInteger(); private GenomicsFactory( String applicationName, HttpTransport httpTransport, int connectTimeout, int readTimeout, int numRetries, JsonFactory jsonFactory, Collection<String> scopes, Optional<String> rootUrl, Optional<String> servicePath) { this.applicationName = applicationName; this.httpTransport = httpTransport; this.connectTimeout = connectTimeout; this.readTimeout = readTimeout; this.numRetries = numRetries; this.jsonFactory = jsonFactory; this.scopes = scopes; this.rootUrl = rootUrl; this.servicePath = servicePath; } private <T extends AbstractGoogleJsonClient.Builder> T prepareBuilder(T builder, final HttpRequestInitializer delegate, GoogleClientRequestInitializer googleClientRequestInitializer) { builder .setHttpRequestInitializer(getHttpRequestInitializer(delegate)) .setApplicationName(applicationName) .setGoogleClientRequestInitializer(googleClientRequestInitializer); if (rootUrl.isPresent()) { builder.setRootUrl(rootUrl.get()); } if (servicePath.isPresent()) { builder.setServicePath(servicePath.get()); } return builder; } private Genomics.Builder getGenomicsBuilder() { return new Genomics.Builder(httpTransport, jsonFactory, null); } private HttpRequestInitializer getHttpRequestInitializer(final HttpRequestInitializer delegate) { return new HttpRequestInitializer() { @Override public void initialize(final HttpRequest request) throws IOException { initializedRequestsCount.incrementAndGet(); if (null != delegate) { delegate.initialize(request); } final HttpBackOffUnsuccessfulResponseHandler unsuccessfulResponseHandler = new HttpBackOffUnsuccessfulResponseHandler(new ExponentialBackOff()); final HttpIOExceptionHandler ioExceptionHandler = new HttpBackOffIOExceptionHandler(new ExponentialBackOff()); request .setConnectTimeout(connectTimeout) .setReadTimeout(readTimeout) .setNumberOfRetries(numRetries) .setUnsuccessfulResponseHandler( new HttpUnsuccessfulResponseHandler() { @Nullable private final HttpUnsuccessfulResponseHandler delegate = request.getUnsuccessfulResponseHandler(); @Override public boolean handleResponse(HttpRequest req, HttpResponse response, boolean supportsRetry) throws IOException { unsuccessfulResponsesCount.incrementAndGet(); return (null != delegate && delegate.handleResponse(req, response, supportsRetry)) || unsuccessfulResponseHandler.handleResponse( req, response, supportsRetry); } }) .setIOExceptionHandler( new HttpIOExceptionHandler() { @Nullable private final HttpIOExceptionHandler delegate = request.getIOExceptionHandler(); @Override public boolean handleIOException(HttpRequest req, boolean supportsRetry) throws IOException { ioExceptionsCount.incrementAndGet(); return (null != delegate && delegate.handleIOException(req, supportsRetry)) || ioExceptionHandler.handleIOException(req, supportsRetry); } }); } }; } public HttpTransport getHttpTransport() { return httpTransport; } public JsonFactory getJsonFactory() { return jsonFactory; } public final int initializedRequestsCount() { return initializedRequestsCount.get(); } public final int unsuccessfulResponsesCount() { return unsuccessfulResponsesCount.get(); } public final int ioExceptionsCount() { return ioExceptionsCount.get(); } /** * Create a {@link Genomics} stub using an API key. * * @param apiKey The API key of the Google Cloud Platform project. * @return The new {@code Genomics} stub */ public Genomics fromApiKey(String apiKey) { Preconditions.checkNotNull(apiKey); return fromApiKey(getGenomicsBuilder(), apiKey).build(); } /** * Prepare an AbstractGoogleJsonClient.Builder using an API key. * * @param builder The builder to be prepared. * @param apiKey The API key of the Google Cloud Platform project. * @return The passed in builder, for easy chaining. */ public <T extends AbstractGoogleJsonClient.Builder> T fromApiKey(T builder, String apiKey) { Preconditions.checkNotNull(builder); Preconditions.checkNotNull(apiKey); return prepareBuilder(builder, null, new CommonGoogleClientRequestInitializer(apiKey)); } /** * Create a {@link Genomics} stub using a {@code client_secrets.json} {@link File}. * * @param clientSecretsJson {@code client_secrets.json} file. * @return The new {@code Genomics} stub */ public Genomics fromClientSecretsFile(File clientSecretsJson) { Preconditions.checkNotNull(clientSecretsJson); return fromClientSecretsFile(getGenomicsBuilder(), clientSecretsJson).build(); } /** * Prepare an AbstractGoogleJsonClient.Builder using a {@code client_secrets.json} {@link File}. * * @param builder The builder to be prepared. * @param clientSecretsJson {@code client_secrets.json} file. * @return The passed in builder, for easy chaining. */ public <T extends AbstractGoogleJsonClient.Builder> T fromClientSecretsFile(T builder, File clientSecretsJson) { Preconditions.checkNotNull(builder); Preconditions.checkNotNull(clientSecretsJson); return prepareBuilder(builder, CredentialFactory.getCredentialFromClientSecrets(clientSecretsJson.getAbsolutePath(), applicationName), null); } /** * Create a {@link Genomics} stub using a credential. * * @param credential The credential to be used for requests. * @return The new {@code Genomics} stub */ public Genomics fromCredential(Credential credential) { Preconditions.checkNotNull(credential); return fromCredential(getGenomicsBuilder(), credential).build(); } /** * Prepare an AbstractGoogleJsonClient.Builder using a credential. * * @param builder The builder to be prepared. * @param credential The credential to be used for requests. * @return The passed in builder, for easy chaining. */ public <T extends AbstractGoogleJsonClient.Builder> T fromCredential(T builder, Credential credential) { Preconditions.checkNotNull(builder); Preconditions.checkNotNull(credential); return prepareBuilder(builder, credential, null); } /** * Create a {@link Genomics} stub using the Application Default Credential. * * @return The new {@code Genomics} stub */ public Genomics fromApplicationDefaultCredential() { return fromCredential(CredentialFactory.getApplicationDefaultCredential()); } /** * Prepare an AbstractGoogleJsonClient.Builder using the Application Default Credential. * * @param builder The builder to be prepared. * @return The passed in builder, for easy chaining. */ public <T extends AbstractGoogleJsonClient.Builder> T fromApplicationDefaultCredential(T builder) { Preconditions.checkNotNull(builder); return fromCredential(builder, CredentialFactory.getApplicationDefaultCredential()); } /** * Create a new genomics stub from the given service account ID and private key {@link File}. * * @param serviceAccountId The service account ID (typically an email address) * @param p12File The file on disk containing the private key * @return The new {@code Genomics} stub * @throws GeneralSecurityException * @throws IOException */ public Genomics fromServiceAccount(String serviceAccountId, File p12File) throws GeneralSecurityException, IOException { Preconditions.checkNotNull(serviceAccountId); Preconditions.checkNotNull(p12File); return fromServiceAccount(getGenomicsBuilder(), serviceAccountId, p12File).build(); } /** * Prepare an AbstractGoogleJsonClient.Builder with the given service account ID * and private key {@link File}. * * @param builder The builder to be prepared. * @param serviceAccountId The service account ID (typically an email address) * @param p12File The file on disk containing the private key * @return The passed in builder, for easy chaining. * @throws GeneralSecurityException * @throws IOException */ public <T extends AbstractGoogleJsonClient.Builder> T fromServiceAccount(T builder, String serviceAccountId, File p12File) throws GeneralSecurityException, IOException { Preconditions.checkNotNull(builder); GoogleCredential creds = new GoogleCredential.Builder() .setTransport(httpTransport) .setJsonFactory(jsonFactory) .setServiceAccountId(serviceAccountId) .setServiceAccountScopes(scopes) .setServiceAccountPrivateKeyFromP12File(p12File) .build(); creds.refreshToken(); return prepareBuilder(builder, creds, null); } /** * Create a new genomics stub from the given OfflineAuth object. * * @param auth The OfflineAuth * @return The new {@code Genomics} stub */ public Genomics fromOfflineAuth(OfflineAuth auth) { Preconditions.checkNotNull(auth); return fromOfflineAuth(getGenomicsBuilder(), auth).build(); } /** * Prepare an AbstractGoogleJsonClient.Builder with the given OfflineAuth object. * * @param builder The builder to be prepared. * @param auth The OfflineAuth * @return The passed in builder, for easy chaining. */ public <T extends AbstractGoogleJsonClient.Builder> T fromOfflineAuth(T builder, OfflineAuth auth) { Preconditions.checkNotNull(builder); Preconditions.checkNotNull(auth); if(auth.hasApiKey()) { return fromApiKey(builder, auth.getApiKey()); } return fromCredential(builder, auth.getCredential()); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.dataexchange.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Details of the operation to be performed by the job. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dataexchange-2017-07-25/ImportAssetFromSignedUrlRequestDetails" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ImportAssetFromSignedUrlRequestDetails implements Serializable, Cloneable, StructuredPojo { /** * <p> * The name of the asset. When importing from Amazon S3, the S3 object key is used as the asset name. * </p> */ private String assetName; /** * <p> * The unique identifier for the data set associated with this import job. * </p> */ private String dataSetId; /** * <p> * The Base64-encoded Md5 hash for the asset, used to ensure the integrity of the file at that location. * </p> */ private String md5Hash; /** * <p> * The unique identifier for the revision associated with this import request. * </p> */ private String revisionId; /** * <p> * The name of the asset. When importing from Amazon S3, the S3 object key is used as the asset name. * </p> * * @param assetName * The name of the asset. When importing from Amazon S3, the S3 object key is used as the asset name. */ public void setAssetName(String assetName) { this.assetName = assetName; } /** * <p> * The name of the asset. When importing from Amazon S3, the S3 object key is used as the asset name. * </p> * * @return The name of the asset. When importing from Amazon S3, the S3 object key is used as the asset name. */ public String getAssetName() { return this.assetName; } /** * <p> * The name of the asset. When importing from Amazon S3, the S3 object key is used as the asset name. * </p> * * @param assetName * The name of the asset. When importing from Amazon S3, the S3 object key is used as the asset name. * @return Returns a reference to this object so that method calls can be chained together. */ public ImportAssetFromSignedUrlRequestDetails withAssetName(String assetName) { setAssetName(assetName); return this; } /** * <p> * The unique identifier for the data set associated with this import job. * </p> * * @param dataSetId * The unique identifier for the data set associated with this import job. */ public void setDataSetId(String dataSetId) { this.dataSetId = dataSetId; } /** * <p> * The unique identifier for the data set associated with this import job. * </p> * * @return The unique identifier for the data set associated with this import job. */ public String getDataSetId() { return this.dataSetId; } /** * <p> * The unique identifier for the data set associated with this import job. * </p> * * @param dataSetId * The unique identifier for the data set associated with this import job. * @return Returns a reference to this object so that method calls can be chained together. */ public ImportAssetFromSignedUrlRequestDetails withDataSetId(String dataSetId) { setDataSetId(dataSetId); return this; } /** * <p> * The Base64-encoded Md5 hash for the asset, used to ensure the integrity of the file at that location. * </p> * * @param md5Hash * The Base64-encoded Md5 hash for the asset, used to ensure the integrity of the file at that location. */ public void setMd5Hash(String md5Hash) { this.md5Hash = md5Hash; } /** * <p> * The Base64-encoded Md5 hash for the asset, used to ensure the integrity of the file at that location. * </p> * * @return The Base64-encoded Md5 hash for the asset, used to ensure the integrity of the file at that location. */ public String getMd5Hash() { return this.md5Hash; } /** * <p> * The Base64-encoded Md5 hash for the asset, used to ensure the integrity of the file at that location. * </p> * * @param md5Hash * The Base64-encoded Md5 hash for the asset, used to ensure the integrity of the file at that location. * @return Returns a reference to this object so that method calls can be chained together. */ public ImportAssetFromSignedUrlRequestDetails withMd5Hash(String md5Hash) { setMd5Hash(md5Hash); return this; } /** * <p> * The unique identifier for the revision associated with this import request. * </p> * * @param revisionId * The unique identifier for the revision associated with this import request. */ public void setRevisionId(String revisionId) { this.revisionId = revisionId; } /** * <p> * The unique identifier for the revision associated with this import request. * </p> * * @return The unique identifier for the revision associated with this import request. */ public String getRevisionId() { return this.revisionId; } /** * <p> * The unique identifier for the revision associated with this import request. * </p> * * @param revisionId * The unique identifier for the revision associated with this import request. * @return Returns a reference to this object so that method calls can be chained together. */ public ImportAssetFromSignedUrlRequestDetails withRevisionId(String revisionId) { setRevisionId(revisionId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAssetName() != null) sb.append("AssetName: ").append(getAssetName()).append(","); if (getDataSetId() != null) sb.append("DataSetId: ").append(getDataSetId()).append(","); if (getMd5Hash() != null) sb.append("Md5Hash: ").append(getMd5Hash()).append(","); if (getRevisionId() != null) sb.append("RevisionId: ").append(getRevisionId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ImportAssetFromSignedUrlRequestDetails == false) return false; ImportAssetFromSignedUrlRequestDetails other = (ImportAssetFromSignedUrlRequestDetails) obj; if (other.getAssetName() == null ^ this.getAssetName() == null) return false; if (other.getAssetName() != null && other.getAssetName().equals(this.getAssetName()) == false) return false; if (other.getDataSetId() == null ^ this.getDataSetId() == null) return false; if (other.getDataSetId() != null && other.getDataSetId().equals(this.getDataSetId()) == false) return false; if (other.getMd5Hash() == null ^ this.getMd5Hash() == null) return false; if (other.getMd5Hash() != null && other.getMd5Hash().equals(this.getMd5Hash()) == false) return false; if (other.getRevisionId() == null ^ this.getRevisionId() == null) return false; if (other.getRevisionId() != null && other.getRevisionId().equals(this.getRevisionId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAssetName() == null) ? 0 : getAssetName().hashCode()); hashCode = prime * hashCode + ((getDataSetId() == null) ? 0 : getDataSetId().hashCode()); hashCode = prime * hashCode + ((getMd5Hash() == null) ? 0 : getMd5Hash().hashCode()); hashCode = prime * hashCode + ((getRevisionId() == null) ? 0 : getRevisionId().hashCode()); return hashCode; } @Override public ImportAssetFromSignedUrlRequestDetails clone() { try { return (ImportAssetFromSignedUrlRequestDetails) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.dataexchange.model.transform.ImportAssetFromSignedUrlRequestDetailsMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright 2012 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.util; import java.io.Serializable; import java.util.Arrays; /** * @see http://en.literateprograms.org/Red-black_tree_%28Java%29 */ public class RBTree<K extends Comparable< ? super K>, V> implements Serializable { public static final boolean VERIFY_RBTREE = false; private static final int INDENT_STEP = 4; public Node<K, V> root; public RBTree() { root = null; verifyProperties(); } public void verifyProperties() { if ( VERIFY_RBTREE ) { verifyProperty1( root ); verifyProperty2( root ); // Property 3 is implicit verifyProperty4( root ); verifyProperty5( root ); } } private static void verifyProperty1(Node< ? , ? > n) { assert nodeColor( n ) == Color.RED || nodeColor( n ) == Color.BLACK; if ( n == null ) return; verifyProperty1( n.left ); verifyProperty1( n.right ); } private static void verifyProperty2(Node< ? , ? > root) { assert nodeColor( root ) == Color.BLACK; } private static Color nodeColor(Node< ? , ? > n) { return n == null ? Color.BLACK : n.color; } private static void verifyProperty4(Node< ? , ? > n) { if ( nodeColor( n ) == Color.RED ) { assert nodeColor( n.left ) == Color.BLACK; assert nodeColor( n.right ) == Color.BLACK; assert nodeColor( n.parent ) == Color.BLACK; } if ( n == null ) return; verifyProperty4( n.left ); verifyProperty4( n.right ); } private static void verifyProperty5(Node< ? , ? > root) { verifyProperty5Helper( root, 0, -1 ); } private static int verifyProperty5Helper(Node< ? , ? > n, int blackCount, int pathBlackCount) { if ( nodeColor( n ) == Color.BLACK ) { blackCount++; } if ( n == null ) { if ( pathBlackCount == -1 ) { pathBlackCount = blackCount; } else { assert blackCount == pathBlackCount; } return pathBlackCount; } pathBlackCount = verifyProperty5Helper( n.left, blackCount, pathBlackCount ); pathBlackCount = verifyProperty5Helper( n.right, blackCount, pathBlackCount ); return pathBlackCount; } private Node<K, V> lookupNode(K key) { Node<K, V> n = root; while ( n != null ) { int compResult = key.compareTo( n.key ); if ( compResult == 0 ) { return n; } else if ( compResult < 0 ) { n = n.left; } else { n = n.right; } } return n; } public enum Boundary { LOWER, UPPER } public static class RBTreeFastIterator<K extends Comparable< ? super K>, V> implements FastIterator { private Node<K, V> upperBound; private Node<K, V> next; public RBTreeFastIterator(Node<K, V> lowerBound, Node<K, V> upperBound) { this.next = lowerBound; this.upperBound = upperBound; } public Entry next(Entry object) { Node<K, V> temp = next; next = checkUpperBound( recurse( next ) ); return temp; } public boolean isFullIterator() { return false; } private Node<K, V> recurse(Node<K, V> current) { if (current == null) { return null; } if (current.right != null) { Node<K, V> p = current.right; while (p.left != null) { p = p.left; } return p; } Node<K, V> p = current.parent; Node<K, V> ch = current; while (p != null && ch == p.right) { ch = p; p = p.parent; } return p; } public Node<K, V> checkUpperBound(Node<K, V> current) { if (upperBound == null) { return current; } return current == null || current.compareTo(upperBound) > 0 ? null : current; } } public boolean isEmpty() { return root == null; } public Node<K, V> first() { if (root == null) { return null; } Node<K, V> n = root; while (n.left != null) { n = n.left; } return n; } public Node<K, V> last() { if (root == null) { return null; } Node<K, V> n = root; while (n.right != null) { n = n.right; } return n; } public FastIterator fastIterator() { return root == null ? FastIterator.EMPTY : new RBTreeFastIterator( first(), null ); } public FastIterator range(K lowerBound, boolean testLowerEqual, K upperBound, boolean testUpperEqual) { Node<K, V> lowerNearest = findNearestNode( lowerBound, testLowerEqual, Boundary.LOWER ); Node<K, V> upperNearest = findNearestNode( upperBound, testUpperEqual, Boundary.UPPER ); if ( lowerNearest == null || upperNearest == null ) { return FastIterator.EMPTY; } if ( lowerNearest.key.compareTo( upperNearest.key ) > 0 ) { upperNearest = lowerNearest; } return new RBTreeFastIterator( lowerNearest, upperNearest ); } public Node<K, V> findNearestNode(K key, boolean allowEqual, Boundary boundary) { Node<K, V> nearest = null; Node<K, V> n = root; while ( n != null ) { int compResult = key.compareTo( n.key ); if ( allowEqual && compResult == 0 ) { return n; } boolean accepted = acceptNode(compResult, boundary); if ( acceptNode( compResult, boundary ) && ( nearest == null || acceptNode( n.key.compareTo( nearest.key ), boundary ) ) ) { nearest = n; } if ( compResult == 0 ) { n = boundary == Boundary.LOWER ? n.right : n.left; } else { n = accepted ^ boundary == Boundary.LOWER ? n.right : n.left; } } return nearest; } private boolean acceptNode(int compResult, Boundary boundary) { return compResult != 0 && ( compResult > 0 ^ boundary == Boundary.LOWER ); } public V lookup(K key) { Node<K, V> n = lookupNode( key ); return n == null ? null : n.value; } private void rotateLeft(Node<K, V> n) { Node<K, V> r = n.right; replaceNode( n, r ); n.right = r.left; if ( r.left != null ) { r.left.parent = n; } r.left = n; n.parent = r; } private void rotateRight(Node<K, V> n) { Node<K, V> l = n.left; replaceNode( n, l ); n.left = l.right; if ( l.right != null ) { l.right.parent = n; } l.right = n; n.parent = l; } private void replaceNode(Node<K, V> oldn, Node<K, V> newn) { if ( oldn.parent == null ) { root = newn; } else { if ( oldn == oldn.parent.left ) oldn.parent.left = newn; else oldn.parent.right = newn; } if ( newn != null ) { newn.parent = oldn.parent; } } public void insert(K key, V value) { Node<K, V> insertedNode = new Node<K, V>( key, value, Color.RED, null, null ); if ( root == null ) { root = insertedNode; } else { Node<K, V> n = root; while ( true ) { int compResult = key.compareTo( n.key ); if ( compResult == 0 ) { n.value = value; return; } else if ( compResult < 0 ) { if ( n.left == null ) { n.left = insertedNode; break; } else { n = n.left; } } else { if ( n.right == null ) { n.right = insertedNode; break; } else { n = n.right; } } } insertedNode.parent = n; } insertCase1( insertedNode ); verifyProperties(); } private void insertCase1(Node<K, V> n) { if ( n.parent == null ) n.color = Color.BLACK; else insertCase2( n ); } private void insertCase2(Node<K, V> n) { if ( nodeColor( n.parent ) == Color.BLACK ) return; // Tree is still valid else insertCase3( n ); } void insertCase3(Node<K, V> n) { if ( nodeColor( n.uncle() ) == Color.RED ) { n.parent.color = Color.BLACK; n.uncle().color = Color.BLACK; n.grandparent().color = Color.RED; insertCase1( n.grandparent() ); } else { insertCase4( n ); } } void insertCase4(Node<K, V> n) { if ( n == n.parent.right && n.parent == n.grandparent().left ) { rotateLeft( n.parent ); n = n.left; } else if ( n == n.parent.left && n.parent == n.grandparent().right ) { rotateRight( n.parent ); n = n.right; } insertCase5(n); } void insertCase5(Node<K, V> n) { n.parent.color = Color.BLACK; n.grandparent().color = Color.RED; if ( n == n.parent.left && n.parent == n.grandparent().left ) { rotateRight( n.grandparent() ); } else { rotateLeft( n.grandparent() ); } } public void delete(K key) { Node<K, V> n = lookupNode( key ); if ( n == null ) return; // Key not found, do nothing if ( n.left != null && n.right != null ) { // Copy key/value from predecessor and then delete it instead Node<K, V> pred = maximumNode( n.left ); n.key = pred.key; n.value = pred.value; n = pred; } Node<K, V> child = (n.right == null) ? n.left : n.right; if ( nodeColor( n ) == Color.BLACK ) { n.color = nodeColor( child ); deleteCase1( n ); } replaceNode( n, child ); if ( nodeColor( root ) == Color.RED ) { root.color = Color.BLACK; } verifyProperties(); } private static <K extends Comparable< ? super K>, V> Node<K, V> maximumNode(Node<K, V> n) { while ( n.right != null ) { n = n.right; } return n; } private void deleteCase1(Node<K, V> n) { if ( n.parent == null ) return; else deleteCase2( n ); } private void deleteCase2(Node<K, V> n) { if ( nodeColor( n.sibling() ) == Color.RED ) { n.parent.color = Color.RED; n.sibling().color = Color.BLACK; if ( n == n.parent.left ) rotateLeft( n.parent ); else rotateRight( n.parent ); } deleteCase3( n ); } private void deleteCase3(Node<K, V> n) { if ( nodeColor( n.parent ) == Color.BLACK && nodeColor( n.sibling() ) == Color.BLACK && nodeColor( n.sibling().left ) == Color.BLACK && nodeColor( n.sibling().right ) == Color.BLACK ) { n.sibling().color = Color.RED; deleteCase1( n.parent ); } else deleteCase4( n ); } private void deleteCase4(Node<K, V> n) { if ( nodeColor( n.parent ) == Color.RED && nodeColor( n.sibling() ) == Color.BLACK && nodeColor( n.sibling().left ) == Color.BLACK && nodeColor( n.sibling().right ) == Color.BLACK ) { n.sibling().color = Color.RED; n.parent.color = Color.BLACK; } else deleteCase5( n ); } private void deleteCase5(Node<K, V> n) { if ( n == n.parent.left && nodeColor( n.sibling() ) == Color.BLACK && nodeColor( n.sibling().left ) == Color.RED && nodeColor( n.sibling().right ) == Color.BLACK ) { n.sibling().color = Color.RED; n.sibling().left.color = Color.BLACK; rotateRight( n.sibling() ); } else if ( n == n.parent.right && nodeColor( n.sibling() ) == Color.BLACK && nodeColor( n.sibling().right ) == Color.RED && nodeColor( n.sibling().left ) == Color.BLACK ) { n.sibling().color = Color.RED; n.sibling().right.color = Color.BLACK; rotateLeft( n.sibling() ); } deleteCase6( n ); } private void deleteCase6(Node<K, V> n) { n.sibling().color = nodeColor( n.parent ); n.parent.color = Color.BLACK; if ( n == n.parent.left ) { n.sibling().right.color = Color.BLACK; rotateLeft( n.parent ); } else { n.sibling().left.color = Color.BLACK; rotateRight( n.parent ); } } public void print() { printHelper( root, 0 ); } private static void printHelper(Node< ? , ? > n, int indent) { if ( n == null ) { System.out.print( "<empty tree>" ); return; } if ( n.right != null ) { printHelper( n.right, indent + INDENT_STEP ); } for ( int i = 0; i < indent; i++ ) { System.out.print( " " ); } if ( n.color == Color.BLACK ) { System.out.println( n.key ); } else { System.out.println( "<" + n.key + ">" ); } if ( n.left != null ) { printHelper( n.left, indent + INDENT_STEP ); } } public enum Color { RED, BLACK } public static class Node<K extends Comparable< ? super K>, V> implements Entry, Comparable<Node<K, V>> { public K key; public V value; public Node<K, V> left; public Node<K, V> right; public Node<K, V> parent; public Color color; public Node(K key, V value, Color nodeColor, Node<K, V> left, Node<K, V> right) { this.key = key; this.value = value; this.color = nodeColor; this.left = left; this.right = right; if ( left != null ) left.parent = this; if ( right != null ) right.parent = this; this.parent = null; } public Node<K, V> grandparent() { return parent.parent; } public Node<K, V> sibling() { if ( this == parent.left ) return parent.right; else return parent.left; } public Node<K, V> uncle() { return parent.sibling(); } public String toString() { return "Node key=" + key + " value=" + value; } public void setNext(Entry next) { // TODO Auto-generated method stub } public Entry getNext() { // TODO Auto-generated method stub return null; } public int compareTo(Node<K, V> other) { return key.compareTo(other.key); } } }
/** * Project: richContentMediaSearchService * ROLE-Project * authors: daniel.dahrendorf@im-c.de, julian.weber@im-c.de * This software uses the GNU GPL */ package de.imc.advancedMediaSearch.target; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.Date; import org.apache.log4j.Logger; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import de.imc.advancedMediaSearch.helpers.ConversionHelper; import de.imc.advancedMediaSearch.http.ApacheHttpClient; import de.imc.advancedMediaSearch.http.RESTHttpClient; import de.imc.advancedMediaSearch.properties.AMSPropertyManager; import de.imc.advancedMediaSearch.result.LearningList; import de.imc.advancedMediaSearch.result.MediaType; import de.imc.advancedMediaSearch.result.ResultEntity; import de.imc.advancedMediaSearch.result.ResultSet; import de.imc.advancedMediaSearch.result.ResultTag; import de.imc.advancedMediaSearch.result.ResultThumbnail; /** * This class represents the connection to the richMediaContentList Service of * the ROLE Project * * @author julian.weber@im-c.de * */ public class LearningListTarget extends Target { private static Logger logger = Logger.getLogger(LearningListTarget.class); private static final String DEFAULT_BASEURL = "http://role-demo.de:8080/mediaListService/"; private String baseurl = AMSPropertyManager.getInstance() .getStringValue("de.imc.advancedMediaSearch.baseurls.learninglist", DEFAULT_BASEURL); private String listurl = baseurl + "lists/"; public static final String ID = "mediaListService"; public void initializeMetaData() { name = "MediaListService"; url = "http://role-demo.de:8080/mediaListService"; mediaTypeIconUrl = ""; // TODO: insert url description = "The Media List Service provides learning lists on various topics."; String[] mTypes = { MediaType.LIST.toString() }; mediaTypes = mTypes; iconUrl = AMSPropertyManager .getInstance() .getStringValue( "de.imc.advancedMediaSearch.iconurls.learninglist", "http://role-demo.de:8080/richMediaContentSearchResources/icons/list.ico"); } public LearningListTarget() { super(); initializeMetaData(); } public LearningListTarget(int maxDuration, int maxQueryResults) { super(maxDuration, maxQueryResults); initializeMetaData(); } // TODO: Implement LearningListTarget functions /** * IMPORTANT: add new Target to Aggregator initialization (query argument * analysis, ...) */ private ResultSet executeQuery(String url) { RESTHttpClient client = new ApacheHttpClient(); Date startDate = new Date(); try { // execute http get Document doc = client.executeGETURL(new URL(url)); // generate Results ResultSet s = parseXMLResponse(doc); // return results return s; } catch (MalformedURLException e) { logger.error("An exception occured while executing the query: " + e.getMessage()); e.printStackTrace(); } catch (IOException e) { logger.error("An exception occured while executing the query: " + e.getMessage()); e.printStackTrace(); } // return empty list on errors ResultSet set = new ResultSet(); set.addSourceRepository(this.getUrl()); set = filterResult(set); // calculate search time Date stopDate = new Date(); long searchtime = stopDate.getTime() - startDate.getTime(); logger.debug("mediaListService searchtime: " + searchtime); return set; } /** * @param doc * @return */ private ResultSet parseXMLResponse(Document doc) { if (doc == null) { // return empty list on null arguments ResultSet set = new ResultSet(); set.addSourceRepository(this.getUrl()); return set; } // initialize resultset ResultSet results = new ResultSet(); results.addSourceRepository(this.getUrl()); NodeList listsList = doc.getElementsByTagName("list"); // iterate through list items for (int i = 0; i < listsList.getLength(); i++) { Node node = listsList.item(i); LearningList newList = new LearningList(); newList.setSource(this.getUrl()); // get item nodes child nodes NodeList nodesChilds = node.getChildNodes(); // iterate through child nodes for (int j = 0; j < nodesChilds.getLength(); j++) { Node subNode = nodesChilds.item(j); String subNodeName = subNode.getNodeName().toLowerCase(); // title if (subNodeName.equals("list_title")) { if (subNode != null) { if (subNode.getFirstChild() != null) { newList.setTitle(subNode.getFirstChild() .getNodeValue()); } } } // id if (subNodeName.equals("list_id")) { if (subNode != null) { if (subNode.getFirstChild() != null) { String retrievedId = subNode.getFirstChild() .getNodeValue(); newList.setId(ConversionHelper.convertToInt( retrievedId, 0, false)); newList.setUrl(listurl + ConversionHelper.convertToInt( retrievedId, 0, false)); } } } // description if (subNodeName.equals("list_description")) { if (subNode != null) { if (subNode.getFirstChild() != null) { newList.setDescription(subNode.getFirstChild() .getNodeValue()); } } } } // iterate through all items of the list here -> add them to the // learning list if (newList != null && newList.getId() != 0) { String itemString = "item" + newList.getId(); // select all item(Nr of list) nodes NodeList itemNodes = doc.getElementsByTagName(itemString); for (int l = 0; l < itemNodes.getLength(); l++) { Node actualItemNode = itemNodes.item(l); NodeList itemSubNodes = actualItemNode.getChildNodes(); ResultEntity myItem = new ResultEntity(); for (int k = 0; k < itemSubNodes.getLength(); k++) { Node actualItemSubNode = itemSubNodes.item(k); String subNodeName = actualItemSubNode.getNodeName() .toLowerCase(); if (subNodeName.equals("item_title")) { if (actualItemSubNode.getFirstChild() != null) { myItem.setTitle(actualItemSubNode .getFirstChild().getNodeValue()); insertTagsBasedOnTitle(myItem); } } if (subNodeName.equals("item_description")) { if (actualItemSubNode.getFirstChild() != null) { myItem.setDescription(actualItemSubNode .getFirstChild().getNodeValue()); } } if (subNodeName.equals("url_content")) { if (actualItemSubNode.getFirstChild() != null) { String urlString = actualItemSubNode .getFirstChild().getNodeValue(); // prevent false linkage here if (!(urlString.startsWith("http") || urlString.startsWith("https") || urlString.startsWith("ftp") || urlString .startsWith("ftps"))) { urlString = "http://" + urlString; } myItem.setUrl(urlString); } } if (subNodeName.equals("image_url")) { if (actualItemSubNode.getFirstChild() != null) { try { ResultThumbnail tn = new ResultThumbnail(0, 0, new URL(actualItemSubNode .getFirstChild() .getNodeValue())); myItem.setThumbnail(tn); } catch (Exception e) { } } } } newList.addEntity(myItem); // add tags from item to new list for (ResultTag t : myItem.getTags()) { newList.addTag(t); } } } results.add(newList); } results = filterResult(results); return results; } /* * (non-Javadoc) * * @see * de.imc.advancedMediaSearch.target.Target#searchByTags(java.lang.String, * de.imc.advancedMediaSearch.target.QueryArguments) */ @Override public ResultSet searchByTags(String tagQuery, QueryArguments args) { /** * use full text search here */ return searchByFullTextQuery(tagQuery, args); } /* * (non-Javadoc) * * @see * de.imc.advancedMediaSearch.target.Target#searchByFullTextQuery(java.lang * .String, de.imc.advancedMediaSearch.target.QueryArguments) */ @Override public ResultSet searchByFullTextQuery(String searchTermQuery, QueryArguments args) { if (searchTermQuery == null || searchTermQuery.equals("")) { return null; } String escapedQueryString = encodeQueryString(searchTermQuery); String queryUrl = baseurl + "search/" + escapedQueryString; return executeQuery(queryUrl); } /* * (non-Javadoc) * * @see * de.imc.advancedMediaSearch.target.Target#searchByAuthor(java.lang.String, * de.imc.advancedMediaSearch.target.QueryArguments) */ @Override public ResultSet searchByAuthor(String authorQuery, QueryArguments args) { /** * use full text search here */ return searchByFullTextQuery(encodeQueryString(authorQuery), args); } /* * (non-Javadoc) * * @see de.imc.advancedMediaSearch.target.Target#getId() */ @Override public String getId() { return ID; } private void insertTagsBasedOnTitle(ResultEntity e) { if (e != null) { if (e.getTitle() != null && !e.getTitle().equals("")) { String[] splitted = e.getTitle().split(" "); for (String s : splitted) { e.addTag(new ResultTag(s)); } } } } }
/* * The MIT License * * Copyright (c) 2013-2016 reark project contributors * * https://github.com/reark/reark/graphs/contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package io.reark.reark.network.fetchers; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.util.Pair; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import io.reark.reark.pojo.NetworkRequestStatus; import io.reark.reark.utils.Log; import io.reark.reark.utils.ObjectLockHandler; import retrofit2.adapter.rxjava.HttpException; import rx.Subscription; import rx.functions.Action1; import static io.reark.reark.utils.Preconditions.checkNotNull; import static io.reark.reark.utils.Preconditions.get; /** * Base class for Fetchers. The class implements tracking of request listeners and duplicate * requests. * * @param <T> Type of the Service Uri used by the application. */ public abstract class FetcherBase<T> implements Fetcher<T> { private static final String TAG = FetcherBase.class.getSimpleName(); private static final int NO_ERROR_CODE = -1; @NonNull private final Action1<NetworkRequestStatus> updateNetworkRequestStatus; @NonNull private final Map<Integer, Set<Integer>> listeners = new ConcurrentHashMap<>(20, 0.75f, 2); @NonNull private final Map<Integer, Subscription> requests = new ConcurrentHashMap<>(20, 0.75f, 2); @NonNull private final ObjectLockHandler<Integer> locker = new ObjectLockHandler<>(); protected FetcherBase(@NonNull Action1<NetworkRequestStatus> updateNetworkRequestStatus) { this.updateNetworkRequestStatus = get(updateNetworkRequestStatus); } protected void startRequest(int requestId, @NonNull String uri) { Log.v(TAG, String.format("startRequest(%s, %s)", requestId, get(uri))); lock(requestId); updateNetworkRequestStatus.call(new NetworkRequestStatus.Builder() .uri(uri) .listeners(getListeners(requestId)) .ongoing() .build()); release(requestId); } protected void completeRequest(int requestId, @NonNull String uri) { Log.v(TAG, String.format("completeRequest(%s, %s)", requestId, get(uri))); lock(requestId); updateNetworkRequestStatus.call(new NetworkRequestStatus.Builder() .uri(uri) .listeners(getListeners(requestId)) .completed() .build()); release(requestId); } protected void errorRequest(int requestId, @NonNull String uri, int errorCode, @Nullable String errorMessage) { Log.v(TAG, String.format("errorRequest(%s, %s, %s, %s)", requestId, get(uri), errorCode, errorMessage)); lock(requestId); updateNetworkRequestStatus.call(new NetworkRequestStatus.Builder() .uri(uri) .listeners(getListeners(requestId)) .error() .errorCode(errorCode) .errorMessage(errorMessage) .build()); release(requestId); } protected void addRequest(int requestId, @NonNull Subscription subscription) { Log.v(TAG, String.format("addRequest(%s)", requestId)); checkNotNull(subscription); lock(requestId); if (requests.containsKey(requestId)) { Subscription oldRequest = requests.remove(requestId); if (!oldRequest.isUnsubscribed()) { Log.w(TAG, "Unexpected subscribed request " + requestId); oldRequest.unsubscribe(); } } requests.put(requestId, subscription); release(requestId); } protected void addListener(int requestId, int listenerId) { Log.v(TAG, String.format("addListener(%s, %s)", requestId, listenerId)); lock(requestId); Set<Integer> newListeners = createListener(listenerId); if (requests.containsKey(requestId)) { newListeners.addAll(listeners.get(requestId)); } listeners.put(requestId, newListeners); release(requestId); } protected boolean isOngoingRequest(int requestId) { Log.v(TAG, String.format("isOngoingRequest(%s)", requestId)); lock(requestId); boolean isOngoing = requests.containsKey(requestId) && !requests.get(requestId).isUnsubscribed(); release(requestId); return isOngoing; } @NonNull private Set<Integer> getListeners(int requestId) { return listeners.get(requestId); } @NonNull private static Set<Integer> createListener(int listenerId) { Set<Integer> set = new HashSet<>(1); set.add(listenerId); return set; } @NonNull protected Action1<Throwable> doOnError(int requestId, @NonNull String uri) { checkNotNull(uri); return throwable -> { if (throwable instanceof HttpException) { HttpException httpException = (HttpException) throwable; int statusCode = httpException.code(); errorRequest(requestId, uri, statusCode, httpException.getMessage()); } else { Log.w(TAG, "The error was not a RetrofitError"); errorRequest(requestId, uri, NO_ERROR_CODE, null); } }; } private void lock(int id) { try { locker.acquire(id); } catch (InterruptedException e) { Log.e(TAG, "Lock acquisition failed!", e); } } private void release(int id) { locker.release(id); } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.core.widget; import static android.os.Build.VERSION.SDK_INT; import android.content.Context; import android.graphics.Canvas; import android.util.AttributeSet; import android.view.View; import android.widget.EdgeEffect; import android.widget.OverScroller; import android.widget.Scroller; import androidx.annotation.DoNotInline; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; /** * Helper for accessing {@link EdgeEffect}. * * This class is used to access {@link EdgeEffect} on platform versions * that support it. When running on older platforms it will result in no-ops. It should * be used by views that wish to use the standard Android visual effects at the edges * of scrolling containers. */ public final class EdgeEffectCompat { private final EdgeEffect mEdgeEffect; /** * Construct a new EdgeEffect themed using the given context. * * <p>Note: On platform versions that do not support EdgeEffect, all operations * on the newly constructed object will be mocked/no-ops.</p> * * @param context Context to use for theming the effect * * @deprecated Use {@link EdgeEffect} constructor directly or * {@link EdgeEffectCompat#create(Context, AttributeSet)}. */ @Deprecated public EdgeEffectCompat(Context context) { mEdgeEffect = new EdgeEffect(context); } /** * Constructs and returns a new EdgeEffect themed using the given context, allowing support * for the view attributes. * * @param context Context to use for theming the effect * @param attrs The attributes of the XML tag that is inflating the view */ @NonNull public static EdgeEffect create(@NonNull Context context, @Nullable AttributeSet attrs) { if (SDK_INT >= 31) { return Api31Impl.create(context, attrs); } return new EdgeEffect(context); } /** * Returns the pull distance needed to be released to remove the showing effect. * It is determined by the {@link #onPull(float, float)} <code>deltaDistance</code> and * any animating values, including from {@link #onAbsorb(int)} and {@link #onRelease()}. * * This can be used in conjunction with {@link #onPullDistance(EdgeEffect, float, float)} to * release the currently showing effect. * * On API level 30 and earlier, this will return 0. * * @return The pull distance that must be released to remove the showing effect or 0 for * API level 30 and earlier. */ public static float getDistance(@NonNull EdgeEffect edgeEffect) { if (SDK_INT >= 31) { return Api31Impl.getDistance(edgeEffect); } return 0; } /** * Set the size of this edge effect in pixels. * * @param width Effect width in pixels * @param height Effect height in pixels * * @deprecated Use {@link EdgeEffect#setSize(int, int)} directly. */ @Deprecated public void setSize(int width, int height) { mEdgeEffect.setSize(width, height); } /** * Reports if this EdgeEffectCompat's animation is finished. If this method returns false * after a call to {@link #draw(Canvas)} the host widget should schedule another * drawing pass to continue the animation. * * @return true if animation is finished, false if drawing should continue on the next frame. * * @deprecated Use {@link EdgeEffect#isFinished()} directly. */ @Deprecated public boolean isFinished() { return mEdgeEffect.isFinished(); } /** * Immediately finish the current animation. * After this call {@link #isFinished()} will return true. * * @deprecated Use {@link EdgeEffect#finish()} directly. */ @Deprecated public void finish() { mEdgeEffect.finish(); } /** * A view should call this when content is pulled away from an edge by the user. * This will update the state of the current visual effect and its associated animation. * The host view should always {@link View#invalidate()} if this method * returns true and draw the results accordingly. * * @param deltaDistance Change in distance since the last call. Values may be 0 (no change) to * 1.f (full length of the view) or negative values to express change * back toward the edge reached to initiate the effect. * @return true if the host view should call invalidate, false if it should not. * * @deprecated Use {@link #onPull(EdgeEffect, float, float)}. */ @Deprecated public boolean onPull(float deltaDistance) { mEdgeEffect.onPull(deltaDistance); return true; } /** * A view should call this when content is pulled away from an edge by the user. * This will update the state of the current visual effect and its associated animation. * The host view should always {@link View#invalidate()} if this method * returns true and draw the results accordingly. * * Views using {@link EdgeEffect} should favor {@link EdgeEffect#onPull(float, float)} when * the displacement of the pull point is known. * * @param deltaDistance Change in distance since the last call. Values may be 0 (no change) to * 1.f (full length of the view) or negative values to express change * back toward the edge reached to initiate the effect. * @param displacement The displacement from the starting side of the effect of the point * initiating the pull. In the case of touch this is the finger position. * Values may be from 0-1. * @return true if the host view should call invalidate, false if it should not. * * @deprecated Use {@link EdgeEffect#onPull(float)} directly. */ @Deprecated public boolean onPull(float deltaDistance, float displacement) { onPull(mEdgeEffect, deltaDistance, displacement); return true; } /** * A view should call this when content is pulled away from an edge by the user. * This will update the state of the current visual effect and its associated animation. * The host view should always {@link View#invalidate()} after call this method * and draw the results accordingly. * * @param edgeEffect The EdgeEffect that is attached to the view that is getting pulled away * from an edge by the user. * @param deltaDistance Change in distance since the last call. Values may be 0 (no change) to * 1.f (full length of the view) or negative values to express change * back toward the edge reached to initiate the effect. * @param displacement The displacement from the starting side of the effect of the point * initiating the pull. In the case of touch this is the finger position. * Values may be from 0-1. * * @see EdgeEffect#onPull(float, float) */ public static void onPull(@NonNull EdgeEffect edgeEffect, float deltaDistance, float displacement) { if (SDK_INT >= 21) { Api21Impl.onPull(edgeEffect, deltaDistance, displacement); } else { edgeEffect.onPull(deltaDistance); } } /** * A view should call this when content is pulled away from an edge by the user. * This will update the state of the current visual effect and its associated animation. * The host view should always {@link View#invalidate()} after this * and draw the results accordingly. This works similarly to {@link #onPull(float, float)}, * but returns the amount of <code>deltaDistance</code> that has been consumed. * * For API level 31 and above, if the {@link #getDistance(EdgeEffect)} is currently 0 and * <code>deltaDistance</code> is negative, this function will return 0 and the drawn value * will remain unchanged. * * For API level 30 and below, this will consume all of the provided value and return * <code>deltaDistance</code>. * * This method can be used to reverse the effect from a pull or absorb and partially consume * some of a motion: * * <pre class="prettyprint"> * if (deltaY < 0 && EdgeEffectCompat.getDistance(edgeEffect) != 0) { * float displacement = x / getWidth(); * float dist = deltaY / getHeight(); * float consumed = EdgeEffectCompat.onPullDistance(edgeEffect, dist, displacement); * deltaY -= consumed * getHeight(); * if (edgeEffect.getDistance() == 0f) edgeEffect.onRelease(); * } * </pre> * * @param deltaDistance Change in distance since the last call. Values may be 0 (no change) to * 1.f (full length of the view) or negative values to express change * back toward the edge reached to initiate the effect. * @param displacement The displacement from the starting side of the effect of the point * initiating the pull. In the case of touch this is the finger position. * Values may be from 0-1. * @return The amount of <code>deltaDistance</code> that was consumed, a number between * 0 and <code>deltaDistance</code>. */ public static float onPullDistance( @NonNull EdgeEffect edgeEffect, float deltaDistance, float displacement ) { if (SDK_INT >= 31) { return Api31Impl.onPullDistance(edgeEffect, deltaDistance, displacement); } onPull(edgeEffect, deltaDistance, displacement); return deltaDistance; } /** * Call when the object is released after being pulled. * This will begin the "decay" phase of the effect. After calling this method * the host view should {@link View#invalidate()} if this method * returns true and thereby draw the results accordingly. * * @return true if the host view should invalidate, false if it should not. * * @deprecated Use {@link EdgeEffect#onRelease()} directly. */ @Deprecated public boolean onRelease() { mEdgeEffect.onRelease(); return mEdgeEffect.isFinished(); } /** * Call when the effect absorbs an impact at the given velocity. * Used when a fling reaches the scroll boundary. * * <p>When using a {@link Scroller} or {@link OverScroller}, * the method <code>getCurrVelocity</code> will provide a reasonable approximation * to use here.</p> * * @param velocity Velocity at impact in pixels per second. * @return true if the host view should invalidate, false if it should not. * * @deprecated Use {@link EdgeEffect#onAbsorb(int)} directly. */ @Deprecated public boolean onAbsorb(int velocity) { mEdgeEffect.onAbsorb(velocity); return true; } /** * Draw into the provided canvas. Assumes that the canvas has been rotated * accordingly and the size has been set. The effect will be drawn the full * width of X=0 to X=width, beginning from Y=0 and extending to some factor < * 1.f of height. * * @param canvas Canvas to draw into * @return true if drawing should continue beyond this frame to continue the * animation * * @deprecated Use {@link EdgeEffect#draw(Canvas)} directly. */ @Deprecated public boolean draw(Canvas canvas) { return mEdgeEffect.draw(canvas); } @RequiresApi(31) private static class Api31Impl { private Api31Impl() {} @DoNotInline public static EdgeEffect create(Context context, AttributeSet attrs) { try { return new EdgeEffect(context, attrs); } catch (Throwable t) { return new EdgeEffect(context); // Old preview release } } @DoNotInline public static float onPullDistance( EdgeEffect edgeEffect, float deltaDistance, float displacement ) { try { return edgeEffect.onPullDistance(deltaDistance, displacement); } catch (Throwable t) { edgeEffect.onPull(deltaDistance, displacement); // Old preview release return 0; } } @DoNotInline public static float getDistance(EdgeEffect edgeEffect) { try { return edgeEffect.getDistance(); } catch (Throwable t) { return 0; // Old preview release } } } @RequiresApi(21) static class Api21Impl { private Api21Impl() { // This class is not instantiable. } @DoNotInline static void onPull(EdgeEffect edgeEffect, float deltaDistance, float displacement) { edgeEffect.onPull(deltaDistance, displacement); } } }
package org.codehaus.mojo.jaxb2.schemageneration.postprocessing.javadoc; import org.codehaus.mojo.jaxb2.BufferingLog; import org.codehaus.mojo.jaxb2.schemageneration.postprocessing.javadoc.location.ClassLocation; import org.codehaus.mojo.jaxb2.schemageneration.postprocessing.javadoc.location.FieldLocation; import org.codehaus.mojo.jaxb2.schemageneration.postprocessing.javadoc.location.MethodLocation; import org.codehaus.mojo.jaxb2.schemageneration.postprocessing.javadoc.location.PackageLocation; import org.codehaus.mojo.jaxb2.shared.FileSystemUtilities; import org.codehaus.mojo.jaxb2.shared.Validate; import org.codehaus.mojo.jaxb2.shared.filters.Filter; import org.codehaus.mojo.jaxb2.shared.filters.Filters; import org.codehaus.mojo.jaxb2.shared.filters.pattern.PatternFileFilter; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.io.File; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import org.junit.Ignore; /** * @author <a href="mailto:lj@jguru.se">Lennart J&ouml;relid</a>, jGuru Europe AB */ public class JavaDocExtractorTest { // Shared state private File javaDocBasicDir; private File javaDocAnnotatedDir; private File javaDocEnumsDir; private File javaDocXmlWrappersDir; private BufferingLog log; @Before public void setupSharedState() { log = new BufferingLog(BufferingLog.LogLevel.DEBUG); // Find the desired directory final URL dirURL = getClass() .getClassLoader() .getResource("testdata/schemageneration/javadoc/basic"); this.javaDocBasicDir = new File(dirURL.getPath()); Assert.assertTrue(javaDocBasicDir.exists() && javaDocBasicDir.isDirectory()); final URL annotatedDirURL = getClass() .getClassLoader() .getResource("testdata/schemageneration/javadoc/annotated"); this.javaDocAnnotatedDir = new File(annotatedDirURL.getPath()); Assert.assertTrue(javaDocAnnotatedDir.exists() && javaDocAnnotatedDir.isDirectory()); final URL enumsDirURL = getClass() .getClassLoader() .getResource("testdata/schemageneration/javadoc/enums"); this.javaDocEnumsDir = new File(enumsDirURL.getPath()); Assert.assertTrue(javaDocEnumsDir.exists() && javaDocEnumsDir.isDirectory()); final URL wrappersDirURL = getClass() .getClassLoader() .getResource("testdata/schemageneration/javadoc/xmlwrappers"); this.javaDocXmlWrappersDir = new File(wrappersDirURL.getPath()); Assert.assertTrue(javaDocXmlWrappersDir.exists() && javaDocXmlWrappersDir.isDirectory()); } @Test public void validateLogStatementsDuringProcessing() { // Assemble final JavaDocExtractor unitUnderTest = new JavaDocExtractor(log); final List<File> sourceDirs = Arrays.<File>asList(javaDocBasicDir); final List<File> sourceFiles = FileSystemUtilities.resolveRecursively(sourceDirs, null, log); // Act unitUnderTest.addSourceFiles(sourceFiles); final SearchableDocumentation ignoredResult = unitUnderTest.process(); // Assert final SortedMap<String, Throwable> logBuffer = log.getLogBuffer(); final List<String> keys = new ArrayList<String>(logBuffer.keySet()); /* * 000: (DEBUG) Accepted file [/Users/lj/Development/Projects/Codehaus/github_jaxb2_plugin/target/test-classes/testdata/schemageneration/javadoc/basic/NodeProcessor.java], * 001: (INFO) Processing [1] java sources., * 002: (DEBUG) Added package-level JavaDoc for [basic], * 003: (DEBUG) Added class-level JavaDoc for [basic.NodeProcessor], * 004: (DEBUG) Added method-level JavaDoc for [basic.NodeProcessor#accept(org.w3c.dom.Node)], * 005: (DEBUG) Added method-level JavaDoc for [basic.NodeProcessor#process(org.w3c.dom.Node)]] */ Assert.assertEquals(6, keys.size()); Assert.assertEquals("001: (INFO) Processing [1] java sources.", keys.get(1)); Assert.assertEquals("002: (DEBUG) Added package-level JavaDoc for [basic]", keys.get(2)); Assert.assertEquals("003: (DEBUG) Added class-level JavaDoc for [basic.NodeProcessor]", keys.get(3)); Assert.assertEquals("004: (DEBUG) Added method-level JavaDoc for [basic.NodeProcessor#accept(org.w3c.dom.Node)]", keys.get(4)); Assert.assertEquals("005: (DEBUG) Added method-level JavaDoc for [basic.NodeProcessor#process(org.w3c.dom.Node)]", keys.get(5)); } @Test @Ignore public void validateExtractingXmlAnnotatedName() throws Exception { // Assemble final JavaDocExtractor unitUnderTest = new JavaDocExtractor(log); // Act final SearchableDocumentation result = getSearchableDocumentationFor(unitUnderTest, 2, javaDocAnnotatedDir); // Assert final String prefix = "testdata.schemageneration.javadoc.annotated."; final String fieldAccessPrefix = prefix + "AnnotatedXmlNameAnnotatedClassWithFieldAccessTypeName#"; final String methodAccessPrefix = prefix + "AnnotatedXmlNameAnnotatedClassWithMethodAccessTypeName#"; // First, check the field-annotated class. final SortableLocation stringFieldLocation = result.getLocation(fieldAccessPrefix + "annotatedStringField"); final SortableLocation integerFieldLocation = result.getLocation(fieldAccessPrefix + "annotatedIntegerField"); final SortableLocation stringMethodLocation = result.getLocation(fieldAccessPrefix + "getStringField()"); final SortableLocation integerMethodLocation = result.getLocation(fieldAccessPrefix + "getIntegerField()"); Assert.assertTrue(stringFieldLocation instanceof FieldLocation); Assert.assertTrue(integerFieldLocation instanceof FieldLocation); Assert.assertTrue(stringMethodLocation instanceof MethodLocation); Assert.assertTrue(integerMethodLocation instanceof MethodLocation); Assert.assertNull(stringMethodLocation.getAnnotationRenamedTo()); Assert.assertNull(integerMethodLocation.getAnnotationRenamedTo()); Assert.assertEquals("annotatedStringField", stringFieldLocation.getAnnotationRenamedTo()); Assert.assertEquals("annotatedIntegerField", integerFieldLocation.getAnnotationRenamedTo()); Assert.assertEquals(JavaDocData.NO_COMMENT, result.getJavaDoc(stringMethodLocation.getPath()).getComment()); Assert.assertEquals(JavaDocData.NO_COMMENT, result.getJavaDoc(integerMethodLocation.getPath()).getComment()); Assert.assertEquals("This is a string field.", result.getJavaDoc(stringFieldLocation.getPath()).getComment()); Assert.assertEquals("This is an integer field.", result.getJavaDoc(integerFieldLocation.getPath()).getComment()); // Secondly, check the method-annotated class. final SortableLocation stringFieldLocation2 = result.getLocation(methodAccessPrefix + "stringField"); final SortableLocation integerFieldLocation2 = result.getLocation(methodAccessPrefix + "integerField"); final SortableLocation stringMethodLocation2 = result.getLocation(methodAccessPrefix + "annotatedStringMethod()"); final SortableLocation integerMethodLocation2 = result.getLocation(methodAccessPrefix + "annotatedIntegerMethod()"); Assert.assertTrue(stringFieldLocation2 instanceof FieldLocation); Assert.assertTrue(integerFieldLocation2 instanceof FieldLocation); Assert.assertTrue(stringMethodLocation2 instanceof MethodLocation); Assert.assertTrue(integerMethodLocation2 instanceof MethodLocation); Assert.assertNull(stringFieldLocation2.getAnnotationRenamedTo()); Assert.assertNull(integerFieldLocation2.getAnnotationRenamedTo()); Assert.assertEquals("annotatedStringMethod", stringMethodLocation2.getAnnotationRenamedTo()); Assert.assertEquals("annotatedIntegerMethod", integerMethodLocation2.getAnnotationRenamedTo()); Assert.assertEquals("Getter for the stringField.", result.getJavaDoc(stringMethodLocation2.getPath()).getComment()); Assert.assertEquals("Getter for the integerField.", result.getJavaDoc(integerMethodLocation2.getPath()).getComment()); Assert.assertEquals(JavaDocData.NO_COMMENT, result.getJavaDoc(stringFieldLocation2.getPath()).getComment()); Assert.assertEquals(JavaDocData.NO_COMMENT, result.getJavaDoc(integerFieldLocation2.getPath()).getComment()); } @Test @Ignore public void validateJavaDocsForXmlEnumsAreCorrectlyApplied() { // Assemble final JavaDocExtractor unitUnderTest = new JavaDocExtractor(log); // Act final SearchableDocumentation result = getSearchableDocumentationFor(unitUnderTest, 3, javaDocEnumsDir); final MapWrapper mapWrapper = new MapWrapper(result); // Assert Assert.assertEquals(21, mapWrapper.sortableLocations2JavaDocDataMap.size()); final List<String> paths = Arrays.asList( "enums", "enums.AmericanCoin", "enums.AmericanCoin#1", "enums.AmericanCoin#5", "enums.AmericanCoin#10", "enums.AmericanCoin#25", "enums.AmericanCoin#getValue()", "enums.AmericanCoin#value", "enums.ExampleEnumHolder", "enums.ExampleEnumHolder#coins", "enums.ExampleEnumHolder#foodPreferences", "enums.ExampleEnumHolder#getCoins()", "enums.ExampleEnumHolder#getFoodPreferences()", "enums.FoodPreference", "enums.FoodPreference#LACTO_VEGETARIAN", "enums.FoodPreference#NONE", "enums.FoodPreference#VEGAN", "enums.FoodPreference#isMeatEater()", "enums.FoodPreference#isMilkDrinker()", "enums.FoodPreference#meatEater", "enums.FoodPreference#milkDrinker"); for (String current : paths) { Assert.assertTrue("Required path [" + current + "] not found.", mapWrapper.path2LocationMap.keySet().contains(current.trim())); } // Finally, validate that the injected XML document comments // match the expected/corresponding JavaDoc comments. mapWrapper.validateJavaDocCommentText( "Simple enumeration example defining some Food preferences.", "enums.FoodPreference"); mapWrapper.validateJavaDocCommentText( "No special food preferences; eats everything.", "enums.FoodPreference#NONE"); mapWrapper.validateJavaDocCommentText( "Vegan who will neither eat meats nor drink milk.", "enums.FoodPreference#VEGAN"); mapWrapper.validateJavaDocCommentText( "Vegetarian who will not eat meats, but drinks milk.", "enums.FoodPreference#LACTO_VEGETARIAN"); mapWrapper.validateJavaDocCommentText( "A Penny, worth 1 cent.", "enums.AmericanCoin#1"); mapWrapper.validateJavaDocCommentText( "A Nickel, worth 5 cents.", "enums.AmericanCoin#5"); mapWrapper.validateJavaDocCommentText( "A Dime, worth 10 cents.", "enums.AmericanCoin#10"); mapWrapper.validateJavaDocCommentText( "A Quarter, worth 25 cents.", "enums.AmericanCoin#25"); } @Test @Ignore public void validateJavaDocsForXmlWrapperAnnotatedFieldsAndMethodsAreCorrectlyApplied() throws Exception { // Assemble final JavaDocExtractor unitUnderTest = new JavaDocExtractor(log); // Act final SearchableDocumentation result = getSearchableDocumentationFor(unitUnderTest, 2, javaDocXmlWrappersDir); final MapWrapper mapWrapper = new MapWrapper(result); // Assert Assert.assertEquals(11, mapWrapper.sortableLocations2JavaDocDataMap.size()); final String packagePrefix = "org.codehaus.mojo.jaxb2.schemageneration.postprocessing.javadoc.wrappers"; final List<String> paths = new ArrayList<String>(); for (String current : Arrays.asList("", ".ExampleXmlWrapperUsingFieldAccess", ".ExampleXmlWrapperUsingFieldAccess#foobar", ".ExampleXmlWrapperUsingFieldAccess#getIntegerSet()", ".ExampleXmlWrapperUsingFieldAccess#getStrings()", ".ExampleXmlWrapperUsingFieldAccess#integerSet", ".ExampleXmlWrapperUsingMethodAccess", ".ExampleXmlWrapperUsingMethodAccess#foobar()", ".ExampleXmlWrapperUsingMethodAccess#getMethodIntegerSet()", ".ExampleXmlWrapperUsingMethodAccess#methodIntegerSet", ".ExampleXmlWrapperUsingMethodAccess#methodStrings")) { paths.add(packagePrefix + current); } for (String current : paths) { Assert.assertTrue("Required path [" + current + "] not found.", mapWrapper.path2LocationMap.keySet().contains(current.trim())); } mapWrapper.validateJavaDocCommentText("List containing some strings.", packagePrefix + ".ExampleXmlWrapperUsingFieldAccess#foobar"); mapWrapper.validateJavaDocCommentText("SortedSet containing Integers.", packagePrefix + ".ExampleXmlWrapperUsingFieldAccess#integerSet"); mapWrapper.validateJavaDocCommentText("List containing some methodStrings.", packagePrefix + ".ExampleXmlWrapperUsingMethodAccess#foobar()"); mapWrapper.validateJavaDocCommentText("SortedSet containing Integers.", packagePrefix + ".ExampleXmlWrapperUsingMethodAccess#getMethodIntegerSet()"); } @Test public void validatePathsFromProcessing() { // Assemble final JavaDocExtractor unitUnderTest = new JavaDocExtractor(log); // Act final SearchableDocumentation result = getSearchableDocumentationFor(unitUnderTest, 1, javaDocBasicDir); // Assert final ArrayList<SortableLocation> sortableLocations = new ArrayList<SortableLocation>(result.getAll().keySet()); Assert.assertEquals(4, sortableLocations.size()); final List<String> paths = new ArrayList<String>(result.getPaths()); Assert.assertEquals(4, paths.size()); Assert.assertEquals("basic", paths.get(0)); Assert.assertEquals("basic.NodeProcessor", paths.get(1)); Assert.assertEquals("basic.NodeProcessor#accept(org.w3c.dom.Node)", paths.get(2)); Assert.assertEquals("basic.NodeProcessor#process(org.w3c.dom.Node)", paths.get(3)); } @Test public void validateJavaDocDataFromProcessing() { // Assemble final String basicPackagePath = "basic"; final String nodeProcessorClassPath = "basic.NodeProcessor"; final String acceptMethodPath = "basic.NodeProcessor#accept(org.w3c.dom.Node)"; final String processMethodPath = "basic.NodeProcessor#process(org.w3c.dom.Node)"; final JavaDocExtractor unitUnderTest = new JavaDocExtractor(log); final List<File> sourceDirs = Collections.<File>singletonList(javaDocBasicDir); final List<File> sourceFiles = FileSystemUtilities.resolveRecursively(sourceDirs, null, log); // Act unitUnderTest.addSourceFiles(sourceFiles); final SearchableDocumentation result = unitUnderTest.process(); // Assert /* +================= | Comment: | No JavaDoc tags. +================= */ final SortableLocation packageLocation = result.getLocation(basicPackagePath); final JavaDocData basicPackageJavaDoc = result.getJavaDoc(basicPackagePath); Assert.assertTrue(packageLocation instanceof PackageLocation); final PackageLocation castPackageLocation = (PackageLocation) packageLocation; Assert.assertEquals("basic", castPackageLocation.getPackageName()); Assert.assertEquals(JavaDocData.NO_COMMENT, basicPackageJavaDoc.getComment()); Assert.assertEquals(0, basicPackageJavaDoc.getTag2ValueMap().size()); /* +================= | Comment: Processor/visitor pattern specification for DOM Nodes. | 2 JavaDoc tags ... | author: <a href="mailto:lj@jguru.se">Lennart J&ouml;relid</a>, Mr. Foo | see: org.w3c.dom.Node +================= */ final SortableLocation classLocation = result.getLocation(nodeProcessorClassPath); final JavaDocData nodeProcessorClassJavaDoc = result.getJavaDoc(nodeProcessorClassPath); Assert.assertTrue(classLocation instanceof ClassLocation); final ClassLocation castClassLocation = (ClassLocation) classLocation; Assert.assertEquals("basic", castClassLocation.getPackageName()); Assert.assertEquals("NodeProcessor", castClassLocation.getClassName()); Assert.assertEquals("Processor/visitor pattern specification for DOM Nodes.", nodeProcessorClassJavaDoc.getComment()); final SortedMap<String, String> classTag2ValueMap = nodeProcessorClassJavaDoc.getTag2ValueMap(); Assert.assertEquals(2, classTag2ValueMap.size()); Assert.assertEquals("org.w3c.dom.Node", classTag2ValueMap.get("see")); Assert.assertEquals("<a href=\"mailto:lj@jguru.se\">Lennart J&ouml;relid</a>, Mr. Foo", classTag2ValueMap.get("author")); /* +================= | Comment: Defines if this visitor should process the provided node. | 2 JavaDoc tags ... | param: aNode The DOM node to process. | return: <code>true</code> if the provided Node should be processed by this NodeProcessor. +================= */ final SortableLocation acceptMethodLocation = result.getLocation(acceptMethodPath); final JavaDocData acceptMethodClassJavaDoc = result.getJavaDoc(acceptMethodPath); Assert.assertTrue(acceptMethodLocation instanceof MethodLocation); final MethodLocation castMethodLocation = (MethodLocation) acceptMethodLocation; Assert.assertEquals("basic", castMethodLocation.getPackageName()); Assert.assertEquals("NodeProcessor", castMethodLocation.getClassName()); Assert.assertEquals("(org.w3c.dom.Node)", castMethodLocation.getParametersAsString()); Assert.assertEquals("Defines if this visitor should process the provided node.", acceptMethodClassJavaDoc.getComment()); final SortedMap<String, String> methodTag2ValueMap = acceptMethodClassJavaDoc.getTag2ValueMap(); Assert.assertEquals(2, methodTag2ValueMap.size()); Assert.assertEquals("aNode The DOM node to process.", methodTag2ValueMap.get("param")); Assert.assertEquals("<code>true</code> if the provided Node should be processed by this NodeProcessor.", methodTag2ValueMap.get("return")); } // // Private helpers // /** * Simple helper class wrapping the path2LocationMap and the sortableLocations2JavaDocDataMap. */ class MapWrapper { SortedMap<String, SortableLocation> path2LocationMap; SortedMap<SortableLocation, JavaDocData> sortableLocations2JavaDocDataMap; /** * Creates a MapWrapper using the data retrieved from a SearchableDocumentation * * @param searchableDocumentation A non-null SearchableDocumentation instance. */ public MapWrapper(final SearchableDocumentation searchableDocumentation) { // Check sanity Validate.notNull(searchableDocumentation, "searchableDocumentation"); // Assign state this.sortableLocations2JavaDocDataMap = searchableDocumentation.getAll(); this.path2LocationMap = new TreeMap<String, SortableLocation>(); for (Map.Entry<SortableLocation, JavaDocData> current : sortableLocations2JavaDocDataMap.entrySet()) { path2LocationMap.put(current.getKey().getPath(), current.getKey()); } } /** * Validates that the JavaDoc found at the supplied SortableLocation path equals the expected value. * * @param expected The expected JavaDoc comment text. * @param path The SortableLocation path where the text was expected. * @see SortableLocation#getPath() */ public void validateJavaDocCommentText(final String expected, final String path) { final SortableLocation sortableLocation = path2LocationMap.get(path); final JavaDocData xmlWrapperJavaDocData = sortableLocations2JavaDocDataMap.get(sortableLocation); // All Done. Assert.assertEquals(expected, xmlWrapperJavaDocData.getComment()); } } private void validateJavaDocCommentText( final MapWrapper wrapper, final String expected, final String path) { final SortableLocation sortableLocation = wrapper.path2LocationMap.get(path); final JavaDocData xmlWrapperJavaDocData = wrapper.sortableLocations2JavaDocDataMap.get(sortableLocation); // All Done. Assert.assertEquals(expected, xmlWrapperJavaDocData.getComment()); } private SearchableDocumentation getSearchableDocumentationFor(final JavaDocExtractor unitUnderTest, final int expectedNumberOfFiles, final File... sourceFileDirectories) { // Ensure that the encoding is correctly set unitUnderTest.setEncoding("UTF-8"); // Convert the supplied directory Files to a List final List<File> sourceDirs = new ArrayList<File>(); Collections.addAll(sourceDirs, sourceFileDirectories); // Exclude any ".xsd" files found within the source directory files given final List<Filter<File>> excludeFilesMatching = new ArrayList<Filter<File>>(); excludeFilesMatching.add(new PatternFileFilter(Collections.singletonList("\\.xsd"))); Filters.initialize(log, excludeFilesMatching); // Find all normal Files not being ".xsd" files below the supplied sourceDirs final List<File> sourceFiles = FileSystemUtilities.resolveRecursively(sourceDirs, excludeFilesMatching, log); Assert.assertEquals(expectedNumberOfFiles, sourceFiles.size()); // Add the found files as source files unitUnderTest.addSourceFiles(sourceFiles); // Launch the JavaDocExtractor and find // the resulting SearchableDocumentation. return unitUnderTest.process(); } }
/* * ========================================================================= * Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved. * This product is protected by U.S. and international copyright * and intellectual property laws. Pivotal products are covered by * more patents listed at http://www.pivotal.io/patents. * ======================================================================== */ package com.gemstone.gemfire.management.internal.cli.util; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.IOException; import java.lang.reflect.Modifier; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarInputStream; import com.gemstone.gemfire.internal.ClassPathLoader; import com.gemstone.gemfire.management.internal.cli.CliUtil; /** * Utility class to scan class-path & load classes. * * @author Abhishek Chaudhari * * @since 7.0 */ public class ClasspathScanLoadHelper { private static final String CLASSFILE_EXTENSION = ".class"; public static Set<Class<?>> loadAndGet(String commandPackageName, Class<?> requiredInterfaceToLoad, boolean onlyInstantiable) throws ClassNotFoundException, IOException { Set<Class<?>> classSet = new HashSet<Class<?>>(); Class<?> classes[] = getClasses(commandPackageName); for (int i = 0; i < classes.length; i++) { if (implementsType(classes[i], requiredInterfaceToLoad)) { if (onlyInstantiable) { if (isInstantiable(classes[i])) { classSet.add(classes[i]); } } else { classSet.add(classes[i]); } } } return classSet; } public static boolean isInstantiable(Class<?> klass) { int modifiers = klass.getModifiers(); boolean isInstantiable = !Modifier.isAbstract(modifiers) && !Modifier.isInterface(modifiers) && Modifier.isPublic(modifiers); return isInstantiable; } private static boolean implementsType(Class<?> typeToCheck, Class<?> requiredInterface) { if(requiredInterface.isAssignableFrom(typeToCheck)){ return true; }else{ return false; } } public static Class<?>[] getClasses(String packageName) throws ClassNotFoundException, IOException { String packagePath = packageName.replace('.', '/'); List<File> dirs = new ArrayList<File>(); Enumeration<URL> resources = ClassPathLoader.getLatest().getResources(packagePath); List<Class<?>> classesList = new ArrayList<Class<?>>(); while (resources.hasMoreElements()) { URL packageUrl = resources.nextElement(); String actualPackagePath = packageUrl.getPath(); int jarIndex = actualPackagePath.indexOf(".jar!"); if (jarIndex != -1) { // resource appears to be in a jar String jarPath = actualPackagePath.substring(0, jarIndex + ".jar".length()); if (jarPath.startsWith("file:")) { if (File.separatorChar == '/') {//whether Unix or Windows system // On Unix, to get actual path, we remove "file:" from the Path jarPath = jarPath.substring("file:".length()); } else { // On Windows jarPaths are like: // Local Path: file:/G:/where/java/spring/spring-shell/1.0.0/spring-shell-1.0.0.RELEASE.jar // Network Path: file://stinger.pune.gemstone.com/shared/where/java/spring/spring-shell/1.0.0/spring-shell-1.0.0.RELEASE.jar // To get actual path, we remove "file:/" from the Path jarPath = jarPath.substring("file:/".length()); // If the path still starts with a "/", then it's a network path. // Hence, add one "/". if (jarPath.startsWith("/") && !jarPath.startsWith("//")) { jarPath = "/" + jarPath; } } } // decode the jarPath as it's derived from an URL Class<?>[] classes = getClasses(CliUtil.decodeWithDefaultCharSet(jarPath), packageName); classesList.addAll(Arrays.asList(classes)); } else { dirs.add(new File(packageUrl.getFile())); } } for (File directory : dirs) { classesList.addAll(findClasses(directory, packageName)); } return (Class[]) classesList.toArray(new Class[classesList.size()]); } public static List<Class<?>> findClasses(File directory, String packageName) throws ClassNotFoundException { List<Class<?>> classes = new ArrayList<Class<?>>(); if (!directory.exists()) { return classes; } ClassPathLoader cpLoader = ClassPathLoader.getLatest(); // Load only .class files that are not from test code TestClassFilter tcf = new TestClassFilter(); File[] files = directory.listFiles(tcf); File file = null; for (int i = 0; i < files.length; i++) { file = files[i]; if (file.isDirectory()) {//sub-package // assert !file.getName().contains("."); classes.addAll(findClasses(file, packageName + "." + file.getName())); } else { //remove .class from the file name String classSimpleName = file.getName().substring(0, file.getName().length() - CLASSFILE_EXTENSION.length()); classes.add(cpLoader.forName(packageName + '.' + classSimpleName)); } } return classes; } /** * Returns all classes that are in the specified jar and package name. * * @param jarPath * The absolute or relative jar path. * @param packageName * The package name. * @return Returns all classes that are in the specified jar and package name. * @throws ClassNotFoundException * Thrown if unable to load a class * @throws IOException * Thrown if error occurs while reading the jar file */ public static Class<?>[] getClasses(String jarPath, String packageName) throws ClassNotFoundException, IOException { ClassPathLoader cpLoader = ClassPathLoader.getLatest(); String[] classNames = getClassNames(jarPath, packageName); Class<?> classes[] = new Class[classNames.length]; for (int i = 0; i < classNames.length; i++) { String className = (String)classNames[i]; classes[i] = cpLoader.forName(className); } return classes; } /** * Returns all names of classes that are defined in the specified jar and * package name. * * @param jarPath * The absolute or relative jar path. * @param packageName * The package name. * @return Returns all names of classes that are defined in the specified jar * and package name. * @throws IOException * Thrown if error occurs while reading the jar file */ public static String[] getClassNames(String jarPath, String packageName) throws IOException { if (jarPath == null) { return new String[0]; } File file; //Path is absolute on Unix if it starts with '/' //or path contains colon on Windows if (jarPath.startsWith("/") || (jarPath.indexOf(':') >= 0 && File.separatorChar == '\\' )) { // absolute path file = new File(jarPath); } else { // relative path String workingDir = System.getProperty("user.dir"); file = new File(workingDir + File.separator + jarPath); } List<String> classNames = new ArrayList<String>(); String packagePath = packageName.replaceAll("\\.", "/"); JarInputStream jarFile = new JarInputStream(new FileInputStream(file)); JarEntry jarEntry; while (true) { jarEntry = jarFile.getNextJarEntry(); if (jarEntry == null) { break; } String name = jarEntry.getName(); if (name.startsWith(packagePath) && (name.endsWith(CLASSFILE_EXTENSION))) { int endIndex = name.length() - 6; name = name.replaceAll("/", "\\."); name = name.substring(0, endIndex); classNames.add(name); } } jarFile.close(); return (String[])classNames.toArray(new String[0]); } /** * FileFilter to filter out GemFire Test Code. * * @author Abhishek Chaudhari * @since 7.0 */ static class TestClassFilter implements FileFilter { private static final String TESTS_CODE_INDICATOR = "Test"; @Override public boolean accept(File pathname) { String pathToCheck = pathname.getName(); return !pathToCheck.contains(TESTS_CODE_INDICATOR) && pathToCheck.endsWith(CLASSFILE_EXTENSION); } } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cloudtrail.model; import java.io.Serializable; /** * <p> * Specifies an attribute and value that filter the events returned. * </p> */ public class LookupAttribute implements Serializable, Cloneable { /** * <p> * Specifies an attribute on which to filter the events returned. * </p> */ private String attributeKey; /** * <p> * Specifies a value for the specified AttributeKey. * </p> */ private String attributeValue; /** * <p> * Specifies an attribute on which to filter the events returned. * </p> * * @param attributeKey * Specifies an attribute on which to filter the events returned. * @see LookupAttributeKey */ public void setAttributeKey(String attributeKey) { this.attributeKey = attributeKey; } /** * <p> * Specifies an attribute on which to filter the events returned. * </p> * * @return Specifies an attribute on which to filter the events returned. * @see LookupAttributeKey */ public String getAttributeKey() { return this.attributeKey; } /** * <p> * Specifies an attribute on which to filter the events returned. * </p> * * @param attributeKey * Specifies an attribute on which to filter the events returned. * @return Returns a reference to this object so that method calls can be * chained together. * @see LookupAttributeKey */ public LookupAttribute withAttributeKey(String attributeKey) { setAttributeKey(attributeKey); return this; } /** * <p> * Specifies an attribute on which to filter the events returned. * </p> * * @param attributeKey * Specifies an attribute on which to filter the events returned. * @return Returns a reference to this object so that method calls can be * chained together. * @see LookupAttributeKey */ public void setAttributeKey(LookupAttributeKey attributeKey) { this.attributeKey = attributeKey.toString(); } /** * <p> * Specifies an attribute on which to filter the events returned. * </p> * * @param attributeKey * Specifies an attribute on which to filter the events returned. * @return Returns a reference to this object so that method calls can be * chained together. * @see LookupAttributeKey */ public LookupAttribute withAttributeKey(LookupAttributeKey attributeKey) { setAttributeKey(attributeKey); return this; } /** * <p> * Specifies a value for the specified AttributeKey. * </p> * * @param attributeValue * Specifies a value for the specified AttributeKey. */ public void setAttributeValue(String attributeValue) { this.attributeValue = attributeValue; } /** * <p> * Specifies a value for the specified AttributeKey. * </p> * * @return Specifies a value for the specified AttributeKey. */ public String getAttributeValue() { return this.attributeValue; } /** * <p> * Specifies a value for the specified AttributeKey. * </p> * * @param attributeValue * Specifies a value for the specified AttributeKey. * @return Returns a reference to this object so that method calls can be * chained together. */ public LookupAttribute withAttributeValue(String attributeValue) { setAttributeValue(attributeValue); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAttributeKey() != null) sb.append("AttributeKey: " + getAttributeKey() + ","); if (getAttributeValue() != null) sb.append("AttributeValue: " + getAttributeValue()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof LookupAttribute == false) return false; LookupAttribute other = (LookupAttribute) obj; if (other.getAttributeKey() == null ^ this.getAttributeKey() == null) return false; if (other.getAttributeKey() != null && other.getAttributeKey().equals(this.getAttributeKey()) == false) return false; if (other.getAttributeValue() == null ^ this.getAttributeValue() == null) return false; if (other.getAttributeValue() != null && other.getAttributeValue().equals(this.getAttributeValue()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAttributeKey() == null) ? 0 : getAttributeKey() .hashCode()); hashCode = prime * hashCode + ((getAttributeValue() == null) ? 0 : getAttributeValue() .hashCode()); return hashCode; } @Override public LookupAttribute clone() { try { return (LookupAttribute) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.aws2.sqs; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import software.amazon.awssdk.services.sqs.SqsClient; import software.amazon.awssdk.services.sqs.model.BatchResultErrorEntry; import software.amazon.awssdk.services.sqs.model.ChangeMessageVisibilityRequest; import software.amazon.awssdk.services.sqs.model.ChangeMessageVisibilityResponse; import software.amazon.awssdk.services.sqs.model.CreateQueueRequest; import software.amazon.awssdk.services.sqs.model.CreateQueueResponse; import software.amazon.awssdk.services.sqs.model.DeleteMessageRequest; import software.amazon.awssdk.services.sqs.model.DeleteMessageResponse; import software.amazon.awssdk.services.sqs.model.ListQueuesRequest; import software.amazon.awssdk.services.sqs.model.ListQueuesResponse; import software.amazon.awssdk.services.sqs.model.Message; import software.amazon.awssdk.services.sqs.model.ReceiveMessageRequest; import software.amazon.awssdk.services.sqs.model.ReceiveMessageResponse; import software.amazon.awssdk.services.sqs.model.SendMessageBatchRequest; import software.amazon.awssdk.services.sqs.model.SendMessageBatchResponse; import software.amazon.awssdk.services.sqs.model.SendMessageBatchResultEntry; import software.amazon.awssdk.services.sqs.model.SendMessageRequest; import software.amazon.awssdk.services.sqs.model.SendMessageResponse; import software.amazon.awssdk.services.sqs.model.SetQueueAttributesRequest; import software.amazon.awssdk.services.sqs.model.SetQueueAttributesResponse; public class AmazonSQSClientMock implements SqsClient { List<Message> messages = new ArrayList<>(); Map<String, Map<String, String>> queueAttributes = new HashMap<>(); List<ChangeMessageVisibilityRequest> changeMessageVisibilityRequests = new CopyOnWriteArrayList<>(); private Map<String, CreateQueueRequest> queues = new LinkedHashMap<>(); private Map<String, ScheduledFuture<?>> inFlight = new LinkedHashMap<>(); private ScheduledExecutorService scheduler; public AmazonSQSClientMock() { } @Override public ListQueuesResponse listQueues() { return ListQueuesResponse.builder().build(); } @Override public ListQueuesResponse listQueues(ListQueuesRequest request) { ListQueuesResponse.Builder result = ListQueuesResponse.builder(); List<String> queues = new ArrayList<>(); queues.add("queue1"); queues.add("queue2"); result.queueUrls(queues); return result.build(); } @Override public CreateQueueResponse createQueue(CreateQueueRequest createQueueRequest) { String queueName = "https://queue.amazonaws.com/541925086079/" + createQueueRequest.queueName(); queues.put(queueName, createQueueRequest); CreateQueueResponse.Builder result = CreateQueueResponse.builder(); result.queueUrl(queueName); return result.build(); } @Override public SendMessageResponse sendMessage(SendMessageRequest sendMessageRequest) { Message.Builder message = Message.builder(); message.body(sendMessageRequest.messageBody()); message.md5OfBody("6a1559560f67c5e7a7d5d838bf0272ee"); message.messageId("f6fb6f99-5eb2-4be4-9b15-144774141458"); message.receiptHandle("0NNAq8PwvXsyZkR6yu4nQ07FGxNmOBWi5zC9+4QMqJZ0DJ3gVOmjI2Gh/oFnb0IeJqy5Zc8kH4JX7GVpfjcEDjaAPSeOkXQZRcaBqt" + "4lOtyfj0kcclVV/zS7aenhfhX5Ixfgz/rHhsJwtCPPvTAdgQFGYrqaHly+etJiawiNPVc="); synchronized (messages) { messages.add(message.build()); } return SendMessageResponse.builder().messageId("f6fb6f99-5eb2-4be4-9b15-144774141458").md5OfMessageBody("6a1559560f67c5e7a7d5d838bf0272ee").build(); } @Override public ReceiveMessageResponse receiveMessage(ReceiveMessageRequest receiveMessageRequest) { Integer maxNumberOfMessages = receiveMessageRequest.maxNumberOfMessages() != null ? receiveMessageRequest.maxNumberOfMessages() : Integer.MAX_VALUE; ReceiveMessageResponse.Builder result = ReceiveMessageResponse.builder(); Collection<Message> resultMessages = new ArrayList<>(); synchronized (messages) { int fetchSize = 0; for (Iterator<Message> iterator = messages.iterator(); iterator.hasNext() && fetchSize < maxNumberOfMessages; fetchSize++) { Message rc = iterator.next(); resultMessages.add(rc); iterator.remove(); scheduleCancelInflight(receiveMessageRequest.queueUrl(), rc); } } result.messages(resultMessages); return result.build(); } /* * Cancel (put back onto queue) in flight messages if the visibility time * has expired and has not been manually deleted (ack'd) */ private void scheduleCancelInflight(final String queueUrl, final Message message) { if (scheduler != null) { int visibility = getVisibilityForQueue(queueUrl); if (visibility > 0) { ScheduledFuture<?> task = scheduler.schedule(new Runnable() { @Override public void run() { synchronized (messages) { // put it back! messages.add(message); } } }, visibility, TimeUnit.SECONDS); inFlight.put(message.receiptHandle(), task); } } } private int getVisibilityForQueue(String queueUrl) { Map<String, String> queueAttr = queues.get(queueUrl).attributesAsStrings(); if (queueAttr.containsKey("VisibilityTimeout")) { return Integer.parseInt(queueAttr.get("VisibilityTimeout")); } return 0; } public ScheduledExecutorService getScheduler() { return scheduler; } public void setScheduler(ScheduledExecutorService scheduler) { this.scheduler = scheduler; } @Override public DeleteMessageResponse deleteMessage(DeleteMessageRequest deleteMessageRequest) { String receiptHandle = deleteMessageRequest.receiptHandle(); if (inFlight.containsKey(receiptHandle)) { ScheduledFuture<?> inFlightTask = inFlight.get(receiptHandle); inFlightTask.cancel(true); } return DeleteMessageResponse.builder().build(); } @Override public SetQueueAttributesResponse setQueueAttributes(SetQueueAttributesRequest setQueueAttributesRequest) { synchronized (queueAttributes) { if (!queueAttributes.containsKey(setQueueAttributesRequest.queueUrl())) { queueAttributes.put(setQueueAttributesRequest.queueUrl(), new HashMap<String, String>()); } for (final Map.Entry<String, String> entry : setQueueAttributesRequest.attributesAsStrings().entrySet()) { queueAttributes.get(setQueueAttributesRequest.queueUrl()).put(entry.getKey(), entry.getValue()); } } return SetQueueAttributesResponse.builder().build(); } @Override public ChangeMessageVisibilityResponse changeMessageVisibility(ChangeMessageVisibilityRequest changeMessageVisibilityRequest) { this.changeMessageVisibilityRequests.add(changeMessageVisibilityRequest); return ChangeMessageVisibilityResponse.builder().build(); } @Override public SendMessageBatchResponse sendMessageBatch(SendMessageBatchRequest request) { SendMessageBatchResponse.Builder result = SendMessageBatchResponse.builder(); Collection<SendMessageBatchResultEntry> entriesSuccess = new ArrayList<>(); SendMessageBatchResultEntry.Builder entry1 = SendMessageBatchResultEntry.builder(); SendMessageBatchResultEntry.Builder entry2 = SendMessageBatchResultEntry.builder(); entry1.id("team1"); entry2.id("team2"); entriesSuccess.add(entry1.build()); entriesSuccess.add(entry2.build()); Collection<BatchResultErrorEntry> entriesFail = new ArrayList<>(); BatchResultErrorEntry.Builder entry3 = BatchResultErrorEntry.builder(); BatchResultErrorEntry.Builder entry4 = BatchResultErrorEntry.builder(); entry3.id("team1"); entry4.id("team4"); entriesFail.add(entry3.build()); entriesFail.add(entry4.build()); result.successful(entriesSuccess); result.failed(entriesFail); return result.build(); } @Override public String serviceName() { // TODO Auto-generated method stub return null; } @Override public void close() { // TODO Auto-generated method stub } }
package com.cusnews.utils; import java.lang.ref.WeakReference; import java.util.LinkedList; import java.util.List; import android.os.Handler; import android.support.annotation.Nullable; import android.support.design.widget.Snackbar; import android.support.design.widget.TabLayout; import android.support.design.widget.TabLayout.Tab; import android.text.TextUtils; import android.view.View; import android.view.View.OnClickListener; import com.cusnews.R; import com.cusnews.app.App; import com.cusnews.ds.TabLabel; import cn.bmob.v3.BmobQuery; import cn.bmob.v3.listener.DeleteListener; import cn.bmob.v3.listener.FindListener; import cn.bmob.v3.listener.SaveListener; /** * A manager to control adding, removing, loading {@link android.support.design.widget.TabLayout.Tab}s. * * @author Xinyue Zhao */ public class TabLabelManager { public interface TabLabelManagerUIHelper { /** * Add customized , default, first {@link Tab}. */ void addDefaultTab(); /** * Add customized {@link Tab}. * * @param tabLabel * {@link TabLabel}. * * @return The added new {@link Tab}. */ Tab addTab( TabLabel tabLabel ); /** * Remove a {@link Tab} from {@link Tab}s. * * @param tab * {@link Tab} */ void removeTab( Tab tab ); } /** * Cached list of all {@link TabLabel}s from backend. */ private List<TabLabel> mCachedTabLabels = new LinkedList<>(); /** * Singleton. */ private static TabLabelManager sInstance = new TabLabelManager(); private Handler mHandler = new Handler(); /** * @return The instance of singleton pattern. */ public static TabLabelManager getInstance() { return sInstance; } /** * No one can create this class. */ private TabLabelManager() { } /** * For initialize the {@link TabLayout} when host {@link android.app.Activity} is being created. * * @param helper * {@link TabLabelManagerUIHelper}. * @param loadDefault * {@code true} if the first default will also be loaded. */ public void init( final TabLabelManagerUIHelper helper, boolean loadDefault ) { //Default page. if( loadDefault ) { helper.addDefaultTab(); } //Load from cache. for( TabLabel cached : mCachedTabLabels ) { helper.addTab( cached ); } //Load from backend and refresh tabs. BmobQuery<TabLabel> queryTabLabels = new BmobQuery<>(); queryTabLabels.addWhereEqualTo( "mUID", Prefs.getInstance().getGoogleId() ); queryTabLabels.findObjects( App.Instance, new FindListener<TabLabel>() { @Override public void onSuccess( List<TabLabel> list ) { for( TabLabel tabLabel : list ) { boolean found = false; for( TabLabel cached : mCachedTabLabels ) { if( cached.equals( tabLabel ) ) { found = true; break; } } if( !found ) { mCachedTabLabels.add( tabLabel ); helper.addTab( tabLabel ); } } } @Override public void onError( int i, String s ) { } } ); } /** * Add a new {@link TabLabel}. * * @param newTabLabel * The new {@link TabLabel}. * @param helper * Use helper to refresh UI before removing {@link TabLabel}. * @param viewForSnack * The anchor for {@link Snackbar} for result-messages. * * @return A {@link Tab} that hosts the new {@link TabLabel}. It might be {@code null} if the {@code newTabLabel} has same wording(label) equal to * label of an existing {@link TabLabel} in {@link #mCachedTabLabels}. */ public @Nullable Tab addNewRemoteTab( TabLabel newTabLabel, TabLabelManagerUIHelper helper, View viewForSnack ) { //Same label should not be added again. for( TabLabel cached : mCachedTabLabels ) { if( cached.equals( newTabLabel ) ) { Snackbar.make( viewForSnack, viewForSnack.getContext().getString( R.string.lbl_sync_same_label, newTabLabel.getLabel() ), Snackbar.LENGTH_SHORT ).show(); return null; } } final Tab tab = helper.addTab( newTabLabel ); mHandler.postDelayed( new Runnable() { @Override public void run() { tab.select(); } }, 300 ); mCachedTabLabels.add( newTabLabel ); addNewRemoteTabInternal( newTabLabel, viewForSnack ); return tab; } /** * Save a new {@link TabLabel} to backend. * * @param newTabLabel * New {@link TabLabel}. * @param viewForSnack * The anchor for {@link Snackbar} for result-messages. */ private void addNewRemoteTabInternal( final TabLabel newTabLabel, View viewForSnack ) { final WeakReference<View> anchor = new WeakReference<>( viewForSnack ); newTabLabel.save( App.Instance, new SaveListener() { @Override public void onSuccess() { View anchorV = anchor.get(); if( anchorV != null ) { Snackbar.make( anchorV, anchorV.getContext().getString( R.string.lbl_sync_label_added, newTabLabel.getLabel() ), Snackbar.LENGTH_SHORT ) .show(); } } @Override public void onFailure( int i, String s ) { View anchorV = anchor.get(); if( anchorV != null ) { Snackbar.make( anchorV, R.string.lbl_sync_fail, Snackbar.LENGTH_LONG ).setAction( R.string.btn_retry, new OnClickListener() { @Override public void onClick( View v ) { addNewRemoteTabInternal( newTabLabel, anchor.get() ); } } ).show(); } } } ); } /** * Remove a {@link TabLabel} and its host {@link Tab}. It delete cached item and them remove from backend. * * @param tab * {@link Tab} that hosts {@code tabLabel}. * @param tabLabel * {@link TabLabel} to remove. * @param helper * Use helper to refresh UI before removing {@link TabLabel}. * @param viewForSnack * The anchor for {@link Snackbar} for result-messages. */ public void removeRemoteTab( Tab tab, TabLabel tabLabel, TabLabelManagerUIHelper helper, View viewForSnack ) { helper.removeTab( tab ); for( TabLabel cached : mCachedTabLabels ) { if( TextUtils.equals( cached.getObjectId(), tabLabel.getObjectId() ) ) { mCachedTabLabels.remove( cached ); removeRemoteTabInternal( tabLabel, viewForSnack ); break; } } } /** * Remove a {@link TabLabel} from backend. * * @param tabLabel * Existed {@link TabLabel}. * @param viewForSnack * The anchor for {@link Snackbar} for result-messages. */ private void removeRemoteTabInternal( final TabLabel tabLabel, View viewForSnack ) { final WeakReference<View> anchor = new WeakReference<>( viewForSnack ); tabLabel.delete( App.Instance, new DeleteListener() { @Override public void onSuccess() { View anchorV = anchor.get(); if( anchorV != null ) { Snackbar.make( anchorV, R.string.lbl_sync_label_removed, Snackbar.LENGTH_SHORT ).show(); } } @Override public void onFailure( int i, String s ) { View anchorV = anchor.get(); if( anchorV != null ) { Snackbar.make( anchorV, R.string.lbl_sync_fail, Snackbar.LENGTH_LONG ).setAction( R.string.btn_retry, new OnClickListener() { @Override public void onClick( View v ) { removeRemoteTabInternal( tabLabel, anchor.get() ); } } ).show(); } } } ); } /** * Clean all tabs. */ public void clean() { mCachedTabLabels.clear(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test.rest.yaml; import com.carrotsearch.randomizedtesting.RandomizedTest; import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSuiteParser; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSuite; import org.elasticsearch.test.rest.yaml.section.DoSection; import org.elasticsearch.test.rest.yaml.section.ExecutableSection; import org.elasticsearch.test.rest.yaml.section.SkipSection; import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; /** * Runs a suite of yaml tests shared with all the official Elasticsearch clients against against an elasticsearch cluster. */ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { /** * Property that allows to control which REST tests get run. Supports comma separated list of tests * or directories that contain tests e.g. -Dtests.rest.suite=index,get,create/10_with_id */ public static final String REST_TESTS_SUITE = "tests.rest.suite"; /** * Property that allows to blacklist some of the REST tests based on a comma separated list of globs * e.g. -Dtests.rest.blacklist=get/10_basic/* */ public static final String REST_TESTS_BLACKLIST = "tests.rest.blacklist"; /** * Property that allows to control whether spec validation is enabled or not (default true). */ public static final String REST_TESTS_VALIDATE_SPEC = "tests.rest.validate_spec"; /** * Property that allows to control where the REST spec files need to be loaded from */ public static final String REST_TESTS_SPEC = "tests.rest.spec"; public static final String REST_LOAD_PACKAGED_TESTS = "tests.rest.load_packaged"; private static final String DEFAULT_TESTS_PATH = "/rest-api-spec/test"; private static final String DEFAULT_SPEC_PATH = "/rest-api-spec/api"; /** * This separator pattern matches ',' except it is preceded by a '\'. * This allows us to support ',' within paths when it is escaped with a slash. * * For example, the path string "/a/b/c\,d/e/f,/foo/bar,/baz" is separated to "/a/b/c\,d/e/f", "/foo/bar" and "/baz". * * For reference, this regular expression feature is known as zero-width negative look-behind. * */ private static final String PATHS_SEPARATOR = "(?<!\\\\),"; private final List<BlacklistedPathPatternMatcher> blacklistPathMatchers = new ArrayList<>(); private static ClientYamlTestExecutionContext restTestExecutionContext; private static ClientYamlTestExecutionContext adminExecutionContext; private final ClientYamlTestCandidate testCandidate; public ESClientYamlSuiteTestCase(ClientYamlTestCandidate testCandidate) { this.testCandidate = testCandidate; String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); for (String entry : blacklist) { this.blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); } } @Override protected void afterIfFailed(List<Throwable> errors) { logger.info("Stash dump on failure [{}]", XContentHelper.toString(restTestExecutionContext.stash())); super.afterIfFailed(errors); } public static Iterable<Object[]> createParameters() throws IOException, ClientYamlTestParseException { //parse tests only if rest test group is enabled, otherwise rest tests might not even be available on file system List<ClientYamlTestCandidate> restTestCandidates = collectTestCandidates(); List<Object[]> objects = new ArrayList<>(); for (ClientYamlTestCandidate restTestCandidate : restTestCandidates) { objects.add(new Object[]{restTestCandidate}); } return objects; } private static List<ClientYamlTestCandidate> collectTestCandidates() throws ClientYamlTestParseException, IOException { List<ClientYamlTestCandidate> testCandidates = new ArrayList<>(); FileSystem fileSystem = getFileSystem(); // don't make a try-with, getFileSystem returns null // ... and you can't close() the default filesystem try { String[] paths = resolvePathsProperty(REST_TESTS_SUITE, DEFAULT_TESTS_PATH); Map<String, Set<Path>> yamlSuites = FileUtils.findYamlSuites(fileSystem, DEFAULT_TESTS_PATH, paths); ClientYamlTestSuiteParser restTestSuiteParser = new ClientYamlTestSuiteParser(); //yaml suites are grouped by directory (effectively by api) for (String api : yamlSuites.keySet()) { List<Path> yamlFiles = new ArrayList<>(yamlSuites.get(api)); for (Path yamlFile : yamlFiles) { ClientYamlTestSuite restTestSuite = restTestSuiteParser.parse(api, yamlFile); for (ClientYamlTestSection testSection : restTestSuite.getTestSections()) { testCandidates.add(new ClientYamlTestCandidate(restTestSuite, testSection)); } } } } finally { IOUtils.close(fileSystem); } //sort the candidates so they will always be in the same order before being shuffled, for repeatability Collections.sort(testCandidates, new Comparator<ClientYamlTestCandidate>() { @Override public int compare(ClientYamlTestCandidate o1, ClientYamlTestCandidate o2) { return o1.getTestPath().compareTo(o2.getTestPath()); } }); return testCandidates; } private static String[] resolvePathsProperty(String propertyName, String defaultValue) { String property = System.getProperty(propertyName); if (!Strings.hasLength(property)) { return defaultValue == null ? Strings.EMPTY_ARRAY : new String[]{defaultValue}; } else { return property.split(PATHS_SEPARATOR); } } /** * Returns a new FileSystem to read REST resources, or null if they * are available from classpath. */ @SuppressForbidden(reason = "proper use of URL, hack around a JDK bug") static FileSystem getFileSystem() throws IOException { // REST suite handling is currently complicated, with lots of filtering and so on // For now, to work embedded in a jar, return a ZipFileSystem over the jar contents. URL codeLocation = FileUtils.class.getProtectionDomain().getCodeSource().getLocation(); boolean loadPackaged = RandomizedTest.systemPropertyAsBoolean(REST_LOAD_PACKAGED_TESTS, true); if (codeLocation.getFile().endsWith(".jar") && loadPackaged) { try { // hack around a bug in the zipfilesystem implementation before java 9, // its checkWritable was incorrect and it won't work without write permissions. // if we add the permission, it will open jars r/w, which is too scary! so copy to a safe r-w location. Path tmp = Files.createTempFile(null, ".jar"); try (InputStream in = codeLocation.openStream()) { Files.copy(in, tmp, StandardCopyOption.REPLACE_EXISTING); } return FileSystems.newFileSystem(new URI("jar:" + tmp.toUri()), Collections.<String,Object>emptyMap()); } catch (URISyntaxException e) { throw new IOException("couldn't open zipfilesystem: ", e); } } else { return null; } } @BeforeClass public static void initExecutionContext() throws IOException { String[] specPaths = resolvePathsProperty(REST_TESTS_SPEC, DEFAULT_SPEC_PATH); ClientYamlSuiteRestSpec restSpec = null; FileSystem fileSystem = getFileSystem(); // don't make a try-with, getFileSystem returns null // ... and you can't close() the default filesystem try { restSpec = ClientYamlSuiteRestSpec.parseFrom(fileSystem, DEFAULT_SPEC_PATH, specPaths); } finally { IOUtils.close(fileSystem); } validateSpec(restSpec); restTestExecutionContext = new ClientYamlTestExecutionContext(restSpec); adminExecutionContext = new ClientYamlTestExecutionContext(restSpec); } protected ClientYamlTestExecutionContext getAdminExecutionContext() { return adminExecutionContext; } private static void validateSpec(ClientYamlSuiteRestSpec restSpec) { boolean validateSpec = RandomizedTest.systemPropertyAsBoolean(REST_TESTS_VALIDATE_SPEC, true); if (validateSpec) { StringBuilder errorMessage = new StringBuilder(); for (ClientYamlSuiteRestApi restApi : restSpec.getApis()) { if (restApi.getMethods().contains("GET") && restApi.isBodySupported()) { if (!restApi.getMethods().contains("POST")) { errorMessage.append("\n- ").append(restApi.getName()).append(" supports GET with a body but doesn't support POST"); } } } if (errorMessage.length() > 0) { throw new IllegalArgumentException(errorMessage.toString()); } } } @AfterClass public static void clearStatic() { restTestExecutionContext = null; adminExecutionContext = null; } @Before public void reset() throws IOException { // admin context must be available for @After always, regardless of whether the test was blacklisted adminExecutionContext.initClient(adminClient(), getClusterHosts()); adminExecutionContext.clear(); //skip test if it matches one of the blacklist globs for (BlacklistedPathPatternMatcher blacklistedPathMatcher : blacklistPathMatchers) { String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName(); assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher .isSuffixMatch(testPath)); } //The client needs non static info to get initialized, therefore it can't be initialized in the before class restTestExecutionContext.initClient(client(), getClusterHosts()); restTestExecutionContext.clear(); //skip test if the whole suite (yaml file) is disabled assumeFalse(testCandidate.getSetupSection().getSkipSection().getSkipMessage(testCandidate.getSuitePath()), testCandidate.getSetupSection().getSkipSection().skip(restTestExecutionContext.esVersion())); //skip test if the whole suite (yaml file) is disabled assumeFalse(testCandidate.getTeardownSection().getSkipSection().getSkipMessage(testCandidate.getSuitePath()), testCandidate.getTeardownSection().getSkipSection().skip(restTestExecutionContext.esVersion())); //skip test if test section is disabled assumeFalse(testCandidate.getTestSection().getSkipSection().getSkipMessage(testCandidate.getTestPath()), testCandidate.getTestSection().getSkipSection().skip(restTestExecutionContext.esVersion())); } public void test() throws IOException { //let's check that there is something to run, otherwise there might be a problem with the test section if (testCandidate.getTestSection().getExecutableSections().size() == 0) { throw new IllegalArgumentException("No executable sections loaded for [" + testCandidate.getTestPath() + "]"); } if (!testCandidate.getSetupSection().isEmpty()) { logger.debug("start setup test [{}]", testCandidate.getTestPath()); for (DoSection doSection : testCandidate.getSetupSection().getDoSections()) { executeSection(doSection); } logger.debug("end setup test [{}]", testCandidate.getTestPath()); } restTestExecutionContext.clear(); try { for (ExecutableSection executableSection : testCandidate.getTestSection().getExecutableSections()) { executeSection(executableSection); } } finally { logger.debug("start teardown test [{}]", testCandidate.getTestPath()); for (DoSection doSection : testCandidate.getTeardownSection().getDoSections()) { executeSection(doSection); } logger.debug("end teardown test [{}]", testCandidate.getTestPath()); } } /** * Execute an {@link ExecutableSection}, careful to log its place of origin on failure. */ private void executeSection(ExecutableSection executableSection) { try { executableSection.execute(restTestExecutionContext); } catch (Exception e) { throw new RuntimeException(errorMessage(executableSection, e), e); } catch (AssertionError e) { throw new AssertionError(errorMessage(executableSection, e), e); } } private String errorMessage(ExecutableSection executableSection, Throwable t) { return "Failure at [" + testCandidate.getSuitePath() + ":" + executableSection.getLocation().lineNumber + "]: " + t.getMessage(); } }
package com.lchtime.safetyexpress.ui.chat.hx.video.util; import android.annotation.TargetApi; import android.content.Context; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.media.ThumbnailUtils; import android.os.Build; import android.provider.MediaStore.Video.Thumbnails; import android.util.Log; import com.lchtime.safetyexpress.BuildConfig; import java.io.FileDescriptor; public class ImageResizer extends ImageWorker { private static final String TAG = "ImageResizer"; protected int mImageWidth; protected int mImageHeight; /** * Initialize providing a single target image size (used for both width and * height); * * @param context * @param imageWidth * @param imageHeight */ public ImageResizer(Context context, int imageWidth, int imageHeight) { super(context); setImageSize(imageWidth, imageHeight); } /** * Initialize providing a single target image size (used for both width and * height); * * @param context * @param imageSize */ public ImageResizer(Context context, int imageSize) { super(context); setImageSize(imageSize); } /** * Set the target image width and height. * * @param width * @param height */ public void setImageSize(int width, int height) { mImageWidth = width; mImageHeight = height; } /** * Set the target image size (width and height will be the same). * * @param size */ public void setImageSize(int size) { setImageSize(size, size); } /** * The main processing method. This happens in a background task. In this * case we are just sampling down the bitmap and returning it from a * resource. * * @param resId * @return */ private Bitmap processBitmap(int resId) { if (BuildConfig.DEBUG) { Log.d(TAG, "processBitmap - " + resId); } return decodeSampledBitmapFromResource(mResources, resId, mImageWidth, mImageHeight, getImageCache()); } @Override protected Bitmap processBitmap(Object data) { String filePath=String.valueOf(data); return ThumbnailUtils.createVideoThumbnail(filePath, Thumbnails.MICRO_KIND); } /** * Decode and sample down a bitmap from resources to the requested width and * height. * * @param res * The resources object containing the image data * @param resId * The resource id of the image data * @param reqWidth * The requested width of the resulting bitmap * @param reqHeight * The requested height of the resulting bitmap * @param cache * The ImageCache used to find candidate bitmaps for use with * inBitmap * @return A bitmap sampled down from the original with the same aspect * ratio and dimensions that are equal to or greater than the * requested width and height */ public static Bitmap decodeSampledBitmapFromResource(Resources res, int resId, int reqWidth, int reqHeight, ImageCache cache) { // BEGIN_INCLUDE (read_bitmap_dimensions) // First decode with inJustDecodeBounds=true to check dimensions final BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeResource(res, resId, options); // Calculate inSampleSize options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight); // END_INCLUDE (read_bitmap_dimensions) // If we're running on Honeycomb or newer, try to use inBitmap if (Utils.hasHoneycomb()) { addInBitmapOptions(options, cache); } // Decode bitmap with inSampleSize set options.inJustDecodeBounds = false; return BitmapFactory.decodeResource(res, resId, options); } /** * Decode and sample down a bitmap from a file to the requested width and * height. * * @param filename * The full path of the file to decode * @param reqWidth * The requested width of the resulting bitmap * @param reqHeight * The requested height of the resulting bitmap * @param cache * The ImageCache used to find candidate bitmaps for use with * inBitmap * @return A bitmap sampled down from the original with the same aspect * ratio and dimensions that are equal to or greater than the * requested width and height */ public static Bitmap decodeSampledBitmapFromFile(String filename, int reqWidth, int reqHeight, ImageCache cache) { // First decode with inJustDecodeBounds=true to check dimensions final BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeFile(filename, options); // Calculate inSampleSize options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight); // If we're running on Honeycomb or newer, try to use inBitmap if (Utils.hasHoneycomb()) { addInBitmapOptions(options, cache); } // Decode bitmap with inSampleSize set options.inJustDecodeBounds = false; return BitmapFactory.decodeFile(filename, options); } /** * Decode and sample down a bitmap from a file input stream to the requested * width and height. * * @param fileDescriptor * The file descriptor to read from * @param reqWidth * The requested width of the resulting bitmap * @param reqHeight * The requested height of the resulting bitmap * @param cache * The ImageCache used to find candidate bitmaps for use with * inBitmap * @return A bitmap sampled down from the original with the same aspect * ratio and dimensions that are equal to or greater than the * requested width and height */ public static Bitmap decodeSampledBitmapFromDescriptor( FileDescriptor fileDescriptor, int reqWidth, int reqHeight, ImageCache cache) { // First decode with inJustDecodeBounds=true to check dimensions final BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeFileDescriptor(fileDescriptor, null, options); // Calculate inSampleSize options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight); // Decode bitmap with inSampleSize set options.inJustDecodeBounds = false; // If we're running on Honeycomb or newer, try to use inBitmap if (Utils.hasHoneycomb()) { addInBitmapOptions(options, cache); } return BitmapFactory .decodeFileDescriptor(fileDescriptor, null, options); } @TargetApi(Build.VERSION_CODES.HONEYCOMB) private static void addInBitmapOptions(BitmapFactory.Options options, ImageCache cache) { // BEGIN_INCLUDE(add_bitmap_options) // inBitmap only works with mutable bitmaps so force the decoder to // return mutable bitmaps. options.inMutable = true; if (cache != null) { // Try and find a bitmap to use for inBitmap Bitmap inBitmap = cache.getBitmapFromReusableSet(options); if (inBitmap != null) { options.inBitmap = inBitmap; } } // END_INCLUDE(add_bitmap_options) } /** * Calculate an inSampleSize for use in a * {@link BitmapFactory.Options} object when decoding * bitmaps using the decode* methods from * {@link BitmapFactory}. This implementation calculates * the closest inSampleSize that is a power of 2 and will result in the * final decoded bitmap having a width and height equal to or larger than * the requested width and height. * * @param options * An options object with out* params already populated (run * through a decode* method with inJustDecodeBounds==true * @param reqWidth * The requested width of the resulting bitmap * @param reqHeight * The requested height of the resulting bitmap * @return The value to be used for inSampleSize */ public static int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) { // BEGIN_INCLUDE (calculate_sample_size) // Raw height and width of image final int height = options.outHeight; final int width = options.outWidth; int inSampleSize = 1; if (height > reqHeight || width > reqWidth) { final int halfHeight = height / 2; final int halfWidth = width / 2; // Calculate the largest inSampleSize value that is a power of 2 and // keeps both // height and width larger than the requested height and width. while ((halfHeight / inSampleSize) > reqHeight && (halfWidth / inSampleSize) > reqWidth) { inSampleSize *= 2; } // This offers some additional logic in case the image has a strange // aspect ratio. For example, a panorama may have a much larger // width than height. In these cases the total pixels might still // end up being too large to fit comfortably in memory, so we should // be more aggressive with sample down the image (=larger // inSampleSize). long totalPixels = width * height / inSampleSize; // Anything more than 2x the requested pixels we'll sample down // further final long totalReqPixelsCap = reqWidth * reqHeight * 2; while (totalPixels > totalReqPixelsCap) { inSampleSize *= 2; totalPixels /= 2; } } return inSampleSize; // END_INCLUDE (calculate_sample_size) } }
package org.flowable.engine.test.api.event; import java.util.ArrayList; import java.util.List; import org.flowable.engine.common.api.delegate.event.FlowableEngineEntityEvent; import org.flowable.engine.common.api.delegate.event.FlowableEngineEventType; import org.flowable.engine.common.api.delegate.event.FlowableEntityEvent; import org.flowable.engine.common.api.delegate.event.FlowableEvent; import org.flowable.engine.common.api.delegate.event.FlowableEventListener; import org.flowable.engine.delegate.event.FlowableActivityCancelledEvent; import org.flowable.engine.delegate.event.FlowableActivityEvent; import org.flowable.engine.delegate.event.FlowableCancelledEvent; import org.flowable.engine.delegate.event.FlowableProcessStartedEvent; import org.flowable.engine.event.EventLogEntry; import org.flowable.engine.impl.event.logger.EventLogger; import org.flowable.engine.impl.persistence.entity.ExecutionEntity; import org.flowable.engine.impl.test.PluggableFlowableTestCase; import org.flowable.engine.runtime.ProcessInstance; import org.flowable.engine.test.Deployment; import org.flowable.task.service.impl.persistence.entity.TaskEntity; public class CallActivityTest extends PluggableFlowableTestCase { private CallActivityEventListener listener; protected EventLogger databaseEventLogger; @Override protected void setUp() throws Exception { super.setUp(); // Database event logger setup databaseEventLogger = new EventLogger(processEngineConfiguration.getClock(), processEngineConfiguration.getObjectMapper()); runtimeService.addEventListener(databaseEventLogger); } @Override protected void tearDown() throws Exception { if (listener != null) { listener.clearEventsReceived(); processEngineConfiguration.getEventDispatcher().removeEventListener(listener); } // Remove entries for (EventLogEntry eventLogEntry : managementService.getEventLogEntries(null, null)) { managementService.deleteEventLogEntry(eventLogEntry.getLogNumber()); } // Database event logger teardown runtimeService.removeEventListener(databaseEventLogger); super.tearDown(); } @Override protected void initializeServices() { super.initializeServices(); listener = new CallActivityEventListener(); processEngineConfiguration.getEventDispatcher().addEventListener(listener); } @Deployment(resources = { "org/flowable/engine/test/api/event/CallActivityTest.testCallActivity.bpmn20.xml", "org/flowable/engine/test/api/event/CallActivityTest.testCalledActivity.bpmn20.xml" }) public void testCallActivityCalledHasNoneEndEvent() throws Exception { CallActivityEventListener mylistener = new CallActivityEventListener(); processEngineConfiguration.getEventDispatcher().addEventListener(mylistener); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("callActivity"); assertNotNull(processInstance); // no task should be active in parent process org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNull(task); // only active task should be the one defined in the external subprocess task = taskService.createTaskQuery().active().singleResult(); assertNotNull(task); assertEquals("User Task2 in External", task.getName()); ExecutionEntity subprocessInstance = (ExecutionEntity) runtimeService.createExecutionQuery() .rootProcessInstanceId(processInstance.getId()) .onlySubProcessExecutions() .singleResult(); assertNotNull(subprocessInstance); assertEquals("Default name", runtimeService.getVariable(processInstance.getId(), "Name")); assertEquals("Default name", runtimeService.getVariable(subprocessInstance.getId(), "FullName")); // set the variable in the subprocess to validate that the new value is returned from callActivity runtimeService.setVariable(subprocessInstance.getId(), "FullName", "Mary Smith"); assertEquals("Default name", runtimeService.getVariable(processInstance.getId(), "Name")); assertEquals("Mary Smith", runtimeService.getVariable(subprocessInstance.getId(), "FullName")); // complete user task so that external subprocess will flow to terminate end taskService.complete(task.getId()); task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNotNull(task); assertEquals("User Task1", task.getName()); // validate that the variable was copied back when Call Activity finished assertEquals("Mary Smith", runtimeService.getVariable(processInstance.getId(), "Name")); // complete user task so that parent process will terminate normally taskService.complete(task.getId()); FlowableEntityEvent entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(0); assertEquals(FlowableEngineEventType.ENTITY_CREATED, entityEvent.getType()); ExecutionEntity executionEntity = (ExecutionEntity) entityEvent.getEntity(); // this is the root process so parent null assertNull(executionEntity.getParentId()); String processExecutionId = executionEntity.getId(); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(1); assertEquals(FlowableEngineEventType.ENTITY_CREATED, entityEvent.getType()); executionEntity = (ExecutionEntity) entityEvent.getEntity(); assertNotNull(executionEntity.getParentId()); assertEquals(processExecutionId, executionEntity.getParentId()); FlowableEvent flowableEvent = mylistener.getEventsReceived().get(2); assertEquals(FlowableEngineEventType.PROCESS_STARTED, flowableEvent.getType()); FlowableActivityEvent activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(3); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("startEvent", activityEvent.getActivityType()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(4); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("startEvent", activityEvent.getActivityType()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(5); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("callActivity1", activityEvent.getActivityId()); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(6); assertEquals(FlowableEngineEventType.ENTITY_CREATED, entityEvent.getType()); executionEntity = (ExecutionEntity) entityEvent.getEntity(); assertNull(executionEntity.getParentId()); assertEquals(executionEntity.getId(), executionEntity.getProcessInstanceId()); // user task within the external subprocess entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(7); assertEquals(FlowableEngineEventType.ENTITY_CREATED, entityEvent.getType()); executionEntity = (ExecutionEntity) entityEvent.getEntity(); assertEquals("calledtask1", executionEntity.getActivityId()); // external subprocess flowableEvent = mylistener.getEventsReceived().get(8); assertEquals(FlowableEngineEventType.PROCESS_STARTED, flowableEvent.getType()); // start event in external subprocess activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(9); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("startEvent", activityEvent.getActivityType()); assertEquals("startevent2", activityEvent.getActivityId()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(10); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("startEvent", activityEvent.getActivityType()); assertEquals("startevent2", activityEvent.getActivityId()); // user task within external subprocess activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(11); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("calledtask1", activityEvent.getActivityId()); assertEquals("userTask", activityEvent.getActivityType()); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(12); assertEquals(FlowableEngineEventType.TASK_CREATED, entityEvent.getType()); TaskEntity taskEntity = (TaskEntity) entityEvent.getEntity(); assertEquals("User Task2 in External", taskEntity.getName()); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(13); assertEquals(FlowableEngineEventType.TASK_COMPLETED, entityEvent.getType()); taskEntity = (TaskEntity) entityEvent.getEntity(); assertEquals("User Task2 in External", taskEntity.getName()); // user task within external subprocess activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(14); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("calledtask1", activityEvent.getActivityId()); assertEquals("userTask", activityEvent.getActivityType()); // None event in external subprocess activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(15); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("noneevent2", activityEvent.getActivityId()); assertEquals("endEvent", activityEvent.getActivityType()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(16); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("noneevent2", activityEvent.getActivityId()); assertEquals("endEvent", activityEvent.getActivityType()); // the external subprocess entityEvent = (FlowableEntityEvent)mylistener.getEventsReceived().get(17); assertEquals(FlowableEngineEventType.PROCESS_COMPLETED, entityEvent.getType()); // callActivity activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(18); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("callActivity", activityEvent.getActivityType()); // user task within parent process activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(19); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("task1", activityEvent.getActivityId()); assertEquals("userTask", activityEvent.getActivityType()); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(20); assertEquals(FlowableEngineEventType.TASK_CREATED, entityEvent.getType()); taskEntity = (TaskEntity) entityEvent.getEntity(); assertEquals("User Task1", taskEntity.getName()); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(21); assertEquals(FlowableEngineEventType.TASK_COMPLETED, entityEvent.getType()); taskEntity = (TaskEntity) entityEvent.getEntity(); assertEquals("User Task1", taskEntity.getName()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(22); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("task1", activityEvent.getActivityId()); assertEquals("userTask", activityEvent.getActivityType()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(23); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("noneevent1", activityEvent.getActivityId()); assertEquals("endEvent", activityEvent.getActivityType()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(24); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("noneevent1", activityEvent.getActivityId()); assertEquals("endEvent", activityEvent.getActivityType()); // the parent process entityEvent = (FlowableEntityEvent)mylistener.getEventsReceived().get(25); assertEquals(FlowableEngineEventType.PROCESS_COMPLETED, entityEvent.getType()); assertEquals(26, mylistener.getEventsReceived().size()); } @Deployment(resources = { "org/flowable/engine/test/api/event/CallActivityTest.testCallActivity.bpmn20.xml", "org/flowable/engine/test/api/event/CallActivityTest.testCalledActivity.bpmn20.xml" }) public void testDeleteParentWhenCallActivityCalledHasNoneEndEvent() throws Exception { CallActivityEventListener mylistener = new CallActivityEventListener(); processEngineConfiguration.getEventDispatcher().addEventListener(mylistener); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("callActivity"); assertNotNull(processInstance); // no task should be active in parent process org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNull(task); // only active task should be the one defined in the external subprocess task = taskService.createTaskQuery().active().singleResult(); assertNotNull(task); assertEquals("User Task2 in External", task.getName()); ExecutionEntity subprocessInstance = (ExecutionEntity) runtimeService.createExecutionQuery() .rootProcessInstanceId(processInstance.getId()) .onlySubProcessExecutions() .singleResult(); assertNotNull(subprocessInstance); assertEquals("Default name", runtimeService.getVariable(processInstance.getId(), "Name")); assertEquals("Default name", runtimeService.getVariable(subprocessInstance.getId(), "FullName")); runtimeService.deleteProcessInstance(processInstance.getId(), null); FlowableEntityEvent entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(0); assertEquals(FlowableEngineEventType.ENTITY_CREATED, entityEvent.getType()); ExecutionEntity executionEntity = (ExecutionEntity) entityEvent.getEntity(); // this is the root process so parent null assertNull(executionEntity.getParentId()); String processExecutionId = executionEntity.getId(); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(1); assertEquals(FlowableEngineEventType.ENTITY_CREATED, entityEvent.getType()); executionEntity = (ExecutionEntity) entityEvent.getEntity(); assertNotNull(executionEntity.getParentId()); assertEquals(processExecutionId, executionEntity.getParentId()); FlowableEvent flowableEvent = mylistener.getEventsReceived().get(2); assertEquals(FlowableEngineEventType.PROCESS_STARTED, flowableEvent.getType()); FlowableActivityEvent activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(3); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("startEvent", activityEvent.getActivityType()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(4); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("startEvent", activityEvent.getActivityType()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(5); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("callActivity1", activityEvent.getActivityId()); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(6); assertEquals(FlowableEngineEventType.ENTITY_CREATED, entityEvent.getType()); executionEntity = (ExecutionEntity) entityEvent.getEntity(); assertNull(executionEntity.getParentId()); assertEquals(executionEntity.getId(), executionEntity.getProcessInstanceId()); // user task within the external subprocess entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(7); assertEquals(FlowableEngineEventType.ENTITY_CREATED, entityEvent.getType()); executionEntity = (ExecutionEntity) entityEvent.getEntity(); assertEquals("calledtask1", executionEntity.getActivityId()); // external subprocess flowableEvent = mylistener.getEventsReceived().get(8); assertEquals(FlowableEngineEventType.PROCESS_STARTED, flowableEvent.getType()); // start event in external subprocess activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(9); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("startEvent", activityEvent.getActivityType()); assertEquals("startevent2", activityEvent.getActivityId()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(10); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("startEvent", activityEvent.getActivityType()); assertEquals("startevent2", activityEvent.getActivityId()); // user task within external subprocess activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(11); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("calledtask1", activityEvent.getActivityId()); assertEquals("userTask", activityEvent.getActivityType()); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(12); assertEquals(FlowableEngineEventType.TASK_CREATED, entityEvent.getType()); TaskEntity taskEntity = (TaskEntity) entityEvent.getEntity(); assertEquals("User Task2 in External", taskEntity.getName()); // user task within external subprocess cancelled FlowableActivityCancelledEvent activityCancelledEvent = (FlowableActivityCancelledEvent) mylistener.getEventsReceived().get(13); assertEquals(FlowableEngineEventType.ACTIVITY_CANCELLED, activityCancelledEvent.getType()); assertEquals("User Task2 in External", activityCancelledEvent.getActivityName()); assertEquals("userTask", activityCancelledEvent.getActivityType()); // external subprocess cancelled FlowableCancelledEvent processCancelledEvent = (FlowableCancelledEvent) mylistener.getEventsReceived().get(14); assertEquals(FlowableEngineEventType.PROCESS_CANCELLED, processCancelledEvent.getType()); assertEquals(subprocessInstance.getId(), processCancelledEvent.getProcessInstanceId()); // expecting cancelled event for Call Activity activityCancelledEvent = (FlowableActivityCancelledEvent) mylistener.getEventsReceived().get(15); assertEquals(FlowableEngineEventType.ACTIVITY_CANCELLED, activityCancelledEvent.getType()); assertEquals("callActivity", activityCancelledEvent.getActivityType()); // parent process cancelled processCancelledEvent = (FlowableCancelledEvent) mylistener.getEventsReceived().get(16); assertEquals(FlowableEngineEventType.PROCESS_CANCELLED, processCancelledEvent.getType()); assertEquals(processInstance.getId(), processCancelledEvent.getProcessInstanceId()); assertEquals(17, mylistener.getEventsReceived().size()); } @Deployment(resources = { "org/flowable/engine/test/api/event/CallActivityTest.testCallActivityTerminateEnd.bpmn20.xml", "org/flowable/engine/test/api/event/CallActivityTest.testCalledActivityTerminateEnd.bpmn20.xml" }) public void testCallActivityCalledHasTerminateEndEvent() throws Exception { CallActivityEventListener mylistener = new CallActivityEventListener(); processEngineConfiguration.getEventDispatcher().addEventListener(mylistener); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("callActivityTerminateEnd"); assertNotNull(processInstance); // no task should be active in parent process org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNull(task); // only active task should be the one defined in the external subprocess task = taskService.createTaskQuery().active().singleResult(); assertNotNull(task); assertEquals("User Task2 in External with Terminate End Event", task.getName()); ExecutionEntity subprocessInstance = (ExecutionEntity) runtimeService.createExecutionQuery() .rootProcessInstanceId(processInstance.getId()) .onlySubProcessExecutions() .singleResult(); assertNotNull(subprocessInstance); assertEquals("Default name", runtimeService.getVariable(processInstance.getId(), "Name")); assertEquals("Default name", runtimeService.getVariable(subprocessInstance.getId(), "FullName")); // set the variable in the subprocess to validate that the new value is returned from callActivity runtimeService.setVariable(subprocessInstance.getId(), "FullName", "Mary Smith"); assertEquals("Default name", runtimeService.getVariable(processInstance.getId(), "Name")); assertEquals("Mary Smith", runtimeService.getVariable(subprocessInstance.getId(), "FullName")); // complete user task so that external subprocess will flow to terminate end taskService.complete(task.getId()); task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNotNull(task); assertEquals("User Task1 in Parent", task.getName()); // validate that the variable was copied back when Call Activity finished assertEquals("Mary Smith", runtimeService.getVariable(processInstance.getId(), "Name")); // complete user task so that parent process will terminate normally taskService.complete(task.getId()); FlowableEntityEvent entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(0); assertEquals(FlowableEngineEventType.ENTITY_CREATED, entityEvent.getType()); ExecutionEntity executionEntity = (ExecutionEntity) entityEvent.getEntity(); // this is the root process so parent null assertNull(executionEntity.getParentId()); String processExecutionId = executionEntity.getId(); int idx=1; entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ENTITY_CREATED, entityEvent.getType()); executionEntity = (ExecutionEntity) entityEvent.getEntity(); assertNotNull(executionEntity.getParentId()); assertEquals(processExecutionId, executionEntity.getParentId()); FlowableEvent flowableEvent = mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.PROCESS_STARTED, flowableEvent.getType()); FlowableActivityEvent activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("startEvent", activityEvent.getActivityType()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("startEvent", activityEvent.getActivityType()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("callActivityId1", activityEvent.getActivityId()); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ENTITY_CREATED, entityEvent.getType()); executionEntity = (ExecutionEntity) entityEvent.getEntity(); assertNull(executionEntity.getParentId()); assertEquals(executionEntity.getId(), executionEntity.getProcessInstanceId()); // user task within the external subprocess entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ENTITY_CREATED, entityEvent.getType()); executionEntity = (ExecutionEntity) entityEvent.getEntity(); assertEquals("calledtask1", executionEntity.getActivityId()); // external subprocess flowableEvent = mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.PROCESS_STARTED, flowableEvent.getType()); executionEntity = (ExecutionEntity) ((FlowableProcessStartedEvent) flowableEvent).getEntity(); assertEquals(subprocessInstance.getId(), executionEntity.getParentId()); // start event in external subprocess activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("startEvent", activityEvent.getActivityType()); assertEquals("startevent2", activityEvent.getActivityId()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("startEvent", activityEvent.getActivityType()); assertEquals("startevent2", activityEvent.getActivityId()); // user task within external subprocess activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("calledtask1", activityEvent.getActivityId()); assertEquals("userTask", activityEvent.getActivityType()); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.TASK_CREATED, entityEvent.getType()); TaskEntity taskEntity = (TaskEntity) entityEvent.getEntity(); assertEquals("User Task2 in External with Terminate End Event", taskEntity.getName()); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.TASK_COMPLETED, entityEvent.getType()); taskEntity = (TaskEntity) entityEvent.getEntity(); assertEquals("User Task2 in External with Terminate End Event", taskEntity.getName()); // user task within external subprocess activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("calledtask1", activityEvent.getActivityId()); assertEquals("userTask", activityEvent.getActivityType()); // None event in external subprocess activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("terminateEnd2", activityEvent.getActivityId()); assertEquals("endEvent", activityEvent.getActivityType()); // PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT instead of PROCESS_COMPLETED // because external subprocess defined with terminate end event entityEvent = (FlowableEntityEvent)mylistener.getEventsReceived().get(idx++); assertEquals(subprocessInstance.getId(), ((FlowableEngineEntityEvent)entityEvent).getExecutionId()); assertEquals(FlowableEngineEventType.PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT, entityEvent.getType()); //the external subprocess (callActivity) activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("callActivity", activityEvent.getActivityType()); // user task within parent process activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("task1", activityEvent.getActivityId()); assertEquals("userTask", activityEvent.getActivityType()); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.TASK_CREATED, entityEvent.getType()); taskEntity = (TaskEntity) entityEvent.getEntity(); assertEquals("User Task1 in Parent", taskEntity.getName()); entityEvent = (FlowableEntityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.TASK_COMPLETED, entityEvent.getType()); taskEntity = (TaskEntity) entityEvent.getEntity(); assertEquals("User Task1 in Parent", taskEntity.getName()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("task1", activityEvent.getActivityId()); assertEquals("userTask", activityEvent.getActivityType()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_STARTED, activityEvent.getType()); assertEquals("noneevent1", activityEvent.getActivityId()); assertEquals("endEvent", activityEvent.getActivityType()); activityEvent = (FlowableActivityEvent) mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED, activityEvent.getType()); assertEquals("noneevent1", activityEvent.getActivityId()); assertEquals("endEvent", activityEvent.getActivityType()); // the parent process entityEvent = (FlowableEntityEvent)mylistener.getEventsReceived().get(idx++); assertEquals(FlowableEngineEventType.PROCESS_COMPLETED, entityEvent.getType()); assertEquals(idx, mylistener.getEventsReceived().size()); } class CallActivityEventListener implements FlowableEventListener { private List<FlowableEvent> eventsReceived; public CallActivityEventListener() { eventsReceived = new ArrayList<>(); } public List<FlowableEvent> getEventsReceived() { return eventsReceived; } public void clearEventsReceived() { eventsReceived.clear(); } @Override public void onEvent(FlowableEvent event) { FlowableEngineEventType engineEventType = (FlowableEngineEventType) event.getType(); switch (engineEventType) { case ENTITY_CREATED: FlowableEntityEvent entityEvent = (FlowableEntityEvent) event; if (entityEvent.getEntity() instanceof ExecutionEntity) { eventsReceived.add(event); } break; case ACTIVITY_STARTED: case ACTIVITY_COMPLETED: case ACTIVITY_CANCELLED: case TASK_CREATED: case TASK_COMPLETED: case PROCESS_STARTED: case PROCESS_COMPLETED: case PROCESS_CANCELLED: case PROCESS_COMPLETED_WITH_TERMINATE_END_EVENT: eventsReceived.add(event); break; default: break; } } @Override public boolean isFailOnException() { return false; } } }
package org.keycloak.testsuite.console.clients; import org.jboss.arquillian.graphene.page.Page; import org.junit.Test; import org.keycloak.testsuite.console.page.users.UserRoleMappingsForm; import static org.junit.Assert.*; import org.keycloak.representations.idm.ClientRepresentation; import org.keycloak.representations.idm.RoleRepresentation; import org.keycloak.testsuite.console.page.clients.ClientRole; import org.keycloak.testsuite.console.page.clients.ClientRoles; import org.keycloak.testsuite.console.page.clients.CreateClientRole; import org.keycloak.testsuite.console.page.users.User; /** * Created by fkiss. */ public class ClientRolesTest extends AbstractClientTest { @Page private ClientRoles clientRolesPage; @Page private CreateClientRole createClientRolePage; @Page private ClientRole clientRolePage; @Page private User userPage; // note: cannot call navigateTo() unless user id is set @Page private UserRoleMappingsForm userRolesPage; public void addClientRole(RoleRepresentation roleRep) { // assertCurrentUrl(clientRoles); clientRolesPage.roles().addRole(); // assertCurrentUrl(createClientRole); // can't do this, need client id to build uri createClientRolePage.form().setBasicAttributes(roleRep); createClientRolePage.form().save(); assertFlashMessageSuccess(); createClientRolePage.form().setCompositeRoles(roleRep); // TODO add verification of notification message when KEYCLOAK-1497 gets resolved } @Test public void testAddClientRole() { ClientRepresentation newClient = createClientRepresentation("test-client1", "http://example.com/*"); RoleRepresentation newRole = new RoleRepresentation("client-role", ""); createClient(newClient); assertFlashMessageSuccess(); clientPage.tabs().roles(); addClientRole(newRole); assertFlashMessageSuccess(); clientRolePage.backToClientRolesViaBreadcrumb(); assertFalse(clientRolesPage.roles().getRolesFromTableRows().isEmpty()); configure().clients(); clientsPage.table().search(newClient.getClientId()); clientsPage.table().deleteClient(newClient.getClientId()); modalDialog.confirmDeletion(); assertFlashMessageSuccess(); assertNull(clientsPage.table().findClient(newClient.getClientId())); } // @Test // @Jira("KEYCLOAK-1497") // public void testAddClientRoleToUser() { // ClientRepresentation newClient = createClientRepresentation("test-client2", "http://example.com/*"); // RoleRepresentation newRole = new RoleRepresentation("client-role2", ""); // String testUsername = "test-user2"; // UserRepresentation newUser = new UserRepresentation(); // newUser.setUsername(testUsername); // newUser.credential(PASSWORD, "pass"); // // createClient(newClient); // assertFlashMessageSuccess(); // // client.tabs().roles(); // addClientRole(newRole); // assertFlashMessageSuccess(); // // clientRole.backToClientRolesViaBreadcrumb(); // assertFalse(clientRoles.table().searchRoles(newRole.getName()).isEmpty()); // // users.navigateTo(); // createUser(newUser); // flashMessage.waitUntilPresent(); // assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // // users.navigateTo(); // users.findUser(testUsername); // users.clickUser(testUsername); // // user.tabs().roleMappings(); // roleMappings.selectClientRole(newClient.getClientId()); // roleMappings.addAvailableClientRole(newRole.getName()); // //flashMessage.waitUntilPresent(); // //assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // //KEYCLOAK-1497 // assertTrue(roleMappings.isAssignedClientRole(newRole.getName())); // // users.navigateTo(); // users.deleteUser(testUsername); // // clients.navigateTo(); // clients.deleteClient(newClient.getClientId()); // assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // assertNull(clients.findClient(newClient.getClientId())); // } // // @Test // @Jira("KEYCLOAK-1496, KEYCLOAK-1497") // @Ignore // TODO use REST to create test data (user/roles) // public void testAddCompositeRealmClientRoleToUser() { // ClientRepresentation newClient = createClientRepresentation("test-client3", "http://example.com/*"); // RoleRepresentation clientCompositeRole = new RoleRepresentation("client-composite-role", ""); // String testUsername = "test-user3"; // UserRepresentation newUser = new UserRepresentation(); // newUser.setUsername(testUsername); // newUser.credential(PASSWORD, "pass"); // // RoleRepresentation subRole1 = new RoleRepresentation("sub-role1", ""); // RoleRepresentation subRole2 = new RoleRepresentation("sub-role2", ""); // List<RoleRepresentation> testRoles = new ArrayList<>(); // clientCompositeRole.setComposite(true); // testRoles.add(subRole1); // testRoles.add(subRole2); // // //create sub-roles // configure().roles(); // for (RoleRepresentation role : testRoles) { // realmRoles.addRole(role); // flashMessage.waitUntilPresent(); // assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // configure().roles(); // assertEquals(role.getName(), realmRoles.findRole(role.getName()).getName()); // } // // //create client // clients.navigateTo(); // createClient(newClient); // flashMessage.waitUntilPresent(); // assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // // //add client role // configure().roles(); // realmRoles.addRole(clientCompositeRole); // flashMessage.waitUntilPresent(); // assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // // //add realm composite roles // realmRoles.setCompositeRole(clientCompositeRole); // roleMappings.addAvailableRole(subRole1.getName(), subRole2.getName()); // //flashMessage.waitUntilPresent(); // //assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // //KEYCLOAK-1497 // // //create user // users.navigateTo(); // createUser(newUser); // flashMessage.waitUntilPresent(); // assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // // //add client role to user and verify // users.navigateTo(); // users.findUser(testUsername); // users.clickUser(testUsername); // // user.tabs().roleMappings(); // roleMappings.selectClientRole(newClient.getClientId()); // roleMappings.addAvailableClientRole(clientCompositeRole.getName()); // //flashMessage.waitUntilPresent(); // //assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // //KEYCLOAK-1497 // assertTrue(roleMappings.isAssignedClientRole(clientCompositeRole.getName())); // assertTrue(roleMappings.isEffectiveRealmRolesComplete(subRole1, subRole2)); //KEYCLOAK-1496 // assertTrue(roleMappings.isEffectiveClientRolesComplete(clientCompositeRole)); // // //delete everything // users.navigateTo(); // users.deleteUser(testUsername); // // configure().roles(); // realmRoles.deleteRole(subRole1); // configure().roles(); // realmRoles.deleteRole(subRole2); // // clients.navigateTo(); // clients.deleteClient(newClient.getClientId()); // assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // assertNull(clients.findClient(newClient.getClientId())); // } // // @Test // @Jira("KEYCLOAK-1504, KEYCLOAK-1497") // public void testAddCompositeClientRoleToUser() { // ClientRepresentation newClient = createClientRepresentation("test-client4", "http://example.com/*"); // RoleRepresentation clientCompositeRole = new RoleRepresentation("client-composite-role2", ""); // String testUsername = "test-user4"; // UserRepresentation newUser = new UserRepresentation(); // newUser.setUsername(testUsername); // newUser.credential(PASSWORD, "pass"); // // RoleRepresentation subRole1 = new RoleRepresentation("client-sub-role1", ""); // RoleRepresentation subRole2 = new RoleRepresentation("client-sub-role2", ""); // List<RoleRepresentation> testRoles = new ArrayList<>(); // clientCompositeRole.setComposite(true); // testRoles.add(clientCompositeRole); // testRoles.add(subRole1); // testRoles.add(subRole2); // // //create client // createClient(newClient); // flashMessage.waitUntilPresent(); // assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // // //create sub-roles // configure().roles(); // for (RoleRepresentation role : testRoles) { // clients.navigateTo(); // clients.clickClient(newClient.getClientId()); // configure().roles(); // realmRoles.addRole(role); // flashMessage.waitUntilPresent(); // assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // } // // //add client composite roles // clients.navigateTo(); // clients.clickClient(newClient); // configure().roles(); // realmRoles.clickRole(clientCompositeRole); // realmRoles.setCompositeRole(clientCompositeRole); // roleMappings.selectClientRole(newClient.getClientId()); // roleMappings.addAvailableClientRole(subRole1.getName(), subRole2.getName()); // //flashMessage.waitUntilPresent(); // //assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // //KEYCLOAK-1504, KEYCLOAK-1497 // // //create user // users.navigateTo(); // createUser(newUser); // flashMessage.waitUntilPresent(); // assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // // //add client role to user and verify // users.navigateTo(); // users.findUser(testUsername); // users.clickUser(testUsername); // // user.tabs().roleMappings(); // roleMappings.selectClientRole(newClient.getClientId()); // roleMappings.addAvailableClientRole(clientCompositeRole.getName()); // assertTrue(roleMappings.isAssignedClientRole(clientCompositeRole.getName())); // assertTrue(roleMappings.isEffectiveClientRolesComplete(clientCompositeRole, subRole1, subRole2)); // // //delete everything // users.navigateTo(); // users.deleteUser(testUsername); // // configure().roles(); // realmRoles.deleteRole(subRole1); // configure().roles(); // realmRoles.deleteRole(subRole2); // // clients.navigateTo(); // clients.deleteClient(newClient.getClientId()); // assertTrue(flashMessage.getText(), flashMessage.isSuccess()); // assertNull(clients.findClient(newClient.getClientId())); // } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.introduceField; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInsight.completion.JavaCompletionUtil; import com.intellij.ide.util.ClassFilter; import com.intellij.ide.util.PropertiesComponent; import com.intellij.ide.util.TreeClassChooser; import com.intellij.ide.util.TreeClassChooserFactory; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.event.DocumentAdapter; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.help.HelpManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.LanguageLevelProjectExtension; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Comparing; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.codeStyle.SuggestedNameInfo; import com.intellij.psi.codeStyle.VariableKind; import com.intellij.psi.impl.source.resolve.JavaResolveUtil; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.refactoring.HelpID; import com.intellij.refactoring.JavaRefactoringSettings; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.introduceParameter.AbstractJavaInplaceIntroducer; import com.intellij.refactoring.ui.*; import com.intellij.refactoring.util.CommonRefactoringUtil; import com.intellij.refactoring.util.EnumConstantsUtil; import com.intellij.refactoring.util.RefactoringMessageUtil; import com.intellij.ui.RecentsManager; import com.intellij.ui.ReferenceEditorComboWithBrowseButton; import com.intellij.ui.StateRestoringCheckBox; import com.intellij.usageView.UsageViewUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.ui.UIUtil; import gnu.trove.THashSet; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Set; class IntroduceConstantDialog extends DialogWrapper { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.introduceField.IntroduceConstantDialog"); @NonNls private static final String RECENTS_KEY = "IntroduceConstantDialog.RECENTS_KEY"; @NonNls protected static final String NONNLS_SELECTED_PROPERTY = "INTRODUCE_CONSTANT_NONNLS"; private final Project myProject; private final PsiClass myParentClass; private final PsiExpression myInitializerExpression; private final PsiLocalVariable myLocalVariable; private final boolean myInvokedOnDeclaration; private final PsiExpression[] myOccurrences; private final String myEnteredName; private final int myOccurrencesCount; private PsiClass myTargetClass; private final TypeSelectorManager myTypeSelectorManager; private NameSuggestionsField myNameField; private JCheckBox myCbReplaceAll; private TypeSelector myTypeSelector; private StateRestoringCheckBox myCbDeleteVariable; private final JavaCodeStyleManager myCodeStyleManager; private ReferenceEditorComboWithBrowseButton myTfTargetClassName; private BaseExpressionToFieldHandler.TargetDestination myDestinationClass; private JPanel myTypePanel; private JPanel myTargetClassNamePanel; private JPanel myPanel; private JLabel myTypeLabel; private JPanel myNameSuggestionPanel; private JLabel myNameSuggestionLabel; private JLabel myTargetClassNameLabel; private JCheckBox myCbNonNls; private JPanel myVisibilityPanel; private final JavaVisibilityPanel myVPanel; private final JCheckBox myIntroduceEnumConstantCb = new JCheckBox(RefactoringBundle.message("introduce.constant.enum.cb"), true); IntroduceConstantDialog(Project project, PsiClass parentClass, PsiExpression initializerExpression, PsiLocalVariable localVariable, boolean isInvokedOnDeclaration, PsiExpression[] occurrences, PsiClass targetClass, TypeSelectorManager typeSelectorManager, String enteredName) { super(project, true); myProject = project; myParentClass = parentClass; myInitializerExpression = initializerExpression; myLocalVariable = localVariable; myInvokedOnDeclaration = isInvokedOnDeclaration; myOccurrences = occurrences; myEnteredName = enteredName; myOccurrencesCount = occurrences.length; myTargetClass = targetClass; myTypeSelectorManager = typeSelectorManager; myDestinationClass = null; setTitle(IntroduceConstantHandler.REFACTORING_NAME); myCodeStyleManager = JavaCodeStyleManager.getInstance(myProject); myVPanel = new JavaVisibilityPanel(false, true); myVisibilityPanel.add(myVPanel, BorderLayout.CENTER); init(); String initialVisibility = JavaRefactoringSettings.getInstance().INTRODUCE_CONSTANT_VISIBILITY; if (initialVisibility == null) { initialVisibility = PsiModifier.PUBLIC; } myVPanel.setVisibility(initialVisibility); myIntroduceEnumConstantCb.setEnabled(isSuitableForEnumConstant()); updateVisibilityPanel(); updateButtons(); } public String getEnteredName() { return myNameField.getEnteredName(); } private String getTargetClassName() { return myTfTargetClassName.getText().trim(); } public BaseExpressionToFieldHandler.TargetDestination getDestinationClass () { return myDestinationClass; } public boolean introduceEnumConstant() { return myIntroduceEnumConstantCb.isEnabled() && myIntroduceEnumConstantCb.isSelected(); } public String getFieldVisibility() { return myVPanel.getVisibility(); } public boolean isReplaceAllOccurrences() { return myOccurrencesCount > 1 && myCbReplaceAll.isSelected(); } public PsiType getSelectedType() { return myTypeSelector.getSelectedType(); } @NotNull protected Action[] createActions() { return new Action[]{getOKAction(), getCancelAction(), getHelpAction()}; } protected void doHelpAction() { HelpManager.getInstance().invokeHelp(HelpID.INTRODUCE_CONSTANT); } protected JComponent createNorthPanel() { myTypeSelector = myTypeSelectorManager.getTypeSelector(); myTypePanel.setLayout(new BorderLayout()); myTypePanel.add(myTypeSelector.getComponent(), BorderLayout.CENTER); if (myTypeSelector.getFocusableComponent() != null) { myTypeLabel.setLabelFor(myTypeSelector.getFocusableComponent()); } myNameField = new NameSuggestionsField(myProject); myNameSuggestionPanel.setLayout(new BorderLayout()); myNameField.addDataChangedListener(new NameSuggestionsField.DataChanged() { public void dataChanged() { updateButtons(); } }); myNameSuggestionPanel.add(myNameField.getComponent(), BorderLayout.CENTER); myNameSuggestionLabel.setLabelFor(myNameField.getFocusableComponent()); Set<String> possibleClassNames = new LinkedHashSet<>(); for (final PsiExpression occurrence : myOccurrences) { final PsiClass parentClass = new IntroduceConstantHandler().getParentClass(occurrence); if (parentClass != null && parentClass.getQualifiedName() != null) { possibleClassNames.add(parentClass.getQualifiedName()); } } myTfTargetClassName = new ReferenceEditorComboWithBrowseButton(new ChooseClassAction(), "", myProject, true, RECENTS_KEY); myTargetClassNamePanel.setLayout(new BorderLayout()); myTargetClassNamePanel.add(myTfTargetClassName, BorderLayout.CENTER); myTargetClassNameLabel.setLabelFor(myTfTargetClassName); for (String possibleClassName : possibleClassNames) { myTfTargetClassName.prependItem(possibleClassName); } myTfTargetClassName.getChildComponent().setSelectedItem(myParentClass.getQualifiedName()); myTfTargetClassName.getChildComponent().addDocumentListener(new DocumentAdapter() { public void documentChanged(DocumentEvent e) { targetClassChanged(); enableEnumDependant(introduceEnumConstant()); } }); myIntroduceEnumConstantCb.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { enableEnumDependant(introduceEnumConstant()); } }); final JPanel enumPanel = new JPanel(new BorderLayout()); enumPanel.add(myIntroduceEnumConstantCb, BorderLayout.EAST); myTargetClassNamePanel.add(enumPanel, BorderLayout.SOUTH); final String propertyName; if (myLocalVariable != null) { propertyName = myCodeStyleManager.variableNameToPropertyName(myLocalVariable.getName(), VariableKind.LOCAL_VARIABLE); } else { propertyName = null; } final NameSuggestionsManager nameSuggestionsManager = new NameSuggestionsManager(myTypeSelector, myNameField, createNameSuggestionGenerator(propertyName, myInitializerExpression, myCodeStyleManager, myEnteredName, myParentClass)); nameSuggestionsManager.setLabelsFor(myTypeLabel, myNameSuggestionLabel); ////////// if (myOccurrencesCount > 1) { myCbReplaceAll.addItemListener(new ItemListener() { public void itemStateChanged(ItemEvent e) { updateTypeSelector(); myNameField.requestFocusInWindow(); } }); myCbReplaceAll.setText(RefactoringBundle.message("replace.all.occurences", myOccurrencesCount)); } else { myCbReplaceAll.setVisible(false); } if (myLocalVariable != null) { if (myInvokedOnDeclaration) { myCbDeleteVariable.setEnabled(false); myCbDeleteVariable.setSelected(true); } else if (myCbReplaceAll != null) { updateCbDeleteVariable(); myCbReplaceAll.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { updateCbDeleteVariable(); } }); } } else { myCbDeleteVariable.setVisible(false); } final PsiManager psiManager = PsiManager.getInstance(myProject); if ((myTypeSelectorManager.isSuggestedType(CommonClassNames.JAVA_LANG_STRING) || (myLocalVariable != null && AnnotationUtil.isAnnotated(myLocalVariable, AnnotationUtil.NON_NLS, false, false)))&& LanguageLevelProjectExtension.getInstance(psiManager.getProject()).getLanguageLevel().isAtLeast(LanguageLevel.JDK_1_5) && JavaPsiFacade.getInstance(psiManager.getProject()).findClass(AnnotationUtil.NON_NLS, myParentClass.getResolveScope()) != null) { final PropertiesComponent component = PropertiesComponent.getInstance(myProject); myCbNonNls.setSelected(component.getBoolean(NONNLS_SELECTED_PROPERTY)); myCbNonNls.addItemListener(new ItemListener() { public void itemStateChanged(ItemEvent e) { component.setValue(NONNLS_SELECTED_PROPERTY, myCbNonNls.isSelected()); } }); } else { myCbNonNls.setVisible(false); } updateTypeSelector(); enableEnumDependant(introduceEnumConstant()); return myPanel; } public void setReplaceAllOccurrences(boolean replaceAllOccurrences) { if (myCbReplaceAll != null) { myCbReplaceAll.setSelected(replaceAllOccurrences); } } protected static NameSuggestionsGenerator createNameSuggestionGenerator(final String propertyName, final PsiExpression psiExpression, final JavaCodeStyleManager codeStyleManager, final String enteredName, final PsiClass parentClass) { return new NameSuggestionsGenerator() { public SuggestedNameInfo getSuggestedNameInfo(PsiType type) { SuggestedNameInfo nameInfo = codeStyleManager.suggestVariableName(VariableKind.STATIC_FINAL_FIELD, propertyName, psiExpression, type); if (psiExpression != null) { String[] names = nameInfo.names; for (int i = 0, namesLength = names.length; i < namesLength; i++) { String name = names[i]; if (parentClass.findFieldByName(name, false) != null) { names[i] = codeStyleManager.suggestUniqueVariableName(name, psiExpression, true); } } } final String[] strings = AbstractJavaInplaceIntroducer.appendUnresolvedExprName(JavaCompletionUtil .completeVariableNameForRefactoring(codeStyleManager, type, VariableKind.LOCAL_VARIABLE, nameInfo), psiExpression); return new SuggestedNameInfo.Delegate(enteredName != null ? ArrayUtil.mergeArrays(new String[]{enteredName}, strings): strings, nameInfo); } }; } private void updateButtons() { setOKActionEnabled(PsiNameHelper.getInstance(myProject).isIdentifier(getEnteredName())); } private void targetClassChanged() { final String targetClassName = getTargetClassName(); myTargetClass = JavaPsiFacade.getInstance(myProject).findClass(targetClassName, GlobalSearchScope.projectScope(myProject)); updateVisibilityPanel(); myIntroduceEnumConstantCb.setEnabled(isSuitableForEnumConstant()); } private boolean isSuitableForEnumConstant() { return EnumConstantsUtil.isSuitableForEnumConstant(getSelectedType(), myTargetClass) && PsiTreeUtil .getParentOfType(myInitializerExpression, PsiEnumConstant.class) == null; } private void enableEnumDependant(boolean enable) { if (enable) { myVPanel.disableAllButPublic(); } else { updateVisibilityPanel(); } myCbNonNls.setEnabled(!enable); } protected JComponent createCenterPanel() { return new JPanel(); } public boolean isDeleteVariable() { return myInvokedOnDeclaration || myCbDeleteVariable != null && myCbDeleteVariable.isSelected(); } public boolean isAnnotateAsNonNls() { return myCbNonNls != null && myCbNonNls.isSelected(); } private void updateCbDeleteVariable() { if (!myCbReplaceAll.isSelected()) { myCbDeleteVariable.makeUnselectable(false); } else { myCbDeleteVariable.makeSelectable(); } } private void updateTypeSelector() { if (myCbReplaceAll != null) { myTypeSelectorManager.setAllOccurrences(myCbReplaceAll.isSelected()); } else { myTypeSelectorManager.setAllOccurrences(false); } } private void updateVisibilityPanel() { if (myTargetClass != null && myTargetClass.isInterface()) { myVPanel.disableAllButPublic(); } else { UIUtil.setEnabled(myVisibilityPanel, true, true); // exclude all modifiers not visible from all occurrences final Set<String> visible = new THashSet<>(); visible.add(PsiModifier.PRIVATE); visible.add(PsiModifier.PROTECTED); visible.add(PsiModifier.PACKAGE_LOCAL); visible.add(PsiModifier.PUBLIC); for (PsiExpression occurrence : myOccurrences) { final PsiManager psiManager = PsiManager.getInstance(myProject); for (Iterator<String> iterator = visible.iterator(); iterator.hasNext();) { String modifier = iterator.next(); try { final String modifierText = PsiModifier.PACKAGE_LOCAL.equals(modifier) ? "" : modifier + " "; final PsiField field = JavaPsiFacade.getInstance(psiManager.getProject()).getElementFactory().createFieldFromText(modifierText + "int xxx;", myTargetClass); if (!JavaResolveUtil.isAccessible(field, myTargetClass, field.getModifierList(), occurrence, myTargetClass, null)) { iterator.remove(); } } catch (IncorrectOperationException e) { LOG.error(e); } } } if (!visible.contains(getFieldVisibility())) { if (visible.contains(PsiModifier.PUBLIC)) myVPanel.setVisibility(PsiModifier.PUBLIC); if (visible.contains(PsiModifier.PACKAGE_LOCAL)) myVPanel.setVisibility(PsiModifier.PACKAGE_LOCAL); if (visible.contains(PsiModifier.PROTECTED)) myVPanel.setVisibility(PsiModifier.PROTECTED); if (visible.contains(PsiModifier.PRIVATE)) myVPanel.setVisibility(PsiModifier.PRIVATE); } } } protected void doOKAction() { final String targetClassName = getTargetClassName(); PsiClass newClass = myParentClass; if (!"".equals (targetClassName) && !Comparing.strEqual(targetClassName, myParentClass.getQualifiedName())) { newClass = JavaPsiFacade.getInstance(myProject).findClass(targetClassName, GlobalSearchScope.projectScope(myProject)); if (newClass == null) { if (Messages.showOkCancelDialog(myProject, RefactoringBundle.message("class.does.not.exist.in.the.project"), IntroduceConstantHandler.REFACTORING_NAME, Messages.getErrorIcon()) != Messages.OK) { return; } myDestinationClass = new BaseExpressionToFieldHandler.TargetDestination(targetClassName, myParentClass); } else { myDestinationClass = new BaseExpressionToFieldHandler.TargetDestination(newClass); } } String fieldName = getEnteredName(); String errorString = null; if ("".equals(fieldName)) { errorString = RefactoringBundle.message("no.field.name.specified"); } else if (!PsiNameHelper.getInstance(myProject).isIdentifier(fieldName)) { errorString = RefactoringMessageUtil.getIncorrectIdentifierMessage(fieldName); } else if (newClass != null && !myParentClass.getLanguage().equals(newClass.getLanguage())) { errorString = RefactoringBundle.message("move.to.different.language", UsageViewUtil.getType(myParentClass), myParentClass.getQualifiedName(), newClass.getQualifiedName()); } if (errorString != null) { CommonRefactoringUtil.showErrorMessage( IntroduceFieldHandler.REFACTORING_NAME, errorString, HelpID.INTRODUCE_FIELD, myProject); return; } if (newClass != null) { PsiField oldField = newClass.findFieldByName(fieldName, true); if (oldField != null) { int answer = Messages.showYesNoDialog( myProject, RefactoringBundle.message("field.exists", fieldName, oldField.getContainingClass().getQualifiedName()), IntroduceFieldHandler.REFACTORING_NAME, Messages.getWarningIcon() ); if (answer != Messages.YES) { return; } } } JavaRefactoringSettings.getInstance().INTRODUCE_CONSTANT_VISIBILITY = getFieldVisibility(); RecentsManager.getInstance(myProject).registerRecentEntry(RECENTS_KEY, targetClassName); super.doOKAction(); } public JComponent getPreferredFocusedComponent() { return myNameField.getFocusableComponent(); } private class ChooseClassAction implements ActionListener { public void actionPerformed(ActionEvent e) { TreeClassChooser chooser = TreeClassChooserFactory.getInstance(myProject).createWithInnerClassesScopeChooser(RefactoringBundle.message("choose.destination.class"), GlobalSearchScope.projectScope(myProject), new ClassFilter() { public boolean isAccepted(PsiClass aClass) { return aClass.getParent() instanceof PsiJavaFile || aClass.hasModifierProperty(PsiModifier.STATIC); } }, null); if (myTargetClass != null) { chooser.selectDirectory(myTargetClass.getContainingFile().getContainingDirectory()); } chooser.showDialog(); PsiClass aClass = chooser.getSelected(); if (aClass != null) { myTfTargetClassName.setText(aClass.getQualifiedName()); } } } }
// Copyright (C) 2012 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.project; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.common.collect.Lists; import com.google.gerrit.common.Nullable; import com.google.gerrit.common.data.SubmitRecord; import com.google.gerrit.common.data.SubmitTypeRecord; import com.google.gerrit.extensions.client.SubmitType; import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.Change; import com.google.gerrit.reviewdb.client.PatchSet; import com.google.gerrit.rules.PrologEnvironment; import com.google.gerrit.rules.StoredValues; import com.google.gerrit.server.CurrentUser; import com.google.gerrit.server.query.change.ChangeData; import com.google.gwtorm.server.OrmException; import com.googlecode.prolog_cafe.exceptions.CompileException; import com.googlecode.prolog_cafe.exceptions.ReductionLimitException; import com.googlecode.prolog_cafe.lang.IntegerTerm; import com.googlecode.prolog_cafe.lang.ListTerm; import com.googlecode.prolog_cafe.lang.Prolog; import com.googlecode.prolog_cafe.lang.StructureTerm; import com.googlecode.prolog_cafe.lang.SymbolTerm; import com.googlecode.prolog_cafe.lang.Term; import com.googlecode.prolog_cafe.lang.VariableTerm; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * Evaluates a submit-like Prolog rule found in the rules.pl file of the current * project and filters the results through rules found in the parent projects, * all the way up to All-Projects. */ public class SubmitRuleEvaluator { private static final Logger log = LoggerFactory .getLogger(SubmitRuleEvaluator.class); private static final String DEFAULT_MSG = "Error evaluating project rules, check server log"; public static List<SubmitRecord> defaultRuleError() { return createRuleError(DEFAULT_MSG); } public static List<SubmitRecord> createRuleError(String err) { SubmitRecord rec = new SubmitRecord(); rec.status = SubmitRecord.Status.RULE_ERROR; rec.errorMessage = err; return Collections.singletonList(rec); } public static SubmitTypeRecord defaultTypeError() { return createTypeError(DEFAULT_MSG); } public static SubmitTypeRecord createTypeError(String err) { SubmitTypeRecord rec = new SubmitTypeRecord(); rec.status = SubmitTypeRecord.Status.RULE_ERROR; rec.errorMessage = err; return rec; } /** * Exception thrown when the label term of a submit record * unexpectedly didn't contain a user term. */ private static class UserTermExpected extends Exception { private static final long serialVersionUID = 1L; public UserTermExpected(SubmitRecord.Label label) { super(String.format("A label with the status %s must contain a user.", label.toString())); } } private final ChangeData cd; private final ChangeControl control; private PatchSet patchSet; private boolean fastEvalLabels; private boolean allowDraft; private boolean allowClosed; private boolean skipFilters; private String rule; private boolean logErrors = true; private long reductionsConsumed; private Term submitRule; public SubmitRuleEvaluator(ChangeData cd) throws OrmException { this.cd = cd; this.control = cd.changeControl(); } /** * @param ps patch set of the change to evaluate. If not set, the current * patch set will be loaded from {@link #canSubmit()} or {@link * #getSubmitType}. * @return this */ public SubmitRuleEvaluator setPatchSet(PatchSet ps) { checkArgument(ps.getId().getParentKey().equals(cd.getId()), "Patch set %s does not match change %s", ps.getId(), cd.getId()); patchSet = ps; return this; } /** * @param fast if true, infer label information from rules rather than reading * from project config. * @return this */ public SubmitRuleEvaluator setFastEvalLabels(boolean fast) { fastEvalLabels = fast; return this; } /** * @param allow whether to allow {@link #canSubmit()} on closed changes. * @return this */ public SubmitRuleEvaluator setAllowClosed(boolean allow) { allowClosed = allow; return this; } /** * @param allow whether to allow {@link #canSubmit()} on closed changes. * @return this */ public SubmitRuleEvaluator setAllowDraft(boolean allow) { allowDraft = allow; return this; } /** * @param skip if true, submit filter will not be applied. * @return this */ public SubmitRuleEvaluator setSkipSubmitFilters(boolean skip) { skipFilters = skip; return this; } /** * @param rule custom rule to use, or null to use refs/meta/config:rules.pl. * @return this */ public SubmitRuleEvaluator setRule(@Nullable String rule) { this.rule = rule; return this; } /** * @param log whether to log error messages in addition to returning error * records. If true, error record messages will be less descriptive. */ public SubmitRuleEvaluator setLogErrors(boolean log) { logErrors = log; return this; } /** @return Prolog reductions consumed during evaluation. */ public long getReductionsConsumed() { return reductionsConsumed; } /** * Evaluate the submit rules. * * @return List of {@link SubmitRecord} objects returned from the evaluated * rules, including any errors. */ public List<SubmitRecord> canSubmit() { try { initPatchSet(); } catch (OrmException e) { return ruleError("Error looking up patch set " + control.getChange().currentPatchSetId()); } Change c = control.getChange(); if (!allowClosed && c.getStatus().isClosed()) { SubmitRecord rec = new SubmitRecord(); rec.status = SubmitRecord.Status.CLOSED; return Collections.singletonList(rec); } if ((c.getStatus() == Change.Status.DRAFT || patchSet.isDraft()) && !allowDraft) { return cannotSubmitDraft(); } List<Term> results; try { results = evaluateImpl("locate_submit_rule", "can_submit", "locate_submit_filter", "filter_submit_results", control.getCurrentUser()); } catch (RuleEvalException e) { return ruleError(e.getMessage(), e); } if (results.isEmpty()) { // This should never occur. A well written submit rule will always produce // at least one result informing the caller of the labels that are // required for this change to be submittable. Each label will indicate // whether or not that is actually possible given the permissions. return ruleError(String.format("Submit rule '%s' for change %s of %s has " + "no solution.", getSubmitRule(), cd.getId(), getProjectName())); } return resultsToSubmitRecord(getSubmitRule(), results); } private List<SubmitRecord> cannotSubmitDraft() { try { if (!control.isDraftVisible(cd.db(), cd)) { return createRuleError("Patch set " + patchSet.getId() + " not found"); } else if (patchSet.isDraft()) { return createRuleError("Cannot submit draft patch sets"); } else { return createRuleError("Cannot submit draft changes"); } } catch (OrmException err) { String msg = "Cannot check visibility of patch set " + patchSet.getId(); log.error(msg, err); return createRuleError(msg); } } /** * Convert the results from Prolog Cafe's format to Gerrit's common format. * * can_submit/1 terminates when an ok(P) record is found. Therefore walk * the results backwards, using only that ok(P) record if it exists. This * skips partial results that occur early in the output. Later after the loop * the out collection is reversed to restore it to the original ordering. */ private List<SubmitRecord> resultsToSubmitRecord( Term submitRule, List<Term> results) { List<SubmitRecord> out = new ArrayList<>(results.size()); for (int resultIdx = results.size() - 1; 0 <= resultIdx; resultIdx--) { Term submitRecord = results.get(resultIdx); SubmitRecord rec = new SubmitRecord(); out.add(rec); if (!(submitRecord instanceof StructureTerm) || 1 != submitRecord.arity()) { return invalidResult(submitRule, submitRecord); } if ("ok".equals(submitRecord.name())) { rec.status = SubmitRecord.Status.OK; } else if ("not_ready".equals(submitRecord.name())) { rec.status = SubmitRecord.Status.NOT_READY; } else { return invalidResult(submitRule, submitRecord); } // Unpack the one argument. This should also be a structure with one // argument per label that needs to be reported on to the caller. // submitRecord = submitRecord.arg(0); if (!(submitRecord instanceof StructureTerm)) { return invalidResult(submitRule, submitRecord); } rec.labels = new ArrayList<>(submitRecord.arity()); for (Term state : ((StructureTerm) submitRecord).args()) { if (!(state instanceof StructureTerm) || 2 != state.arity() || !"label".equals(state.name())) { return invalidResult(submitRule, submitRecord); } SubmitRecord.Label lbl = new SubmitRecord.Label(); rec.labels.add(lbl); lbl.label = state.arg(0).name(); Term status = state.arg(1); try { if ("ok".equals(status.name())) { lbl.status = SubmitRecord.Label.Status.OK; appliedBy(lbl, status); } else if ("reject".equals(status.name())) { lbl.status = SubmitRecord.Label.Status.REJECT; appliedBy(lbl, status); } else if ("need".equals(status.name())) { lbl.status = SubmitRecord.Label.Status.NEED; } else if ("may".equals(status.name())) { lbl.status = SubmitRecord.Label.Status.MAY; } else if ("impossible".equals(status.name())) { lbl.status = SubmitRecord.Label.Status.IMPOSSIBLE; } else { return invalidResult(submitRule, submitRecord); } } catch (UserTermExpected e) { return invalidResult(submitRule, submitRecord, e.getMessage()); } } if (rec.status == SubmitRecord.Status.OK) { break; } } Collections.reverse(out); return out; } private List<SubmitRecord> invalidResult(Term rule, Term record, String reason) { return ruleError(String.format("Submit rule %s for change %s of %s output " + "invalid result: %s%s", rule, cd.getId(), getProjectName(), record, (reason == null ? "" : ". Reason: " + reason))); } private List<SubmitRecord> invalidResult(Term rule, Term record) { return invalidResult(rule, record, null); } private List<SubmitRecord> ruleError(String err) { return ruleError(err, null); } private List<SubmitRecord> ruleError(String err, Exception e) { if (logErrors) { if (e == null) { log.error(err); } else { log.error(err, e); } return defaultRuleError(); } else { return createRuleError(err); } } /** * Evaluate the submit type rules to get the submit type. * * @return record from the evaluated rules. */ public SubmitTypeRecord getSubmitType() { try { initPatchSet(); } catch (OrmException e) { return typeError("Error looking up patch set " + control.getChange().currentPatchSetId()); } try { if (control.getChange().getStatus() == Change.Status.DRAFT && !control.isDraftVisible(cd.db(), cd)) { return createTypeError("Patch set " + patchSet.getId() + " not found"); } if (patchSet.isDraft() && !control.isDraftVisible(cd.db(), cd)) { return createTypeError("Patch set " + patchSet.getId() + " not found"); } } catch (OrmException err) { String msg = "Cannot read patch set " + patchSet.getId(); log.error(msg, err); return createTypeError(msg); } List<Term> results; try { results = evaluateImpl("locate_submit_type", "get_submit_type", "locate_submit_type_filter", "filter_submit_type_results", // Do not include current user in submit type evaluation. This is used // for mergeability checks, which are stored persistently and so must // have a consistent view of the submit type. null); } catch (RuleEvalException e) { return typeError(e.getMessage(), e); } if (results.isEmpty()) { // Should never occur for a well written rule return typeError("Submit rule '" + getSubmitRule() + "' for change " + cd.getId() + " of " + getProjectName() + " has no solution."); } Term typeTerm = results.get(0); if (!(typeTerm instanceof SymbolTerm)) { return typeError("Submit rule '" + getSubmitRule() + "' for change " + cd.getId() + " of " + getProjectName() + " did not return a symbol."); } String typeName = ((SymbolTerm) typeTerm).name(); try { return SubmitTypeRecord.OK( SubmitType.valueOf(typeName.toUpperCase())); } catch (IllegalArgumentException e) { return typeError("Submit type rule " + getSubmitRule() + " for change " + cd.getId() + " of " + getProjectName() + " output invalid result: " + typeName); } } private SubmitTypeRecord typeError(String err) { return typeError(err, null); } private SubmitTypeRecord typeError(String err, Exception e) { if (logErrors) { if (e == null) { log.error(err); } else { log.error(err, e); } return defaultTypeError(); } else { return createTypeError(err); } } private List<Term> evaluateImpl( String userRuleLocatorName, String userRuleWrapperName, String filterRuleLocatorName, String filterRuleWrapperName, CurrentUser user) throws RuleEvalException { PrologEnvironment env = getPrologEnvironment(user); try { Term sr = env.once("gerrit", userRuleLocatorName, new VariableTerm()); if (fastEvalLabels) { env.once("gerrit", "assume_range_from_label"); } List<Term> results = new ArrayList<>(); try { for (Term[] template : env.all("gerrit", userRuleWrapperName, sr, new VariableTerm())) { results.add(template[1]); } } catch (ReductionLimitException err) { throw new RuleEvalException(String.format( "%s on change %d of %s", err.getMessage(), cd.getId().get(), getProjectName())); } catch (RuntimeException err) { throw new RuleEvalException(String.format( "Exception calling %s on change %d of %s", sr, cd.getId().get(), getProjectName()), err); } finally { reductionsConsumed = env.getReductions(); } Term resultsTerm = toListTerm(results); if (!skipFilters) { resultsTerm = runSubmitFilters( resultsTerm, env, filterRuleLocatorName, filterRuleWrapperName); } List<Term> r; if (resultsTerm instanceof ListTerm) { r = Lists.newArrayList(); for (Term t = resultsTerm; t instanceof ListTerm;) { ListTerm l = (ListTerm) t; r.add(l.car().dereference()); t = l.cdr().dereference(); } } else { r = Collections.emptyList(); } submitRule = sr; return r; } finally { env.close(); } } private PrologEnvironment getPrologEnvironment(CurrentUser user) throws RuleEvalException { checkState(patchSet != null, "getPrologEnvironment() called before initPatchSet()"); ProjectState projectState = control.getProjectControl().getProjectState(); PrologEnvironment env; try { if (rule == null) { env = projectState.newPrologEnvironment(); } else { env = projectState.newPrologEnvironment( "stdin", new ByteArrayInputStream(rule.getBytes(UTF_8))); } } catch (CompileException err) { throw new RuleEvalException("Cannot consult rules.pl for " + getProjectName(), err); } env.set(StoredValues.REVIEW_DB, cd.db()); env.set(StoredValues.CHANGE_DATA, cd); env.set(StoredValues.PATCH_SET, patchSet); env.set(StoredValues.CHANGE_CONTROL, control); if (user != null) { env.set(StoredValues.CURRENT_USER, user); } return env; } private Term runSubmitFilters(Term results, PrologEnvironment env, String filterRuleLocatorName, String filterRuleWrapperName) throws RuleEvalException { ProjectState projectState = control.getProjectControl().getProjectState(); PrologEnvironment childEnv = env; for (ProjectState parentState : projectState.parents()) { PrologEnvironment parentEnv; try { parentEnv = parentState.newPrologEnvironment(); } catch (CompileException err) { throw new RuleEvalException("Cannot consult rules.pl for " + parentState.getProject().getName(), err); } parentEnv.copyStoredValues(childEnv); Term filterRule = parentEnv.once("gerrit", filterRuleLocatorName, new VariableTerm()); try { if (fastEvalLabels) { env.once("gerrit", "assume_range_from_label"); } Term[] template = parentEnv.once("gerrit", filterRuleWrapperName, filterRule, results, new VariableTerm()); results = template[2]; } catch (ReductionLimitException err) { throw new RuleEvalException(String.format( "%s on change %d of %s", err.getMessage(), cd.getId().get(), parentState.getProject().getName())); } catch (RuntimeException err) { throw new RuleEvalException(String.format( "Exception calling %s on change %d of %s", filterRule, cd.getId().get(), parentState.getProject().getName()), err); } finally { reductionsConsumed += env.getReductions(); } childEnv = parentEnv; } return results; } private static Term toListTerm(List<Term> terms) { Term list = Prolog.Nil; for (int i = terms.size() - 1; i >= 0; i--) { list = new ListTerm(terms.get(i), list); } return list; } private void appliedBy(SubmitRecord.Label label, Term status) throws UserTermExpected { if (status instanceof StructureTerm && status.arity() == 1) { Term who = status.arg(0); if (isUser(who)) { label.appliedBy = new Account.Id(((IntegerTerm) who.arg(0)).intValue()); } else { throw new UserTermExpected(label); } } } private static boolean isUser(Term who) { return who instanceof StructureTerm && who.arity() == 1 && who.name().equals("user") && who.arg(0) instanceof IntegerTerm; } public Term getSubmitRule() { checkState(submitRule != null, "getSubmitRule() invalid before evaluation"); return submitRule; } private void initPatchSet() throws OrmException { if (patchSet == null) { patchSet = cd.currentPatchSet(); } } private String getProjectName() { return control.getProjectControl().getProjectState().getProject().getName(); } }
package psidev.psi.mi.jami.xml.io.writer.elements.impl.expanded.xml25; import junit.framework.Assert; import org.junit.Ignore; import org.junit.Test; import psidev.psi.mi.jami.binary.BinaryInteraction; import psidev.psi.mi.jami.binary.impl.DefaultNamedBinaryInteraction; import psidev.psi.mi.jami.exception.IllegalRangeException; import psidev.psi.mi.jami.model.Complex; import psidev.psi.mi.jami.model.Feature; import psidev.psi.mi.jami.model.NamedInteraction; import psidev.psi.mi.jami.model.Participant; import psidev.psi.mi.jami.model.impl.*; import psidev.psi.mi.jami.utils.CvTermUtils; import psidev.psi.mi.jami.utils.RangeUtils; import psidev.psi.mi.jami.xml.cache.PsiXmlObjectCache; import psidev.psi.mi.jami.xml.cache.InMemoryIdentityObjectCache; import psidev.psi.mi.jami.xml.io.writer.elements.impl.AbstractXmlWriterTest; import psidev.psi.mi.jami.xml.io.writer.elements.impl.expanded.xml25.XmlNamedBinaryInteractionWriter; import javax.xml.stream.XMLStreamException; import java.io.IOException; /** * Unit tester for XmlNamedBinaryInteractionWriter * * @author Marine Dumousseau (marine@ebi.ac.uk) * @version $Id$ * @since <pre>25/11/13</pre> */ public class XmlNamedBinaryInteractionWriterTest extends AbstractXmlWriterTest { private String interaction = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n"+ " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_complex = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n"+ " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactionRef>4</interactionRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_complexAsInteractor ="<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n"+ " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>test complex</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>complex</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0314\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_shortName ="<interaction id=\"1\">\n" + " <names>\n" + " <shortLabel>interaction test</shortLabel>\n"+ " </names>\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n"+ " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_fullName ="<interaction id=\"1\">\n" + " <names>\n" + " <fullName>interaction test</fullName>\n"+ " </names>\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n"+ " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_aliases ="<interaction id=\"1\">\n" + " <names>\n" + " <alias type=\"synonym\">interaction synonym</alias>\n"+ " <alias>test</alias>\n"+ " </names>\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n"+ " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_identifier = "<interaction id=\"1\">\n" + " <xref>\n" + " <primaryRef db=\"intact\" id=\"EBI-xxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " <secondaryRef db=\"test\" id=\"xxxx1\"/>\n"+ " </xref>\n"+ " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n"+ " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_xref = "<interaction id=\"1\">\n" + " <xref>\n" + " <primaryRef db=\"test2\" id=\"xxxx2\"/>\n" + " <secondaryRef db=\"test\" id=\"xxxx1\"/>\n"+ " </xref>\n"+ " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n"+ " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private String interaction_inferred = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n"+ " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <featureList>\n" + " <feature id=\"5\">\n" + " <featureRangeList>\n" + " <featureRange>\n" + " <startStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </startStatus>\n" + " <begin position=\"1\"/>\n"+ " <endStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </endStatus>\n" + " <end position=\"4\"/>\n"+ " </featureRange>\n"+ " </featureRangeList>\n" + " </feature>\n"+ " </featureList>\n" + " </participant>\n"+ " <participant id=\"6\">\n" + " <interactor id=\"7\">\n" + " <names>\n" + " <shortLabel>protein test2</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <featureList>\n" + " <feature id=\"8\">\n" + " <featureRangeList>\n" + " <featureRange>\n" + " <startStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </startStatus>\n" + " <begin position=\"1\"/>\n"+ " <endStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </endStatus>\n" + " <end position=\"4\"/>\n"+ " </featureRange>\n"+ " </featureRangeList>\n" + " </feature>\n"+ " </featureList>\n" + " </participant>\n"+ " </participantList>\n" + " <inferredInteractionList>\n" + " <inferredInteraction>\n" + " <participant>\n" + " <participantFeatureRef>5</participantFeatureRef>\n" + " </participant>\n"+ " <participant>\n" + " <participantFeatureRef>8</participantFeatureRef>\n" + " </participant>\n"+ " </inferredInteraction>\n"+ " </inferredInteractionList>\n" + "</interaction>"; private String interaction_type = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n"+ " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " <interactionType>\n" + " <names>\n" + " <shortLabel>association</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0914\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionType>\n" + "</interaction>"; private String interaction_attributes = "<interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n"+ " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " <attributeList>\n" + " <attribute name=\"test2\"/>\n"+ " <attribute name=\"test3\"/>\n"+ " <attribute name=\"spoke expansion\" nameAc=\"MI:1060\"/>\n"+ " </attributeList>\n"+ "</interaction>"; private String interaction_registered = "<interaction id=\"2\">\n" + " <experimentList>\n" + " <experimentDescription id=\"3\">\n" + " <bibref>\n" + " <xref>\n" + " <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </bibref>\n"+ " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </interactionDetectionMethod>\n"+ " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"4\">\n" + " <interactor id=\"5\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + "</interaction>"; private PsiXmlObjectCache elementCache = new InMemoryIdentityObjectCache(); @Test public void test_write_interaction() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); elementCache.clear(); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction, output.toString()); } @Test public void test_write_participant_complex() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Complex complex = new DefaultComplex("test complex"); complex.getParticipants().add(new DefaultModelledParticipant(new DefaultProtein("test protein"))); Participant participant = new DefaultParticipant(complex); interaction.addParticipant(participant); elementCache.clear(); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_complex, output.toString()); } @Test public void test_write_participant_complex_as_interactor() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Complex complex = new DefaultComplex("test complex"); complex.getParticipants().add(new DefaultModelledParticipant(new DefaultProtein("test protein"))); Participant participant = new DefaultParticipant(complex); interaction.addParticipant(participant); elementCache.clear(); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.setComplexAsInteractor(true); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_complexAsInteractor, output.toString()); } @Test public void test_write_participant_complex_no_participants() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Complex complex = new DefaultComplex("test complex"); Participant participant = new DefaultParticipant(complex); interaction.addParticipant(participant); elementCache.clear(); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_complexAsInteractor, output.toString()); } @Test public void test_write_interaction_shortName() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new DefaultNamedBinaryInteraction("interaction test"); Participant participant = new DefaultParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); elementCache.clear(); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_shortName, output.toString()); } @Test public void test_write_interaction_fullName() throws XMLStreamException, IOException, IllegalRangeException { NamedInteraction interaction = new DefaultNamedBinaryInteraction(); interaction.setFullName("interaction test"); Participant participant = new DefaultParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); elementCache.clear(); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.write((BinaryInteraction)interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_fullName, output.toString()); } @Test public void test_write_interaction_alias() throws XMLStreamException, IOException, IllegalRangeException { NamedInteraction interaction = new DefaultNamedBinaryInteraction(); interaction.getAliases().add(new DefaultAlias(new DefaultCvTerm("synonym"), "interaction synonym")); interaction.getAliases().add(new DefaultAlias("test")); Participant participant = new DefaultParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); elementCache.clear(); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.write((BinaryInteraction)interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_aliases, output.toString()); } @Test public void test_write_interaction_identifier() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); interaction.getIdentifiers().add(new DefaultXref(new DefaultCvTerm("intact"), "EBI-xxx")); interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test"), "xxxx1")); elementCache.clear(); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_identifier, output.toString()); } @Test public void test_write_interaction_xref() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test2"), "xxxx2")); interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test"), "xxxx1")); elementCache.clear(); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_xref, output.toString()); } @Test @Ignore public void test_write_interaction_inferred() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultParticipant(new DefaultProtein("protein test")); Participant participant2 = new DefaultParticipant(new DefaultProtein("protein test2")); // two inferred interactiosn f1, f2, f3 and f3,f4 Feature f1 = new DefaultFeature(); f1.getRanges().add(RangeUtils.createRangeFromString("1-4")); Feature f2 = new DefaultFeature(); f2.getRanges().add(RangeUtils.createRangeFromString("1-4")); f1.getLinkedFeatures().add(f2); f2.getLinkedFeatures().add(f1); participant.addFeature(f1); participant2.addFeature(f2); interaction.addParticipant(participant); interaction.addParticipant(participant2); elementCache.clear(); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_inferred, output.toString()); } @Test public void test_write_interaction_type() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); interaction.setInteractionType(CvTermUtils.createMICvTerm("association", "MI:0914")); elementCache.clear(); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_type, output.toString()); } @Test public void test_write_interaction_attributes() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); interaction.setComplexExpansion(CvTermUtils.createMICvTerm("spoke expansion", "MI:1060")); Participant participant = new DefaultParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); interaction.getAnnotations().add(new DefaultAnnotation(new DefaultCvTerm("test2"))); interaction.getAnnotations().add(new DefaultAnnotation(new DefaultCvTerm("test3"))); elementCache.clear(); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_attributes, output.toString()); } @Test public void test_write_interaction_registered() throws XMLStreamException, IOException, IllegalRangeException { BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); elementCache.clear(); elementCache.extractIdForInteraction(new DefaultInteraction()); elementCache.extractIdForInteraction(interaction); XmlNamedBinaryInteractionWriter writer = new XmlNamedBinaryInteractionWriter(createStreamWriter(), this.elementCache); writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx"))); writer.write(interaction); streamWriter.flush(); Assert.assertEquals(this.interaction_registered, output.toString()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.util; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Token; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Document; import org.apache.lucene.queryParser.ParseException; import org.apache.lucene.queryParser.QueryParser; import org.apache.lucene.search.*; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.highlight.*; import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrException; import org.apache.solr.request.SolrParams; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryResponse; import org.apache.solr.request.DefaultSolrParams; import org.apache.solr.request.AppendedSolrParams; import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; import org.apache.solr.search.*; import java.io.IOException; import java.io.StringReader; import java.util.*; import java.util.logging.Level; import java.util.regex.Pattern; /** * <p>Utilities that may be of use to RequestHandlers.</p> * * <p> * Many of these functions have code that was stolen/mutated from * StandardRequestHandler. * </p> * * <p>:TODO: refactor StandardRequestHandler to use these utilities</p> * * <p>:TODO: Many "standard" functionality methods are not cognisant of * default parameter settings. */ public class SolrPluginUtils { /** * Set defaults on a SolrQueryRequest. * * RequestHandlers can use this method to ensure their defaults are * visible to other components such as the response writer */ public static void setDefaults(SolrQueryRequest req, SolrParams defaults) { setDefaults(req, defaults, null, null); } /** * Set default-ish params on a SolrQueryRequest. * * RequestHandlers can use this method to ensure their defaults and * overrides are visible to other components such as the response writer * * @param req The request whose params we are interested i * @param defaults values to be used if no values are specified in the request params * @param appends values to be appended to those from the request (or defaults) when dealing with multi-val params, or treated as another layer of defaults for singl-val params. * @param invariants values which will be used instead of any request, or default values, regardless of context. */ public static void setDefaults(SolrQueryRequest req, SolrParams defaults, SolrParams appends, SolrParams invariants) { SolrParams p = req.getParams(); if (defaults != null) { p = new DefaultSolrParams(p,defaults); } if (appends != null) { p = new AppendedSolrParams(p,appends); } if (invariants != null) { p = new DefaultSolrParams(invariants,p); } req.setParams(p); } /** standard param for field list */ @Deprecated public static String FL = SolrParams.FL; /** * SolrIndexSearch.numDocs(Query,Query) freaks out if the filtering * query is null, so we use this workarround. */ public static int numDocs(SolrIndexSearcher s, Query q, Query f) throws IOException { return (null == f) ? s.getDocSet(q).size() : s.numDocs(q,f); } /** * Returns the param, or the default if it's empty or not specified. * @deprecated use SolrParam.get(String,String) */ public static String getParam(SolrQueryRequest req, String param, String def) { String v = req.getParam(param); // Note: parameters passed but given only white-space value are // considered equvalent to passing nothing for that parameter. if (null == v || "".equals(v.trim())) { return def; } return v; } /** * Treats the param value as a Number, returns the default if nothing is * there or if it's not a number. * @deprecated use SolrParam.getFloat(String,float) */ public static Number getNumberParam(SolrQueryRequest req, String param, Number def) { Number r = def; String v = req.getParam(param); if (null == v || "".equals(v.trim())) { return r; } try { r = new Float(v); } catch (NumberFormatException e) { /* :NOOP" */ } return r; } /** * Treats parameter value as a boolean. The string 'false' is false; * any other non-empty string is true. * @deprecated use SolrParam.getBool(String,boolean) */ public static boolean getBooleanParam(SolrQueryRequest req, String param, boolean def) { String v = req.getParam(param); if (null == v || "".equals(v.trim())) { return def; } return !"false".equals(v.trim()); } private final static Pattern splitList=Pattern.compile(",| "); /** Split a value that may contain a comma, space of bar separated list. */ public static String[] split(String value){ return splitList.split(value.trim(), 0); } /** * Assumes the standard query param of "fl" to specify the return fields * @see #setReturnFields(String,SolrQueryResponse) */ public static int setReturnFields(SolrQueryRequest req, SolrQueryResponse res) { return setReturnFields(req.getParam(FL), res); } /** * Given a space seperated list of field names, sets the field list on the * SolrQueryResponse. * * @return bitfield of SolrIndexSearcher flags that need to be set */ public static int setReturnFields(String fl, SolrQueryResponse res) { int flags = 0; if (fl != null) { // TODO - this could become more efficient if widely used. // TODO - should field order be maintained? String[] flst = split(fl); if (flst.length > 0 && !(flst.length==1 && flst[0].length()==0)) { Set<String> set = new HashSet<String>(); for (String fname : flst) { if("score".equalsIgnoreCase(fname)) flags |= SolrIndexSearcher.GET_SCORES; set.add(fname); } res.setReturnFields(set); } } return flags; } /** * Pre-fetch documents into the index searcher's document cache. * * This is an entirely optional step which you might want to perform for * the following reasons: * * <ul> * <li>Locates the document-retrieval costs in one spot, which helps * detailed performance measurement</li> * * <li>Determines a priori what fields will be needed to be fetched by * various subtasks, like response writing and highlighting. This * minimizes the chance that many needed fields will be loaded lazily. * (it is more efficient to load all the field we require normally).</li> * </ul> * * If lazy field loading is disabled, this method does nothing. */ public static void optimizePreFetchDocs(DocList docs, Query query, SolrQueryRequest req, SolrQueryResponse res) throws IOException { SolrIndexSearcher searcher = req.getSearcher(); if(!searcher.enableLazyFieldLoading) { // nothing to do return; } Set<String> fieldFilter = null; Set<String> returnFields = res.getReturnFields(); if(returnFields != null) { // copy return fields list fieldFilter = new HashSet<String>(returnFields); // add highlight fields if(HighlightingUtils.isHighlightingEnabled(req)) { for(String field: HighlightingUtils.getHighlightFields(query, req, null)) fieldFilter.add(field); } // fetch unique key if one exists. SchemaField keyField = req.getSearcher().getSchema().getUniqueKeyField(); if(null != keyField) fieldFilter.add(keyField.getName()); } // get documents DocIterator iter = docs.iterator(); for (int i=0; i<docs.size(); i++) { searcher.doc(iter.nextDoc(), fieldFilter); } } /** * <p> * Returns a NamedList containing many "standard" pieces of debugging * information. * </p> * * <ul> * <li>rawquerystring - the 'q' param exactly as specified by the client * </li> * <li>querystring - the 'q' param after any preprocessing done by the plugin * </li> * <li>parsedquery - the main query executed formated by the Solr * QueryParsing utils class (which knows about field types) * </li> * <li>parsedquery_toString - the main query executed formated by it's * own toString method (in case it has internal state Solr * doesn't know about) * </li> * <li>expain - the list of score explanations for each document in * results against query. * </li> * <li>otherQuery - the query string specified in 'explainOther' query param. * </li> * <li>explainOther - the list of score explanations for each document in * results against 'otherQuery' * </li> * </ul> * * @param req the request we are dealing with * @param userQuery the users query as a string, after any basic * preprocessing has been done * @param query the query built from the userQuery * (and perhaps other clauses) that identifies the main * result set of the response. * @param results the main result set of the response * @deprecated Use doStandardDebug(SolrQueryRequest,String,Query,DocList) with setDefaults */ public static NamedList doStandardDebug(SolrQueryRequest req, String userQuery, Query query, DocList results, CommonParams params) throws IOException { String debug = getParam(req, SolrParams.DEBUG_QUERY, params.debugQuery); NamedList dbg = null; if (debug!=null) { dbg = new NamedList(); /* userQuery may have been pre-processes .. expose that */ dbg.add("rawquerystring", req.getQueryString()); dbg.add("querystring", userQuery); /* QueryParsing.toString isn't perfect, use it to see converted * values, use regular toString to see any attributes of the * underlying Query it may have missed. */ dbg.add("parsedquery",QueryParsing.toString(query, req.getSchema())); dbg.add("parsedquery_toString", query.toString()); dbg.add("explain", getExplainList (query, results, req.getSearcher(), req.getSchema())); String otherQueryS = req.getParam("explainOther"); if (otherQueryS != null && otherQueryS.length() > 0) { DocList otherResults = doSimpleQuery (otherQueryS,req.getSearcher(), req.getSchema(),0,10); dbg.add("otherQuery",otherQueryS); dbg.add("explainOther", getExplainList (query, otherResults, req.getSearcher(), req.getSchema())); } } return dbg; } /** * <p> * Returns a NamedList containing many "standard" pieces of debugging * information. * </p> * * <ul> * <li>rawquerystring - the 'q' param exactly as specified by the client * </li> * <li>querystring - the 'q' param after any preprocessing done by the plugin * </li> * <li>parsedquery - the main query executed formated by the Solr * QueryParsing utils class (which knows about field types) * </li> * <li>parsedquery_toString - the main query executed formated by it's * own toString method (in case it has internal state Solr * doesn't know about) * </li> * <li>expain - the list of score explanations for each document in * results against query. * </li> * <li>otherQuery - the query string specified in 'explainOther' query param. * </li> * <li>explainOther - the list of score explanations for each document in * results against 'otherQuery' * </li> * </ul> * * @param req the request we are dealing with * @param userQuery the users query as a string, after any basic * preprocessing has been done * @param query the query built from the userQuery * (and perhaps other clauses) that identifies the main * result set of the response. * @param results the main result set of the response */ public static NamedList doStandardDebug(SolrQueryRequest req, String userQuery, Query query, DocList results) throws IOException { String debug = req.getParam(SolrParams.DEBUG_QUERY); NamedList dbg = null; if (debug!=null) { dbg = new NamedList(); /* userQuery may have been pre-processes .. expose that */ dbg.add("rawquerystring", req.getQueryString()); dbg.add("querystring", userQuery); /* QueryParsing.toString isn't perfect, use it to see converted * values, use regular toString to see any attributes of the * underlying Query it may have missed. */ dbg.add("parsedquery",QueryParsing.toString(query, req.getSchema())); dbg.add("parsedquery_toString", query.toString()); dbg.add("explain", getExplainList (query, results, req.getSearcher(), req.getSchema())); String otherQueryS = req.getParam("explainOther"); if (otherQueryS != null && otherQueryS.length() > 0) { DocList otherResults = doSimpleQuery (otherQueryS,req.getSearcher(), req.getSchema(),0,10); dbg.add("otherQuery",otherQueryS); dbg.add("explainOther", getExplainList (query, otherResults, req.getSearcher(), req.getSchema())); } } return dbg; } /** * Generates an list of Explanations for each item in a list of docs. * * @param query The Query you want explanations in the context of * @param docs The Documents you want explained relative that query */ public static NamedList getExplainList(Query query, DocList docs, SolrIndexSearcher searcher, IndexSchema schema) throws IOException { NamedList explainList = new NamedList(); DocIterator iterator = docs.iterator(); for (int i=0; i<docs.size(); i++) { int id = iterator.nextDoc(); Explanation explain = searcher.explain(query, id); Document doc = searcher.doc(id); String strid = schema.printableUniqueKey(doc); String docname = ""; if (strid != null) docname="id="+strid+","; docname = docname + "internal_docid="+id; explainList.add(docname, "\n" +explain.toString()); } return explainList; } /** * Executes a basic query in lucene syntax */ public static DocList doSimpleQuery(String sreq, SolrIndexSearcher searcher, IndexSchema schema, int start, int limit) throws IOException { List<String> commands = StrUtils.splitSmart(sreq,';'); String qs = commands.size() >= 1 ? commands.get(0) : ""; Query query = QueryParsing.parseQuery(qs, schema); // If the first non-query, non-filter command is a simple sort on an indexed field, then // we can use the Lucene sort ability. Sort sort = null; if (commands.size() >= 2) { QueryParsing.SortSpec sortSpec = QueryParsing.parseSort(commands.get(1), schema); if (sortSpec != null) { sort = sortSpec.getSort(); if (sortSpec.getCount() >= 0) { limit = sortSpec.getCount(); } } } DocList results = searcher.getDocList(query,(DocSet)null, sort, start, limit); return results; } /** * Given a string containing fieldNames and boost info, * converts it to a Map from field name to boost info. * * <p> * Doesn't care if boost info is negative, you're on your own. * </p> * <p> * Doesn't care if boost info is missing, again: you're on your own. * </p> * * @param in a String like "fieldOne^2.3 fieldTwo fieldThree^-0.4" * @return Map of fieldOne =&gt; 2.3, fieldTwo =&gt; null, fieldThree =&gt; -0.4 */ public static Map<String,Float> parseFieldBoosts(String in) { if (null == in || "".equals(in.trim())) { return new HashMap<String,Float>(); } String[] bb = in.trim().split("\\s+"); Map<String, Float> out = new HashMap<String,Float>(7); for (String s : bb) { String[] bbb = s.split("\\^"); out.put(bbb[0], 1 == bbb.length ? null : Float.valueOf(bbb[1])); } return out; } /** * Given a string containing functions with optional boosts, returns * an array of Queries representing those functions with the specified * boosts. * <p> * NOTE: intra-function whitespace is not allowed. * </p> * @see #parseFieldBoosts */ public static List<Query> parseFuncs(IndexSchema s, String in) throws ParseException { Map<String,Float> ff = parseFieldBoosts(in); List<Query> funcs = new ArrayList<Query>(ff.keySet().size()); for (String f : ff.keySet()) { Query fq = QueryParsing.parseFunction(f, s); Float b = ff.get(f); if (null != b) { fq.setBoost(b); } funcs.add(fq); } return funcs; } /** * Checks the number of optional clauses in the query, and compares it * with the specification string to determine the proper value to use. * * <p> * Details about the specification format can be found * <a href="doc-files/min-should-match.html">here</a> * </p> * * <p>A few important notes...</p> * <ul> * <li> * If the calculations based on the specification determine that no * optional clauses are needed, BooleanQuerysetMinMumberShouldMatch * will never be called, but the usual rules about BooleanQueries * still apply at search time (a BooleanQuery containing no required * clauses must still match at least one optional clause) * <li> * <li> * No matter what number the calculation arrives at, * BooleanQuery.setMinShouldMatch() will never be called with a * value greater then the number of optional clauses (or less then 1) * </li> * </ul> * * <p>:TODO: should optimize the case where number is same * as clauses to just make them all "required" * </p> */ public static void setMinShouldMatch(BooleanQuery q, String spec) { int optionalClauses = 0; for (BooleanClause c : q.getClauses()) { if (c.getOccur() == Occur.SHOULD) { optionalClauses++; } } int msm = calculateMinShouldMatch(optionalClauses, spec); if (0 < msm) { q.setMinimumNumberShouldMatch(msm); } } /** * helper exposed for UnitTests * @see #setMinShouldMatch */ static int calculateMinShouldMatch(int optionalClauseCount, String spec) { int result = optionalClauseCount; if (-1 < spec.indexOf("<")) { /* we have conditional spec(s) */ for (String s : spec.trim().split(" ")) { String[] parts = s.split("<"); int upperBound = (new Integer(parts[0])).intValue(); if (optionalClauseCount <= upperBound) { return result; } else { result = calculateMinShouldMatch (optionalClauseCount, parts[1]); } } return result; } /* otherwise, simple expresion */ if (-1 < spec.indexOf("%")) { /* percentage */ int percent = new Integer(spec.replace("%","")).intValue(); float calc = (result * percent) / 100f; result = calc < 0 ? result + (int)calc : (int)calc; } else { int calc = (new Integer(spec)).intValue(); result = calc < 0 ? result + calc : calc; } return (optionalClauseCount < result ? optionalClauseCount : (result < 0 ? 0 : result)); } /** * Recursively walks the "from" query pulling out sub-queries and * adding them to the "to" query. * * <p> * Boosts are multiplied as needed. Sub-BooleanQueryies which are not * optional will not be flattened. From will be mangled durring the walk, * so do not attempt to reuse it. * </p> */ public static void flattenBooleanQuery(BooleanQuery to, BooleanQuery from) { BooleanClause[] c = from.getClauses(); for (int i = 0; i < c.length; i++) { Query ci = c[i].getQuery(); ci.setBoost(ci.getBoost() * from.getBoost()); if (ci instanceof BooleanQuery && !c[i].isRequired() && !c[i].isProhibited()) { /* we can recurse */ flattenBooleanQuery(to, (BooleanQuery)ci); } else { to.add(c[i]); } } } /** * Escapes all special characters except '"', '-', and '+' * * @see QueryParser#escape */ public static CharSequence partialEscape(CharSequence s) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < s.length(); i++) { char c = s.charAt(i); if (c == '\\' || c == '!' || c == '(' || c == ')' || c == ':' || c == '^' || c == '[' || c == ']' || c == '{' || c == '}' || c == '~' || c == '*' || c == '?' ) { sb.append('\\'); } sb.append(c); } return sb; } /** * Returns it's input if there is an even (ie: balanced) number of * '"' characters -- otherwise returns a String in which all '"' * characters are striped out. */ public static CharSequence stripUnbalancedQuotes(CharSequence s) { int count = 0; for (int i = 0; i < s.length(); i++) { if (s.charAt(i) == '\"') { count++; } } if (0 == (count & 1)) { return s; } return s.toString().replace("\"",""); } /** * A subclass of SolrQueryParser that supports aliasing fields for * constructing DisjunctionMaxQueries. */ public static class DisjunctionMaxQueryParser extends SolrQueryParser { /** A simple container for storing alias info * @see #aliases */ protected static class Alias { public float tie; public Map<String,Float> fields; } /** * Where we store a map from field name we expect to see in our query * string, to Alias object containing the fields to use in our * DisjunctionMaxQuery and the tiebreaker to use. */ protected Map<String,Alias> aliases = new HashMap<String,Alias>(3); public DisjunctionMaxQueryParser(IndexSchema s, String defaultField) { super(s,defaultField); } public DisjunctionMaxQueryParser(IndexSchema s) { this(s,null); } /** * Add an alias to this query parser. * * @param field the field name that should trigger alias mapping * @param fieldBoosts the mapping from fieldname to boost value that * should be used to build up the clauses of the * DisjunctionMaxQuery. * @param tiebreaker to the tiebreaker to be used in the * DisjunctionMaxQuery * @see SolrPluginUtils#parseFieldBoosts */ public void addAlias(String field, float tiebreaker, Map<String,Float> fieldBoosts) { Alias a = new Alias(); a.tie = tiebreaker; a.fields = fieldBoosts; aliases.put(field, a); } /** * Delegates to the super class unless the field has been specified * as an alias -- in which case we recurse on each of * the aliased fields, and the results are composed into a * DisjunctionMaxQuery. (so yes: aliases which point at other * aliases should work) */ protected Query getFieldQuery(String field, String queryText) throws ParseException { if (aliases.containsKey(field)) { Alias a = aliases.get(field); DisjunctionMaxQuery q = new DisjunctionMaxQuery(a.tie); /* we might not get any valid queries from delegation, * in which we should return null */ boolean ok = false; for (String f : a.fields.keySet()) { Query sub = getFieldQuery(f,queryText); if (null != sub) { if (null != a.fields.get(f)) { sub.setBoost(a.fields.get(f)); } q.add(sub); ok = true; } } return ok ? q : null; } else { return super.getFieldQuery(field, queryText); } } } /** * Determines the correct Sort based on the request parameter "sort" * * @return null if no sort is specified. */ public static Sort getSort(SolrQueryRequest req) { String sort = req.getParam("sort"); if (null == sort || sort.equals("")) { return null; } SolrException sortE = null; QueryParsing.SortSpec ss = null; try { ss = QueryParsing.parseSort(sort, req.getSchema()); } catch (SolrException e) { sortE = e; } if ((null == ss) || (null != sortE)) { /* we definitely had some sort of sort string from the user, * but no SortSpec came out of it */ SolrCore.log.log(Level.WARNING,"Invalid sort \""+sort+"\" was specified, ignoring", sortE); return null; } return ss.getSort(); } /** * Builds a list of Query objects that should be used to filter results * @see SolrParams#FQ * @return null if no filter queries */ public static List<Query> parseFilterQueries(SolrQueryRequest req) throws ParseException { String[] in = req.getParams().getParams(SolrParams.FQ); if (null == in || 0 == in.length) return null; List<Query> out = new LinkedList<Query>(); SolrIndexSearcher s = req.getSearcher(); /* Ignore SolrParams.DF - could have init param FQs assuming the * schema default with query param DF intented to only affect Q. * If user doesn't want schema default, they should be explicit in the FQ. */ SolrQueryParser qp = new SolrQueryParser(s.getSchema(), null); for (String q : in) { if (null != q && 0 != q.trim().length()) { out.add(qp.parse(q)); } } return out; } /** * A CacheRegenerator that can be used whenever the items in the cache * are not dependant on the current searcher. * * <p> * Flat out copies the oldKey=&gt;oldVal pair into the newCache * </p> */ public static class IdentityRegenerator implements CacheRegenerator { public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache, Object oldKey, Object oldVal) throws IOException { newCache.put(oldKey,oldVal); return true; } } } /** * Helper class which creates a single TokenStream out of values from a * multi-valued field. */ class MultiValueTokenStream extends TokenStream { private String fieldName; private String[] values; private Analyzer analyzer; private int curIndex; // next index into the values array private int curOffset; // offset into concatenated string private TokenStream currentStream; // tokenStream currently being iterated private boolean orderTokenOffsets; /** Constructs a TokenStream for consecutively-analyzed field values * * @param fieldName name of the field * @param values array of field data * @param analyzer analyzer instance */ public MultiValueTokenStream(String fieldName, String[] values, Analyzer analyzer, boolean orderTokenOffsets) { this.fieldName = fieldName; this.values = values; this.analyzer = analyzer; curIndex = -1; curOffset = 0; currentStream = null; this.orderTokenOffsets=orderTokenOffsets; } /** Returns the next token in the stream, or null at EOS. */ public Token next() throws IOException { int extra = 0; if(currentStream == null) { curIndex++; if(curIndex < values.length) { currentStream = analyzer.tokenStream(fieldName, new StringReader(values[curIndex])); if (orderTokenOffsets) currentStream = new TokenOrderingFilter(currentStream,10); // add extra space between multiple values if(curIndex > 0) extra = analyzer.getPositionIncrementGap(fieldName); } else { return null; } } Token nextToken = currentStream.next(); if(nextToken == null) { curOffset += values[curIndex].length(); currentStream = null; return next(); } // create an modified token which is the offset into the concatenated // string of all values Token offsetToken = new Token(nextToken.termText(), nextToken.startOffset() + curOffset, nextToken.endOffset() + curOffset); offsetToken.setPositionIncrement(nextToken.getPositionIncrement() + extra*10); return offsetToken; } /** * Returns all values as a single String into which the Tokens index with * their offsets. */ public String asSingleValue() { StringBuilder sb = new StringBuilder(); for(String str : values) sb.append(str); return sb.toString(); } } /** * A simple modification of SimpleFragmenter which additionally creates new * fragments when an unusually-large position increment is encountered * (this behaves much better in the presence of multi-valued fields). */ class GapFragmenter extends SimpleFragmenter { public static final int INCREMENT_THRESHOLD = 50; protected int fragOffsetAccum = 0; public GapFragmenter() { } public GapFragmenter(int fragsize) { super(fragsize); } /* (non-Javadoc) * @see org.apache.lucene.search.highlight.TextFragmenter#start(java.lang.String) */ public void start(String originalText) { fragOffsetAccum = 0; } /* (non-Javadoc) * @see org.apache.lucene.search.highlight.TextFragmenter#isNewFragment(org.apache.lucene.analysis.Token) */ public boolean isNewFragment(Token token) { boolean isNewFrag = token.endOffset() >= fragOffsetAccum + getFragmentSize() || token.getPositionIncrement() > INCREMENT_THRESHOLD; if(isNewFrag) { fragOffsetAccum += token.endOffset() - fragOffsetAccum; } return isNewFrag; } } /** Orders Tokens in a window first by their startOffset ascending. * endOffset is currently ignored. * This is meant to work around fickleness in the highlighter only. It * can mess up token positions and should not be used for indexing or querying. */ class TokenOrderingFilter extends TokenFilter { private final int windowSize; private final LinkedList<Token> queue = new LinkedList<Token>(); private boolean done=false; protected TokenOrderingFilter(TokenStream input, int windowSize) { super(input); this.windowSize = windowSize; } public Token next() throws IOException { while (!done && queue.size() < windowSize) { Token newTok = input.next(); if (newTok==null) { done=true; break; } // reverse iterating for better efficiency since we know the // list is already sorted, and most token start offsets will be too. ListIterator<Token> iter = queue.listIterator(queue.size()); while(iter.hasPrevious()) { if (newTok.startOffset() >= iter.previous().startOffset()) { // insertion will be before what next() would return (what // we just compared against), so move back one so the insertion // will be after. iter.next(); break; } } iter.add(newTok); } return queue.isEmpty() ? null : queue.removeFirst(); } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.pubsub.spi.v1; import static com.google.cloud.pubsub.spi.v1.StatusUtil.isRetryable; import com.google.api.gax.grpc.FlowController; import com.google.api.stats.Distribution; import com.google.auth.Credentials; import com.google.cloud.Clock; import com.google.cloud.pubsub.spi.v1.MessageDispatcher.AckProcessor; import com.google.cloud.pubsub.spi.v1.MessageDispatcher.PendingModifyAckDeadline; import com.google.common.collect.Lists; import com.google.common.util.concurrent.AbstractService; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.SettableFuture; import com.google.pubsub.v1.StreamingPullRequest; import com.google.pubsub.v1.StreamingPullResponse; import com.google.pubsub.v1.SubscriberGrpc; import io.grpc.CallOptions; import io.grpc.Channel; import io.grpc.Status; import io.grpc.auth.MoreCallCredentials; import io.grpc.stub.ClientCallStreamObserver; import io.grpc.stub.ClientCalls; import io.grpc.stub.ClientResponseObserver; import java.util.Iterator; import java.util.List; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nullable; import org.joda.time.Duration; /** Implementation of {@link AbstractSubscriberConnection} based on Cloud Pub/Sub streaming pull. */ final class StreamingSubscriberConnection extends AbstractService implements AckProcessor { private static final Logger logger = Logger.getLogger(StreamingSubscriberConnection.class.getName()); private static final Duration INITIAL_CHANNEL_RECONNECT_BACKOFF = new Duration(100); // 100ms private static final int MAX_PER_REQUEST_CHANGES = 10000; private Duration channelReconnectBackoff = INITIAL_CHANNEL_RECONNECT_BACKOFF; private final Channel channel; private final Credentials credentials; private final String subscription; private final ScheduledExecutorService executor; private final MessageDispatcher messageDispatcher; private ClientCallStreamObserver<StreamingPullRequest> requestObserver; public StreamingSubscriberConnection( String subscription, Credentials credentials, MessageReceiver receiver, Duration ackExpirationPadding, int streamAckDeadlineSeconds, Distribution ackLatencyDistribution, Channel channel, FlowController flowController, ScheduledExecutorService executor, Clock clock) { this.subscription = subscription; this.executor = executor; this.credentials = credentials; this.channel = channel; this.messageDispatcher = new MessageDispatcher( receiver, this, ackExpirationPadding, ackLatencyDistribution, flowController, executor, clock); messageDispatcher.setMessageDeadlineSeconds(streamAckDeadlineSeconds); } @Override protected void doStart() { logger.log(Level.INFO, "Starting subscriber."); initialize(); notifyStarted(); } @Override protected void doStop() { messageDispatcher.stop(); notifyStopped(); requestObserver.onError(Status.CANCELLED.asException()); } private class StreamingPullResponseObserver implements ClientResponseObserver<StreamingPullRequest, StreamingPullResponse> { final SettableFuture<Void> errorFuture; StreamingPullResponseObserver(SettableFuture<Void> errorFuture) { this.errorFuture = errorFuture; } @Override public void beforeStart(ClientCallStreamObserver<StreamingPullRequest> requestObserver) { StreamingSubscriberConnection.this.requestObserver = requestObserver; requestObserver.disableAutoInboundFlowControl(); } @Override public void onNext(StreamingPullResponse response) { messageDispatcher.processReceivedMessages(response.getReceivedMessagesList()); // Only if not shutdown we will request one more bundles of messages to be delivered. if (isAlive()) { requestObserver.request(1); } } @Override public void onError(Throwable t) { logger.log(Level.INFO, "Terminated streaming with exception", t); errorFuture.setException(t); } @Override public void onCompleted() { logger.log(Level.INFO, "Streaming pull terminated successfully!"); errorFuture.set(null); } } private void initialize() { final SettableFuture<Void> errorFuture = SettableFuture.create(); final ClientResponseObserver<StreamingPullRequest, StreamingPullResponse> responseObserver = new StreamingPullResponseObserver(errorFuture); final ClientCallStreamObserver<StreamingPullRequest> requestObserver = (ClientCallStreamObserver<StreamingPullRequest>) (ClientCalls.asyncBidiStreamingCall( channel.newCall( SubscriberGrpc.METHOD_STREAMING_PULL, CallOptions.DEFAULT.withCallCredentials(MoreCallCredentials.from(credentials))), responseObserver)); logger.log( Level.INFO, "Initializing stream to subscription " + subscription + " with deadline " + messageDispatcher.getMessageDeadlineSeconds()); requestObserver.onNext( StreamingPullRequest.newBuilder() .setSubscription(subscription) .setStreamAckDeadlineSeconds(messageDispatcher.getMessageDeadlineSeconds()) .build()); requestObserver.request(1); Futures.addCallback( errorFuture, new FutureCallback<Void>() { @Override public void onSuccess(@Nullable Void result) { channelReconnectBackoff = INITIAL_CHANNEL_RECONNECT_BACKOFF; // The stream was closed. And any case we want to reopen it to continue receiving // messages. initialize(); } @Override public void onFailure(Throwable t) { Status errorStatus = Status.fromThrowable(t); if (isRetryable(errorStatus) && isAlive()) { long backoffMillis = channelReconnectBackoff.getMillis(); channelReconnectBackoff = channelReconnectBackoff.plus(backoffMillis); executor.schedule( new Runnable() { @Override public void run() { initialize(); } }, backoffMillis, TimeUnit.MILLISECONDS); } else { if (isAlive()) { notifyFailed(t); } } } }, executor); } private boolean isAlive() { return state() == State.RUNNING || state() == State.STARTING; } @Override public void sendAckOperations( List<String> acksToSend, List<PendingModifyAckDeadline> ackDeadlineExtensions) { // Send the modify ack deadlines in batches as not to exceed the max request // size. List<List<String>> ackChunks = Lists.partition(acksToSend, MAX_PER_REQUEST_CHANGES); List<List<PendingModifyAckDeadline>> modifyAckDeadlineChunks = Lists.partition(ackDeadlineExtensions, MAX_PER_REQUEST_CHANGES); Iterator<List<String>> ackChunksIt = ackChunks.iterator(); Iterator<List<PendingModifyAckDeadline>> modifyAckDeadlineChunksIt = modifyAckDeadlineChunks.iterator(); while (ackChunksIt.hasNext() || modifyAckDeadlineChunksIt.hasNext()) { StreamingPullRequest.Builder requestBuilder = StreamingPullRequest.newBuilder(); if (modifyAckDeadlineChunksIt.hasNext()) { List<PendingModifyAckDeadline> modAckChunk = modifyAckDeadlineChunksIt.next(); for (PendingModifyAckDeadline modifyAckDeadline : modAckChunk) { for (String ackId : modifyAckDeadline.ackIds) { requestBuilder .addModifyDeadlineSeconds(modifyAckDeadline.deadlineExtensionSeconds) .addModifyDeadlineAckIds(ackId); } } } if (ackChunksIt.hasNext()) { List<String> ackChunk = ackChunksIt.next(); requestBuilder.addAllAckIds(ackChunk); } requestObserver.onNext(requestBuilder.build()); } } public void updateStreamAckDeadline(int newAckDeadlineSeconds) { messageDispatcher.setMessageDeadlineSeconds(newAckDeadlineSeconds); requestObserver.onNext( StreamingPullRequest.newBuilder() .setStreamAckDeadlineSeconds(newAckDeadlineSeconds) .build()); } }
/* * Copyright 2014 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apiman.gateway.platforms.vertx3.common.config; import io.apiman.common.util.SimpleStringUtils; import io.apiman.common.util.crypt.IDataEncrypter; import io.apiman.gateway.engine.IComponent; import io.apiman.gateway.engine.IConnectorFactory; import io.apiman.gateway.engine.IEngineConfig; import io.apiman.gateway.engine.IMetrics; import io.apiman.gateway.engine.IPluginRegistry; import io.apiman.gateway.engine.IRegistry; import io.apiman.gateway.engine.i18n.Messages; import io.apiman.gateway.engine.policy.IPolicyFactory; import io.apiman.gateway.platforms.vertx3.common.verticles.VerticleType; import io.vertx.core.json.JsonObject; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Engine configuration, read simplistically from Vert'x JSON config. * * @see "http://vertx.io/manual.html#using-vertx-from-the-command-line" * @author Marc Savy {@literal <msavy@redhat.com>} */ @SuppressWarnings("nls") public class VertxEngineConfig implements IEngineConfig { public static final String GATEWAY_ENDPOINT_POLICY_INGESTION = "io.apiman.gateway.platforms.vertx2.policy"; public static final String GATEWAY_ENDPOINT_REQUEST = ".request"; public static final String GATEWAY_ENDPOINT_RESPONSE = ".response"; private static final String VERTICLES = "verticles"; private static final String VERTICLE_PORT = "port"; private static final String VERTICLE_COUNT = "count"; private static final String GATEWAY_HOSTNAME = "hostname"; private static final String GATEWAY_ENDPOINT = "endpoint"; private static final String GATEWAY_PREFER_SECURE = "preferSecure"; private static final String API_AUTH = "auth"; private static final String API_PASSWORD = "password"; private static final String API_REQUIRED = "required"; private static final String API_REALM = "realm"; private static final String GATEWAY_REGISTRY_PREFIX = "registry"; private static final String GATEWAY_ENCRYPTER_PREFIX = "encrypter"; private static final String GATEWAY_PLUGIN_REGISTRY_PREFIX = "plugin-registry"; private static final String GATEWAY_CONNECTOR_FACTORY_PREFIX = "connector-factory"; private static final String GATEWAY_POLICY_FACTORY_PREFIX = "policy-factory"; private static final String GATEWAY_METRICS_PREFIX = "metrics"; private static final String GATEWAY_COMPONENT_PREFIX = "components"; private static final String GATEWAY_CONFIG = "config"; private static final String GATEWAY_CLASS = "class"; private static final String SSL = "ssl"; private static final String SSL_TRUSTSTORE = "truststore"; private static final String SSL_KEYSTORE = "keystore"; private static final String SSL_PATH = "path"; private JsonObject config; private HashMap<String, String> basicAuthMap = new HashMap<>(); public VertxEngineConfig(JsonObject config) { this.config = config; } public JsonObject getConfig() { return config; } @Override public Class<? extends IRegistry> getRegistryClass(IPluginRegistry pluginRegistry) { return loadConfigClass(getClassname(config, GATEWAY_REGISTRY_PREFIX), IRegistry.class); } @Override public Class<? extends IDataEncrypter> getDataEncrypterClass(IPluginRegistry pluginRegistry) { return loadConfigClass(getClassname(config, GATEWAY_ENCRYPTER_PREFIX), IDataEncrypter.class); } @Override public Map<String, String> getRegistryConfig() { return toFlatStringMap(getConfig(config, GATEWAY_REGISTRY_PREFIX)); } @Override public Map<String, String> getDataEncrypterConfig() { return toFlatStringMap(getConfig(config, GATEWAY_ENCRYPTER_PREFIX)); } @Override public Class<? extends IPluginRegistry> getPluginRegistryClass() { return loadConfigClass(getClassname(config, GATEWAY_PLUGIN_REGISTRY_PREFIX), IPluginRegistry.class); } @Override public Map<String, String> getPluginRegistryConfig() { return toFlatStringMap(getConfig(config, GATEWAY_PLUGIN_REGISTRY_PREFIX)); } @Override public Class<? extends IConnectorFactory> getConnectorFactoryClass(IPluginRegistry pluginRegistry) { return loadConfigClass(getClassname(config, GATEWAY_CONNECTOR_FACTORY_PREFIX), IConnectorFactory.class); } @Override public Map<String, String> getConnectorFactoryConfig() { return toFlatStringMap(getConfig(config, GATEWAY_CONNECTOR_FACTORY_PREFIX)); } @Override public Class<? extends IPolicyFactory> getPolicyFactoryClass(IPluginRegistry pluginRegistry) { return loadConfigClass(getClassname(config, GATEWAY_POLICY_FACTORY_PREFIX), IPolicyFactory.class); } @Override public Map<String, String> getPolicyFactoryConfig() { return toFlatStringMap(getConfig(config, GATEWAY_POLICY_FACTORY_PREFIX)); } @Override public Class<? extends IMetrics> getMetricsClass(IPluginRegistry pluginRegistry) { return loadConfigClass(getClassname(config, GATEWAY_METRICS_PREFIX), IMetrics.class); } @Override public Map<String, String> getMetricsConfig() { return toFlatStringMap(getConfig(config, GATEWAY_METRICS_PREFIX)); } @Override public <T extends IComponent> Class<T> getComponentClass(Class<T> componentType, IPluginRegistry pluginRegistry) { String className = config.getJsonObject(GATEWAY_COMPONENT_PREFIX). getJsonObject(componentType.getSimpleName()). getString(GATEWAY_CLASS); return loadConfigClass(className, componentType); } @Override public <T extends IComponent> Map<String, String> getComponentConfig(Class<T> componentType) { JsonObject componentConfig = config.getJsonObject(GATEWAY_COMPONENT_PREFIX). getJsonObject(componentType.getSimpleName()). getJsonObject(GATEWAY_CONFIG); return toFlatStringMap(componentConfig); } public Boolean isAuthenticationEnabled() { return config.getJsonObject(API_AUTH).getString(API_REQUIRED) != null; } public String getRealm() { return config.getJsonObject(API_AUTH).getString(API_REALM); } public String getHostname() { return stringConfigWithDefault(GATEWAY_HOSTNAME, "localhost"); } public String getEndpoint() { return config.getString(GATEWAY_ENDPOINT); } public Boolean preferSecure() { return config.getBoolean(GATEWAY_PREFER_SECURE); } public Map<String, String> getBasicAuthCredentials() { if (!basicAuthMap.isEmpty()) return basicAuthMap; JsonObject pairs = config.getJsonObject(API_AUTH).getJsonObject("basic"); for (String username : pairs.fieldNames()) { basicAuthMap.put(username, pairs.getString(username)); } return basicAuthMap; } protected Map<String, String> toFlatStringMap(JsonObject jsonObject) { Map<String, String> outMap = new LinkedHashMap<>(); // TODO figure out why this workaround is necessary. jsonMapToProperties("", new JsonObject(jsonObject.encode()).getMap(), outMap); return outMap; } @SuppressWarnings("unchecked") protected void jsonMapToProperties(String pathSoFar, Object value, Map<String, String> output) { if (value instanceof Map) { // Descend again Map<String, Object> map = (Map<String, Object>) value; map.entrySet() .forEach(elem -> jsonMapToProperties(determineKey(pathSoFar, elem.getKey()), elem.getValue(), output)); } else if (value instanceof List) { // Join objects and descend List<Object> list = (List<Object>) value; list.forEach(elem -> jsonMapToProperties(pathSoFar, elem, output)); } else { // Value if (output.containsKey(pathSoFar)) { output.put(pathSoFar, SimpleStringUtils.join(",", output.get(pathSoFar), value.toString())); } else { output.put(pathSoFar, value.toString()); } } } private String determineKey(String pathSoFar, String key) { return pathSoFar.length() == 0 ? key : pathSoFar + "." + key; } protected String getClassname(JsonObject obj, String prefix) { return obj.getJsonObject(prefix).getString(GATEWAY_CLASS); } protected JsonObject getConfig(JsonObject obj, String prefix) { return obj.getJsonObject(prefix).getJsonObject(GATEWAY_CONFIG); } /** * @return a loaded class */ @SuppressWarnings("unchecked") protected <T> Class<T> loadConfigClass(String classname, Class<T> type) { if (classname == null) { throw new RuntimeException("No " + type.getSimpleName() + " class configured."); //$NON-NLS-2$ } try { Class<T> c = (Class<T>) Thread.currentThread().getContextClassLoader().loadClass(classname); return c; } catch (ClassNotFoundException e) { // Not found via Class.forName() - try other mechanisms. } try { Class<T> c = (Class<T>) Class.forName(classname); return c; } catch (ClassNotFoundException e) { // Not found via Class.forName() - try other mechanisms. } System.err.println("COULD NOT LOAD " + classname); throw new RuntimeException(Messages.i18n.format("EngineConfig.FailedToLoadClass", classname)); } protected String stringConfigWithDefault(String name, String defaultValue) { String str = config.getString(name); return str == null ? defaultValue : str; } protected Boolean boolConfigWithDefault(String name, Boolean defaultValue) { Boolean bool = config.containsKey(name); return bool == null ? defaultValue : bool; } public JsonObject getVerticleConfig(String verticleType) { return config.getJsonObject(VERTICLES).getJsonObject(verticleType.toLowerCase()); } public int getPort(String name) { return getVerticleConfig(name).getInteger(VERTICLE_PORT); } public int getPort(VerticleType verticleType) { return getPort(verticleType.name()); } public int getVerticleCount(VerticleType verticleType) { return getVerticleConfig(verticleType.name()).getInteger(VERTICLE_COUNT); } public boolean isSSL() { return config.containsKey(SSL); } public String getKeyStore() { return config.getJsonObject(SSL, new JsonObject()).getJsonObject(SSL_KEYSTORE, new JsonObject()).getString(SSL_PATH); } public String getKeyStorePassword() { return config.getJsonObject(SSL, new JsonObject()).getJsonObject(SSL_KEYSTORE, new JsonObject()).getString(API_PASSWORD); } public String getTrustStore() { return config.getJsonObject(SSL, new JsonObject()).getJsonObject(SSL_TRUSTSTORE, new JsonObject()).getString(SSL_PATH); } public String getTrustStorePassword() { return config.getJsonObject(SSL, new JsonObject()).getJsonObject(SSL_TRUSTSTORE, new JsonObject()).getString(API_PASSWORD); } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.utils; import java.util.Iterator; import java.util.NoSuchElementException; import com.badlogic.gdx.math.MathUtils; /** An unordered map that uses int keys. This implementation is a cuckoo hash map using 3 hashes, random walking, and a small stash * for problematic keys. Null values are allowed. No allocation is done except when growing the table size. <br> * <br> * This map performs very fast get, containsKey, and remove (typically O(1), worst case O(log(n))). Put may be a bit slower, * depending on hash collisions. Load factors greater than 0.91 greatly increase the chances the map will have to rehash to the * next higher POT size. * @author Nathan Sweet */ public class IntMap<V> { private static final int PRIME1 = 0xbe1f14b1; private static final int PRIME2 = 0xb4b82e39; private static final int PRIME3 = 0xced1c241; private static final int EMPTY = 0; public int size; int[] keyTable; V[] valueTable; int capacity, stashSize; V zeroValue; boolean hasZeroValue; private float loadFactor; private int hashShift, mask, threshold; private int stashCapacity; private int pushIterations; private Entries entries1, entries2; private Values values1, values2; private Keys keys1, keys2; /** Creates a new map with an initial capacity of 32 and a load factor of 0.8. This map will hold 25 items before growing the * backing table. */ public IntMap () { this(32, 0.8f); } /** Creates a new map with a load factor of 0.8. This map will hold initialCapacity * 0.8 items before growing the backing * table. */ public IntMap (int initialCapacity) { this(initialCapacity, 0.8f); } /** Creates a new map with the specified initial capacity and load factor. This map will hold initialCapacity * loadFactor items * before growing the backing table. */ public IntMap (int initialCapacity, float loadFactor) { if (initialCapacity < 0) throw new IllegalArgumentException("initialCapacity must be >= 0: " + initialCapacity); if (capacity > 1 << 30) throw new IllegalArgumentException("initialCapacity is too large: " + initialCapacity); capacity = MathUtils.nextPowerOfTwo(initialCapacity); if (loadFactor <= 0) throw new IllegalArgumentException("loadFactor must be > 0: " + loadFactor); this.loadFactor = loadFactor; threshold = (int)(capacity * loadFactor); mask = capacity - 1; hashShift = 31 - Integer.numberOfTrailingZeros(capacity); stashCapacity = Math.max(3, (int)Math.ceil(Math.log(capacity)) * 2); pushIterations = Math.max(Math.min(capacity, 8), (int)Math.sqrt(capacity) / 8); keyTable = new int[capacity + stashCapacity]; valueTable = (V[])new Object[keyTable.length]; } public V put (int key, V value) { if (key == 0) { V oldValue = zeroValue; zeroValue = value; if (!hasZeroValue) { hasZeroValue = true; size++; } return oldValue; } int[] keyTable = this.keyTable; // Check for existing keys. int index1 = key & mask; int key1 = keyTable[index1]; if (key1 == key) { V oldValue = valueTable[index1]; valueTable[index1] = value; return oldValue; } int index2 = hash2(key); int key2 = keyTable[index2]; if (key2 == key) { V oldValue = valueTable[index2]; valueTable[index2] = value; return oldValue; } int index3 = hash3(key); int key3 = keyTable[index3]; if (key3 == key) { V oldValue = valueTable[index3]; valueTable[index3] = value; return oldValue; } // Update key in the stash. for (int i = capacity, n = i + stashSize; i < n; i++) { if (keyTable[i] == key) { V oldValue = valueTable[i]; valueTable[i] = value; return oldValue; } } // Check for empty buckets. if (key1 == EMPTY) { keyTable[index1] = key; valueTable[index1] = value; if (size++ >= threshold) resize(capacity << 1); return null; } if (key2 == EMPTY) { keyTable[index2] = key; valueTable[index2] = value; if (size++ >= threshold) resize(capacity << 1); return null; } if (key3 == EMPTY) { keyTable[index3] = key; valueTable[index3] = value; if (size++ >= threshold) resize(capacity << 1); return null; } push(key, value, index1, key1, index2, key2, index3, key3); return null; } public void putAll (IntMap<V> map) { for (Entry<V> entry : map.entries()) put(entry.key, entry.value); } /** Skips checks for existing keys. */ private void putResize (int key, V value) { if (key == 0) { zeroValue = value; hasZeroValue = true; return; } // Check for empty buckets. int index1 = key & mask; int key1 = keyTable[index1]; if (key1 == EMPTY) { keyTable[index1] = key; valueTable[index1] = value; if (size++ >= threshold) resize(capacity << 1); return; } int index2 = hash2(key); int key2 = keyTable[index2]; if (key2 == EMPTY) { keyTable[index2] = key; valueTable[index2] = value; if (size++ >= threshold) resize(capacity << 1); return; } int index3 = hash3(key); int key3 = keyTable[index3]; if (key3 == EMPTY) { keyTable[index3] = key; valueTable[index3] = value; if (size++ >= threshold) resize(capacity << 1); return; } push(key, value, index1, key1, index2, key2, index3, key3); } private void push (int insertKey, V insertValue, int index1, int key1, int index2, int key2, int index3, int key3) { int[] keyTable = this.keyTable; V[] valueTable = this.valueTable; int mask = this.mask; // Push keys until an empty bucket is found. int evictedKey; V evictedValue; int i = 0, pushIterations = this.pushIterations; do { // Replace the key and value for one of the hashes. switch (MathUtils.random(2)) { case 0: evictedKey = key1; evictedValue = valueTable[index1]; keyTable[index1] = insertKey; valueTable[index1] = insertValue; break; case 1: evictedKey = key2; evictedValue = valueTable[index2]; keyTable[index2] = insertKey; valueTable[index2] = insertValue; break; default: evictedKey = key3; evictedValue = valueTable[index3]; keyTable[index3] = insertKey; valueTable[index3] = insertValue; break; } // If the evicted key hashes to an empty bucket, put it there and stop. index1 = evictedKey & mask; key1 = keyTable[index1]; if (key1 == EMPTY) { keyTable[index1] = evictedKey; valueTable[index1] = evictedValue; if (size++ >= threshold) resize(capacity << 1); return; } index2 = hash2(evictedKey); key2 = keyTable[index2]; if (key2 == EMPTY) { keyTable[index2] = evictedKey; valueTable[index2] = evictedValue; if (size++ >= threshold) resize(capacity << 1); return; } index3 = hash3(evictedKey); key3 = keyTable[index3]; if (key3 == EMPTY) { keyTable[index3] = evictedKey; valueTable[index3] = evictedValue; if (size++ >= threshold) resize(capacity << 1); return; } if (++i == pushIterations) break; insertKey = evictedKey; insertValue = evictedValue; } while (true); putStash(evictedKey, evictedValue); } private void putStash (int key, V value) { if (stashSize == stashCapacity) { // Too many pushes occurred and the stash is full, increase the table size. resize(capacity << 1); put(key, value); return; } // Store key in the stash. int index = capacity + stashSize; keyTable[index] = key; valueTable[index] = value; stashSize++; size++; } public V get (int key) { if (key == 0) { if (!hasZeroValue) return null; return zeroValue; } int index = key & mask; if (keyTable[index] != key) { index = hash2(key); if (keyTable[index] != key) { index = hash3(key); if (keyTable[index] != key) return getStash(key, null); } } return valueTable[index]; } public V get (int key, V defaultValue) { if (key == 0) { if (!hasZeroValue) return defaultValue; return zeroValue; } int index = key & mask; if (keyTable[index] != key) { index = hash2(key); if (keyTable[index] != key) { index = hash3(key); if (keyTable[index] != key) return getStash(key, defaultValue); } } return valueTable[index]; } private V getStash (int key, V defaultValue) { int[] keyTable = this.keyTable; for (int i = capacity, n = i + stashSize; i < n; i++) if (keyTable[i] == key) return valueTable[i]; return defaultValue; } public V remove (int key) { if (key == 0) { if (!hasZeroValue) return null; V oldValue = zeroValue; zeroValue = null; hasZeroValue = false; size--; return oldValue; } int index = key & mask; if (keyTable[index] == key) { keyTable[index] = EMPTY; V oldValue = valueTable[index]; valueTable[index] = null; size--; return oldValue; } index = hash2(key); if (keyTable[index] == key) { keyTable[index] = EMPTY; V oldValue = valueTable[index]; valueTable[index] = null; size--; return oldValue; } index = hash3(key); if (keyTable[index] == key) { keyTable[index] = EMPTY; V oldValue = valueTable[index]; valueTable[index] = null; size--; return oldValue; } return removeStash(key); } V removeStash (int key) { int[] keyTable = this.keyTable; for (int i = capacity, n = i + stashSize; i < n; i++) { if (keyTable[i] == key) { V oldValue = valueTable[i]; removeStashIndex(i); size--; return oldValue; } } return null; } void removeStashIndex (int index) { // If the removed location was not last, move the last tuple to the removed location. stashSize--; int lastIndex = capacity + stashSize; if (index < lastIndex) { keyTable[index] = keyTable[lastIndex]; valueTable[index] = valueTable[lastIndex]; valueTable[lastIndex] = null; } else valueTable[index] = null; } public void clear () { int[] keyTable = this.keyTable; V[] valueTable = this.valueTable; for (int i = capacity + stashSize; i-- > 0;) { keyTable[i] = EMPTY; valueTable[i] = null; } size = 0; stashSize = 0; zeroValue = null; hasZeroValue = false; } /** Returns true if the specified value is in the map. Note this traverses the entire map and compares every value, which may be * an expensive operation. * @param identity If true, uses == to compare the specified value with values in the map. If false, uses * {@link #equals(Object)}. */ public boolean containsValue (Object value, boolean identity) { V[] valueTable = this.valueTable; if (value == null) { if (hasZeroValue && zeroValue == null) return true; int[] keyTable = this.keyTable; for (int i = capacity + stashSize; i-- > 0;) if (keyTable[i] != EMPTY && valueTable[i] == null) return true; } else if (identity) { if (value == zeroValue) return true; for (int i = capacity + stashSize; i-- > 0;) if (valueTable[i] == value) return true; } else { if (hasZeroValue && value.equals(zeroValue)) return true; for (int i = capacity + stashSize; i-- > 0;) if (value.equals(valueTable[i])) return true; } return false; } public boolean containsKey (int key) { if (key == 0) return hasZeroValue; int index = key & mask; if (keyTable[index] != key) { index = hash2(key); if (keyTable[index] != key) { index = hash3(key); if (keyTable[index] != key) return containsKeyStash(key); } } return true; } private boolean containsKeyStash (int key) { int[] keyTable = this.keyTable; for (int i = capacity, n = i + stashSize; i < n; i++) if (keyTable[i] == key) return true; return false; } /** Returns the key for the specified value, or <tt>notFound</tt> if it is not in the map. Note this traverses the entire map * and compares every value, which may be an expensive operation. * @param identity If true, uses == to compare the specified value with values in the map. If false, uses * {@link #equals(Object)}. */ public int findKey (Object value, boolean identity, int notFound) { V[] valueTable = this.valueTable; if (value == null) { if (hasZeroValue && zeroValue == null) return 0; int[] keyTable = this.keyTable; for (int i = capacity + stashSize; i-- > 0;) if (keyTable[i] != EMPTY && valueTable[i] == null) return keyTable[i]; } else if (identity) { if (value == zeroValue) return 0; for (int i = capacity + stashSize; i-- > 0;) if (valueTable[i] == value) return keyTable[i]; } else { if (hasZeroValue && value.equals(zeroValue)) return 0; for (int i = capacity + stashSize; i-- > 0;) if (value.equals(valueTable[i])) return keyTable[i]; } return notFound; } /** Increases the size of the backing array to acommodate the specified number of additional items. Useful before adding many * items to avoid multiple backing array resizes. */ public void ensureCapacity (int additionalCapacity) { int sizeNeeded = size + additionalCapacity; if (sizeNeeded >= threshold) resize(MathUtils.nextPowerOfTwo((int)(sizeNeeded / loadFactor))); } private void resize (int newSize) { int oldEndIndex = capacity + stashSize; capacity = newSize; threshold = (int)(newSize * loadFactor); mask = newSize - 1; hashShift = 31 - Integer.numberOfTrailingZeros(newSize); stashCapacity = Math.max(3, (int)Math.ceil(Math.log(newSize)) * 2); pushIterations = Math.max(Math.min(newSize, 8), (int)Math.sqrt(newSize) / 8); int[] oldKeyTable = keyTable; V[] oldValueTable = valueTable; keyTable = new int[newSize + stashCapacity]; valueTable = (V[])new Object[newSize + stashCapacity]; size = hasZeroValue ? 1 : 0; stashSize = 0; for (int i = 0; i < oldEndIndex; i++) { int key = oldKeyTable[i]; if (key != EMPTY) putResize(key, oldValueTable[i]); } } private int hash2 (int h) { h *= PRIME2; return (h ^ h >>> hashShift) & mask; } private int hash3 (int h) { h *= PRIME3; return (h ^ h >>> hashShift) & mask; } public String toString () { if (size == 0) return "[]"; StringBuilder buffer = new StringBuilder(32); buffer.append('['); int[] keyTable = this.keyTable; V[] valueTable = this.valueTable; int i = keyTable.length; if (hasZeroValue) { buffer.append("0="); buffer.append(zeroValue); } else { while (i-- > 0) { int key = keyTable[i]; if (key == EMPTY) continue; buffer.append(key); buffer.append('='); buffer.append(valueTable[i]); break; } } while (i-- > 0) { int key = keyTable[i]; if (key == EMPTY) continue; buffer.append(", "); buffer.append(key); buffer.append('='); buffer.append(valueTable[i]); } buffer.append(']'); return buffer.toString(); } /** Returns an iterator for the entries in the map. Remove is supported. Note that the same iterator instance is returned each * time this method is called. Use the {@link Entries} constructor for nested or multithreaded iteration. */ public Entries<V> entries () { if (entries1 == null) { entries1 = new Entries(this); entries2 = new Entries(this); } if (!entries1.valid) { entries1.reset(); entries1.valid = true; entries2.valid = false; return entries1; } entries2.reset(); entries2.valid = true; entries1.valid = false; return entries2; } /** Returns an iterator for the values in the map. Remove is supported. Note that the same iterator instance is returned each * time this method is called. Use the {@link Entries} constructor for nested or multithreaded iteration. */ public Values<V> values () { if (values1 == null) { values1 = new Values(this); values2 = new Values(this); } if (!values1.valid) { values1.reset(); values1.valid = true; values2.valid = false; return values1; } values2.reset(); values2.valid = true; values1.valid = false; return values2; } /** Returns an iterator for the keys in the map. Remove is supported. Note that the same iterator instance is returned each time * this method is called. Use the {@link Entries} constructor for nested or multithreaded iteration. */ public Keys keys () { if (keys1 == null) { keys1 = new Keys(this); keys2 = new Keys(this); } if (!keys1.valid) { keys1.reset(); keys1.valid = true; keys2.valid = false; return keys1; } keys2.reset(); keys2.valid = true; keys1.valid = false; return keys2; } static public class Entry<V> { public int key; public V value; public String toString () { return key + "=" + value; } } static private class MapIterator<V> { static final int INDEX_ILLEGAL = -2; static final int INDEX_ZERO = -1; public boolean hasNext; final IntMap<V> map; int nextIndex, currentIndex; boolean valid = true; public MapIterator (IntMap<V> map) { this.map = map; reset(); } public void reset () { currentIndex = INDEX_ILLEGAL; nextIndex = INDEX_ZERO; if (map.hasZeroValue) hasNext = true; else findNextIndex(); } void findNextIndex () { hasNext = false; int[] keyTable = map.keyTable; for (int n = map.capacity + map.stashSize; ++nextIndex < n;) { if (keyTable[nextIndex] != EMPTY) { hasNext = true; break; } } } public void remove () { if (currentIndex == INDEX_ZERO && map.hasZeroValue) { map.zeroValue = null; map.hasZeroValue = false; } else if (currentIndex < 0) { throw new IllegalStateException("next must be called before remove."); } else if (currentIndex >= map.capacity) { map.removeStashIndex(currentIndex); } else { map.keyTable[currentIndex] = EMPTY; map.valueTable[currentIndex] = null; } currentIndex = INDEX_ILLEGAL; map.size--; } } static public class Entries<V> extends MapIterator<V> implements Iterable<Entry<V>>, Iterator<Entry<V>> { private Entry<V> entry = new Entry(); public Entries (IntMap map) { super(map); } /** Note the same entry instance is returned each time this method is called. */ public Entry<V> next () { if (!hasNext) throw new NoSuchElementException(); if (!valid) throw new GdxRuntimeException("#iterator() cannot be used nested."); int[] keyTable = map.keyTable; if (nextIndex == INDEX_ZERO) { entry.key = 0; entry.value = map.zeroValue; } else { entry.key = keyTable[nextIndex]; entry.value = map.valueTable[nextIndex]; } currentIndex = nextIndex; findNextIndex(); return entry; } public boolean hasNext () { return hasNext; } public Iterator<Entry<V>> iterator () { return this; } } static public class Values<V> extends MapIterator<V> implements Iterable<V>, Iterator<V> { public Values (IntMap<V> map) { super(map); } public boolean hasNext () { return hasNext; } public V next () { if (!hasNext) throw new NoSuchElementException(); if (!valid) throw new GdxRuntimeException("#iterator() cannot be used nested."); V value; if (nextIndex == INDEX_ZERO) value = map.zeroValue; else value = map.valueTable[nextIndex]; currentIndex = nextIndex; findNextIndex(); return value; } public Iterator<V> iterator () { return this; } /** Returns a new array containing the remaining values. */ public Array<V> toArray () { Array array = new Array(true, map.size); while (hasNext) array.add(next()); return array; } } static public class Keys extends MapIterator { public Keys (IntMap map) { super(map); } public int next () { if (!hasNext) throw new NoSuchElementException(); if (!valid) throw new GdxRuntimeException("#iterator() cannot be used nested."); int key = nextIndex == INDEX_ZERO ? 0 : map.keyTable[nextIndex]; currentIndex = nextIndex; findNextIndex(); return key; } /** Returns a new array containing the remaining keys. */ public IntArray toArray () { IntArray array = new IntArray(true, map.size); while (hasNext) array.add(next()); return array; } } }
package com.eveningoutpost.dexdrip.UtilityModels; import android.content.Context; import android.content.SharedPreferences; import android.content.res.Configuration; import android.graphics.Color; import android.preference.PreferenceManager; import android.text.format.DateFormat; import com.eveningoutpost.dexdrip.Models.BgReading; import java.text.DecimalFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.List; import java.util.TimeZone; import lecho.lib.hellocharts.model.Axis; import lecho.lib.hellocharts.model.AxisValue; import lecho.lib.hellocharts.model.Line; import lecho.lib.hellocharts.model.LineChartData; import lecho.lib.hellocharts.model.PointValue; import lecho.lib.hellocharts.model.Viewport; import lecho.lib.hellocharts.util.Utils; import lecho.lib.hellocharts.view.Chart; /** * Created by stephenblack on 11/15/14. */ public class BgGraphBuilder { public double end_time = new Date().getTime() + (60000 * 10); public double start_time = end_time - (60000 * 60 * 24); public Context context; public SharedPreferences prefs; public double highMark; public double lowMark; public double defaultMinY; public double defaultMaxY; public boolean doMgdl; final int pointSize; final int axisTextSize; final int previewAxisTextSize; final int hoursPreviewStep; private double endHour; private final int numValues =(60/5)*24; private final List<BgReading> bgReadings = BgReading.latestForGraph( numValues, start_time); private List<PointValue> inRangeValues = new ArrayList<PointValue>(); private List<PointValue> highValues = new ArrayList<PointValue>(); private List<PointValue> lowValues = new ArrayList<PointValue>(); public Viewport viewport; public BgGraphBuilder(Context context){ this.context = context; this.prefs = PreferenceManager.getDefaultSharedPreferences(context); this.highMark = Double.parseDouble(prefs.getString("highValue", "170")); this.lowMark = Double.parseDouble(prefs.getString("lowValue", "70")); this.doMgdl = (prefs.getString("units", "mgdl").compareTo("mgdl") == 0); defaultMinY = unitized(40); defaultMaxY = unitized(250); pointSize = isXLargeTablet() ? 5 : 3; axisTextSize = isXLargeTablet() ? 20 : Axis.DEFAULT_TEXT_SIZE_SP; previewAxisTextSize = isXLargeTablet() ? 12 : 5; hoursPreviewStep = isXLargeTablet() ? 2 : 1; } public LineChartData lineData() { LineChartData lineData = new LineChartData(defaultLines()); lineData.setAxisYLeft(yAxis()); lineData.setAxisXBottom(xAxis()); return lineData; } public LineChartData previewLineData() { LineChartData previewLineData = new LineChartData(lineData()); previewLineData.setAxisYLeft(yAxis()); previewLineData.setAxisXBottom(previewXAxis()); previewLineData.getLines().get(4).setPointRadius(2); previewLineData.getLines().get(5).setPointRadius(2); previewLineData.getLines().get(6).setPointRadius(2); return previewLineData; } public List<Line> defaultLines() { addBgReadingValues(); List<Line> lines = new ArrayList<Line>(); lines.add(minShowLine()); lines.add(maxShowLine()); lines.add(highLine()); lines.add(lowLine()); lines.add(inRangeValuesLine()); lines.add(lowValuesLine()); lines.add(highValuesLine()); return lines; } public Line highValuesLine() { Line highValuesLine = new Line(highValues); highValuesLine.setColor(Utils.COLOR_ORANGE); highValuesLine.setHasLines(false); highValuesLine.setPointRadius(pointSize); highValuesLine.setHasPoints(true); return highValuesLine; } public Line lowValuesLine() { Line lowValuesLine = new Line(lowValues); lowValuesLine.setColor(Color.parseColor("#C30909")); lowValuesLine.setHasLines(false); lowValuesLine.setPointRadius(pointSize); lowValuesLine.setHasPoints(true); return lowValuesLine; } public Line inRangeValuesLine() { Line inRangeValuesLine = new Line(inRangeValues); inRangeValuesLine.setColor(Utils.COLOR_BLUE); inRangeValuesLine.setHasLines(false); inRangeValuesLine.setPointRadius(pointSize); inRangeValuesLine.setHasPoints(true); return inRangeValuesLine; } private void addBgReadingValues() { for (BgReading bgReading : bgReadings) { if (bgReading.calculated_value >= 400) { highValues.add(new PointValue((float) bgReading.timestamp, (float) unitized(400))); } else if (unitized(bgReading.calculated_value) >= highMark) { highValues.add(new PointValue((float) bgReading.timestamp, (float) unitized(bgReading.calculated_value))); } else if (unitized(bgReading.calculated_value) >= lowMark) { inRangeValues.add(new PointValue((float) bgReading.timestamp, (float) unitized(bgReading.calculated_value))); } else if (bgReading.calculated_value >= 40) { lowValues.add(new PointValue((float)bgReading.timestamp, (float) unitized(bgReading.calculated_value))); } else { lowValues.add(new PointValue((float)bgReading.timestamp, (float) unitized(40))); } } } public Line highLine() { List<PointValue> highLineValues = new ArrayList<PointValue>(); highLineValues.add(new PointValue((float)start_time, (float)highMark)); highLineValues.add(new PointValue((float)end_time, (float)highMark)); Line highLine = new Line(highLineValues); highLine.setHasPoints(false); highLine.setStrokeWidth(1); highLine.setColor(Utils.COLOR_ORANGE); return highLine; } public Line lowLine() { List<PointValue> lowLineValues = new ArrayList<PointValue>(); lowLineValues.add(new PointValue((float)start_time, (float)lowMark)); lowLineValues.add(new PointValue((float)end_time, (float)lowMark)); Line lowLine = new Line(lowLineValues); lowLine.setHasPoints(false); lowLine.setAreaTransparency(50); lowLine.setColor(Color.parseColor("#C30909")); lowLine.setStrokeWidth(1); lowLine.setFilled(true); return lowLine; } public Line maxShowLine() { List<PointValue> maxShowValues = new ArrayList<PointValue>(); maxShowValues.add(new PointValue((float)start_time, (float)defaultMaxY)); maxShowValues.add(new PointValue((float)end_time, (float)defaultMaxY)); Line maxShowLine = new Line(maxShowValues); maxShowLine.setHasLines(false); maxShowLine.setHasPoints(false); return maxShowLine; } public Line minShowLine() { List<PointValue> minShowValues = new ArrayList<PointValue>(); minShowValues.add(new PointValue((float)start_time, (float)defaultMinY)); minShowValues.add(new PointValue((float)end_time, (float)defaultMinY)); Line minShowLine = new Line(minShowValues); minShowLine.setHasPoints(false); minShowLine.setHasLines(false); return minShowLine; } /////////AXIS RELATED////////////// public Axis yAxis() { Axis yAxis = new Axis(); yAxis.setAutoGenerated(false); List<AxisValue> axisValues = new ArrayList<AxisValue>(); for(int j = 1; j <= 12; j += 1) { if (doMgdl) { axisValues.add(new AxisValue(j * 50)); } else { axisValues.add(new AxisValue(j*2)); } } yAxis.setValues(axisValues); yAxis.setHasLines(true); yAxis.setMaxLabelChars(5); yAxis.setInside(true); yAxis.setTextSize(axisTextSize); return yAxis; } public Axis xAxis() { Axis xAxis = new Axis(); xAxis.setAutoGenerated(false); List<AxisValue> xAxisValues = new ArrayList<AxisValue>(); GregorianCalendar now = new GregorianCalendar(); GregorianCalendar today = new GregorianCalendar(now.get(Calendar.YEAR), now.get(Calendar.MONTH), now.get(Calendar.DAY_OF_MONTH)); final java.text.DateFormat timeFormat = hourFormat(); timeFormat.setTimeZone(TimeZone.getDefault()); double start_hour = today.getTime().getTime(); double timeNow = new Date().getTime(); for(int l=0; l<=24; l++) { if ((start_hour + (60000 * 60 * (l))) < timeNow) { if((start_hour + (60000 * 60 * (l + 1))) >= timeNow) { endHour = start_hour + (60000 * 60 * (l)); l=25; } } } for(int l=0; l<=24; l++) { double timestamp = endHour - (60000 * 60 * l); xAxisValues.add(new AxisValue((long)(timestamp), (timeFormat.format(timestamp)).toCharArray())); } xAxis.setValues(xAxisValues); xAxis.setHasLines(true); xAxis.setTextSize(axisTextSize); return xAxis; } private SimpleDateFormat hourFormat() { return new SimpleDateFormat(DateFormat.is24HourFormat(context) ? "HH" : "h a"); } private boolean isXLargeTablet() { return (context.getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_XLARGE; } public Axis previewXAxis(){ List<AxisValue> previewXaxisValues = new ArrayList<AxisValue>(); final java.text.DateFormat timeFormat = hourFormat(); timeFormat.setTimeZone(TimeZone.getDefault()); for(int l=0; l<=24; l+=hoursPreviewStep) { double timestamp = endHour - (60000 * 60 * l); previewXaxisValues.add(new AxisValue((long)(timestamp), (timeFormat.format(timestamp)).toCharArray())); } Axis previewXaxis = new Axis(); previewXaxis.setValues(previewXaxisValues); previewXaxis.setHasLines(true); previewXaxis.setTextSize(previewAxisTextSize); return previewXaxis; } /////////VIEWPORT RELATED////////////// public Viewport advanceViewport(Chart chart, Chart previewChart) { viewport = new Viewport(previewChart.getMaximumViewport()); viewport.inset((float)(86400000 / 2.5), 0); double distance_to_move = (new Date().getTime()) - viewport.left - (((viewport.right - viewport.left) /2)); viewport.offset((float) distance_to_move, 0); return viewport; } public double unitized(double value) { if(doMgdl) { return value; } else { return mmolConvert(value); } } public String unitized_string(double value) { value = Math.round(value); DecimalFormat df = new DecimalFormat("#"); df.setMaximumFractionDigits(0); if (value >= 400) { return "HIGH"; } else if (value >= 40) { if(doMgdl) { df.setMaximumFractionDigits(0); df.setMinimumFractionDigits(0); return df.format(value); } else { df.setMaximumFractionDigits(1); df.setMinimumFractionDigits(1); return df.format(mmolConvert(value)); } } else { return "LOW"; } } public double mmolConvert(double mgdl) { return mgdl * Constants.MGDL_TO_MMOLL; } public String unit() { if(doMgdl){ return "mg/dl"; } else { return "mmol"; } } }
/* * Copyright 2014-2020 chronicle.software * * http://www.chronicle.software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.logger.jul; import net.openhft.chronicle.logger.ChronicleLogLevel; import net.openhft.chronicle.logger.ChronicleLogWriter; import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; class ChronicleLogger extends Logger { protected final String name; protected final ChronicleLogWriter writer; protected final ChronicleLogLevel level; /** * c-tor * * @param writer to use * @param name of the logger * @param level of the logger */ ChronicleLogger(final ChronicleLogWriter writer, final String name, final ChronicleLogLevel level) { super(name, null); this.writer = writer; this.name = name; this.level = level; /* * Set level of super class using final method */ setLevel(level); } private void setLevel(final ChronicleLogLevel level) { super.setLevel(ChronicleHelper.getLogLevel(level)); } // ************************************************************************* // // ************************************************************************* String name() { return this.name; } ChronicleLogWriter writer() { return this.writer; } ChronicleLogLevel level() { return this.level; } // ************************************************************************* // // ************************************************************************* @Override public String getName() { return this.name; } @Override public void setParent(final Logger parent) { throw new UnsupportedOperationException("Cannot set parent logger"); } @Override public void log(final LogRecord record) { append(record); } @Override public void log(final Level level, final String msg) { append(level, msg); } @Override public void log(final Level level, final String msg, final Object param1) { append(level, msg, param1); } @Override public void log(final Level level, final String msg, final Object[] params) { append(level, msg, params); } @Override public void log(final Level level, final String msg, final Throwable thrown) { append(level, msg, thrown); } @Override public void logp(final Level level, final String sourceClass, final String sourceMethod, final String msg) { append(level, msg); } @Override public void logp(final Level level, final String sourceClass, final String sourceMethod, final String msg, final Object param1) { append(level, msg, param1); } @Override public void logp(final Level level, final String sourceClass, final String sourceMethod, final String msg, final Object[] params) { append(level, msg, params); } @Override public void logp(final Level level, final String sourceClass, final String sourceMethod, final String msg, final Throwable thrown) { append(level, msg, thrown); } @Override public void logrb(final Level level, final String sourceClass, final String sourceMethod, final String bundleName, final String msg) { append(level, msg); } @Override public void logrb(final Level level, final String sourceClass, final String sourceMethod, final String bundleName, final String msg, final Object param1) { append(level, msg, param1); } @Override public void logrb(final Level level, final String sourceClass, final String sourceMethod, final String bundleName, final String msg, final Object[] params) { append(level, msg, params); } @Override public void logrb(final Level level, final String sourceClass, final String sourceMethod, final String bundleName, final String msg, final Throwable thrown) { append(level, msg, thrown); } @Override public void severe(final String msg) { append(Level.SEVERE, msg); } @Override public void warning(final String msg) { append(Level.WARNING, msg); } @Override public void info(final String msg) { append(Level.INFO, msg); } @Override public void config(final String msg) { append(Level.CONFIG, msg); } @Override public void fine(final String msg) { append(Level.FINE, msg); } @Override public void finer(final String msg) { append(Level.FINER, msg); } @Override public void finest(final String msg) { append(Level.FINEST, msg); } @Override public void entering(final String sourceClass, final String sourceMethod) { throw new UnsupportedOperationException(); } @Override public void entering(final String sourceClass, final String sourceMethod, final Object param1) { throw new UnsupportedOperationException(); } @Override public void entering(final String sourceClass, final String sourceMethod, final Object[] params) { throw new UnsupportedOperationException(); } @Override public void exiting(final String sourceClass, final String sourceMethod) { throw new UnsupportedOperationException(); } @Override public void exiting(final String sourceClass, final String sourceMethod, final Object result) { throw new UnsupportedOperationException(); } @Override public void throwing(final String sourceClass, final String sourceMethod, final Throwable thrown) { throw new UnsupportedOperationException(); } // ************************************************************************* // HELPERS // ************************************************************************* @Override public boolean isLoggable(Level level) { return isLoggable(ChronicleHelper.getLogLevel(level)); } public boolean isLoggable(ChronicleLogLevel level) { return level.isHigherOrEqualTo(this.level); } protected void append(final LogRecord record) { final ChronicleLogLevel clevel = ChronicleHelper.getLogLevel(record); if (isLoggable(clevel)) { writer.write( clevel, record.getMillis(), "thread-" + record.getThreadID(), record.getLoggerName(), record.getMessage(), record.getThrown(), record.getParameters()); } } protected void append(final Level level, String msg) { final ChronicleLogLevel clevel = ChronicleHelper.getLogLevel(level); if (isLoggable(clevel)) { writer.write( clevel, System.currentTimeMillis(), Thread.currentThread().getName(), this.name, msg); } } protected void append(final Level level, String msg, Object... params) { final ChronicleLogLevel clevel = ChronicleHelper.getLogLevel(level); if (isLoggable(clevel)) { writer.write( clevel, System.currentTimeMillis(), Thread.currentThread().getName(), this.name, msg, null, params); } } protected void append(final Level level, String msg, Throwable thrown) { final ChronicleLogLevel clevel = ChronicleHelper.getLogLevel(level); if (isLoggable(clevel)) { writer.write( clevel, System.currentTimeMillis(), Thread.currentThread().getName(), this.name, msg, thrown); } } public static class Null extends ChronicleLogger { public static final ChronicleLogger INSTANCE = new Null(); private Null() { super(null, null, null); } @Override protected void append(final LogRecord record) { } @Override protected void append(final Level level, String msg) { } @Override protected void append(final Level level, String msg, Object[] params) { } @Override protected void append(final Level level, String msg, Throwable thrown) { } } }
/* * Copyright 2006-2007 Jeremias Maerki. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.aqnote.app.barcode.core.datamatrix.encoder; import com.aqnote.app.barcode.core.Dimension; import java.util.Arrays; /** * DataMatrix ECC 200 data encoder following the algorithm described in ISO/IEC 16022:200(E) in * annex S. */ public final class HighLevelEncoder { /** * Padding character */ private static final char PAD = 129; /** * mode latch to C40 encodation mode */ static final char LATCH_TO_C40 = 230; /** * mode latch to Base 256 encodation mode */ static final char LATCH_TO_BASE256 = 231; /** * FNC1 Codeword */ //private static final char FNC1 = 232; /** * Structured Append Codeword */ //private static final char STRUCTURED_APPEND = 233; /** * Reader Programming */ //private static final char READER_PROGRAMMING = 234; /** * Upper Shift */ static final char UPPER_SHIFT = 235; /** * 05 Macro */ private static final char MACRO_05 = 236; /** * 06 Macro */ private static final char MACRO_06 = 237; /** * mode latch to ANSI X.12 encodation mode */ static final char LATCH_TO_ANSIX12 = 238; /** * mode latch to Text encodation mode */ static final char LATCH_TO_TEXT = 239; /** * mode latch to EDIFACT encodation mode */ static final char LATCH_TO_EDIFACT = 240; /** * ECI character (Extended Channel Interpretation) */ //private static final char ECI = 241; /** * Unlatch from C40 encodation */ static final char C40_UNLATCH = 254; /** * Unlatch from X12 encodation */ static final char X12_UNLATCH = 254; /** * 05 Macro header */ private static final String MACRO_05_HEADER = "[)>\u001E05\u001D"; /** * 06 Macro header */ private static final String MACRO_06_HEADER = "[)>\u001E06\u001D"; /** * Macro trailer */ private static final String MACRO_TRAILER = "\u001E\u0004"; static final int ASCII_ENCODATION = 0; static final int C40_ENCODATION = 1; static final int TEXT_ENCODATION = 2; static final int X12_ENCODATION = 3; static final int EDIFACT_ENCODATION = 4; static final int BASE256_ENCODATION = 5; private HighLevelEncoder() { } /* * Converts the message to a byte array using the default encoding (cp437) as defined by the * specification * * @param msg the message * @return the byte array of the message */ /* public static byte[] getBytesForMessage(String msg) { return msg.getBytes(Charset.forName("cp437")); //See 4.4.3 and annex B of ISO/IEC 15438:2001(E) } */ private static char randomize253State(char ch, int codewordPosition) { int pseudoRandom = ((149 * codewordPosition) % 253) + 1; int tempVariable = ch + pseudoRandom; return (char) (tempVariable <= 254 ? tempVariable : tempVariable - 254); } /** * Performs message encoding of a DataMatrix message using the algorithm described in annex P * of ISO/IEC 16022:2000(E). * * @param msg the message * @return the encoded message (the char values range from 0 to 255) */ public static String encodeHighLevel(String msg) { return encodeHighLevel(msg, SymbolShapeHint.FORCE_NONE, null, null); } /** * Performs message encoding of a DataMatrix message using the algorithm described in annex P * of ISO/IEC 16022:2000(E). * * @param msg the message * @param shape requested shape. May be {@code SymbolShapeHint.FORCE_NONE}, * {@code SymbolShapeHint.FORCE_SQUARE} or {@code SymbolShapeHint.FORCE_RECTANGLE}. * @param minSize the minimum symbol size constraint or null for no constraint * @param maxSize the maximum symbol size constraint or null for no constraint * @return the encoded message (the char values range from 0 to 255) */ public static String encodeHighLevel(String msg, SymbolShapeHint shape, Dimension minSize, Dimension maxSize) { //the codewords 0..255 are encoded as Unicode characters Encoder[] encoders = { new ASCIIEncoder(), new C40Encoder(), new TextEncoder(), new X12Encoder(), new EdifactEncoder(), new Base256Encoder() }; EncoderContext context = new EncoderContext(msg); context.setSymbolShape(shape); context.setSizeConstraints(minSize, maxSize); if (msg.startsWith(MACRO_05_HEADER) && msg.endsWith(MACRO_TRAILER)) { context.writeCodeword(MACRO_05); context.setSkipAtEnd(2); context.pos += MACRO_05_HEADER.length(); } else if (msg.startsWith(MACRO_06_HEADER) && msg.endsWith(MACRO_TRAILER)) { context.writeCodeword(MACRO_06); context.setSkipAtEnd(2); context.pos += MACRO_06_HEADER.length(); } int encodingMode = ASCII_ENCODATION; //Default mode while (context.hasMoreCharacters()) { encoders[encodingMode].encode(context); if (context.getNewEncoding() >= 0) { encodingMode = context.getNewEncoding(); context.resetEncoderSignal(); } } int len = context.getCodewordCount(); context.updateSymbolInfo(); int capacity = context.getSymbolInfo().getDataCapacity(); if (len < capacity) { if (encodingMode != ASCII_ENCODATION && encodingMode != BASE256_ENCODATION) { context.writeCodeword('\u00fe'); //Unlatch (254) } } //Padding StringBuilder codewords = context.getCodewords(); if (codewords.length() < capacity) { codewords.append(PAD); } while (codewords.length() < capacity) { codewords.append(randomize253State(PAD, codewords.length() + 1)); } return context.getCodewords().toString(); } static int lookAheadTest(CharSequence msg, int startpos, int currentMode) { if (startpos >= msg.length()) { return currentMode; } float[] charCounts; //step J if (currentMode == ASCII_ENCODATION) { charCounts = new float[]{0, 1, 1, 1, 1, 1.25f}; } else { charCounts = new float[]{1, 2, 2, 2, 2, 2.25f}; charCounts[currentMode] = 0; } int charsProcessed = 0; while (true) { //step K if ((startpos + charsProcessed) == msg.length()) { int min = Integer.MAX_VALUE; byte[] mins = new byte[6]; int[] intCharCounts = new int[6]; min = findMinimums(charCounts, intCharCounts, min, mins); int minCount = getMinimumCount(mins); if (intCharCounts[ASCII_ENCODATION] == min) { return ASCII_ENCODATION; } if (minCount == 1 && mins[BASE256_ENCODATION] > 0) { return BASE256_ENCODATION; } if (minCount == 1 && mins[EDIFACT_ENCODATION] > 0) { return EDIFACT_ENCODATION; } if (minCount == 1 && mins[TEXT_ENCODATION] > 0) { return TEXT_ENCODATION; } if (minCount == 1 && mins[X12_ENCODATION] > 0) { return X12_ENCODATION; } return C40_ENCODATION; } char c = msg.charAt(startpos + charsProcessed); charsProcessed++; //step L if (isDigit(c)) { charCounts[ASCII_ENCODATION] += 0.5f; } else if (isExtendedASCII(c)) { charCounts[ASCII_ENCODATION] = (float) Math.ceil(charCounts[ASCII_ENCODATION]); charCounts[ASCII_ENCODATION] += 2.0f; } else { charCounts[ASCII_ENCODATION] = (float) Math.ceil(charCounts[ASCII_ENCODATION]); charCounts[ASCII_ENCODATION]++; } //step M if (isNativeC40(c)) { charCounts[C40_ENCODATION] += 2.0f / 3.0f; } else if (isExtendedASCII(c)) { charCounts[C40_ENCODATION] += 8.0f / 3.0f; } else { charCounts[C40_ENCODATION] += 4.0f / 3.0f; } //step N if (isNativeText(c)) { charCounts[TEXT_ENCODATION] += 2.0f / 3.0f; } else if (isExtendedASCII(c)) { charCounts[TEXT_ENCODATION] += 8.0f / 3.0f; } else { charCounts[TEXT_ENCODATION] += 4.0f / 3.0f; } //step O if (isNativeX12(c)) { charCounts[X12_ENCODATION] += 2.0f / 3.0f; } else if (isExtendedASCII(c)) { charCounts[X12_ENCODATION] += 13.0f / 3.0f; } else { charCounts[X12_ENCODATION] += 10.0f / 3.0f; } //step P if (isNativeEDIFACT(c)) { charCounts[EDIFACT_ENCODATION] += 3.0f / 4.0f; } else if (isExtendedASCII(c)) { charCounts[EDIFACT_ENCODATION] += 17.0f / 4.0f; } else { charCounts[EDIFACT_ENCODATION] += 13.0f / 4.0f; } // step Q if (isSpecialB256(c)) { charCounts[BASE256_ENCODATION] += 4.0f; } else { charCounts[BASE256_ENCODATION]++; } //step R if (charsProcessed >= 4) { int[] intCharCounts = new int[6]; byte[] mins = new byte[6]; findMinimums(charCounts, intCharCounts, Integer.MAX_VALUE, mins); int minCount = getMinimumCount(mins); if (intCharCounts[ASCII_ENCODATION] < intCharCounts[BASE256_ENCODATION] && intCharCounts[ASCII_ENCODATION] < intCharCounts[C40_ENCODATION] && intCharCounts[ASCII_ENCODATION] < intCharCounts[TEXT_ENCODATION] && intCharCounts[ASCII_ENCODATION] < intCharCounts[X12_ENCODATION] && intCharCounts[ASCII_ENCODATION] < intCharCounts[EDIFACT_ENCODATION]) { return ASCII_ENCODATION; } if (intCharCounts[BASE256_ENCODATION] < intCharCounts[ASCII_ENCODATION] || (mins[C40_ENCODATION] + mins[TEXT_ENCODATION] + mins[X12_ENCODATION] + mins[EDIFACT_ENCODATION]) == 0) { return BASE256_ENCODATION; } if (minCount == 1 && mins[EDIFACT_ENCODATION] > 0) { return EDIFACT_ENCODATION; } if (minCount == 1 && mins[TEXT_ENCODATION] > 0) { return TEXT_ENCODATION; } if (minCount == 1 && mins[X12_ENCODATION] > 0) { return X12_ENCODATION; } if (intCharCounts[C40_ENCODATION] + 1 < intCharCounts[ASCII_ENCODATION] && intCharCounts[C40_ENCODATION] + 1 < intCharCounts[BASE256_ENCODATION] && intCharCounts[C40_ENCODATION] + 1 < intCharCounts[EDIFACT_ENCODATION] && intCharCounts[C40_ENCODATION] + 1 < intCharCounts[TEXT_ENCODATION]) { if (intCharCounts[C40_ENCODATION] < intCharCounts[X12_ENCODATION]) { return C40_ENCODATION; } if (intCharCounts[C40_ENCODATION] == intCharCounts[X12_ENCODATION]) { int p = startpos + charsProcessed + 1; while (p < msg.length()) { char tc = msg.charAt(p); if (isX12TermSep(tc)) { return X12_ENCODATION; } if (!isNativeX12(tc)) { break; } p++; } return C40_ENCODATION; } } } } } private static int findMinimums(float[] charCounts, int[] intCharCounts, int min, byte[] mins) { Arrays.fill(mins, (byte) 0); for (int i = 0; i < 6; i++) { intCharCounts[i] = (int) Math.ceil(charCounts[i]); int current = intCharCounts[i]; if (min > current) { min = current; Arrays.fill(mins, (byte) 0); } if (min == current) { mins[i]++; } } return min; } private static int getMinimumCount(byte[] mins) { int minCount = 0; for (int i = 0; i < 6; i++) { minCount += mins[i]; } return minCount; } static boolean isDigit(char ch) { return ch >= '0' && ch <= '9'; } static boolean isExtendedASCII(char ch) { return ch >= 128 && ch <= 255; } private static boolean isNativeC40(char ch) { return (ch == ' ') || (ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z'); } private static boolean isNativeText(char ch) { return (ch == ' ') || (ch >= '0' && ch <= '9') || (ch >= 'a' && ch <= 'z'); } private static boolean isNativeX12(char ch) { return isX12TermSep(ch) || (ch == ' ') || (ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z'); } private static boolean isX12TermSep(char ch) { return (ch == '\r') //CR || (ch == '*') || (ch == '>'); } private static boolean isNativeEDIFACT(char ch) { return ch >= ' ' && ch <= '^'; } private static boolean isSpecialB256(char ch) { return false; //TODO NOT IMPLEMENTED YET!!! } /** * Determines the number of consecutive characters that are encodable using numeric compaction. * * @param msg the message * @param startpos the start position within the message * @return the requested character count */ public static int determineConsecutiveDigitCount(CharSequence msg, int startpos) { int count = 0; int len = msg.length(); int idx = startpos; if (idx < len) { char ch = msg.charAt(idx); while (isDigit(ch) && idx < len) { count++; idx++; if (idx < len) { ch = msg.charAt(idx); } } } return count; } static void illegalCharacter(char c) { String hex = Integer.toHexString(c); hex = "0000".substring(0, 4 - hex.length()) + hex; throw new IllegalArgumentException("Illegal character: " + c + " (0x" + hex + ')'); } }
/* * Copyright (c) 2010-2020. Axon Framework * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.axonframework.spring.config; import org.axonframework.commandhandling.CommandBus; import org.axonframework.common.AxonConfigurationException; import org.axonframework.common.annotation.AnnotationUtils; import org.axonframework.common.caching.Cache; import org.axonframework.common.jpa.EntityManagerProvider; import org.axonframework.common.lock.LockFactory; import org.axonframework.common.lock.NullLockFactory; import org.axonframework.common.transaction.TransactionManager; import org.axonframework.config.AggregateConfigurer; import org.axonframework.config.Configuration; import org.axonframework.config.Configurer; import org.axonframework.config.DefaultConfigurer; import org.axonframework.config.EventProcessingConfiguration; import org.axonframework.config.EventProcessingConfigurer; import org.axonframework.config.EventProcessingModule; import org.axonframework.config.ModuleConfiguration; import org.axonframework.config.ProcessingGroup; import org.axonframework.config.TagsConfiguration; import org.axonframework.deadline.DeadlineManager; import org.axonframework.eventhandling.ErrorHandler; import org.axonframework.eventhandling.EventBus; import org.axonframework.eventhandling.EventMessage; import org.axonframework.eventhandling.ListenerInvocationErrorHandler; import org.axonframework.eventhandling.scheduling.EventScheduler; import org.axonframework.eventhandling.tokenstore.TokenStore; import org.axonframework.eventsourcing.AggregateFactory; import org.axonframework.eventsourcing.SnapshotTriggerDefinition; import org.axonframework.eventsourcing.eventstore.EventStorageEngine; import org.axonframework.messaging.annotation.HandlerDefinition; import org.axonframework.messaging.annotation.MessageHandler; import org.axonframework.messaging.annotation.ParameterResolverFactory; import org.axonframework.messaging.correlation.CorrelationDataProvider; import org.axonframework.modelling.command.CommandTargetResolver; import org.axonframework.modelling.command.GenericJpaRepository; import org.axonframework.modelling.command.Repository; import org.axonframework.modelling.saga.ResourceInjector; import org.axonframework.modelling.saga.repository.SagaStore; import org.axonframework.queryhandling.QueryBus; import org.axonframework.queryhandling.QueryUpdateEmitter; import org.axonframework.serialization.Serializer; import org.axonframework.serialization.upcasting.event.EventUpcaster; import org.axonframework.spring.config.annotation.SpringContextHandlerDefinitionBuilder; import org.axonframework.spring.config.annotation.SpringContextParameterResolverFactoryBuilder; import org.axonframework.spring.eventsourcing.SpringPrototypeAggregateFactory; import org.axonframework.spring.messaging.unitofwork.SpringTransactionManager; import org.axonframework.spring.saga.SpringResourceInjector; import org.axonframework.spring.stereotype.Aggregate; import org.axonframework.spring.stereotype.Saga; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.BeanFactoryAware; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.config.RuntimeBeanReference; import org.springframework.beans.factory.support.BeanDefinitionReaderUtils; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.ManagedList; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.DeferredImportSelector; import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; import org.springframework.core.type.AnnotationMetadata; import org.springframework.transaction.PlatformTransactionManager; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; import static java.lang.String.format; import static org.axonframework.common.ReflectionUtils.methodsOf; import static org.axonframework.common.annotation.AnnotationUtils.findAnnotationAttributes; import static org.axonframework.spring.SpringUtils.isQualifierMatch; import static org.springframework.beans.factory.BeanFactoryUtils.beanNamesForTypeIncludingAncestors; import static org.springframework.beans.factory.support.BeanDefinitionBuilder.genericBeanDefinition; /** * ImportBeanDefinitionRegistrar implementation that sets up an infrastructure Configuration based on beans available * in the application context. * <p> * This component is backed by a DefaultConfiguration (see {@link DefaultConfigurer#defaultConfiguration()} * and registers the following beans if present in the ApplicationContext: * <ul> * <li>{@link CommandBus}</li> * <li>{@link EventStorageEngine} or {@link EventBus}</li> * <li>{@link Serializer}</li> * <li>{@link TokenStore}</li> * <li>{@link PlatformTransactionManager}</li> * <li>{@link TransactionManager}</li> * <li>{@link SagaStore}</li> * <li>{@link ResourceInjector} (which defaults to {@link SpringResourceInjector}</li> * </ul> * <p> * Furthermore, all beans with an {@link Aggregate @Aggregate} or {@link Saga @Saga} annotation are inspected and * required components to operate the Aggregate or Saga are registered. * * @author Allard Buijze * @since 3.0 */ public class SpringAxonAutoConfigurer implements ImportBeanDefinitionRegistrar, BeanFactoryAware { /** * Name of the {@link AxonConfiguration} bean. */ @SuppressWarnings("WeakerAccess") public static final String AXON_CONFIGURATION_BEAN = "org.axonframework.spring.config.AxonConfiguration"; /** * Name of the {@link Configurer} bean. */ @SuppressWarnings("WeakerAccess") public static final String AXON_CONFIGURER_BEAN = "org.axonframework.config.Configurer"; private static final Logger logger = LoggerFactory.getLogger(SpringAxonAutoConfigurer.class); private static final String EMPTY_STRING = ""; private ConfigurableListableBeanFactory beanFactory; @Override public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata, BeanDefinitionRegistry registry) { registry.registerBeanDefinition("commandHandlerSubscriber", genericBeanDefinition(CommandHandlerSubscriber.class).getBeanDefinition()); registry.registerBeanDefinition("queryHandlerSubscriber", genericBeanDefinition(QueryHandlerSubscriber.class).getBeanDefinition()); Configurer configurer = DefaultConfigurer.defaultConfiguration(false); RuntimeBeanReference parameterResolver = SpringContextParameterResolverFactoryBuilder.getBeanReference(registry); configurer.registerComponent(ParameterResolverFactory.class, c -> beanFactory .getBean(parameterResolver.getBeanName(), ParameterResolverFactory.class)); RuntimeBeanReference handlerDefinition = SpringContextHandlerDefinitionBuilder.getBeanReference(registry); configurer.registerHandlerDefinition((c, clazz) -> beanFactory .getBean(handlerDefinition.getBeanName(), HandlerDefinition.class)); registerComponent(CommandBus.class, configurer::configureCommandBus, configurer, Configuration::commandBus); registerComponent(QueryBus.class, configurer::configureQueryBus, configurer, Configuration::queryBus); registerComponent(QueryUpdateEmitter.class, configurer::configureQueryUpdateEmitter); registerComponent( EventStorageEngine.class, configurer::configureEmbeddedEventStore, configurer, Configuration::eventBus ); registerComponent(EventBus.class, configurer::configureEventBus); registerComponent(Serializer.class, configurer::configureSerializer); registerComponent(Serializer.class, "eventSerializer", configurer::configureEventSerializer); registerComponent(Serializer.class, "messageSerializer", configurer::configureMessageSerializer); registerComponent(TokenStore.class, configurer); try { findComponent(PlatformTransactionManager.class).ifPresent( ptm -> configurer.configureTransactionManager(c -> new SpringTransactionManager(getBean(ptm, c))) ); } catch (NoClassDefFoundError error) { // that's fine... } registerComponent(TransactionManager.class, configurer::configureTransactionManager); registerComponent(SagaStore.class, configurer); registerComponent(ListenerInvocationErrorHandler.class, configurer); registerComponent(ErrorHandler.class, configurer); registerComponent(TagsConfiguration.class, configurer); String resourceInjector = findComponent( ResourceInjector.class, registry, () -> genericBeanDefinition(SpringResourceInjector.class).getBeanDefinition() ); configurer.configureResourceInjector(c -> getBean(resourceInjector, c)); registerComponent(EventScheduler.class, configurer, Configuration::eventScheduler); registerComponent(DeadlineManager.class, configurer, Configuration::deadlineManager); EventProcessingModule eventProcessingModule = new EventProcessingModule(); Optional<String> eventProcessingConfigurerOptional = findComponent(EventProcessingConfigurer.class); String eventProcessingConfigurerBeanName = eventProcessingConfigurerOptional .orElse("eventProcessingConfigurer"); if (!eventProcessingConfigurerOptional.isPresent()) { registry.registerBeanDefinition(eventProcessingConfigurerBeanName, genericBeanDefinition(EventProcessingConfigurer.class, () -> eventProcessingModule) .getBeanDefinition()); } registerModuleConfigurations(configurer); registerCorrelationDataProviders(configurer); registerEventUpcasters(configurer); registerAggregateBeanDefinitions(configurer, registry); String eventProcessingConfigurationName = findComponent(EventProcessingConfiguration.class) .orElseThrow(() -> new AxonConfigurationException("Missing EventProcessingConfiguration bean")); registry.registerBeanDefinition(AXON_CONFIGURER_BEAN, genericBeanDefinition(ConfigurerFactoryBean.class) .addConstructorArgValue(configurer).getBeanDefinition()); registry.registerBeanDefinition(AXON_CONFIGURATION_BEAN, genericBeanDefinition(AxonConfiguration.class) .addConstructorArgReference(AXON_CONFIGURER_BEAN).getBeanDefinition()); try { EventProcessingConfigurer eventProcessingConfigurer = configurer.eventProcessing(); registerSagaBeanDefinitions(eventProcessingConfigurer); registerEventHandlerRegistrar(eventProcessingConfigurationName, eventProcessingConfigurerBeanName, registry); } catch (AxonConfigurationException ace) { logger.warn( "There are several EventProcessingConfigurers registered, Axon will not automatically register sagas and event handlers.", ace); } } private void registerCorrelationDataProviders(Configurer configurer) { configurer.configureCorrelationDataProviders( c -> { String[] correlationDataProviderBeans = beanFactory.getBeanNamesForType(CorrelationDataProvider.class); return Arrays.stream(correlationDataProviderBeans) .map(n -> (CorrelationDataProvider) getBean(n, c)) .collect(Collectors.toList()); }); } private void registerEventUpcasters(Configurer configurer) { Arrays.stream(beanFactory.getBeanNamesForType(EventUpcaster.class)) .forEach(name -> configurer.registerEventUpcaster(c -> getBean(name, c))); } @SuppressWarnings("unchecked") private <T> T getBean(String beanName, Configuration configuration) { return (T) configuration.getComponent(ApplicationContext.class).getBean(beanName); } private void registerEventHandlerRegistrar(String epConfigurationBeanName, String epConfigurerBeanName, BeanDefinitionRegistry registry) { List<RuntimeBeanReference> beans = new ManagedList<>(); beanFactory.getBeanNamesIterator().forEachRemaining(bean -> { if (!beanFactory.isFactoryBean(bean)) { Class<?> beanType = beanFactory.getType(bean); if (beanType != null && beanFactory.containsBeanDefinition(bean) && beanFactory.getBeanDefinition(bean).isSingleton()) { boolean hasHandler = StreamSupport.stream(methodsOf(beanType).spliterator(), false) .map(m -> findAnnotationAttributes(m, MessageHandler.class).orElse(null)) .filter(Objects::nonNull) .anyMatch(attr -> EventMessage.class .isAssignableFrom((Class) attr.get("messageType"))); if (hasHandler) { beans.add(new RuntimeBeanReference(bean)); } } } }); registry.registerBeanDefinition("eventHandlerRegistrar", genericBeanDefinition(EventHandlerRegistrar.class) .addConstructorArgReference(AXON_CONFIGURATION_BEAN) .addConstructorArgReference(epConfigurationBeanName) .addConstructorArgReference(epConfigurerBeanName) .addPropertyValue("eventHandlers", beans).getBeanDefinition()); } private void registerModuleConfigurations(Configurer configurer) { String[] moduleConfigurations = beanFactory.getBeanNamesForType(ModuleConfiguration.class); for (String moduleConfiguration : moduleConfigurations) { configurer.registerModule(new LazyRetrievedModuleConfiguration( () -> beanFactory.getBean(moduleConfiguration, ModuleConfiguration.class), beanFactory.getType(moduleConfiguration) )); } } @SuppressWarnings("unchecked") private void registerSagaBeanDefinitions(EventProcessingConfigurer configurer) { String[] sagas = beanFactory.getBeanNamesForAnnotation(Saga.class); for (String saga : sagas) { Saga sagaAnnotation = beanFactory.findAnnotationOnBean(saga, Saga.class); Class<?> sagaType = beanFactory.getType(saga); ProcessingGroup processingGroupAnnotation = beanFactory.findAnnotationOnBean(saga, ProcessingGroup.class); if (processingGroupAnnotation != null && nonEmptyBeanName(processingGroupAnnotation.value())) { configurer.assignHandlerTypesMatching(processingGroupAnnotation.value(), sagaType::equals); } configurer.registerSaga(sagaType, sagaConfigurer -> { if (sagaAnnotation != null && nonEmptyBeanName(sagaAnnotation.sagaStore())) { sagaConfigurer.configureSagaStore(c -> beanFactory.getBean(sagaAnnotation.sagaStore(), SagaStore.class)); } }); } } @SuppressWarnings("unchecked") private <A> Map<SpringAggregate<? super A>, Map<Class<? extends A>, String>> buildAggregateHierarchy( String[] aggregatePrototypes) { Map<SpringAggregate<? super A>, Map<Class<? extends A>, String>> hierarchy = new HashMap<>(); for (String prototype : aggregatePrototypes) { Class<A> aggregateType = (Class<A>) beanFactory.getType(prototype); SpringAggregate<A> springAggregate = new SpringAggregate<>(prototype, aggregateType); Class<? super A> topType = topAnnotatedAggregateType(aggregateType); SpringAggregate<? super A> topSpringAggregate = new SpringAggregate<>(beanName(topType), topType); hierarchy.compute(topSpringAggregate, (type, subtypes) -> { if (subtypes == null) { subtypes = new HashMap<>(); } if (!type.equals(springAggregate)) { subtypes.put(aggregateType, prototype); } return subtypes; }); } return hierarchy; } private <A> String beanName(Class<A> type) { String[] beanNamesForType = beanFactory.getBeanNamesForType(type); if (beanNamesForType.length == 0) { throw new AxonConfigurationException(format("There are no spring beans for '%s' defined.", type.getName())); } else { if (beanNamesForType.length != 1) { logger.warn("There are {} beans defined for '{}'.", beanNamesForType.length, type.getName()); } return beanNamesForType[0]; } } private <A> Class<? super A> topAnnotatedAggregateType(Class<A> type) { Class<? super A> top = type; Class<? super A> topAnnotated = top; while(!top.getSuperclass().equals(Object.class)) { top = top.getSuperclass(); if (top.isAnnotationPresent(Aggregate.class)) { topAnnotated = top; } } return topAnnotated; } /** * @param <A> generic specifying the Aggregate type being registered */ @SuppressWarnings("unchecked") private <A> void registerAggregateBeanDefinitions(Configurer configurer, BeanDefinitionRegistry registry) { String[] aggregates = beanFactory.getBeanNamesForAnnotation(Aggregate.class); Map<SpringAggregate<? super A>, Map<Class<? extends A>, String>> hierarchy = buildAggregateHierarchy(aggregates); for (Map.Entry<SpringAggregate<? super A>, Map<Class<? extends A>, String>> aggregate : hierarchy.entrySet()) { Class<A> aggregateType = (Class<A>) aggregate.getKey().getClassType(); String aggregatePrototype = aggregate.getKey().getBeanName(); Aggregate aggregateAnnotation = aggregateType.getAnnotation(Aggregate.class); AggregateConfigurer<A> aggregateConfigurer = AggregateConfigurer.defaultConfiguration(aggregateType); aggregateConfigurer.withSubtypes(aggregate.getValue().keySet()); if (EMPTY_STRING.equals(aggregateAnnotation.repository())) { String repositoryName = lcFirst(aggregateType.getSimpleName()) + "Repository"; String factoryName = aggregatePrototype.substring(0, 1).toLowerCase() + aggregatePrototype.substring(1) + "AggregateFactory"; if (beanFactory.containsBean(repositoryName)) { aggregateConfigurer.configureRepository(c -> beanFactory.getBean(repositoryName, Repository.class)); } else { registry.registerBeanDefinition(repositoryName, genericBeanDefinition(RepositoryFactoryBean.class) .addConstructorArgValue(aggregateConfigurer) .getBeanDefinition()); if (!registry.isBeanNameInUse(factoryName)) { registry.registerBeanDefinition(factoryName, genericBeanDefinition(SpringPrototypeAggregateFactory.class) .addConstructorArgValue(aggregatePrototype) .addConstructorArgValue(aggregate.getValue()) .getBeanDefinition()); } aggregateConfigurer.configureAggregateFactory( c -> beanFactory.getBean(factoryName, AggregateFactory.class) ); String triggerDefinitionBeanName = aggregateAnnotation.snapshotTriggerDefinition(); if (nonEmptyBeanName(triggerDefinitionBeanName)) { aggregateConfigurer.configureSnapshotTrigger( c -> beanFactory.getBean(triggerDefinitionBeanName, SnapshotTriggerDefinition.class) ); } String cacheBeanName = aggregateAnnotation.cache(); if (nonEmptyBeanName(cacheBeanName)) { aggregateConfigurer.configureCache(c -> beanFactory.getBean(cacheBeanName, Cache.class)); } if (AnnotationUtils.isAnnotationPresent(aggregateType, "javax.persistence.Entity")) { aggregateConfigurer.configureRepository( c -> GenericJpaRepository.builder(aggregateType) .parameterResolverFactory(c.parameterResolverFactory()) .handlerDefinition(c.handlerDefinition(aggregateType)) .lockFactory(c.getComponent( LockFactory.class, () -> NullLockFactory.INSTANCE )) .entityManagerProvider(c.getComponent( EntityManagerProvider.class, () -> beanFactory.getBean(EntityManagerProvider.class) )) .eventBus(c.eventBus()) .repositoryProvider(c::repository) .build() ); } } } else { aggregateConfigurer.configureRepository( c -> beanFactory.getBean(aggregateAnnotation.repository(), Repository.class) ); } String snapshotFilterBeanName = aggregateAnnotation.snapshotFilter(); if (nonEmptyBeanName(snapshotFilterBeanName)) { aggregateConfigurer.configureSnapshotFilter(c -> getBean(snapshotFilterBeanName, c)); } String commandTargetResolverBeanName = aggregateAnnotation.commandTargetResolver(); if (nonEmptyBeanName(commandTargetResolverBeanName)) { aggregateConfigurer.configureCommandTargetResolver( c -> getBean(commandTargetResolverBeanName, c) ); } else { findComponent(CommandTargetResolver.class).ifPresent( commandTargetResolver -> aggregateConfigurer.configureCommandTargetResolver( c -> getBean(commandTargetResolver, c) ) ); } aggregateConfigurer.configureFilterEventsByType(c -> aggregateAnnotation.filterEventsByType()); configurer.configureAggregate(aggregateConfigurer); } } private boolean nonEmptyBeanName(String beanName) { return !EMPTY_STRING.equals(beanName); } /** * Return the given {@code string}, with its first character lowercase * * @param string The input string * @return The input string, with first character lowercase */ private String lcFirst(String string) { return string.substring(0, 1).toLowerCase() + string.substring(1); } private <T> String findComponent(Class<T> componentType, BeanDefinitionRegistry registry, Supplier<BeanDefinition> defaultBean) { return findComponent(componentType).orElseGet(() -> { BeanDefinition beanDefinition = defaultBean.get(); String beanName = BeanDefinitionReaderUtils.generateBeanName(beanDefinition, registry); registry.registerBeanDefinition(beanName, beanDefinition); return beanName; }); } /** * Register a component of {@code componentType} with {@code componentQualifier} through the given {@code * registrationFunction}. The component to register will be a bean retrieved from the {@link ApplicationContext} * tied to the {@link Configuration}. * * @param componentType the type of the component to register * @param componentQualifier the qualifier of the component to register * @param registrationFunction the function to register the component to the {@link Configuration} * @param <T> the type of the component */ private <T> void registerComponent(Class<T> componentType, String componentQualifier, Consumer<Function<Configuration, T>> registrationFunction) { findComponent(componentType, componentQualifier).ifPresent( componentName -> registrationFunction.accept(config -> getBean(componentName, config)) ); } private <T> Optional<String> findComponent(Class<T> componentType, String componentQualifier) { return Stream.of(beanNamesForTypeIncludingAncestors( beanFactory, componentType )) .filter(bean -> isQualifierMatch(bean, beanFactory, componentQualifier)) .findFirst(); } /** * Register a component of {@code componentType} through the given {@code registrationFunction}. The component to * register will be a bean retrieved from the {@link ApplicationContext} tied to the {@link Configuration}. * * @param componentType the type of the component to register * @param registrationFunction the function to register the component to the {@link Configuration} * @param <T> the type of the component */ private <T> void registerComponent(Class<T> componentType, Consumer<Function<Configuration, T>> registrationFunction) { findComponent(componentType).ifPresent( componentName -> registrationFunction.accept(config -> getBean(componentName, config)) ); } /** * Register a component of {@code componentType} with the given {@code configurer} through {@link * Configurer#registerComponent(Class, Function)}. The component to register will be a bean retrieved from the * {@link ApplicationContext} tied to the {@link Configuration}. * * @param componentType the type of the component to register * @param configurer the {@link Configurer} used to register the component with * @param <T> the type of the component */ private <T> void registerComponent(Class<T> componentType, Configurer configurer) { registerComponent(componentType, builder -> configurer.registerComponent(componentType, builder), configurer, null); } /** * Register a component of {@code componentType} with the given {@code configurer}. through {@link * Configurer#registerComponent(Class, Function)}. The {@code initHandler} is used to initialize the component at * the right point in time. The component to register will be a bean retrieved from the {@link ApplicationContext} * tied to the {@link Configuration}. * * @param componentType the type of the component to register * @param configurer the {@link Configurer} used to register the component with * @param initHandler the function used to initialize the registered component * @param <T> the type of the component */ private <T> void registerComponent(Class<T> componentType, Configurer configurer, Consumer<Configuration> initHandler) { registerComponent(componentType, builder -> configurer.registerComponent(componentType, builder), configurer, initHandler); } /** * Register a component of {@code componentType} through the given {@code registrationFunction}. The {@code * initHandler} is used to initialize the component at the right point in time. The component to register will be a * bean retrieved from the {@link ApplicationContext} tied to the {@link Configuration}. * * @param componentType the type of the component to register * @param registrationFunction the function to register the component to the {@link Configuration} * @param configurer the {@link Configurer} used to register the component with * @param initHandler the function used to initialize the registered component * @param <T> the type of the component */ private <T> void registerComponent(Class<T> componentType, Consumer<Function<Configuration, T>> registrationFunction, Configurer configurer, Consumer<Configuration> initHandler) { findComponent(componentType).ifPresent(componentName -> { registrationFunction.accept(config -> getBean(componentName, config)); if (initHandler != null) { configurer.onInitialize(c -> c.onStart(Integer.MIN_VALUE, () -> initHandler.accept(c))); } }); } private <T> Optional<String> findComponent(Class<T> componentType) { String[] beans = beanNamesForTypeIncludingAncestors(beanFactory, componentType); if (beans.length == 1) { return Optional.of(beans[0]); } else if (beans.length > 1) { for (String bean : beans) { BeanDefinition beanDef = beanFactory.getMergedBeanDefinition(bean); if (beanDef.isPrimary()) { return Optional.of(bean); } } logger.warn("Multiple beans of type {} found in application context: {}. Chose {}", componentType.getSimpleName(), beans, beans[0]); return Optional.of(beans[0]); } return Optional.empty(); } @Override public void setBeanFactory(BeanFactory beanFactory) throws BeansException { this.beanFactory = (ConfigurableListableBeanFactory) beanFactory; } /** * Implementation of an {@link ImportSelector} that enables the import of the {@link SpringAxonAutoConfigurer} after * all {@code @Configuration} beans have been processed. */ public static class ImportSelector implements DeferredImportSelector { @Override public String[] selectImports(AnnotationMetadata importingClassMetadata) { return new String[]{SpringAxonAutoConfigurer.class.getName()}; } } private static class LazyRetrievedModuleConfiguration implements ModuleConfiguration { private final Supplier<ModuleConfiguration> delegateSupplier; private final Class<?> moduleType; private ModuleConfiguration delegate; LazyRetrievedModuleConfiguration(Supplier<ModuleConfiguration> delegateSupplier, Class<?> moduleType) { this.delegateSupplier = delegateSupplier; this.moduleType = moduleType; } @Override public void initialize(Configuration config) { getDelegate().initialize(config); } @Override public ModuleConfiguration unwrap() { return getDelegate(); } @Override public boolean isType(Class<?> type) { return type.isAssignableFrom(moduleType); } private ModuleConfiguration getDelegate() { if (delegate == null) { delegate = delegateSupplier.get(); } return delegate; } } private static class SpringAggregate<T> { private final String beanName; private final Class<T> classType; private SpringAggregate(String beanName, Class<T> classType) { this.beanName = beanName; this.classType = classType; } public String getBeanName() { return beanName; } public Class<T> getClassType() { return classType; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SpringAggregate<?> that = (SpringAggregate<?>) o; return Objects.equals(beanName, that.beanName) && Objects.equals(classType, that.classType); } @Override public int hashCode() { return Objects.hash(beanName, classType); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sshd.client.session; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.sshd.client.ClientFactoryManager; import org.apache.sshd.client.auth.UserAuth; import org.apache.sshd.client.auth.UserInteraction; import org.apache.sshd.client.future.AuthFuture; import org.apache.sshd.client.future.DefaultAuthFuture; import org.apache.sshd.common.FactoryManagerUtils; import org.apache.sshd.common.NamedFactory; import org.apache.sshd.common.NamedResource; import org.apache.sshd.common.Service; import org.apache.sshd.common.SshConstants; import org.apache.sshd.common.SshException; import org.apache.sshd.common.session.Session; import org.apache.sshd.common.util.GenericUtils; import org.apache.sshd.common.util.buffer.Buffer; import org.apache.sshd.common.util.closeable.AbstractCloseable; /** * Client side <code>ssh-auth</code> service. * * @author <a href="mailto:dev@mina.apache.org">Apache MINA SSHD Project</a> */ public class ClientUserAuthService extends AbstractCloseable implements Service, ClientSessionHolder { /** * The AuthFuture that is being used by the current auth request. This encodes the state. * isSuccess -> authenticated, else if isDone -> server waiting for user auth, else authenticating. */ private final AuthFuture authFuture; private final ClientSessionImpl session; private List<Object> identities; private String service; private List<NamedFactory<UserAuth>> authFactories; private List<String> clientMethods; private List<String> serverMethods; private UserAuth userAuth; private int currentMethod; public ClientUserAuthService(Session s) { if (!(s instanceof ClientSessionImpl)) { throw new IllegalStateException("Client side service used on server side"); } session = (ClientSessionImpl) s; authFuture = new DefaultAuthFuture(session.getLock()); ClientFactoryManager manager = session.getFactoryManager(); authFactories = manager.getUserAuthFactories(); clientMethods = new ArrayList<>(); String prefs = FactoryManagerUtils.getString(manager, ClientFactoryManager.PREFERRED_AUTHS); if (!GenericUtils.isEmpty(prefs)) { for (String pref : prefs.split(",")) { NamedFactory<UserAuth> factory = NamedResource.Utils.findByName(pref, String.CASE_INSENSITIVE_ORDER, authFactories); if (factory != null) { clientMethods.add(pref); } else { log.debug("Skip unknown prefered authentication method: {}", pref); } } } else { for (NamedFactory<UserAuth> factory : authFactories) { clientMethods.add(factory.getName()); } } } @Override public ClientSession getSession() { return getClientSession(); } @Override public ClientSession getClientSession() { return session; } @Override public void start() { // ignored } public AuthFuture auth(List<Object> identities, String service) throws IOException { log.debug("Start authentication"); this.identities = new ArrayList<>(identities); this.service = service; log.debug("Send SSH_MSG_USERAUTH_REQUEST for none"); String username = session.getUsername(); Buffer buffer = session.createBuffer(SshConstants.SSH_MSG_USERAUTH_REQUEST, username.length() + service.length() + Integer.SIZE); buffer.putString(session.getUsername()); buffer.putString(service); buffer.putString("none"); session.writePacket(buffer); return authFuture; } @Override public void process(int cmd, Buffer buffer) throws Exception { if (this.authFuture.isSuccess()) { throw new IllegalStateException("UserAuth message delivered to authenticated client"); } else if (this.authFuture.isDone()) { if (log.isDebugEnabled()) { log.debug("Ignoring random message - cmd={}", cmd); } // ignore for now; TODO: random packets } else if (cmd == SshConstants.SSH_MSG_USERAUTH_BANNER) { String welcome = buffer.getString(); String lang = buffer.getString(); log.debug("Welcome banner(lang={}): {}", lang, welcome); UserInteraction ui = UserInteraction.Utils.resolveUserInteraction(session); if (ui != null) { ui.welcome(session, welcome, lang); } } else { buffer.rpos(buffer.rpos() - 1); processUserAuth(buffer); } } /** * execute one step in user authentication. * * @param buffer * @throws java.io.IOException */ private void processUserAuth(Buffer buffer) throws Exception { int cmd = buffer.getUByte(); if (cmd == SshConstants.SSH_MSG_USERAUTH_SUCCESS) { log.debug("SSH_MSG_USERAUTH_SUCCESS Succeeded with {}", userAuth); if (userAuth != null) { userAuth.destroy(); userAuth = null; } session.setAuthenticated(); session.switchToNextService(); // Will wake up anyone sitting in waitFor authFuture.setAuthed(true); return; } if (cmd == SshConstants.SSH_MSG_USERAUTH_FAILURE) { String mths = buffer.getString(); boolean partial = buffer.getBoolean(); if (log.isDebugEnabled()) { log.debug("Received SSH_MSG_USERAUTH_FAILURE - partial={}, methods={}", partial, mths); } if (partial || (serverMethods == null)) { serverMethods = Arrays.asList(GenericUtils.split(mths, ',')); if (userAuth != null) { userAuth.destroy(); userAuth = null; } } tryNext(); return; } if (userAuth == null) { throw new IllegalStateException("Received unknown packet"); } buffer.rpos(buffer.rpos() - 1); if (!userAuth.process(buffer)) { tryNext(); } } private void tryNext() throws Exception { // Loop until we find something to try while (true) { if (userAuth == null) { currentMethod = 0; } else if (!userAuth.process(null)) { userAuth.destroy(); currentMethod++; } else { return; } while (currentMethod < clientMethods.size() && !serverMethods.contains(clientMethods.get(currentMethod))) { currentMethod++; } if (currentMethod >= clientMethods.size()) { // Failure authFuture.setAuthed(false); return; } String method = clientMethods.get(currentMethod); userAuth = NamedFactory.Utils.create(authFactories, method); if (userAuth == null) { throw new UnsupportedOperationException("Failed to find a user-auth factory for method=" + method); } userAuth.init(session, service, identities); } } @Override protected void preClose() { if (!authFuture.isDone()) { authFuture.setException(new SshException("Session is closed")); } super.preClose(); } }
package org.estatio.dom.lease.invoicing; import java.util.Arrays; import java.util.List; import com.google.common.base.Function; import com.google.common.collect.Lists; import org.estatio.dom.asset.Property; import org.estatio.dom.lease.Lease; import org.estatio.dom.lease.LeaseItem; import org.estatio.dom.lease.LeaseItemType; import org.estatio.dom.lease.LeaseTerm; import org.estatio.dom.valuetypes.AbstractInterval.IntervalEnding; import org.estatio.dom.valuetypes.LocalDateInterval; import org.joda.time.LocalDate; import org.apache.isis.applib.util.TitleBuffer; public class InvoiceCalculationParameters { private Property property; private List<Lease> leases; private LeaseItem leaseItem; private LeaseTerm leaseTerm; private LocalDateInterval dueDateRange; private List<LeaseItemType> leaseItemTypes; private InvoiceRunType invoiceRunType; private LocalDate invoiceDueDate; private LocalDate invoiceDate; public InvoiceCalculationParameters( final InvoiceRunType invoicRunType, final LocalDate invoiceDueDate, final LocalDate startDueDate, final LocalDate nextDueDate) { this.dueDateRange = new LocalDateInterval( startDueDate, nextDueDate, IntervalEnding.EXCLUDING_END_DATE); this.invoiceDueDate = invoiceDueDate; this.invoiceRunType = invoicRunType; } public InvoiceCalculationParameters( final Property property, final List<LeaseItemType> leaseItemTypes, final InvoiceRunType invoiceRunType, final LocalDate invoiceDueDate, final LocalDate startDueDate, final LocalDate nextDueDate) { this( invoiceRunType, invoiceDueDate, startDueDate, nextDueDate); this.leaseItemTypes = leaseItemTypes; this.property = property; } private static Property propertyOf(List<Lease> leases) { if(leases.isEmpty()) { throw new IllegalArgumentException("Must specify at least one lease."); } Property property = leases.get(0).getProperty(); for(Lease lease: leases) { Property p = lease.getProperty(); if(property != p) { throw new IllegalArgumentException("All leases must reside in the same property"); } } return property; } public InvoiceCalculationParameters( final List<Lease> leases, final List<LeaseItemType> leaseItemTypes, final InvoiceRunType invoiceRunType, final LocalDate invoiceDueDate, final LocalDate startDueDate, final LocalDate nextDueDate) { this( propertyOf(leases), leaseItemTypes, invoiceRunType, invoiceDueDate, startDueDate, nextDueDate); this.leaseItemTypes = leaseItemTypes; this.leases = leases; } public InvoiceCalculationParameters( final Lease lease, final List<LeaseItemType> leaseItemTypes, final InvoiceRunType invoiceRunType, final LocalDate invoiceDueDate, final LocalDate startDueDate, final LocalDate nextDueDate) { this( lease.getProperty(), leaseItemTypes, invoiceRunType, invoiceDueDate, startDueDate, nextDueDate); this.leases = Arrays.asList(lease); } public InvoiceCalculationParameters( final LeaseItem leaseItem, final InvoiceRunType invoicRunType, final LocalDate invoiceDueDate, final LocalDate startDueDate, final LocalDate nextDueDate) { this( leaseItem.getLease(), Arrays.asList(leaseItem.getType()), invoicRunType, invoiceDueDate, startDueDate, nextDueDate); this.leaseItem = leaseItem; } public InvoiceCalculationParameters( final LeaseTerm leaseTerm, final InvoiceRunType invoiceRunType, final LocalDate invoiceDueDate, final LocalDate startDueDate, final LocalDate nextDueDate) { this(leaseTerm.getLeaseItem(), invoiceRunType, invoiceDueDate, startDueDate, nextDueDate); this.leaseTerm = leaseTerm; } public LocalDateInterval dueDateRange() { return dueDateRange; } public Property property() { return property; } public List<Lease> leases() { return leases; } public List<LeaseItemType> leaseItemTypes() { return leaseItemTypes; } public LeaseItem leaseItem() { return leaseItem; } public LeaseTerm leaseTerm() { return leaseTerm; } public InvoiceRunType invoiceRunType() { return invoiceRunType; } public LocalDate invoiceDueDate() { return invoiceDueDate; } public LocalDate invoiceDate() { return invoiceDueDate; } public String toString() { TitleBuffer tb = new TitleBuffer(); tb .append(" -", property.getReference()) .append(" -", leasesToReferences()) .append(" -", leaseItemTypes()) .append(" -", invoiceDueDate) .append(" -", dueDateRange) .toString(); return tb.toString(); } private String leasesToReferences() { if (leases == null) { return null; } return Lists.transform(leases, ReferenceOfLease.INSTANCE).toString(); } private enum ReferenceOfLease implements Function<Lease, String> { INSTANCE; @Override public String apply(Lease input) { return input.getReference(); } } }
package candidateMatches; /** * Copyright (c) 2008-2010 Morten Silcowitz. * * This file is part of the Jinngine physics library * * Jinngine is published under the GPL license, available * at http://www.gnu.org/copyleft/gpl.html. */ //package jinngine.util; import java.util.*; import candidateMatches.PositionUpdater; /** * Minimum heap implementation. See [Cormen et al 1999] for formal theory. * Maintains all elements in a min-heap, such that the minimum element will be * the top-most node in the heap at all times. Among many other uses, heaps are * ideal for representing priority queues. */ public class MinHeap{ private int size; final protected List<Node> heap; final private Comparator<PositionUpdater> comparator; protected class Node { public PositionUpdater element; public int position; } /** * Create a new heap * * @param comparator * A comparator that handles elements of type T */ public MinHeap(Comparator<PositionUpdater> comparator, int maxSize) { size = 0; // Allocate space heap = new ArrayList<Node>(maxSize+1); //1-based heap.add(null); //dum,my var for 1-based indexing // Comparator this.comparator = comparator; // initialy clear // for (int i=0;i<maxSize;i++) heap[i] = null; } public MinHeap(Comparator<PositionUpdater> comparator) { size = 0; // Allocate space heap = new ArrayList<Node>(); //1-based heap.add(null); //dum,my var for 1-based indexing // Comparator this.comparator = comparator; } /** * Insert element into the heap. O(lg n) where n is the number of * elements/nodes in the heap * * @param element * new element to be inserted */ public void insert(final PositionUpdater element) { size++; Node node = new Node(); node.element = element; node.position = size; node.element.setHeapPos(size); heap.add(node.position, node); decreaseKey(node); // return node; } public final void clear() { heap.clear(); size = 0; } /** * Return a reference to the top-most element on the heap. The method does * not change the state of the heap in any way. O(k). * * @return Reference to top-most element of heap */ public final PositionUpdater top() { return heap.get(1).element; } // bound check missing /** * Pop an element of the heap. O(lg n) where n is the number of elements in * heap. */ public PositionUpdater pop() { PositionUpdater returnNode = top(); exchange(1, size); heap.remove(size); size--; // if any elements left in heap, do minHeapify if (size > 0) { minHeapify(heap.get(1)); } return returnNode; } // private final void reinsert( final Node n ) { // if ( !decreaseKey(n) ) { // minHeapify(n); // } // } public final int size() { return size; } public final void decreaseKey(final Node node) { int index = node.position; // while ( index>0 && (heap[parent(index)]).compareTo( heap[index]) >= 0 // ) { while (index > 1 && comparator.compare(heap.get(parent(index)).element, heap .get(index).element) >= 0) { exchange(index, parent(index)); index = parent(index); } } // called after the actual decrease public void increaseKey(int nodePos) { minHeapify(heap.get(nodePos)); } protected final void minHeapify(final Node node) { int smallest; int index = node.position; int left = left(index); int right = right(index); // if (left<size && (heap[left]).compareTo(heap[index]) <= 0 ) if (left <= size && comparator.compare(heap.get(left).element, heap.get(index).element) <= 0) smallest = left; else smallest = index; // if (right<size && (heap[right]).compareTo(heap[smallest]) <=0 ) if (right <= size && comparator.compare(heap.get(right).element, heap .get(smallest).element) <= 0) smallest = right; if (smallest != index) { exchange(index, smallest); minHeapify(heap.get(smallest)); } } private final void exchange(final int index, final int index2) { Node temp = heap.get(index); temp.position = index2; temp.element.setHeapPos(index2); Node temp2 = heap.get(index2); temp2.position = index; temp2.element.setHeapPos(index); heap.set(index, temp2); heap.set(index2, temp); // Update posistion in Node // heap.get(index).position=index; // heap.get(index2).position=index2; } private final int parent(final int i) { return i / 2; } private final int left(final int i) { return 2 * i; } private final int right(final int i) { return 2 * i + 1; } /** * Returns an iterator that iterates over all elements of the heap, in no * particular order * * @return */ public final Iterator<PositionUpdater> iterator() { return new Iterator<PositionUpdater>() { private Iterator<Node> iterator = heap.iterator(); public boolean hasNext() { return iterator.hasNext(); } public PositionUpdater next() { return iterator.next().element; } public void remove() { } }; } public String toString() { StringBuilder sb = new StringBuilder(); for(int i=1;i<=this.size;i++){ sb.append(Integer.toString(i)).append(":").append(heap.get(i).element.toString()).append(" "); } return sb.toString(); } }
package org.accela.file.collection.impl.testPerformanceForTptp; import java.io.File; import java.io.IOException; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.NoSuchElementException; import org.accela.file.collection.ElementList; import org.accela.file.collection.ListElementIterator; import org.accela.file.collection.ListIteratorMove; import org.accela.file.collection.util.Node; import org.accela.file.common.DataFormatException; import org.accela.file.common.StructureCorruptedException; import org.accela.file.record.RecordPool; import org.accela.file.record.impl.ObjectRecordPool; public abstract class TestElementList<T> extends TestList { protected ElementList<T> list = null; public TestElementList() { super(new File("testList.txt"), new File("sina.txt")); } @Override protected void close() throws IOException { if (list != null) { list.close(); } } @Override protected void open(boolean restore) throws IOException { this.list = createElementList(restore); } protected abstract ElementList<T> createElementList(boolean restore) throws IOException; protected abstract T gen(Long i); protected abstract T genLarge(); protected abstract RecordPool getPool(); protected abstract long getHeadKey(); protected abstract long getTailKey(); protected boolean isEqual(T a, T b) { if (null == a) { return null == b; } else { return a.equals(b); } } private Node getNode(long key) throws IOException { try { return nodeDelegate.readBytes(new ObjectRecordPool(getPool()) .getIfContains(key)); } catch (DataFormatException ex) { ex.printStackTrace(); assert (false); throw new RuntimeException(ex); } } protected abstract boolean isCheckConnectivity(); private List<Long> connectivity() throws IOException { if (!isCheckConnectivity()) { return null; } long headKey = getHeadKey(); long tailKey = getTailKey(); assert (headKey != 0 && tailKey != 0); assert (headKey != tailKey); Node headNode = getNode(headKey); Node tailNode = getNode(tailKey); assert (headNode.getPrev() == 0); assert (tailNode.getNext() == 0); assert (headNode.isFake()); assert (tailNode.isFake()); long curKey = headKey; Node curNode = headNode; List<Long> headList = new LinkedList<Long>(); headList.add(curNode.getElement()); while (curNode.getNext() != 0) { if (curKey != headKey && curKey != tailKey) { assert (!curNode.isFake()); } curKey = curNode.getNext(); curNode = getNode(curKey); headList.add(curNode.getElement()); } assert (curKey == tailKey); curKey = tailKey; curNode = tailNode; List<Long> tailList = new LinkedList<Long>(); tailList.add(curNode.getElement()); while (curNode.getPrev() != 0) { if (curKey != headKey && curKey != tailKey) { assert (!curNode.isFake()); } curKey = curNode.getPrev(); curNode = getNode(curKey); tailList.add(curNode.getElement()); } assert (curKey == headKey); Collections.reverse(tailList); assert (headList.equals(tailList)); assert (headList.size() >= 2); headList.remove(0); headList.remove(headList.size() - 1); return headList; } private void consistency(List<T> elements) throws IOException, DataFormatException, StructureCorruptedException { assert (list.size() == elements.size()); List<Long> innerElements = connectivity(); assert (null == innerElements || innerElements.equals(elements)); consistencyOfGet(elements); consistencyOfIdx(elements); consistencyOfItr(elements); assert (list.size() == elements.size()); } private void consistencyOfGet(List<T> elements) throws IOException, StructureCorruptedException { for (long i = 0; i < list.size(); i++) { try { assert (isEqual(list.get(i), elements.get((int) i))); } catch (DataFormatException ex) { ex.printStackTrace(); assert (false); } } } private void consistencyOfIdx(List<T> elements) throws IOException, DataFormatException, StructureCorruptedException { for (int i = 0; i < elements.size(); i++) { T element = elements.get(i); long lastIdx = -1; do { lastIdx = list.indexOf(lastIdx + 1, element); if (lastIdx == i) { break; } } while (lastIdx != -1); assert (lastIdx == i); } } private void consistencyOfItr(List<T> elements) throws IOException, DataFormatException, StructureCorruptedException { int[] idxes = new int[] { 0, (int) list.size(), Math.min((int) list.size(), 1), Math.max(0, (int) (list.size() - 1)), (int) list.size() / 2, }; for (int i = 0; i < idxes.length; i++) { ListElementIterator<T> itr = list.iterator(idxes[i]); consistencyItrSingle(idxes[i], itr, elements); } } private void consistencyItrSingle(int idx, ListElementIterator<T> itr, List<T> elements) throws IOException, DataFormatException, StructureCorruptedException { assert (itr.nextIndex() == idx); assert (itr.prevIndex() == idx - 1); if (0 == idx || list.size() == idx) { assert (itr.getLastMove() == ListIteratorMove.none); try { itr.getLast(); assert (false); } catch (Exception ex) { assert (ex instanceof NoSuchElementException); } } int curIdx = idx; while (itr.hasPrev()) { assert (itr.prevIndex() == curIdx - 1); T prevElement = itr.prev(); assert (isEqual(prevElement, elements.get(curIdx - 1))); assert (itr.getLastMove() == ListIteratorMove.prev); assert (isEqual(itr.getLast(), prevElement)); curIdx--; } assert (curIdx == 0); assert (itr.prevIndex() == -1); assert (itr.nextIndex() == 0); while (itr.hasNext()) { assert (itr.nextIndex() == curIdx); T nextElement = itr.next(); assert (isEqual(nextElement, elements.get(curIdx))); assert (itr.getLastMove() == ListIteratorMove.next); assert (isEqual(itr.getLast(), nextElement)); curIdx++; } assert (curIdx == list.size()); assert (itr.prevIndex() == list.size() - 1); assert (itr.nextIndex() == list.size()); while (itr.hasPrev()) { assert (itr.prevIndex() == curIdx - 1); T prevElement = itr.prev(); assert (isEqual(prevElement, elements.get(curIdx - 1))); assert (itr.getLastMove() == ListIteratorMove.prev); assert (isEqual(itr.getLast(), prevElement)); curIdx--; } assert (curIdx == 0); assert (itr.prevIndex() == -1); assert (itr.nextIndex() == 0); } private void randomItrTest(ListElementIterator<T> itr, List<T> elements) throws IOException, DataFormatException, StructureCorruptedException { boolean next = rand.nextBoolean(); boolean add = rand.nextDouble() > 0.3; boolean remove = rand.nextDouble() > 0.6; boolean set = rand.nextDouble() > 0.3; // next or previous long lastIdx = -1; if (next) { if (!itr.hasNext()) { assert (itr.nextIndex() == list.size()); } else { long elementIdx = itr.nextIndex(); T element = itr.next(); lastIdx = elementIdx; assert (isEqual(element, elements.get((int) elementIdx))); } } else { if (!itr.hasPrev()) { assert (itr.prevIndex() == -1); } else { long elementIdx = itr.prevIndex(); T element = itr.prev(); lastIdx = elementIdx; assert (isEqual(element, elements.get((int) elementIdx))); } } // set if (lastIdx != -1 && set) { T element = genRndElement(); itr.set(element); elements.set((int) lastIdx, element); assert (isEqual(element, list.get(lastIdx))); } // add if (add) { for (int i = 0; i < 2; i++) { T element = genRndElement(); long oldNextIdx = itr.nextIndex(); itr.add(element); elements.add((int) oldNextIdx, element); assert (isEqual(element, list.get(oldNextIdx))); // System.out.println("add element " + element); } } // remove if (lastIdx != -1 && !add && remove) { itr.remove(); elements.remove((int) lastIdx); } // consistency consistency(elements); } private long genRndIdx(boolean lessThanSize) { long idx = 0; if (lessThanSize) { idx = rand.nextInt((int) list.size()); } else { idx = rand.nextInt((int) list.size() + 1); } return idx; } private T genRndElement() { long element = rand.nextDouble() > 0.2 ? Math.abs(rand.nextLong()) : 0; return gen(element); } private void randomListTest(List<T> elements) throws IOException, DataFormatException, StructureCorruptedException { long idx = -1; T element = null; for (int i = 0; i < 2; i++) { idx = genRndIdx(false); element = genRndElement(); list.insert(idx, element); elements.add((int) idx, element); assert (isEqual(list.get(idx), element)); } idx = genRndIdx(true); list.remove(idx); elements.remove((int) idx); idx = genRndIdx(true); element = genRndElement(); list.set(idx, element); elements.set((int) idx, element); consistency(elements); } public void testPerformance() throws IOException, DataFormatException, StructureCorruptedException { final int TEST_SIZE = 50; List<T> elements = new LinkedList<T>(); ListElementIterator<T> itr = list.iterator(0); for (int i = 0; i < TEST_SIZE; i++) { randomItrTest(itr, elements); } for (int i = 0; i < TEST_SIZE; i++) { randomListTest(elements); } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.remote; import com.intellij.execution.CommandLineUtil; import com.intellij.execution.TaskExecutor; import com.intellij.execution.process.ProcessAdapter; import com.intellij.execution.process.ProcessEvent; import com.intellij.execution.process.ProcessOutputTypes; import com.intellij.execution.process.ProcessWaitFor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.Consumer; import com.intellij.util.concurrency.AppExecutorUtil; import com.intellij.util.io.BaseOutputReader; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.Charset; import java.util.concurrent.Future; /** * @author traff */ public class BaseRemoteProcessHandler<T extends RemoteProcess> extends AbstractRemoteProcessHandler<T> implements TaskExecutor { private static final Logger LOG = Logger.getInstance(BaseRemoteProcessHandler.class); protected final String myCommandLine; protected final ProcessWaitFor myWaitFor; protected final Charset myCharset; protected T myProcess; public BaseRemoteProcessHandler(@NotNull T process, /*@NotNull*/ String commandLine, @Nullable Charset charset) { myProcess = process; myCommandLine = commandLine; myWaitFor = new ProcessWaitFor(process, this, CommandLineUtil.extractPresentableName(commandLine)); myCharset = charset; if (StringUtil.isEmpty(commandLine)) { LOG.warn(new IllegalArgumentException("Must specify non-empty 'commandLine' parameter")); } } @Override public T getProcess() { return myProcess; } @Override protected void destroyProcessImpl() { if (!myProcess.killProcessTree()) { baseDestroyProcessImpl(); } } @Override public void startNotify() { notifyTextAvailable(myCommandLine + '\n', ProcessOutputTypes.SYSTEM); addProcessListener(new ProcessAdapter() { @Override public void startNotified(final ProcessEvent event) { try { final RemoteOutputReader stdoutReader = new RemoteOutputReader(myProcess.getInputStream(), getCharset(), myProcess, myCommandLine) { @Override protected void onTextAvailable(@NotNull String text) { notifyTextAvailable(text, ProcessOutputTypes.STDOUT); } @NotNull @Override protected Future<?> executeOnPooledThread(@NotNull Runnable runnable) { return BaseRemoteProcessHandler.executeOnPooledThread(runnable); } }; final RemoteOutputReader stderrReader = new RemoteOutputReader(myProcess.getErrorStream(), getCharset(), myProcess, myCommandLine) { @Override protected void onTextAvailable(@NotNull String text) { notifyTextAvailable(text, ProcessOutputTypes.STDERR); } @NotNull @Override protected Future<?> executeOnPooledThread(@NotNull Runnable runnable) { return BaseRemoteProcessHandler.executeOnPooledThread(runnable); } }; myWaitFor.setTerminationCallback(new Consumer<Integer>() { @Override public void consume(Integer exitCode) { try { try { stderrReader.waitFor(); stdoutReader.waitFor(); } catch (InterruptedException ignore) { } } finally { onOSProcessTerminated(exitCode); } } }); } finally { removeProcessListener(this); } } }); super.startNotify(); } protected void onOSProcessTerminated(final int exitCode) { notifyProcessTerminated(exitCode); } protected void baseDestroyProcessImpl() { try { closeStreams(); } finally { doDestroyProcess(); } } protected void doDestroyProcess() { getProcess().destroy(); } @Override protected void detachProcessImpl() { final Runnable runnable = new Runnable() { @Override public void run() { closeStreams(); myWaitFor.detach(); notifyProcessDetached(); } }; executeOnPooledThread(runnable); } protected void closeStreams() { try { myProcess.getOutputStream().close(); } catch (IOException e) { LOG.error(e); } } @Override public boolean detachIsDefault() { return false; } @Override public OutputStream getProcessInput() { return myProcess.getOutputStream(); } @Nullable public Charset getCharset() { return myCharset; } @NotNull private static Future<?> executeOnPooledThread(@NotNull Runnable task) { return AppExecutorUtil.getAppExecutorService().submit(task); } @NotNull @Override public Future<?> executeTask(@NotNull Runnable task) { return executeOnPooledThread(task); } private abstract static class RemoteOutputReader extends BaseOutputReader { @NotNull private final RemoteProcess myRemoteProcess; private boolean myClosed; RemoteOutputReader(@NotNull InputStream inputStream, Charset charset, @NotNull RemoteProcess remoteProcess, @NotNull String commandLine) { super(inputStream, charset); myRemoteProcess = remoteProcess; start(CommandLineUtil.extractPresentableName(commandLine)); } @Override protected void doRun() { try { setClosed(false); while (true) { final boolean read = readAvailable(); if (myRemoteProcess.isDisconnected()) { myReader.close(); break; } if (isStopped) { break; } Thread.sleep(mySleepingPolicy.getTimeToSleep(read)); // give other threads a chance } } catch (InterruptedException ignore) { } catch (IOException e) { LOG.warn(e); } catch (Exception e) { LOG.warn(e); } finally { setClosed(true); } } protected synchronized void setClosed(boolean closed) { myClosed = closed; } @Override public void waitFor() throws InterruptedException { while (!isClosed()) { Thread.sleep(100); } } private synchronized boolean isClosed() { return myClosed; } } @Nullable public String getCommandLine() { return myCommandLine; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; public class InternalStats extends InternalNumericMetricsAggregation.MultiValue implements Stats { enum Metrics { count, sum, min, max, avg; public static Metrics resolve(String name) { return Metrics.valueOf(name); } } protected final long count; protected final double min; protected final double max; protected final double sum; public InternalStats(String name, long count, double sum, double min, double max, DocValueFormat formatter, Map<String, Object> metadata) { super(name, metadata); this.count = count; this.sum = sum; this.min = min; this.max = max; this.format = formatter; } /** * Read from a stream. */ public InternalStats(StreamInput in) throws IOException { super(in); format = in.readNamedWriteable(DocValueFormat.class); count = in.readVLong(); min = in.readDouble(); max = in.readDouble(); sum = in.readDouble(); } @Override protected final void doWriteTo(StreamOutput out) throws IOException { out.writeNamedWriteable(format); out.writeVLong(count); out.writeDouble(min); out.writeDouble(max); out.writeDouble(sum); writeOtherStatsTo(out); } protected void writeOtherStatsTo(StreamOutput out) throws IOException { } @Override public String getWriteableName() { return StatsAggregationBuilder.NAME; } @Override public long getCount() { return count; } @Override public double getMin() { return min; } @Override public double getMax() { return max; } @Override public double getAvg() { return sum / count; } @Override public double getSum() { return sum; } @Override public String getMinAsString() { return valueAsString(Metrics.min.name()); } @Override public String getMaxAsString() { return valueAsString(Metrics.max.name()); } @Override public String getAvgAsString() { return valueAsString(Metrics.avg.name()); } @Override public String getSumAsString() { return valueAsString(Metrics.sum.name()); } @Override public double value(String name) { Metrics metrics = Metrics.valueOf(name); switch (metrics) { case min: return this.min; case max: return this.max; case avg: return this.getAvg(); case count: return this.count; case sum: return this.sum; default: throw new IllegalArgumentException("Unknown value [" + name + "] in common stats aggregation"); } } @Override public InternalStats reduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) { long count = 0; double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; CompensatedSum kahanSummation = new CompensatedSum(0, 0); for (InternalAggregation aggregation : aggregations) { InternalStats stats = (InternalStats) aggregation; count += stats.getCount(); min = Math.min(min, stats.getMin()); max = Math.max(max, stats.getMax()); // Compute the sum of double values with Kahan summation algorithm which is more // accurate than naive summation. kahanSummation.add(stats.getSum()); } return new InternalStats(name, count, kahanSummation.value(), min, max, format, getMetadata()); } static class Fields { public static final String COUNT = "count"; public static final String MIN = "min"; public static final String MIN_AS_STRING = "min_as_string"; public static final String MAX = "max"; public static final String MAX_AS_STRING = "max_as_string"; public static final String AVG = "avg"; public static final String AVG_AS_STRING = "avg_as_string"; public static final String SUM = "sum"; public static final String SUM_AS_STRING = "sum_as_string"; } @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(Fields.COUNT, count); if (count != 0) { builder.field(Fields.MIN, min); builder.field(Fields.MAX, max); builder.field(Fields.AVG, getAvg()); builder.field(Fields.SUM, sum); if (format != DocValueFormat.RAW) { builder.field(Fields.MIN_AS_STRING, format.format(min)); builder.field(Fields.MAX_AS_STRING, format.format(max)); builder.field(Fields.AVG_AS_STRING, format.format(getAvg())); builder.field(Fields.SUM_AS_STRING, format.format(sum)); } } else { builder.nullField(Fields.MIN); builder.nullField(Fields.MAX); builder.nullField(Fields.AVG); builder.field(Fields.SUM, 0.0d); } otherStatsToXContent(builder, params); return builder; } protected XContentBuilder otherStatsToXContent(XContentBuilder builder, Params params) throws IOException { return builder; } @Override public int hashCode() { return Objects.hash(super.hashCode(), count, min, max, sum); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; InternalStats other = (InternalStats) obj; return count == other.count && Double.compare(min, other.min) == 0 && Double.compare(max, other.max) == 0 && Double.compare(sum, other.sum) == 0; } }
/* * Copyright 2014-2016 Groupon, Inc * Copyright 2014-2016 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.plugin.adyen.dao; import java.io.IOException; import java.math.BigDecimal; import java.sql.Connection; import java.sql.SQLException; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import javax.annotation.Nullable; import javax.sql.DataSource; import org.joda.time.DateTime; import org.jooq.UpdateSetMoreStep; import org.jooq.impl.DSL; import org.killbill.billing.catalog.api.Currency; import org.killbill.billing.payment.api.PluginProperty; import org.killbill.billing.payment.api.TransactionType; import org.killbill.billing.plugin.adyen.api.AdyenPaymentPluginApi; import org.killbill.billing.plugin.adyen.client.model.NotificationItem; import org.killbill.billing.plugin.adyen.client.model.PaymentModificationResponse; import org.killbill.billing.plugin.adyen.client.model.PaymentServiceProviderResult; import org.killbill.billing.plugin.adyen.client.model.PurchaseResult; import org.killbill.billing.plugin.adyen.dao.gen.tables.AdyenPaymentMethods; import org.killbill.billing.plugin.adyen.dao.gen.tables.AdyenResponses; import org.killbill.billing.plugin.adyen.dao.gen.tables.records.AdyenHppRequestsRecord; import org.killbill.billing.plugin.adyen.dao.gen.tables.records.AdyenNotificationsRecord; import org.killbill.billing.plugin.adyen.dao.gen.tables.records.AdyenPaymentMethodsRecord; import org.killbill.billing.plugin.adyen.dao.gen.tables.records.AdyenResponsesRecord; import org.killbill.billing.plugin.api.PluginProperties; import org.killbill.billing.plugin.dao.payment.PluginPaymentDao; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Functions; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import static org.killbill.billing.plugin.adyen.api.AdyenPaymentPluginApi.PROPERTY_PSP_REFERENCE; import static org.killbill.billing.plugin.adyen.client.model.PurchaseResult.ADYEN_CALL_ERROR_STATUS; import static org.killbill.billing.plugin.adyen.client.model.PurchaseResult.EXCEPTION_CLASS; import static org.killbill.billing.plugin.adyen.client.model.PurchaseResult.EXCEPTION_MESSAGE; import static org.killbill.billing.plugin.adyen.dao.gen.tables.AdyenHppRequests.ADYEN_HPP_REQUESTS; import static org.killbill.billing.plugin.adyen.dao.gen.tables.AdyenNotifications.ADYEN_NOTIFICATIONS; import static org.killbill.billing.plugin.adyen.dao.gen.tables.AdyenPaymentMethods.ADYEN_PAYMENT_METHODS; import static org.killbill.billing.plugin.adyen.dao.gen.tables.AdyenResponses.ADYEN_RESPONSES; public class AdyenDao extends PluginPaymentDao<AdyenResponsesRecord, AdyenResponses, AdyenPaymentMethodsRecord, AdyenPaymentMethods> { private static final ObjectMapper objectMapper = new ObjectMapper(); private static final Joiner JOINER = Joiner.on(","); public AdyenDao(final DataSource dataSource) throws SQLException { super(AdyenResponses.ADYEN_RESPONSES, AdyenPaymentMethods.ADYEN_PAYMENT_METHODS, dataSource); } // Payment methods public void setPaymentMethodToken(final String kbPaymentMethodId, final String token, final String kbTenantId) throws SQLException { execute(dataSource.getConnection(), new WithConnectionCallback<AdyenResponsesRecord>() { @Override public AdyenResponsesRecord withConnection(final Connection conn) throws SQLException { DSL.using(conn, dialect, settings) .update(ADYEN_PAYMENT_METHODS) .set(ADYEN_PAYMENT_METHODS.TOKEN, token) .where(ADYEN_PAYMENT_METHODS.KB_PAYMENT_METHOD_ID.equal(kbPaymentMethodId)) .and(ADYEN_PAYMENT_METHODS.KB_TENANT_ID.equal(kbTenantId)) .and(ADYEN_PAYMENT_METHODS.IS_DELETED.equal(FALSE)) .execute(); return null; } }); } // HPP requests public void addHppRequest(final UUID kbAccountId, @Nullable final UUID kbPaymentId, @Nullable final UUID kbPaymentTransactionId, final String transactionExternalKey, final Map additionalDataMap, final DateTime utcNow, final UUID kbTenantId) throws SQLException { final String additionalData = asString(additionalDataMap); execute(dataSource.getConnection(), new WithConnectionCallback<Void>() { @Override public Void withConnection(final Connection conn) throws SQLException { DSL.using(conn, dialect, settings) .insertInto(ADYEN_HPP_REQUESTS, ADYEN_HPP_REQUESTS.KB_ACCOUNT_ID, ADYEN_HPP_REQUESTS.KB_PAYMENT_ID, ADYEN_HPP_REQUESTS.KB_PAYMENT_TRANSACTION_ID, ADYEN_HPP_REQUESTS.TRANSACTION_EXTERNAL_KEY, ADYEN_HPP_REQUESTS.ADDITIONAL_DATA, ADYEN_HPP_REQUESTS.CREATED_DATE, ADYEN_HPP_REQUESTS.KB_TENANT_ID) .values(kbAccountId.toString(), kbPaymentId != null ? kbPaymentId.toString() : null, kbPaymentTransactionId != null ? kbPaymentTransactionId.toString() : null, transactionExternalKey, additionalData, toTimestamp(utcNow), kbTenantId.toString()) .execute(); return null; } }); } public AdyenHppRequestsRecord getHppRequest(final String merchantReference) throws SQLException { return execute(dataSource.getConnection(), new WithConnectionCallback<AdyenHppRequestsRecord>() { @Override public AdyenHppRequestsRecord withConnection(final Connection conn) throws SQLException { return DSL.using(conn, dialect, settings) .selectFrom(ADYEN_HPP_REQUESTS) .where(ADYEN_HPP_REQUESTS.TRANSACTION_EXTERNAL_KEY.equal(merchantReference)) .orderBy(ADYEN_HPP_REQUESTS.RECORD_ID.desc()) .limit(1) .fetchOne(); } }); } // Responses public AdyenResponsesRecord addResponse(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbPaymentTransactionId, final TransactionType transactionType, final BigDecimal amount, final Currency currency, final PurchaseResult result, final DateTime utcNow, final UUID kbTenantId) throws SQLException { final String dccAmountValue = getProperty(AdyenPaymentPluginApi.PROPERTY_DCC_AMOUNT_VALUE, result); final String additionalData = getAdditionalData(result); return execute(dataSource.getConnection(), new WithConnectionCallback<AdyenResponsesRecord>() { @Override public AdyenResponsesRecord withConnection(final Connection conn) throws SQLException { DSL.using(conn, dialect, settings) .insertInto(ADYEN_RESPONSES, ADYEN_RESPONSES.KB_ACCOUNT_ID, ADYEN_RESPONSES.KB_PAYMENT_ID, ADYEN_RESPONSES.KB_PAYMENT_TRANSACTION_ID, ADYEN_RESPONSES.TRANSACTION_TYPE, ADYEN_RESPONSES.AMOUNT, ADYEN_RESPONSES.CURRENCY, ADYEN_RESPONSES.PSP_RESULT, ADYEN_RESPONSES.PSP_REFERENCE, ADYEN_RESPONSES.AUTH_CODE, ADYEN_RESPONSES.RESULT_CODE, ADYEN_RESPONSES.REFUSAL_REASON, ADYEN_RESPONSES.REFERENCE, ADYEN_RESPONSES.PSP_ERROR_CODES, ADYEN_RESPONSES.PAYMENT_INTERNAL_REF, ADYEN_RESPONSES.FORM_URL, ADYEN_RESPONSES.DCC_AMOUNT, ADYEN_RESPONSES.DCC_CURRENCY, ADYEN_RESPONSES.DCC_SIGNATURE, ADYEN_RESPONSES.ISSUER_URL, ADYEN_RESPONSES.MD, ADYEN_RESPONSES.PA_REQUEST, ADYEN_RESPONSES.ADDITIONAL_DATA, ADYEN_RESPONSES.CREATED_DATE, ADYEN_RESPONSES.KB_TENANT_ID) .values(kbAccountId.toString(), kbPaymentId.toString(), kbPaymentTransactionId.toString(), transactionType.toString(), amount, currency, result.getResult().isPresent() ? result.getResult().get().toString() : null, result.getPspReference(), result.getAuthCode(), result.getResultCode(), result.getReason(), result.getReference(), null, result.getPaymentTransactionExternalKey(), result.getFormUrl(), dccAmountValue == null ? null : new BigDecimal(dccAmountValue), getProperty(AdyenPaymentPluginApi.PROPERTY_DCC_AMOUNT_CURRENCY, result), getProperty(AdyenPaymentPluginApi.PROPERTY_DCC_SIGNATURE, result), getProperty(AdyenPaymentPluginApi.PROPERTY_ISSUER_URL, result), getProperty(AdyenPaymentPluginApi.PROPERTY_MD, result), getProperty(AdyenPaymentPluginApi.PROPERTY_PA_REQ, result), additionalData, toTimestamp(utcNow), kbTenantId.toString()) .execute(); return DSL.using(conn, dialect, settings) .selectFrom(ADYEN_RESPONSES) .where(ADYEN_RESPONSES.KB_PAYMENT_TRANSACTION_ID.equal(kbPaymentTransactionId.toString())) .and(ADYEN_RESPONSES.KB_TENANT_ID.equal(kbTenantId.toString())) .orderBy(ADYEN_RESPONSES.RECORD_ID.desc()) .limit(1) .fetchOne(); } }); } public void addResponse(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbPaymentTransactionId, final TransactionType transactionType, @Nullable final BigDecimal amount, @Nullable final Currency currency, final PaymentModificationResponse result, final DateTime utcNow, final UUID kbTenantId) throws SQLException { final String dccAmountValue = getProperty(AdyenPaymentPluginApi.PROPERTY_DCC_AMOUNT_VALUE, result); final String additionalData = getAdditionalData(result); execute(dataSource.getConnection(), new WithConnectionCallback<Void>() { @Override public Void withConnection(final Connection conn) throws SQLException { DSL.using(conn, dialect, settings) .insertInto(ADYEN_RESPONSES, ADYEN_RESPONSES.KB_ACCOUNT_ID, ADYEN_RESPONSES.KB_PAYMENT_ID, ADYEN_RESPONSES.KB_PAYMENT_TRANSACTION_ID, ADYEN_RESPONSES.TRANSACTION_TYPE, ADYEN_RESPONSES.AMOUNT, ADYEN_RESPONSES.CURRENCY, ADYEN_RESPONSES.PSP_RESULT, ADYEN_RESPONSES.PSP_REFERENCE, ADYEN_RESPONSES.AUTH_CODE, ADYEN_RESPONSES.RESULT_CODE, ADYEN_RESPONSES.REFUSAL_REASON, ADYEN_RESPONSES.REFERENCE, ADYEN_RESPONSES.PSP_ERROR_CODES, ADYEN_RESPONSES.PAYMENT_INTERNAL_REF, ADYEN_RESPONSES.FORM_URL, ADYEN_RESPONSES.DCC_AMOUNT, ADYEN_RESPONSES.DCC_CURRENCY, ADYEN_RESPONSES.DCC_SIGNATURE, ADYEN_RESPONSES.ISSUER_URL, ADYEN_RESPONSES.MD, ADYEN_RESPONSES.PA_REQUEST, ADYEN_RESPONSES.ADDITIONAL_DATA, ADYEN_RESPONSES.CREATED_DATE, ADYEN_RESPONSES.KB_TENANT_ID) .values(kbAccountId.toString(), kbPaymentId.toString(), kbPaymentTransactionId.toString(), transactionType.toString(), amount, currency, result.getResponse(), result.getPspReference(), null, null, null, null, null, null, null, dccAmountValue == null ? null : new BigDecimal(dccAmountValue), getProperty(AdyenPaymentPluginApi.PROPERTY_DCC_AMOUNT_CURRENCY, result), getProperty(AdyenPaymentPluginApi.PROPERTY_DCC_SIGNATURE, result), getProperty(AdyenPaymentPluginApi.PROPERTY_ISSUER_URL, result), getProperty(AdyenPaymentPluginApi.PROPERTY_MD, result), getProperty(AdyenPaymentPluginApi.PROPERTY_PA_REQ, result), additionalData, toTimestamp(utcNow), kbTenantId.toString()) .execute(); return null; } }); } public AdyenResponsesRecord updateResponse(final UUID kbPaymentTransactionId, final Iterable<PluginProperty> additionalPluginProperties, final UUID kbTenantId) throws SQLException { return updateResponse(kbPaymentTransactionId, null, additionalPluginProperties, kbTenantId); } /** * Update the PSP reference and additional data of the latest response row for a payment transaction * * @param kbPaymentTransactionId Kill Bill payment transaction id * @param paymentServiceProviderResult New PSP result (null if unchanged) * @param additionalPluginProperties Latest properties * @param kbTenantId Kill Bill tenant id * @return the latest version of the response row, null if one couldn't be found * @throws SQLException For any unexpected SQL error */ public AdyenResponsesRecord updateResponse(final UUID kbPaymentTransactionId, @Nullable final PaymentServiceProviderResult paymentServiceProviderResult, final Iterable<PluginProperty> additionalPluginProperties, final UUID kbTenantId) throws SQLException { final Map<String, Object> additionalProperties = PluginProperties.toMap(additionalPluginProperties); return execute(dataSource.getConnection(), new WithConnectionCallback<AdyenResponsesRecord>() { @Override public AdyenResponsesRecord withConnection(final Connection conn) throws SQLException { final AdyenResponsesRecord response = DSL.using(conn, dialect, settings) .selectFrom(ADYEN_RESPONSES) .where(ADYEN_RESPONSES.KB_PAYMENT_TRANSACTION_ID.equal(kbPaymentTransactionId.toString())) .and(ADYEN_RESPONSES.KB_TENANT_ID.equal(kbTenantId.toString())) .orderBy(ADYEN_RESPONSES.RECORD_ID.desc()) .limit(1) .fetchOne(); if (response == null) { return null; } final Map originalData = new HashMap(fromAdditionalData(response.getAdditionalData())); originalData.putAll(additionalProperties); final String pspReference = getProperty(PROPERTY_PSP_REFERENCE, additionalProperties); if (pspReference != null) { // If there is a PSP reference, the call went eventually to Adyen. Remove exceptions originalData.remove(ADYEN_CALL_ERROR_STATUS); originalData.remove(EXCEPTION_CLASS); originalData.remove(EXCEPTION_MESSAGE); } final String mergedAdditionalData = asString(originalData); UpdateSetMoreStep<AdyenResponsesRecord> step = DSL.using(conn, dialect, settings) .update(ADYEN_RESPONSES) .set(ADYEN_RESPONSES.PSP_REFERENCE, pspReference) .set(ADYEN_RESPONSES.ADDITIONAL_DATA, mergedAdditionalData); if (paymentServiceProviderResult != null) { step = step.set(ADYEN_RESPONSES.PSP_RESULT, paymentServiceProviderResult.toString()); } step.where(ADYEN_RESPONSES.RECORD_ID.equal(response.getRecordId())) .execute(); return DSL.using(conn, dialect, settings) .selectFrom(ADYEN_RESPONSES) .where(ADYEN_RESPONSES.KB_PAYMENT_TRANSACTION_ID.equal(kbPaymentTransactionId.toString())) .and(ADYEN_RESPONSES.KB_TENANT_ID.equal(kbTenantId.toString())) .orderBy(ADYEN_RESPONSES.RECORD_ID.desc()) .limit(1) .fetchOne(); } }); } @Override public List<AdyenResponsesRecord> getResponses(final UUID kbPaymentId, final UUID kbTenantId) throws SQLException { final List<AdyenResponsesRecord> responses = new LinkedList<AdyenResponsesRecord>(); for (final AdyenResponsesRecord adyenResponsesRecord : Lists.<AdyenResponsesRecord>reverse(super.getResponses(kbPaymentId, kbTenantId))) { responses.add(adyenResponsesRecord); // Keep only the completion row for 3D-S if (TransactionType.AUTHORIZE.toString().equals(adyenResponsesRecord.getTransactionType())) { break; } } return Lists.<AdyenResponsesRecord>reverse(responses); } // Assumes that the last auth was successful @Override public AdyenResponsesRecord getSuccessfulAuthorizationResponse(final UUID kbPaymentId, final UUID kbTenantId) throws SQLException { return execute(dataSource.getConnection(), new WithConnectionCallback<AdyenResponsesRecord>() { @Override public AdyenResponsesRecord withConnection(final Connection conn) throws SQLException { return DSL.using(conn, dialect, settings) .selectFrom(responsesTable) .where(DSL.field(responsesTable.getName() + "." + KB_PAYMENT_ID).equal(kbPaymentId.toString())) .and( DSL.field(responsesTable.getName() + "." + TRANSACTION_TYPE).equal(TransactionType.AUTHORIZE.toString()) .or(DSL.field(responsesTable.getName() + "." + TRANSACTION_TYPE).equal(TransactionType.PURCHASE.toString())) ) .and(DSL.field(responsesTable.getName() + "." + KB_TENANT_ID).equal(kbTenantId.toString())) .orderBy(DSL.field(responsesTable.getName() + "." + RECORD_ID).desc()) .limit(1) .fetchOne(); } }); } public AdyenResponsesRecord getResponse(final String pspReference) throws SQLException { return execute(dataSource.getConnection(), new WithConnectionCallback<AdyenResponsesRecord>() { @Override public AdyenResponsesRecord withConnection(final Connection conn) throws SQLException { return DSL.using(conn, dialect, settings) .selectFrom(ADYEN_RESPONSES) .where(ADYEN_RESPONSES.PSP_REFERENCE.equal(pspReference)) .orderBy(ADYEN_RESPONSES.RECORD_ID.desc()) // Can have multiple entries for 3D-S .limit(1) .fetchOne(); } }); } // Notifications public void addNotification(@Nullable final UUID kbAccountId, @Nullable final UUID kbPaymentId, @Nullable final UUID kbPaymentTransactionId, @Nullable final TransactionType transactionType, final NotificationItem notification, final DateTime utcNow, @Nullable final UUID kbTenantId) throws SQLException { final String additionalData = asString(notification.getAdditionalData()); execute(dataSource.getConnection(), new WithConnectionCallback<Void>() { @Override public Void withConnection(final Connection conn) throws SQLException { DSL.using(conn, dialect, settings) .insertInto(ADYEN_NOTIFICATIONS, ADYEN_NOTIFICATIONS.KB_ACCOUNT_ID, ADYEN_NOTIFICATIONS.KB_PAYMENT_ID, ADYEN_NOTIFICATIONS.KB_PAYMENT_TRANSACTION_ID, ADYEN_NOTIFICATIONS.TRANSACTION_TYPE, ADYEN_NOTIFICATIONS.AMOUNT, ADYEN_NOTIFICATIONS.CURRENCY, ADYEN_NOTIFICATIONS.EVENT_CODE, ADYEN_NOTIFICATIONS.EVENT_DATE, ADYEN_NOTIFICATIONS.MERCHANT_ACCOUNT_CODE, ADYEN_NOTIFICATIONS.MERCHANT_REFERENCE, ADYEN_NOTIFICATIONS.OPERATIONS, ADYEN_NOTIFICATIONS.ORIGINAL_REFERENCE, ADYEN_NOTIFICATIONS.PAYMENT_METHOD, ADYEN_NOTIFICATIONS.PSP_REFERENCE, ADYEN_NOTIFICATIONS.REASON, ADYEN_NOTIFICATIONS.SUCCESS, ADYEN_NOTIFICATIONS.ADDITIONAL_DATA, ADYEN_NOTIFICATIONS.CREATED_DATE, ADYEN_NOTIFICATIONS.KB_TENANT_ID) .values(kbAccountId == null ? null : kbAccountId.toString(), kbPaymentId == null ? null : kbPaymentId.toString(), kbPaymentTransactionId == null ? null : kbPaymentTransactionId.toString(), transactionType == null ? null : transactionType.toString(), notification.getAmount(), notification.getCurrency(), notification.getEventCode(), toTimestamp(notification.getEventDate()), notification.getMerchantAccountCode(), notification.getMerchantReference(), getString(notification.getOperations()), notification.getOriginalReference(), notification.getPaymentMethod(), notification.getPspReference(), notification.getReason(), notification.getSuccess() == null ? FALSE : fromBoolean(notification.getSuccess()), additionalData, toTimestamp(utcNow), kbTenantId == null ? null : kbTenantId.toString()) .execute(); return null; } }); } @VisibleForTesting AdyenNotificationsRecord getNotification(final String pspReference) throws SQLException { return execute(dataSource.getConnection(), new WithConnectionCallback<AdyenNotificationsRecord>() { @Override public AdyenNotificationsRecord withConnection(final Connection conn) throws SQLException { return DSL.using(conn, dialect, settings) .selectFrom(ADYEN_NOTIFICATIONS) .where(ADYEN_NOTIFICATIONS.PSP_REFERENCE.equal(pspReference)) .orderBy(ADYEN_NOTIFICATIONS.RECORD_ID.desc()) .limit(1) .fetchOne(); } }); } // Just for testing public List<AdyenNotificationsRecord> getNotifications() throws SQLException { return execute(dataSource.getConnection(), new WithConnectionCallback<List<AdyenNotificationsRecord>>() { @Override public List<AdyenNotificationsRecord> withConnection(final Connection conn) throws SQLException { return DSL.using(conn, dialect, settings) .selectFrom(ADYEN_NOTIFICATIONS) .orderBy(ADYEN_NOTIFICATIONS.RECORD_ID.asc()) .fetch(); } }); } private String getString(@Nullable final Iterable iterable) { if (iterable == null || !iterable.iterator().hasNext()) { return null; } else { return JOINER.join(Iterables.transform(iterable, Functions.toStringFunction())); } } private String getProperty(final String key, final PurchaseResult result) { return getProperty(key, result.getFormParameter()); } private String getProperty(final String key, final PaymentModificationResponse response) { return getProperty(key, response.getAdditionalData()); } private String getAdditionalData(final PurchaseResult result) throws SQLException { final Map<String, String> additionalDataMap = new HashMap<String, String>(); if (result.getAdditionalData() != null && !result.getAdditionalData().isEmpty()) { additionalDataMap.putAll(result.getAdditionalData()); } if (result.getFormParameter() != null && !result.getFormParameter().isEmpty()) { additionalDataMap.putAll(result.getFormParameter()); } if (additionalDataMap.isEmpty()) { return null; } else { return asString(additionalDataMap); } } private String getAdditionalData(final PaymentModificationResponse response) throws SQLException { return asString(response.getAdditionalData()); } public static Map fromAdditionalData(@Nullable final String additionalData) { if (additionalData == null) { return ImmutableMap.of(); } try { return objectMapper.readValue(additionalData, Map.class); } catch (final IOException e) { throw new RuntimeException(e); } } }
// Copyright 2010 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.enterprise.connector.common; import com.google.common.base.Function; import com.google.common.base.Strings; import com.google.enterprise.connector.common.JarUtils; import com.google.enterprise.connector.instantiator.EncryptedPropertyPlaceholderConfigurer; import com.google.enterprise.connector.manager.Context; import com.google.enterprise.connector.servlet.ServletUtil; import com.google.enterprise.connector.util.SAXParseErrorHandler; import com.google.enterprise.connector.util.XmlParseUtil; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.util.HashMap; import java.util.logging.Level; import java.util.logging.Logger; /** * An abstract superclass for building Connector Manager command line apps. */ public abstract class AbstractCommandLineApp { /** * A couple of basic Options that all command line apps should support. * Note that user request for help and version is handled especially by * this framework, so the subclass need not bother with them. * However, the subclass should add these options to the Options object * it constructs. */ protected static final Option HELP_OPTION = new Option("?", "help", false, "Display this help."); protected static final Option VERSION_OPTION = new Option("v", "version", false, "Display version string."); protected static final String NL = System.getProperty("line.separator"); /** Parsed CommandLine. */ protected CommandLine commandLine; /** * Returns the name of the command line application. */ public abstract String getName(); /** * Returns short description of the command line application. */ public abstract String getDescription(); /** * Executes the command line app. * * @param commandLine a parsed {@code org.apache.commons.cli.CommandLine}. */ public abstract void run(CommandLine commandLine) throws Exception; /** * Returns a Command Line Syntax as a String. This is used to generate * the usage output. The base class includes the application name, plus * the base class options (help and version). Subclasses may override this * method adding their additional Options, plus non-option arguments. */ public String getCommandLineSyntax() { return getName() + " [-?] [-v] "; } /** * Returns a base set of Options, including {@code VERSION_OPTION} and * {@code HELP_OPTION}. Subclasses may override this method and add * additional app-specific options to the set. * * @return {@code org.apache.commons.cli.Options} */ protected Options getOptions() { Options options = new Options(); options.addOption(HELP_OPTION); options.addOption(VERSION_OPTION); return options; } /** * Initializes a standalone Connector Manager application context. If the * CommandLineApp starts a standalone context, it must call {@link #shutdown()} * before exiting. * * @param doStart If {@code true}, start the Context via {@link Context#start} * (with traversals disabled), otherwise construct all the initial * beans, but do not actually start the Connector Manager appliction. */ protected void initStandAloneContext(boolean doStart) { // Turn down the logging output to the console. setLoggingLevels(); // Find the Connector Manager WEB-INF directory. File webInfDir = locateWebInf(); if (webInfDir == null) { System.err.println( "Unable to locate the connector-manager webapp directory."); System.err.println("Try changing to that directory, or use"); System.err.println("-Dmanager.dir=/path/to/webapps/connector-manager"); System.exit(-1); } // If a catalina.base property is not specified, make a guess based on the // knowledge that cwd is likely ${catalina.base}/webapps/connector-manager. if (System.getProperty("catalina.base") == null) { try { System.setProperty("catalina.base", webInfDir.getAbsoluteFile() .getParentFile().getParentFile().getParent()); } catch (NullPointerException npe) { // Bad guess. Go on without it. } } // Establish the webapp keystore configuration before initializing // the Context. try { configureCryptor(webInfDir); } catch (IOException e) { System.err.println("Failed to read keystore configuration: " + e); System.exit(-1); } // Setup the standalone application Context. Context context = Context.getInstance(); File contextLocation = new File(webInfDir, "applicationContext.xml"); try { context.setStandaloneContext( contextLocation.getAbsoluteFile().toURI().toURL().toString(), webInfDir.getAbsoluteFile().getParent(), webInfDir.getAbsolutePath()); // At this point the beans have been created, but the Connector Manager // has not started up. if (doStart) { context.setFeeding(false); context.start(); } } catch (Exception e) { System.err.println( "Failed to initialize standalone application context: " + e); System.exit(-1); } } /** * Sets the Logging Levels. This is typically used to turn logging down * to WARNING or SEVERE to avoid excessive logging to the console logger * when running our command line app. Subclasses may override this if * they want different logging behaviour. */ // TODO: Look for logging levels on the command line via -D... protected void setLoggingLevels() { // Turn down the logging output to the console. Logger.getLogger("").setLevel(Level.WARNING); Logger.getLogger("com.google.enterprise.connector").setLevel(Level.WARNING); Logger.getLogger("org.springframework").setLevel(Level.WARNING); } /** * Shuts down the the command line application context. Subclasses may * override this method, but should call super.shutdown() if they do. */ protected void shutdown() { Context.getInstance().shutdown(true); } /** * Returns the Version string for this application. */ protected String getVersion() { return this.getName() + " v" + JarUtils.getJarVersion(this.getClass()); } /** * Displays the product version. */ protected void printVersion() { System.err.println(getVersion()); System.err.println(""); } /** * Displays the product version and exits. This is called automatically * if the user invokes the app with "-v" or "--version". * * @param exitCode code to supply to {@code System.exit()} */ protected void printVersionAndExit(int exitCode) { printVersion(); System.exit(exitCode); } /** * Gets the header that is included in the {@code usage:} message. * Subclasses my override this to add additional information * before the display of options. */ protected String getUsageHeader() { return null; } /** * Gets the footer to be added to the {@code usage:} message. * Subclasses my override this to add additional informative help. */ protected String getUsageFooter() { return null; } /** * Displays the product usage. * invokes the app with "-?", "-h" or "--help" or required {@code Options} * are not supplied. Subclasses may call this if the supplied command line * options are inconsistent with correct operation. **/ protected void printUsage() { PrintWriter out = new PrintWriter(System.err, true); out.println(getVersion()); out.println(getDescription()); out.println(); HelpFormatter helper = new HelpFormatter(); helper.printHelp(out, 79, getCommandLineSyntax(), getUsageHeader(), getOptions(), 7, 4, getUsageFooter()); out.println(); } /** * Displays the product usage, then exits with the supplied code. * This is called automatically if the user invokes the app with "-?", * or "--help" or required {@code Options} are not supplied. * Subclasses may call this if the supplied command line options are * inconsistent with correct operation. * * @param exitCode code to supply to {@code System.exit()} */ protected void printUsageAndExit(int exitCode) { printUsage(); System.exit(exitCode); } /** * Parses the supplied command line arguments according to the configured * {@code Options} generating a {@code CommandLine}. If parsing the options * fails for any reason, or the user specifically requested help, * then {@link #printUsageAndExit(int)} is called. Similarly, if the user * requests the product version, then {@link #printVersionAndExit(int)} * is called. * * @param args String array of supplied command line arguments. */ public CommandLine parseArgs(String[] args) { try { commandLine = new PosixParser().parse(getOptions(), args); if (commandLine.hasOption(HELP_OPTION.getLongOpt())) { printUsageAndExit(0); } else if (commandLine.hasOption(VERSION_OPTION.getLongOpt())) { printVersionAndExit(0); } return commandLine; } catch (ParseException pe) { printUsageAndExit(-1); } return null; } // This is the default keystore config from out-of-box web.xml. private String keystore_type = "JCEKS"; private String keystore_crypto_algo = "AES"; private String keystore_passwd_file = "keystore_passwd"; private String keystore_file = "connector_manager.keystore"; /** * Extracts the keystore configuration from the web.xml. * * @param in an XML InputStream */ private void getKeystoreContextParams(InputStream in) { Document document = XmlParseUtil.parse(in, new SAXParseErrorHandler(), XmlParseUtil.catalogEntityResolver); NodeList params = document.getElementsByTagName("context-param"); if (params == null) { return; } for (int i = 0; i < params.getLength(); i++) { Element param = (Element)params.item(i); String name = XmlParseUtil.getFirstElementByTagName(param, "param-name"); String value = XmlParseUtil.getFirstElementByTagName(param, "param-value"); if (value != null) { if ("keystore_type".equals(name)) { keystore_type = value; } else if ("keystore_crypto_algo".equals(name)) { keystore_crypto_algo = value; } else if ("keystore_passwd_file".equals(name)) { keystore_passwd_file = value; } else if ("keystore_file".equals(name)) { keystore_file = value; } } } } /** * Configure a {@link EncryptedPropertyPlaceholderConfigurer}. * This must be done before starting up a standalone {@link Context}. * Subclasses may override this if they wish to configure the * {@link EncryptedPropertyPlaceholderConfigurer} differently. * * @param webInfDir {@code connector-manager/WEB-INF} directory. */ protected void configureCryptor(File webInfDir) throws IOException { File webXml = new File(webInfDir, "web.xml"); InputStream is = new BufferedInputStream(new FileInputStream(webXml)); getKeystoreContextParams(is); is.close(); // Supply EncryptedPropertyPlaceholder with the keystore config. if (!Strings.isNullOrEmpty(keystore_type)) { EncryptedPropertyPlaceholderConfigurer.setKeyStoreType(keystore_type); } if (!Strings.isNullOrEmpty(keystore_crypto_algo)) { EncryptedPropertyPlaceholderConfigurer .setKeyStoreCryptoAlgo(keystore_crypto_algo); } // Because of differences in ServletContext and StandaloneContext, // there are differences in the expected location of the keystore file. // See keystore configuration in the StartUp servlet for details. if (!Strings.isNullOrEmpty(keystore_file)) { EncryptedPropertyPlaceholderConfigurer .setKeyStorePath(getRealPath(webInfDir, keystore_file)); } if (!Strings.isNullOrEmpty(keystore_passwd_file)) { EncryptedPropertyPlaceholderConfigurer .setKeyStorePasswdPath(getRealPath(webInfDir, keystore_passwd_file)); } } // Relative to a given directory name, where is WEB-INF? private final HashMap<String, String> cmDirsMap = new HashMap<String, String>() {{ put("scripts", "../Tomcat/webapps/connector-manager/WEB-INF"); put("tomcat", "webapps/connector-manager/WEB-INF"); put("webapps", "connector-manager/WEB-INF"); put("connector-manager", "WEB-INF"); put("web-inf", ""); put("local", "google/webapps/connector-manager/WEB-INF"); put("google", "webapps/connector-manager/WEB-INF"); }}; /** * Locate the Connector Manager WEB-INF directory. */ protected File locateWebInf() { String cmdir = System.getProperty("manager.dir", System.getProperty("catalina.base", System.getProperty("user.dir"))); File webinf = locateWebInf(new File(cmdir)); if (webinf == null) { // Maybe we are at the root of the GCI installation. webinf = locateWebInf(new File(cmdir, "Tomcat")); } if (webinf == null) { // Maybe we are at the root of the GSA installation. webinf = locateWebInf(new File(cmdir, "local")); } return webinf; } /** * Locate the Connector Manager WEB-INF directory, relative to dir. */ protected File locateWebInf(File dir) { String path = cmDirsMap.get(dir.getName().toLowerCase()); if (path != null) { File webinf = new File(dir, path); if (webinf.exists() && webinf.isDirectory()) { return webinf.getAbsoluteFile(); } } return null; } /** * Tries to normalize a pathname, as if relative to the context. * Absolute paths are allowed (unlike traditional web-app behaviour). * file: URLs are allowed as well and are treated like absolute paths. * All relative paths are made relative the the web-app WEB-INF directory. * Attempts are made to recognize paths that are already relative to * WEB-INF (they begin with WEB-INF or /WEB-INF). * * @param servletContext the ServletContext * @param name the file name */ private String getRealPath(final File webInfDir, final String name) throws IOException { return ServletUtil.getRealPath(name, new Function<String, String>() { public String apply(String path) { // Force relative paths to be relative to WEB-INF. return new File(webInfDir, name).getAbsolutePath(); } }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.utils; import java.net.URI; import java.util.Map; import org.apache.activemq.artemis.utils.uri.URIFactory; import org.apache.activemq.artemis.utils.uri.URISchema; import org.junit.Assert; import org.junit.Test; public class URIParserTest { /** * this is just a simple test to validate the model * * @throws Throwable */ @Test public void testSchemaFruit() throws Throwable { FruitParser parser = new FruitParser(); Fruit fruit = (Fruit) parser.newObject(new URI("fruit://some:guy@fair-market:3030?color=green&fluentName=something"), null); Assert.assertEquals("fruit", fruit.getName()); Assert.assertEquals(3030, fruit.getPort()); Assert.assertEquals("fair-market", fruit.getHost()); Assert.assertEquals("some:guy", fruit.getUserInfo()); Assert.assertEquals("green", fruit.getColor()); Assert.assertEquals("something", fruit.getFluentName()); } /** * this is just a simple test to validate the model * * @throws Throwable */ @Test public void testGenerateWithEncoding() throws Throwable { FruitParser parser = new FruitParser(); Fruit myFruit = new Fruit("tomato&fruit"); myFruit.setHost("somehost&uui"); // I'm trying to break things as you can see here with some weird encoding myFruit.setFluentName("apples&bananas with &host=3344"); URI uri = parser.createSchema("fruit", myFruit); Fruit newFruit = (Fruit)parser.newObject(uri, "something"); Assert.assertEquals(myFruit.getHost(), newFruit.getHost()); Assert.assertEquals(myFruit.getFluentName(), newFruit.getFluentName()); } /** * Even thought there's no host Poperty on FruitBase.. this should still work fine without throwing any exceptions * * @throws Throwable */ @Test public void testSchemaNoHosPropertyt() throws Throwable { FruitParser parser = new FruitParser(); FruitBase fruit = parser.newObject(new URI("base://some:guy@fair-market:3030?color=green&fluentName=something"), null); Assert.assertEquals("base", fruit.getName()); Assert.assertEquals("green", fruit.getColor()); Assert.assertEquals("something", fruit.getFluentName()); } /** * Even thought there's no host Poperty on FruitBase.. this should still work fine without throwing any exceptions * * @throws Throwable */ @Test public void testSchemaNoHostOnURL() throws Throwable { FruitParser parser = new FruitParser(); Fruit fruit = (Fruit) parser.newObject(new URI("fruit://some:guy@port?color=green&fluentName=something"), null); System.out.println("fruit:" + fruit); Assert.assertEquals("fruit", fruit.getName()); Assert.assertEquals("green", fruit.getColor()); Assert.assertEquals("something", fruit.getFluentName()); } class FruitParser extends URIFactory<FruitBase, String> { FruitParser() { this.registerSchema(new FruitSchema()); this.registerSchema(new FruitBaseSchema()); } } class FruitSchema extends URISchema<FruitBase, String> { @Override public String getSchemaName() { return "fruit"; } @Override public FruitBase internalNewObject(URI uri, Map<String, String> query, String fruitName) throws Exception { return setData(uri, new Fruit(getSchemaName()), query); } } class FruitBaseSchema extends URISchema<FruitBase, String> { @Override public String getSchemaName() { return "base"; } @Override public FruitBase internalNewObject(URI uri, Map<String, String> query, String fruitName) throws Exception { return setData(uri, new FruitBase(getSchemaName()), query); } } public static class FruitBase { final String name; String fluentName; String color; FruitBase(final String name) { this.name = name; } public String getName() { return name; } public String getColor() { return color; } public void setColor(String color) { this.color = color; } public String getFluentName() { return fluentName; } public FruitBase setFluentName(String name) { this.fluentName = name; return this; } @Override public String toString() { return "FruitBase{" + "name='" + name + '\'' + ", fluentName='" + fluentName + '\'' + ", color='" + color + '\'' + '}'; } } public static class Fruit extends FruitBase { public Fruit(String name) { super(name); } String host; int port; String userInfo; public void setHost(String host) { this.host = host; } public String getHost() { return host; } public void setPort(int port) { this.port = port; } public int getPort() { return port; } public void setUserInfo(String userInfo) { this.userInfo = userInfo; } public String getUserInfo() { return userInfo; } @Override public String toString() { return "Fruit{" + "host='" + host + '\'' + ", port=" + port + ", userInfo='" + userInfo + '\'' + "super=" + super.toString() + '}'; } } }
package com.infinityraider.agricraft.capability; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.infinityraider.agricraft.AgriCraft; import com.infinityraider.agricraft.api.v1.AgriApi; import com.infinityraider.agricraft.api.v1.content.items.IAgriSeedBagItem; import com.infinityraider.agricraft.api.v1.genetics.IAgriGenome; import com.infinityraider.agricraft.api.v1.plant.IAgriPlant; import com.infinityraider.agricraft.content.core.ItemDynamicAgriSeed; import com.infinityraider.agricraft.impl.v1.plant.NoPlant; import com.infinityraider.agricraft.reference.AgriNBT; import com.infinityraider.agricraft.reference.Names; import com.infinityraider.infinitylib.capability.IInfSerializableCapabilityImplementation; import com.infinityraider.infinitylib.utility.ISerializable; import net.minecraft.item.ItemStack; import net.minecraft.nbt.CompoundNBT; import net.minecraft.nbt.ListNBT; import net.minecraft.util.ResourceLocation; import net.minecraftforge.common.capabilities.Capability; import net.minecraftforge.common.capabilities.CapabilityInject; import javax.annotation.Nonnull; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Optional; import static com.infinityraider.agricraft.content.tools.ItemSeedBag.*; public class CapabilitySeedBagContents implements IInfSerializableCapabilityImplementation<ItemStack, CapabilitySeedBagContents.Impl> { private static final CapabilitySeedBagContents INSTANCE = new CapabilitySeedBagContents(); public static CapabilitySeedBagContents getInstance() { return INSTANCE; } public static ResourceLocation KEY = new ResourceLocation(AgriCraft.instance.getModId().toLowerCase(), Names.Items.SEED_BAG); @CapabilityInject(CapabilitySeedBagContents.Impl.class) public static final Capability<CapabilitySeedBagContents.Impl> CAPABILITY = null; private CapabilitySeedBagContents() {} @Override public Class<Impl> getCapabilityClass() { return Impl.class; } @Override public Capability<Impl> getCapability() { return CAPABILITY; } @Override public boolean shouldApplyCapability(ItemStack stack) { return stack.getItem() instanceof IAgriSeedBagItem; } @Override public Impl createNewValue(ItemStack stack) { return new Impl((IAgriSeedBagItem) stack.getItem()); } @Override public ResourceLocation getCapabilityKey() { return KEY; } @Override public Class<ItemStack> getCarrierClass() { return ItemStack.class; } public static class Impl implements IAgriSeedBagItem.Contents, ISerializable { private final IAgriSeedBagItem seedBag; private IAgriPlant plant; private final List<Entry> contents; private int count; private final Map<Integer, Comparator<Entry>> sorters; private int sorterIndex; private Comparator<Entry> subSorter; private ItemStack firstStack; private ItemStack lastStack; private Impl(IAgriSeedBagItem seedBag) { this.seedBag = seedBag; this.plant = NoPlant.getInstance(); this.contents = Lists.newArrayList(); this.count = 0; this.sorters = Maps.newHashMap(); this.setSorterIndex(0); this.firstStack = ItemStack.EMPTY; this.lastStack = ItemStack.EMPTY; } public IAgriSeedBagItem getSeedBag() { return this.seedBag; } @Override public IAgriPlant getPlant() { return this.plant; } @Override public int getCount() { return this.count; } @Override public boolean isFull() { return this.getCount() >= this.getCapacity(); } @Override public Sorter getSorter() { return this.getSeedBag().getSorter(this.getSorterIndex()); } @Override public int getSorterIndex() { return this.sorterIndex; } @Override public void setSorterIndex(int index) { this.sorterIndex = index; this.subSorter = this.sorters.computeIfAbsent(index, value -> (a, b) -> this.getSorter().compare(a.getGenome(), b.getGenome())); this.sort(); } protected void sort() { if(this.contents.size() > 0) { this.contents.sort(this.subSorter); this.firstStack = this.contents.get(0).initializeStack(); this.firstStack = this.contents.get(this.contents.size() - 1).initializeStack(); } } @Override public int getSlots() { return 2; } @Nonnull @Override public ItemStack getStackInSlot(int slot) { if (slot == 0) { return this.firstStack; } if (slot == 1) { return this.lastStack; } return ItemStack.EMPTY; } @Nonnull @Override public ItemStack insertItem(int slot, @Nonnull ItemStack stack, boolean simulate) { if (this.isFull()) { return stack; } if (this.isItemValid(slot, stack)) { return ((ItemDynamicAgriSeed) stack.getItem()).getGenome(stack).map(genome -> { boolean flag = true; int amount = Math.min(this.getCapacity() - this.getCount(), stack.getCount()); if (amount <= 0) { return stack; } for (Entry entry : this.contents) { if (entry.matches(genome)) { flag = false; if (!simulate) { entry.add(amount); this.count += amount; } break; } } if (flag && !simulate) { if (!this.plant.isPlant()) { this.plant = ((ItemDynamicAgriSeed) stack.getItem()).getPlant(stack); } this.contents.add(new Entry(genome, amount)); this.count += amount; this.sort(); } if (amount >= stack.getCount()) { return ItemStack.EMPTY; } else { ItemStack result = stack.copy(); result.setCount(result.getCount() - amount); return result; } }).orElse(stack); } return stack; } @Nonnull @Override public ItemStack extractItem(int slot, int amount, boolean simulate) { if(this.contents.size() >= 1) { if(slot == 0) { return this.extractFirstSeed(amount, simulate); } if(slot == 1) { return this.extractLastSeed(amount, simulate); } } return ItemStack.EMPTY; } @Nonnull @Override public ItemStack extractFirstSeed(int amount, boolean simulate) { if(this.firstStack.isEmpty()) { return ItemStack.EMPTY; } ItemStack out = this.firstStack.copy(); if (amount >= this.contents.get(0).getAmount()) { // More seeds were requested than there actually are Entry entry = simulate ? this.contents.get(0) : this.contents.remove(0); out.setCount(entry.getAmount()); if (!simulate) { this.count -= out.getCount(); if (this.contents.size() > 0) { this.firstStack = this.contents.get(0).initializeStack(); } else { this.firstStack = ItemStack.EMPTY; this.lastStack = ItemStack.EMPTY; this.plant = NoPlant.getInstance(); } } } else { out.setCount(amount); if (!simulate) { this.contents.get(0).extract(amount); this.count -= out.getCount(); } } return out; } @Nonnull @Override public ItemStack extractLastSeed(int amount, boolean simulate) { if(this.lastStack.isEmpty()) { return ItemStack.EMPTY; } ItemStack out = this.lastStack.copy(); if (amount >= this.contents.get(this.contents.size() - 1).getAmount()) { // More seeds were requested than there actually are Entry entry = simulate ? this.contents.get(this.contents.size() - 1) : this.contents.remove(this.contents.size() - 1); out.setCount(entry.getAmount()); if (!simulate) { this.count -= out.getCount(); if (this.contents.size() > 0) { this.lastStack = this.contents.get(this.contents.size() - 1).initializeStack(); } else { this.firstStack = ItemStack.EMPTY; this.lastStack = ItemStack.EMPTY; this.plant = NoPlant.getInstance(); } } } else { out.setCount(amount); if (!simulate) { this.contents.get(this.contents.size() - 1).extract(amount); this.count -= out.getCount(); } } return out; } @Override public int getCapacity() { return AgriCraft.instance.getConfig().seedBagCapacity(); } @Override public int getSlotLimit(int slot) { return 64; } @Override public boolean isItemValid(int slot, @Nonnull ItemStack stack) { if(stack.getItem() instanceof ItemDynamicAgriSeed) { ItemDynamicAgriSeed seed = (ItemDynamicAgriSeed) stack.getItem(); IAgriPlant stackPlant = seed.getPlant(stack); if(stackPlant.isPlant()) { return (!this.getPlant().isPlant()) || this.getPlant() == stackPlant; } } return false; } @Override public void readFromNBT(CompoundNBT tag) { this.contents.clear(); this.count = 0; this.firstStack = ItemStack.EMPTY; this.lastStack = ItemStack.EMPTY; this.plant = tag.contains(AgriNBT.PLANT) ? AgriApi.getPlantRegistry().get(tag.getString(AgriNBT.PLANT)).orElse(NoPlant.getInstance()) : NoPlant.getInstance(); if (this.getPlant().isPlant()) { if (tag.contains(AgriNBT.ENTRIES)) { ListNBT entryTags = tag.getList(AgriNBT.ENTRIES, 10); entryTags.stream().filter(entryTag -> entryTag instanceof CompoundNBT) .map(entryTag -> (CompoundNBT) entryTag). forEach(entryTag -> Entry.readFromTag(entryTag).ifPresent(entry -> { this.contents.add(entry); this.count += entry.getAmount(); })); if (this.count > 0) { this.firstStack = this.contents.get(0).initializeStack(); this.lastStack = this.contents.get(this.contents.size() - 1).initializeStack(); } } else { this.plant = NoPlant.getInstance(); } } this.setSorterIndex(tag.contains(AgriNBT.KEY) ? tag.getInt(AgriNBT.KEY) : 0); } @Override public CompoundNBT writeToNBT() { CompoundNBT tag = new CompoundNBT(); // Write plant tag.putString(AgriNBT.PLANT, this.getPlant().getId()); // Write contents ListNBT entryTags = new ListNBT(); this.contents.forEach(entry -> entryTags.add(entry.writeToTag())); tag.put(AgriNBT.ENTRIES, entryTags); // Write sorter tag.putInt(AgriNBT.KEY, this.getSorterIndex()); // Return the tag return tag; } private static class Entry { private final IAgriGenome genome; private int amount; protected Entry(IAgriGenome genome, int amount) { this.genome = genome; this.amount = amount; } public int getAmount() { return this.amount; } public IAgriGenome getGenome() { return this.genome; } public ItemStack initializeStack() { return this.genome.toSeedStack(); } public void add(int amount) { this.amount += amount; } public void extract(int amount) { this.amount -= amount; this.amount = Math.max(this.amount, 0); } public boolean matches(IAgriGenome genome) { return this.getGenome().equals(genome); } public CompoundNBT writeToTag() { CompoundNBT tag = new CompoundNBT(); CompoundNBT genomeTag = new CompoundNBT(); this.getGenome().writeToNBT(genomeTag); tag.put(AgriNBT.GENOME, genomeTag); tag.putInt(AgriNBT.ENTRIES, this.getAmount()); return tag; } public static Optional<Entry> readFromTag(CompoundNBT tag) { if(!tag.contains(AgriNBT.GENOME) || !tag.contains(AgriNBT.ENTRIES)) { return Optional.empty(); } IAgriGenome genome = AgriApi.getAgriGenomeBuilder(NoPlant.getInstance()).build(); if(!genome.readFromNBT(tag.getCompound(AgriNBT.GENOME))) { return Optional.empty(); } int count = tag.getInt(AgriNBT.ENTRIES); return Optional.of(new Entry(genome, count)); } } } }
/* * Copyright (c) 2007 BUSINESS OBJECTS SOFTWARE LIMITED * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of Business Objects nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /* * CALIDEAboutBox.java * Creation date: ? * By: ? */ package org.openquark.gems.client.caleditor.editorapp; import java.awt.BorderLayout; import java.awt.Dialog; import java.awt.FlowLayout; import java.awt.Frame; import java.awt.GridLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JDialog; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.WindowConstants; /** * This type was generated by a SmartGuide. */ public class CALIDEAboutBox extends JDialog { private static final long serialVersionUID = 4338477612467187967L; private JLabel ivjAppName = null; private JPanel ivjButtonPane = null; private JLabel ivjCopyright = null; private final IvjEventHandler ivjEventHandler = new IvjEventHandler(); private JLabel ivjIconLabel = null; private JPanel ivjIconPane = null; private JPanel ivjJDialogContentPane = null; private JButton ivjOkButton = null; private JLabel ivjSpacer = null; private JPanel ivjTextPane = null; private JLabel ivjUserName = null; private JLabel ivjVersion = null; class IvjEventHandler implements ActionListener { public void actionPerformed(ActionEvent e) { if (e.getSource() == CALIDEAboutBox.this.getOkButton()) { connEtoM1(); } } } /** * CALIDEAboutBox constructor comment. */ public CALIDEAboutBox() { super(); initialize(); } /** * CALIDEAboutBox constructor comment. * @param owner Dialog */ public CALIDEAboutBox(Dialog owner) { super(owner); } /** * CALIDEAboutBox constructor comment. * @param owner Dialog * @param title String */ public CALIDEAboutBox(Dialog owner, String title) { super(owner, title); } /** * CALIDEAboutBox constructor comment. * @param owner Dialog * @param title String * @param modal boolean */ public CALIDEAboutBox(Dialog owner, String title, boolean modal) { super(owner, title, modal); } /** * CALIDEAboutBox constructor comment. * @param owner Dialog * @param modal boolean */ public CALIDEAboutBox(Dialog owner, boolean modal) { super(owner, modal); } /** * CALIDEAboutBox constructor comment. * @param owner Frame */ public CALIDEAboutBox(Frame owner) { super(owner); } /** * CALIDEAboutBox constructor comment. * @param owner Frame * @param title String */ public CALIDEAboutBox(Frame owner, String title) { super(owner, title); } /** * CALIDEAboutBox constructor comment. * @param owner Frame * @param title String * @param modal boolean */ public CALIDEAboutBox(Frame owner, String title, boolean modal) { super(owner, title, modal); } /** * CALIDEAboutBox constructor comment. * @param owner Frame * @param modal boolean */ public CALIDEAboutBox(Frame owner, boolean modal) { super(owner, modal); } /** * connEtoM1: (OkButton.action.actionPerformed(ActionEvent) --> CALIDEAboutBox.dispose()V) */ private void connEtoM1() { try { this.dispose(); } catch (Throwable ivjExc) { handleException(ivjExc); } } /** * Return the AppName property value. * @return JLabel */ private JLabel getAppName() { if (ivjAppName == null) { try { ivjAppName = new JLabel(); ivjAppName.setName("AppName"); //$NON-NLS-1$ ivjAppName.setText(CALIDEMessages.getString("CALIDEAboutBox.AppName")); //$NON-NLS-1$ } catch (Throwable ivjExc) { handleException(ivjExc); } } return ivjAppName; } /** * Return the ButtonPane property value. * @return JPanel */ private JPanel getButtonPane() { if (ivjButtonPane == null) { try { ivjButtonPane = new JPanel(); ivjButtonPane.setName("ButtonPane"); //$NON-NLS-1$ ivjButtonPane.setLayout(new FlowLayout()); getButtonPane().add(getOkButton(), getOkButton().getName()); } catch (Throwable ivjExc) { handleException(ivjExc); } } return ivjButtonPane; } /** * Return the Copyright property value. * @return JLabel */ private JLabel getCopyright() { if (ivjCopyright == null) { try { ivjCopyright = new JLabel(); ivjCopyright.setName("Copyright"); //$NON-NLS-1$ ivjCopyright.setText(CALIDEMessages.getString("CALIDEAboutBox.Copyright_Notice")); //$NON-NLS-1$ } catch (Throwable ivjExc) { handleException(ivjExc); } } return ivjCopyright; } /** * Return the IconLabel property value. * @return JLabel */ private JLabel getIconLabel() { if (ivjIconLabel == null) { try { ivjIconLabel = new JLabel(); ivjIconLabel.setName("IconLabel"); //$NON-NLS-1$ ivjIconLabel.setIcon(new ImageIcon(getClass().getResource("/Resources/gemcutter_32.gif"))); //$NON-NLS-1$ ivjIconLabel.setText(""); //$NON-NLS-1$ } catch (Throwable ivjExc) { handleException(ivjExc); } } return ivjIconLabel; } /** * Return the IconPane property value. * @return JPanel */ private JPanel getIconPane() { if (ivjIconPane == null) { try { ivjIconPane = new JPanel(); ivjIconPane.setName("IconPane"); //$NON-NLS-1$ ivjIconPane.setLayout(new FlowLayout()); getIconPane().add(getIconLabel(), getIconLabel().getName()); } catch (Throwable ivjExc) { handleException(ivjExc); } } return ivjIconPane; } /** * Return the JDialogContentPane property value. * @return JPanel */ private JPanel getJDialogContentPane() { if (ivjJDialogContentPane == null) { try { ivjJDialogContentPane = new JPanel(); ivjJDialogContentPane.setName("JDialogContentPane"); //$NON-NLS-1$ ivjJDialogContentPane.setLayout(new BorderLayout()); getJDialogContentPane().add(getButtonPane(), "South"); //$NON-NLS-1$ getJDialogContentPane().add(getTextPane(), "Center"); //$NON-NLS-1$ getJDialogContentPane().add(getIconPane(), "West"); //$NON-NLS-1$ } catch (Throwable ivjExc) { handleException(ivjExc); } } return ivjJDialogContentPane; } /** * Return the OkButton property value. * @return JButton */ private JButton getOkButton() { if (ivjOkButton == null) { try { ivjOkButton = new JButton(); ivjOkButton.setName("OkButton"); //$NON-NLS-1$ ivjOkButton.setText("OK"); //$NON-NLS-1$ } catch (Throwable ivjExc) { handleException(ivjExc); } } return ivjOkButton; } /** * Return the Spacer property value. * @return JLabel */ private JLabel getSpacer() { if (ivjSpacer == null) { try { ivjSpacer = new JLabel(); ivjSpacer.setName("Spacer"); //$NON-NLS-1$ ivjSpacer.setText(""); //$NON-NLS-1$ } catch (Throwable ivjExc) { handleException(ivjExc); } } return ivjSpacer; } /** * Return the TextPane property value. * @return JPanel */ private JPanel getTextPane() { if (ivjTextPane == null) { try { ivjTextPane = new JPanel(); ivjTextPane.setName("TextPane"); //$NON-NLS-1$ ivjTextPane.setLayout(getTextPaneGridLayout()); getTextPane().add(getAppName(), getAppName().getName()); getTextPane().add(getVersion(), getVersion().getName()); getTextPane().add(getSpacer(), getSpacer().getName()); getTextPane().add(getCopyright(), getCopyright().getName()); getTextPane().add(getUserName(), getUserName().getName()); } catch (Throwable ivjExc) { handleException(ivjExc); } } return ivjTextPane; } /** * Return the TextPaneGridLayout property value. * @return GridLayout */ private GridLayout getTextPaneGridLayout() { GridLayout ivjTextPaneGridLayout = null; try { /* Create part */ ivjTextPaneGridLayout = new GridLayout(5, 1); } catch (Throwable ivjExc) { handleException(ivjExc); } return ivjTextPaneGridLayout; } /** * Return the UserName property value. * @return JLabel */ private JLabel getUserName() { if (ivjUserName == null) { try { ivjUserName = new JLabel(); ivjUserName.setName("UserName"); //$NON-NLS-1$ ivjUserName.setText(CALIDEMessages.getString("CALIDEAboutBox.About_Username")); //$NON-NLS-1$ } catch (Throwable ivjExc) { handleException(ivjExc); } } return ivjUserName; } /** * Return the Version property value. * @return JLabel */ private JLabel getVersion() { if (ivjVersion == null) { try { ivjVersion = new JLabel(); ivjVersion.setName("Version"); //$NON-NLS-1$ ivjVersion.setText(CALIDEMessages.getString("CALIDEAboutBox.Version_Number")); //$NON-NLS-1$ } catch (Throwable ivjExc) { handleException(ivjExc); } } return ivjVersion; } /** * Called whenever the part throws an exception. * @param exception Throwable */ private void handleException(Throwable exception) { /* Uncomment the following lines to print uncaught exceptions to stdout */ System.out.println("--------- UNCAUGHT EXCEPTION ---------"); //$NON-NLS-1$ exception.printStackTrace(System.out); } /** * Initializes connections * @exception Exception The exception description. */ private void initConnections() throws Exception { getOkButton().addActionListener(ivjEventHandler); } /** * Initialize the class. */ private void initialize() { try { setName("CALIDEAboutBox"); //$NON-NLS-1$ setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); setSize(330, 160); setTitle("CALIDEAboutBox"); //$NON-NLS-1$ setContentPane(getJDialogContentPane()); initConnections(); } catch (Throwable ivjExc) { handleException(ivjExc); } } /** * main entrypoint - starts the part when it is run as an application * @param args String[] */ public static void main(String[] args) { try { CALIDEAboutBox aCALIDEAboutBox; aCALIDEAboutBox = new CALIDEAboutBox(); aCALIDEAboutBox.setModal(true); aCALIDEAboutBox.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { System.exit(0); } }); aCALIDEAboutBox.setVisible(true); Insets insets = aCALIDEAboutBox.getInsets(); aCALIDEAboutBox.setSize(aCALIDEAboutBox.getWidth() + insets.left + insets.right, aCALIDEAboutBox.getHeight() + insets.top + insets.bottom); aCALIDEAboutBox.setVisible(true); } catch (Throwable exception) { System.err.println("Exception occurred in main() of JDialog"); //$NON-NLS-1$ exception.printStackTrace(System.out); } } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.refactoring.makeStatic; import com.intellij.java.refactoring.JavaRefactoringBundle; import com.intellij.lang.findUsages.DescriptiveNameUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.ComboBox; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.refactoring.HelpID; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.util.ParameterTablePanel; import com.intellij.refactoring.util.VariableData; import com.intellij.ui.DocumentAdapter; import com.intellij.usageView.UsageViewUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.JBUI; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.DocumentEvent; import java.awt.*; import java.awt.event.*; import java.util.ArrayList; public class MakeParameterizedStaticDialog extends AbstractMakeStaticDialog { private final String[] myNameSuggestions; private final JCheckBox myMakeClassParameter = new JCheckBox(); private JComponent myClassParameterNameInputField; private final JCheckBox myMakeFieldParameters = new JCheckBox(); private ParameterTablePanel myParameterPanel; private VariableData[] myVariableData; private final boolean myPreferOuterClassParameter; private JCheckBox myGenerateDelegateCb; public MakeParameterizedStaticDialog(Project project, PsiTypeParameterListOwner member, String[] nameSuggestions, InternalUsageInfo[] internalUsages) { super(project, member); myNameSuggestions = nameSuggestions; String type = UsageViewUtil.getType(myMember); setTitle(JavaRefactoringBundle.message("make.0.static", StringUtil.capitalize(type))); myPreferOuterClassParameter = buildVariableData(internalUsages) || ContainerUtil.exists(myVariableData, data -> !data.variable.hasModifierProperty(PsiModifier.FINAL)); init(); } private boolean buildVariableData(InternalUsageInfo[] internalUsages) { ArrayList<VariableData> variableDatum = new ArrayList<>(); boolean nonFieldUsages = MakeStaticUtil.collectVariableData(myMember, internalUsages, variableDatum); myVariableData = variableDatum.toArray(new VariableData[0]); return nonFieldUsages; } @Override public boolean isReplaceUsages() { return true; } @Override public boolean isMakeClassParameter() { if (myMakeClassParameter != null) return myMakeClassParameter.isSelected(); else return false; } @Override public String getClassParameterName() { if (isMakeClassParameter()) { if (myClassParameterNameInputField instanceof JTextField) { return ((JTextField)myClassParameterNameInputField).getText(); } else if(myClassParameterNameInputField instanceof JComboBox) { return (String)(((JComboBox<?>)myClassParameterNameInputField).getEditor().getItem()); } else return null; } else { return null; } } /** * * @return null if field parameters are not selected */ @Override public VariableData[] getVariableData() { if(myMakeFieldParameters != null && myMakeFieldParameters.isSelected()) { return myVariableData; } else { return null; } } @Override protected String getHelpId() { return HelpID.MAKE_METHOD_STATIC; } @Override protected JComponent createCenterPanel() { GridBagConstraints gbConstraints = new GridBagConstraints(); JPanel panel = new JPanel(new GridBagLayout()); gbConstraints.insets = JBInsets.create(4, 8); gbConstraints.weighty = 0; gbConstraints.weightx = 0; gbConstraints.gridx = 0; gbConstraints.gridy = GridBagConstraints.RELATIVE; gbConstraints.gridwidth = GridBagConstraints.REMAINDER; gbConstraints.fill = GridBagConstraints.NONE; gbConstraints.anchor = GridBagConstraints.WEST; panel.add(createDescriptionLabel(), gbConstraints); gbConstraints.weighty = 0; gbConstraints.weightx = 0; gbConstraints.gridwidth = GridBagConstraints.REMAINDER; gbConstraints.fill = GridBagConstraints.NONE; gbConstraints.anchor = GridBagConstraints.WEST; String text = myMember instanceof PsiMethod ? RefactoringBundle.message("add.object.as.a.parameter.with.name") : JavaRefactoringBundle.message("add.object.as.a.parameter.to.constructors.with.name"); myMakeClassParameter.setText(text); panel.add(myMakeClassParameter, gbConstraints); myMakeClassParameter.setSelected(myPreferOuterClassParameter); gbConstraints.insets = JBUI.insets(0, 8, 4, 8); gbConstraints.weighty = 0; gbConstraints.weightx = 1; gbConstraints.gridwidth = 2; gbConstraints.fill = GridBagConstraints.HORIZONTAL; gbConstraints.anchor = GridBagConstraints.NORTHWEST; if(myNameSuggestions.length > 1) { myClassParameterNameInputField = createComboBoxForName(); } else { JTextField textField = new JTextField(); textField.setText(myNameSuggestions[0]); textField.getDocument().addDocumentListener(new DocumentAdapter() { @Override public void textChanged(@NotNull DocumentEvent event) { updateControls(); } }); myClassParameterNameInputField = textField; } panel.add(myClassParameterNameInputField, gbConstraints); gbConstraints.gridwidth = GridBagConstraints.REMAINDER; if(myVariableData.length > 0) { gbConstraints.insets = JBInsets.create(4, 8); gbConstraints.weighty = 0; gbConstraints.weightx = 0; gbConstraints.gridheight = 1; gbConstraints.fill = GridBagConstraints.NONE; gbConstraints.anchor = GridBagConstraints.WEST; text = myMember instanceof PsiMethod ? JavaRefactoringBundle.message("add.parameters.for.fields") : JavaRefactoringBundle.message("add.parameters.for.fields.to.constructors"); myMakeFieldParameters.setText(text); panel.add(myMakeFieldParameters, gbConstraints); myMakeFieldParameters.setSelected(!myPreferOuterClassParameter); myParameterPanel = new ParameterTablePanel(myProject, myVariableData, myMember) { @Override protected void updateSignature() { } @Override protected void doEnterAction() { clickDefaultButton(); } @Override protected void doCancelAction() { } }; gbConstraints.insets = JBUI.insets(0, 8, 4, 8); gbConstraints.gridwidth = 2; gbConstraints.fill = GridBagConstraints.BOTH; gbConstraints.weighty = 1; panel.add(myParameterPanel, gbConstraints); } ActionListener inputFieldValidator = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { updateControls(); } }; myMakeClassParameter.addActionListener(inputFieldValidator); myMakeFieldParameters.addActionListener(inputFieldValidator); if (myMember instanceof PsiMethod) { myGenerateDelegateCb = new JCheckBox(RefactoringBundle.message("delegation.panel.delegate.via.overloading.method")); panel.add(myGenerateDelegateCb, gbConstraints); } updateControls(); return panel; } @Override protected boolean isGenerateDelegate() { return myGenerateDelegateCb != null && myGenerateDelegateCb.isSelected(); } @Override protected boolean validateData() { int ret = Messages.YES; if (isMakeClassParameter()) { final PsiMethod methodWithParameter = checkParameterDoesNotExist(); if (methodWithParameter != null) { String who = methodWithParameter == myMember ? JavaRefactoringBundle.message("this.method") : DescriptiveNameUtil .getDescriptiveName(methodWithParameter); String message = JavaRefactoringBundle.message("0.already.has.parameter.named.1.use.this.name.anyway", who, getClassParameterName()); ret = Messages.showYesNoDialog(myProject, message, RefactoringBundle.message("warning.title"), Messages.getWarningIcon()); myClassParameterNameInputField.requestFocusInWindow(); } } return ret == Messages.YES; } private PsiMethod checkParameterDoesNotExist() { String parameterName = getClassParameterName(); if(parameterName == null) return null; PsiMethod[] methods = myMember instanceof PsiMethod ? new PsiMethod[]{(PsiMethod)myMember} : ((PsiClass)myMember).getConstructors(); for (PsiMethod method : methods) { PsiParameterList parameterList = method.getParameterList(); PsiParameter[] parameters = parameterList.getParameters(); for (PsiParameter parameter : parameters) { if (parameterName.equals(parameter.getName())) return method; } } return null; } private void updateControls() { if (isMakeClassParameter()) { String classParameterName = getClassParameterName(); if (classParameterName == null) { setOKActionEnabled(false); } else { setOKActionEnabled(PsiNameHelper.getInstance(myProject).isIdentifier(classParameterName.trim())); } } else setOKActionEnabled(true); if(myClassParameterNameInputField != null) { myClassParameterNameInputField.setEnabled(isMakeClassParameter()); } if(myParameterPanel != null) { myParameterPanel.setEnabled(myMakeFieldParameters.isSelected()); } } private JComboBox createComboBoxForName() { final ComboBox combobox = new ComboBox(myNameSuggestions); combobox.setEditable(true); combobox.setSelectedIndex(0); combobox.setMaximumRowCount(8); combobox.addItemListener( new ItemListener() { @Override public void itemStateChanged(ItemEvent e) { updateControls(); } } ); combobox.getEditor().getEditorComponent().addKeyListener( new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { updateControls(); } @Override public void keyReleased(KeyEvent e) { updateControls(); } @Override public void keyTyped(KeyEvent e) { updateControls(); } } ); return combobox; } }
package com.mindoo.domino.jna.test; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.junit.Assert; import org.junit.Test; import com.mindoo.domino.jna.NotesIDTable; import com.mindoo.domino.jna.NotesTimeDate; import com.mindoo.domino.jna.NotesIDTable.ComparisonResult; import com.mindoo.domino.jna.NotesIDTable.IEnumerateCallback; import lotus.domino.Session; /** * Tests cases for ID tables * * @author Karsten Lehmann */ public class TestIdTable extends BaseJNATestClass { /** * ID table insertion tests */ @Test public void testIDTable_insertMethods() { runWithSession(new IDominoCallable<Object>() { @Override public Object call(Session session) throws Exception { System.out.println("Starting id table test"); NotesIDTable tableSingleInsertion = new NotesIDTable(); final int numIdsToAdd = 40000; List<Integer> allAddedIds = new ArrayList<Integer>(); { int currNoteId = 4; while (allAddedIds.size()<numIdsToAdd) { //skip some entries randomly in the test dataset if ((Math.random()*10) > 7) { allAddedIds.add(currNoteId); } currNoteId+=4; } } long t0=System.currentTimeMillis(); for (int i=0; i<allAddedIds.size(); i++) { tableSingleInsertion.addNote(allAddedIds.get(i)); } long t1=System.currentTimeMillis(); System.out.println("Single insertion added "+tableSingleInsertion.getCount()+" entries after "+(t1-t0)+"ms"); Assert.assertEquals("Single insertion added all ids", numIdsToAdd, tableSingleInsertion.getCount()); for (int i=0; i<allAddedIds.size(); i++) { Assert.assertTrue("Note ID "+allAddedIds.get(i)+" has been added to the table", tableSingleInsertion.contains(allAddedIds.get(i))); } t0 = System.currentTimeMillis(); NotesIDTable tableBulkInsertion = new NotesIDTable(); //best case: ID tqble is empty; then the bulk function can use an optimized call, //because id can skip the test where to insert the IDs (they can just be appended) tableBulkInsertion.addNotes(allAddedIds); t1 = System.currentTimeMillis(); System.out.println("Bulk insertion added "+tableBulkInsertion.getCount()+" entries after "+(t1-t0)+"ms"); Assert.assertEquals("Bulk insertion added all ids", numIdsToAdd, tableBulkInsertion.getCount()); for (int i=0; i<allAddedIds.size(); i++) { Assert.assertTrue("Note ID "+allAddedIds.get(i)+" has been added to the table", tableBulkInsertion.contains(allAddedIds.get(i))); } NotesIDTable intersectionIDTable = tableSingleInsertion.intersect(tableBulkInsertion); Assert.assertEquals("Insersection ID table has the right entry count", numIdsToAdd, intersectionIDTable.getCount()); tableSingleInsertion.removeTable(tableBulkInsertion); Assert.assertTrue("Both takes contain the same IDs", tableSingleInsertion.getCount()==0); System.out.println("Done with id table test"); return null; } }); } /** * ID table comparison tests */ @Test public void testIDTable_tableComparison() { runWithSession(new IDominoCallable<Object>() { @Override public Object call(Session session) throws Exception { System.out.println("Starting id table comparison"); NotesIDTable table1 = new NotesIDTable(new int[] {4,8,16,48}); NotesIDTable table2 = new NotesIDTable(new int[] {8,12,16,48}); ComparisonResult compResult = table1.findDifferences(table2); int[] idsAdds = compResult.getTableAdds().toArray(); int[] idsDeletes = compResult.getTableDeletes().toArray(); int[] idsSame = compResult.getTableSame().toArray(); Assert.assertArrayEquals("Adds are correct", new int[] {12}, idsAdds); Assert.assertArrayEquals("Deletes are correct", new int[] {4}, idsDeletes); Assert.assertArrayEquals("Same IDs are correct", new int[] {8, 16, 48}, idsSame); System.out.println("Done with id table comparison"); return null; } }); } /** * ID Table enumeration tests */ @Test public void testIDTable_enumeration() { runWithSession(new IDominoCallable<Object>() { @Override public Object call(Session session) throws Exception { System.out.println("Starting id table comparison"); NotesIDTable table = new NotesIDTable(new int[] {4,8,12,16,48}); Assert.assertEquals("NotesIDTable.getFirstId() is correct", 4, table.getFirstId()); Assert.assertEquals("NotesIDTable.getLastId() is correct", 48, table.getLastId()); int[] idArr = table.toArray(); List<Integer> idList = table.toList(); Assert.assertEquals("ID lists have same size", idArr.length, idList.size()); for (int i=0; i<idArr.length; i++) { Assert.assertEquals("List element #"+i+" is correct", idArr[i], idList.get(i).intValue()); } final List<Integer> enumResultForward = new ArrayList<Integer>(); final List<Integer> enumResultBackward = new ArrayList<Integer>(); table.enumerate(new IEnumerateCallback() { @Override public Action noteVisited(int noteId) { enumResultForward.add(noteId); return Action.Continue; } }); table.enumerateBackwards(new IEnumerateCallback() { @Override public Action noteVisited(int noteId) { enumResultBackward.add(noteId); return Action.Continue; } }); Collections.reverse(enumResultBackward); Assert.assertArrayEquals("enumerate and enumerateBackwards are correct", enumResultForward.toArray(), enumResultBackward.toArray()); System.out.println("Done with id table comparison"); return null; } }); } /** * ID table replacement */ @Test public void testIDTable_tableReplace() { runWithSession(new IDominoCallable<Object>() { @Override public Object call(Session session) throws Exception { System.out.println("Starting id table comparison"); int[] idsTable1 = new int[] {4,8,16,48}; int[] idsTable2 = new int[] {8,12,16,48}; NotesIDTable table1 = new NotesIDTable(idsTable1); NotesIDTable table2 = new NotesIDTable(idsTable2); //set a time to check if it is preserved NotesTimeDate timeBefore = new NotesTimeDate(); timeBefore.setNow(); table1.setTime(timeBefore); boolean saveHeader = true; int[] idsBefore = table1.toArray(); Assert.assertArrayEquals("Table1 has the right content",idsTable1, idsBefore); table1.replaceWith(table2, saveHeader); int[] idsAfter = table1.toArray(); Assert.assertArrayEquals("Table1 has the right content",idsTable2, idsAfter); NotesTimeDate timeAfter = table1.getTime(); Assert.assertNotNull("Timedate has not been overwritten", timeAfter); Assert.assertArrayEquals("Timedate contains the original value", timeBefore.getInnards(), timeAfter.getInnards()); saveHeader = false; table1.replaceWith(table2, saveHeader); timeAfter = table1.getTime(); Assert.assertNull("Time has been overwritten", timeAfter); System.out.println("Done with id table comparison"); return null; } }); } }
/** * This class is generated by jOOQ */ package de.piratenpartei.berlin.ldadmin.dbaccess.generated.tables.records; /** * This class is generated by jOOQ. */ @javax.annotation.Generated(value = { "http://www.jooq.org", "3.4.4" }, comments = "This class is generated by jOOQ") @java.lang.SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class CurrentDraftRecord extends org.jooq.impl.TableRecordImpl<de.piratenpartei.berlin.ldadmin.dbaccess.generated.tables.records.CurrentDraftRecord> implements org.jooq.Record7<java.lang.Integer, java.lang.Long, java.sql.Timestamp, java.lang.Integer, java.lang.String, java.lang.String, java.lang.Object>, de.piratenpartei.berlin.ldadmin.dbaccess.generated.tables.interfaces.ICurrentDraft { private static final long serialVersionUID = 2109071675; /** * Setter for <code>current_draft.initiative_id</code>. */ public void setInitiativeId(java.lang.Integer value) { setValue(0, value); } /** * Getter for <code>current_draft.initiative_id</code>. */ @Override public java.lang.Integer getInitiativeId() { return (java.lang.Integer) getValue(0); } /** * Setter for <code>current_draft.id</code>. */ public void setId(java.lang.Long value) { setValue(1, value); } /** * Getter for <code>current_draft.id</code>. */ @Override public java.lang.Long getId() { return (java.lang.Long) getValue(1); } /** * Setter for <code>current_draft.created</code>. */ public void setCreated(java.sql.Timestamp value) { setValue(2, value); } /** * Getter for <code>current_draft.created</code>. */ @Override public java.sql.Timestamp getCreated() { return (java.sql.Timestamp) getValue(2); } /** * Setter for <code>current_draft.author_id</code>. */ public void setAuthorId(java.lang.Integer value) { setValue(3, value); } /** * Getter for <code>current_draft.author_id</code>. */ @Override public java.lang.Integer getAuthorId() { return (java.lang.Integer) getValue(3); } /** * Setter for <code>current_draft.formatting_engine</code>. */ public void setFormattingEngine(java.lang.String value) { setValue(4, value); } /** * Getter for <code>current_draft.formatting_engine</code>. */ @Override public java.lang.String getFormattingEngine() { return (java.lang.String) getValue(4); } /** * Setter for <code>current_draft.content</code>. */ public void setContent(java.lang.String value) { setValue(5, value); } /** * Getter for <code>current_draft.content</code>. */ @Override public java.lang.String getContent() { return (java.lang.String) getValue(5); } /** * Setter for <code>current_draft.text_search_data</code>. */ public void setTextSearchData(java.lang.Object value) { setValue(6, value); } /** * Getter for <code>current_draft.text_search_data</code>. */ @Override public java.lang.Object getTextSearchData() { return (java.lang.Object) getValue(6); } // ------------------------------------------------------------------------- // Record7 type implementation // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public org.jooq.Row7<java.lang.Integer, java.lang.Long, java.sql.Timestamp, java.lang.Integer, java.lang.String, java.lang.String, java.lang.Object> fieldsRow() { return (org.jooq.Row7) super.fieldsRow(); } /** * {@inheritDoc} */ @Override public org.jooq.Row7<java.lang.Integer, java.lang.Long, java.sql.Timestamp, java.lang.Integer, java.lang.String, java.lang.String, java.lang.Object> valuesRow() { return (org.jooq.Row7) super.valuesRow(); } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.Integer> field1() { return de.piratenpartei.berlin.ldadmin.dbaccess.generated.tables.CurrentDraft.CURRENT_DRAFT.INITIATIVE_ID; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.Long> field2() { return de.piratenpartei.berlin.ldadmin.dbaccess.generated.tables.CurrentDraft.CURRENT_DRAFT.ID; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.sql.Timestamp> field3() { return de.piratenpartei.berlin.ldadmin.dbaccess.generated.tables.CurrentDraft.CURRENT_DRAFT.CREATED; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.Integer> field4() { return de.piratenpartei.berlin.ldadmin.dbaccess.generated.tables.CurrentDraft.CURRENT_DRAFT.AUTHOR_ID; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field5() { return de.piratenpartei.berlin.ldadmin.dbaccess.generated.tables.CurrentDraft.CURRENT_DRAFT.FORMATTING_ENGINE; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field6() { return de.piratenpartei.berlin.ldadmin.dbaccess.generated.tables.CurrentDraft.CURRENT_DRAFT.CONTENT; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.Object> field7() { return de.piratenpartei.berlin.ldadmin.dbaccess.generated.tables.CurrentDraft.CURRENT_DRAFT.TEXT_SEARCH_DATA; } /** * {@inheritDoc} */ @Override public java.lang.Integer value1() { return getInitiativeId(); } /** * {@inheritDoc} */ @Override public java.lang.Long value2() { return getId(); } /** * {@inheritDoc} */ @Override public java.sql.Timestamp value3() { return getCreated(); } /** * {@inheritDoc} */ @Override public java.lang.Integer value4() { return getAuthorId(); } /** * {@inheritDoc} */ @Override public java.lang.String value5() { return getFormattingEngine(); } /** * {@inheritDoc} */ @Override public java.lang.String value6() { return getContent(); } /** * {@inheritDoc} */ @Override public java.lang.Object value7() { return getTextSearchData(); } /** * {@inheritDoc} */ @Override public CurrentDraftRecord value1(java.lang.Integer value) { setInitiativeId(value); return this; } /** * {@inheritDoc} */ @Override public CurrentDraftRecord value2(java.lang.Long value) { setId(value); return this; } /** * {@inheritDoc} */ @Override public CurrentDraftRecord value3(java.sql.Timestamp value) { setCreated(value); return this; } /** * {@inheritDoc} */ @Override public CurrentDraftRecord value4(java.lang.Integer value) { setAuthorId(value); return this; } /** * {@inheritDoc} */ @Override public CurrentDraftRecord value5(java.lang.String value) { setFormattingEngine(value); return this; } /** * {@inheritDoc} */ @Override public CurrentDraftRecord value6(java.lang.String value) { setContent(value); return this; } /** * {@inheritDoc} */ @Override public CurrentDraftRecord value7(java.lang.Object value) { setTextSearchData(value); return this; } /** * {@inheritDoc} */ @Override public CurrentDraftRecord values(java.lang.Integer value1, java.lang.Long value2, java.sql.Timestamp value3, java.lang.Integer value4, java.lang.String value5, java.lang.String value6, java.lang.Object value7) { return this; } // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- /** * Create a detached CurrentDraftRecord */ public CurrentDraftRecord() { super(de.piratenpartei.berlin.ldadmin.dbaccess.generated.tables.CurrentDraft.CURRENT_DRAFT); } /** * Create a detached, initialised CurrentDraftRecord */ public CurrentDraftRecord(java.lang.Integer initiativeId, java.lang.Long id, java.sql.Timestamp created, java.lang.Integer authorId, java.lang.String formattingEngine, java.lang.String content, java.lang.Object textSearchData) { super(de.piratenpartei.berlin.ldadmin.dbaccess.generated.tables.CurrentDraft.CURRENT_DRAFT); setValue(0, initiativeId); setValue(1, id); setValue(2, created); setValue(3, authorId); setValue(4, formattingEngine); setValue(5, content); setValue(6, textSearchData); } }
/* * Copyright 2005 Joe Walker * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.directwebremoting.impl; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import java.util.Map.Entry; import org.directwebremoting.extend.ConverterManager; import org.directwebremoting.extend.Creator; import org.directwebremoting.extend.CreatorManager; import org.directwebremoting.extend.OverrideProperty; import org.directwebremoting.extend.ParameterProperty; import org.directwebremoting.extend.Property; import org.directwebremoting.util.LocalUtil; import org.directwebremoting.util.Loggers; /** * A parser for type info in a dwr.xml signature. * @author Joe Walker [joe at getahead dot ltd dot uk] */ public class SignatureParser { /** * Simple ctor * @param converterManager Having understood the extra type info we add it in here. * @param creatorManager If we can't find a class by Java name we can lookup by Javascript name */ public SignatureParser(ConverterManager converterManager, CreatorManager creatorManager) { this.converterManager = converterManager; this.creatorManager = creatorManager; packageImports.add("java.lang"); } /** * Parse some text and add it into the converter manager. * @param sigtext The text to parse */ public void parse(String sigtext) { try { Loggers.STARTUP.debug("Parsing extra type info: "); String reply = LegacyCompressor.stripMultiLineComments(sigtext); reply = LegacyCompressor.stripSingleLineComments(reply); String process = reply; process = process.replace('\n', ' '); process = process.replace('\r', ' '); process = process.replace('\t', ' '); StringTokenizer st = new StringTokenizer(process, ";"); while (st.hasMoreTokens()) { String line = st.nextToken(); line = line.trim(); if (line.length() == 0) { continue; } if (line.startsWith("import ")) { parseImportLine(line); } else { parseDeclarationLine(line); } } } catch (Exception ex) { Loggers.STARTUP.error("Unexpected Error", ex); } } /** * Parse a single import line * @param line The import statement */ private void parseImportLine(String line) { String shortcut = line.substring(7, line.length()); shortcut = shortcut.trim(); if (line.endsWith(".*")) { shortcut = shortcut.substring(0, shortcut.length() - 2); packageImports.add(shortcut); } else { int lastDot = line.lastIndexOf('.'); if (lastDot == -1) { Loggers.STARTUP.error("Missing . from import statement: " + line); return; } String leaf = line.substring(lastDot + 1); classImports.put(leaf, shortcut); } } /** * Parse a single declaration line. * Where line is defined as being everything in between 2 ; chars. * @param line The line to parse */ private void parseDeclarationLine(String line) { int openBrace = line.indexOf('('); int closeBrace = line.indexOf(')'); if (openBrace == -1) { Loggers.STARTUP.error("Missing ( in declaration: " + line); return; } if (closeBrace == -1) { Loggers.STARTUP.error("Missing ) in declaration: " + line); return; } if (openBrace > closeBrace) { Loggers.STARTUP.error("( Must come before ) in declaration: " + line); return; } // Class name and method name come before the opening ( String classMethod = line.substring(0, openBrace).trim(); Method method = findMethod(classMethod); if (method == null) { // Debug is done by findMethod() return; } // Now we need to get a list of all the parameters String paramDecl = line.substring(openBrace + 1, closeBrace); String[] paramNames = split(paramDecl); // Check that we have the right number if (method.getParameterTypes().length != paramNames.length) { Loggers.STARTUP.error("Parameter mismatch parsing signatures section in dwr.xml on line: " + line); Loggers.STARTUP.info("- Reflected method had: " + method.getParameterTypes().length + " parameters: " + method.toString()); Loggers.STARTUP.info("- Signatures section had: " + paramNames.length + " parameters"); Loggers.STARTUP.info("- This can be caused by method overloading which is not supported by Javascript or DWR"); return; } for (int i = 0; i < paramNames.length; i++) { String[] genericList = getGenericParameterTypeList(paramNames[i]); for (int j = 0; j < genericList.length; j++) { String type = genericList[j].trim(); Class<?> clazz = findClass(type); if (clazz != null) { Property parent = new ParameterProperty(method, i); Property child = parent.createChild(i); child = converterManager.checkOverride(child); Property replacement = new OverrideProperty(clazz); converterManager.setOverrideProperty(child, replacement); if (Loggers.STARTUP.isDebugEnabled()) { Loggers.STARTUP.debug("- " + child + " = " + clazz.getName()); } } else { Loggers.STARTUP.warn("Missing class (" + type + ") while parsing signature section on line: " + line); } } } } /** * Lookup a class according to the import rules * @param type The name of the class to find * @return The found class, or null if it does not exist */ private Class<?> findClass(String type) { String itype = type; // Handle inner classes if (itype.indexOf('.') != -1) { Loggers.STARTUP.debug("Inner class detected: " + itype); itype = itype.replace('.', '$'); } try { String full = classImports.get(itype); if (full == null) { full = itype; } return LocalUtil.classForName(full); } catch (Exception ex) { // log.debug("Trying to find class in package imports"); } for (String pkg : packageImports) { String lookup = pkg + '.' + itype; try { return LocalUtil.classForName(lookup); } catch (Exception ex) { // log.debug("Not found: " + lookup); } } // So we've failed to find a Java class name. We can also lookup by // Javascript name to help the situation where there is a dynamic proxy // in the way. Creator creator = creatorManager.getCreator(type, false); if (creator != null) { return creator.getType(); } Loggers.STARTUP.error("Failed to find class: '" + itype + "' from <signature> block."); Loggers.STARTUP.info("- Looked in the following class imports:"); for (Entry<String, String> entry : classImports.entrySet()) { Loggers.STARTUP.info(" - " + entry.getKey() + " -> " + entry.getValue()); } Loggers.STARTUP.info("- Looked in the following package imports:"); for (String pkg : packageImports) { Loggers.STARTUP.info(" - " + pkg); } return null; } /** * Convert a parameter like "Map&lt;Integer, URL&gt;" into an array, * something like [Integer, URL]. * @param paramName The parameter declaration string * @return The array of generic types as strings */ private static String[] getGenericParameterTypeList(String paramName) { int openGeneric = paramName.indexOf('<'); if (openGeneric == -1) { Loggers.STARTUP.debug("No < in paramter declaration: " + paramName); return new String[0]; } int closeGeneric = paramName.lastIndexOf('>'); if (closeGeneric == -1) { Loggers.STARTUP.error("Missing > in generic declaration: " + paramName); return new String[0]; } String generics = paramName.substring(openGeneric + 1, closeGeneric); StringTokenizer st = new StringTokenizer(generics, ","); String[] types = new String[st.countTokens()]; int i = 0; while (st.hasMoreTokens()) { types[i] = st.nextToken(); i++; } return types; } /** * Find a method from the declaration string * @param classMethod The declaration that comes before the ( * @return The found method, or null if one was not found. */ private Method findMethod(String classMethod) { String classMethodChop = classMethod; // If there is a return type then it must be before the last space. int lastSpace = classMethodChop.lastIndexOf(' '); if (lastSpace >= 0) { classMethodChop = classMethodChop.substring(lastSpace); } // The method name comes after the last . int lastDot = classMethodChop.lastIndexOf('.'); if (lastDot == -1) { Loggers.STARTUP.error("Missing . to separate class name and method: " + classMethodChop); return null; } String className = classMethodChop.substring(0, lastDot).trim(); String methodName = classMethodChop.substring(lastDot + 1).trim(); Class<?> clazz = findClass(className); if (clazz == null) { // Debug is done by findClass() return null; } Method method = null; Method[] methods = clazz.getMethods(); for (Method test : methods) { if (test.getName().equals(methodName)) { if (method == null) { method = test; } else { Loggers.STARTUP.warn("Setting extra type info to overloaded methods may fail with <parameter .../>"); } } } if (method == null) { Loggers.STARTUP.error("Unable to find method called: " + methodName + " on type: " + clazz.getName()); } return method; } /** * Chop a parameter declaration string into separate parameters * @param paramDecl The full set of parameter declarations * @return An array of found parameters */ private static String[] split(String paramDecl) { List<String> params = new ArrayList<String>(); boolean inGeneric = false; int start = 0; for (int i = 0; i < paramDecl.length(); i++) { char c = paramDecl.charAt(i); if (c == '<') { if (inGeneric) { Loggers.STARTUP.error("Found < while parsing generic section: " + paramDecl); break; } inGeneric = true; } if (c == '>') { if (!inGeneric) { Loggers.STARTUP.error("Found > while not parsing generic section: " + paramDecl); break; } inGeneric = false; } if (!inGeneric && c == ',') { // This is the start of a new parameter String param = paramDecl.substring(start, i); params.add(param); start = i + 1; } } // Add in the bit at the end: String param = paramDecl.substring(start, paramDecl.length()); params.add(param); return params.toArray(new String[params.size()]); } /** * The map of specific class imports that we have parsed. */ private final Map<String, String> classImports = new HashMap<String, String>(); /** * The map of package imports that we have parsed. */ private final List<String> packageImports = new ArrayList<String>(); /** * Having understood the extra type info we add it in here. */ private final ConverterManager converterManager; /** * If we can't find a class by Java name we can lookup by Javascript name */ private final CreatorManager creatorManager; }
package org.carlspring.strongbox.client; import org.carlspring.strongbox.configuration.Configuration; import org.carlspring.strongbox.configuration.ProxyConfiguration; import org.carlspring.strongbox.configuration.ServerConfiguration; import org.carlspring.strongbox.rest.ObjectMapperProvider; import org.carlspring.strongbox.storage.Storage; import org.carlspring.strongbox.storage.repository.Repository; import org.carlspring.strongbox.xml.parsers.GenericParser; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.xml.bind.JAXBException; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import org.glassfish.jersey.jackson.JacksonFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author mtodorov */ public class RestClient extends ArtifactClient { private static final Logger logger = LoggerFactory.getLogger(RestClient.class); public int setConfiguration(Configuration configuration) throws IOException, JAXBException { return setServerConfiguration(configuration, "/configuration/strongbox/xml", Configuration.class); } public Configuration getConfiguration() throws IOException, JAXBException { return (Configuration) getServerConfiguration("/configuration/strongbox/xml", Configuration.class); } public int setServerConfiguration(ServerConfiguration configuration, String path, Class... classes) throws IOException, JAXBException { String url = getContextBaseUrl() + path; WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); GenericParser<ServerConfiguration> parser = new GenericParser<>(classes); parser.store(configuration, baos); Response response = resource.request(MediaType.APPLICATION_XML) .put(Entity.entity(baos.toString("UTF-8"), MediaType.APPLICATION_XML)); return response.getStatus(); } public ServerConfiguration getServerConfiguration(String path, Class... classes) throws IOException, JAXBException { String url = getContextBaseUrl() + path; WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); final Response response = resource.request(MediaType.APPLICATION_XML).get(); ServerConfiguration configuration = null; if (response.getStatus() == 200) { final String xml = response.readEntity(String.class); final ByteArrayInputStream bais = new ByteArrayInputStream(xml.getBytes()); GenericParser<ServerConfiguration> parser = new GenericParser<ServerConfiguration>(classes); configuration = parser.parse(bais); } return configuration; } /** * Sets the listening port. * * @param port The port to listen on. * @return The response from the server. */ public int setListeningPort(int port) { String url = getContextBaseUrl() + "/configuration/strongbox/port/" + port; WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); Response response = resource.request(MediaType.TEXT_PLAIN).put(Entity.entity(port, MediaType.TEXT_PLAIN)); return response.getStatus(); } /** * Get the port on which the server is listening. * @return The port on which the server is listening. */ public int getListeningPort() { String url = getContextBaseUrl() + "/configuration/strongbox/port"; WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); return resource.request(MediaType.TEXT_PLAIN).get(Integer.class); } /** * Sets the base URL of the server. * * @param baseUrl The base URL. * @return The response code. */ public int setBaseUrl(String baseUrl) { String url = getContextBaseUrl() + "/configuration/strongbox/baseUrl/" + baseUrl; WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); Response response = resource.request(MediaType.TEXT_PLAIN).put(Entity.entity(baseUrl, MediaType.TEXT_PLAIN)); return response.getStatus(); } /** * Gets the base URL of the server. * * @return The response code. */ public String getBaseUrl() { String url = getContextBaseUrl() + "/configuration/strongbox/baseUrl"; WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); return resource.request(MediaType.TEXT_PLAIN).get(String.class); } public int setProxyConfiguration(ProxyConfiguration proxyConfiguration) throws IOException, JAXBException { String url = getContextBaseUrl() + "/configuration/strongbox/proxy-configuration"; WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); GenericParser<ProxyConfiguration> parser = new GenericParser<ProxyConfiguration>(ProxyConfiguration.class); parser.store(proxyConfiguration, baos); Response response = resource.request(MediaType.APPLICATION_XML) .put(Entity.entity(baos.toString("UTF-8"), MediaType.APPLICATION_XML)); return response.getStatus(); } public ProxyConfiguration getProxyConfiguration(String storageId, String repositoryId) throws JAXBException { String url = getContextBaseUrl() + "/configuration/strongbox/proxy-configuration" + (storageId != null && repositoryId != null ? "?storageId=" + storageId + "&repositoryId=" + repositoryId : ""); WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); final Response response = resource.request(MediaType.APPLICATION_XML).get(); @SuppressWarnings("UnnecessaryLocalVariable") ProxyConfiguration proxyConfiguration = null; if (response.getStatus() == 200) { final String xml = response.readEntity(String.class); final ByteArrayInputStream bais = new ByteArrayInputStream(xml.getBytes()); GenericParser<ProxyConfiguration> parser = new GenericParser<ProxyConfiguration>(ProxyConfiguration.class); proxyConfiguration = parser.parse(bais); } else { proxyConfiguration = new ProxyConfiguration(); } return proxyConfiguration; } /** * Creates a new storage. * * @param storage The storage object to create. * @return The response code. * @throws IOException */ public int addStorage(Storage storage) throws IOException, JAXBException { String url = getContextBaseUrl() + "/configuration/strongbox/storages"; WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); GenericParser<Storage> parser = new GenericParser<Storage>(Storage.class); parser.store(storage, baos); Response response = resource.request(MediaType.APPLICATION_XML) .put(Entity.entity(baos.toString("UTF-8"), MediaType.APPLICATION_XML)); return response.getStatus(); } /** * Looks up a storage by it's ID. * * @param storageId * @return * @throws IOException */ public Storage getStorage(String storageId) throws IOException, JAXBException { String url = getContextBaseUrl() + "/configuration/strongbox/storages/" + storageId; WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); final Response response = resource.request(MediaType.APPLICATION_XML).get(); Storage storage = null; if (response.getStatus() == 200) { final String xml = response.readEntity(String.class); final ByteArrayInputStream bais = new ByteArrayInputStream(xml.getBytes()); GenericParser<Storage> parser = new GenericParser<Storage>(Storage.class); storage = parser.parse(bais); } return storage; } /** * Deletes a storage. * * @param storageId The storage to delete. * @return */ public int deleteStorage(String storageId, boolean force) { String url = getContextBaseUrl() + "/configuration/strongbox/storages/" + storageId + (force ? "?force=true" : ""); WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); Response response = resource.request().delete(); return response.getStatus(); } public int addRepository(Repository repository) throws IOException, JAXBException { String url = getContextBaseUrl() + "/configuration/strongbox/storages/" + repository.getStorage().getId(); WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); GenericParser<Repository> parser = new GenericParser<Repository>(Repository.class); parser.store(repository, baos); Response response = resource.request(MediaType.APPLICATION_XML) .put(Entity.entity(baos.toString("UTF-8"), MediaType.APPLICATION_XML)); return response.getStatus(); } /** * Looks up a repository by it's ID. * * @param storageId * @param repositoryId * @return * @throws java.io.IOException */ public Repository getRepository(String storageId, String repositoryId) throws IOException, JAXBException { String url = getContextBaseUrl() + "/configuration/strongbox/storages/" + storageId + "/" + repositoryId; WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); final Response response = resource.request(MediaType.APPLICATION_XML).get(); Repository repository = null; if (response.getStatus() == 200) { final String xml = response.readEntity(String.class); final ByteArrayInputStream bais = new ByteArrayInputStream(xml.getBytes()); GenericParser<Repository> parser = new GenericParser<Repository>(Repository.class); repository = parser.parse(bais); } return repository; } /** * Deletes a repository. * * @param storageId The storage in which the repository to delete is under. * @param repositoryId The repository to delete. * @return */ public int deleteRepository(String storageId, String repositoryId, boolean force) { String url = getContextBaseUrl() + "/configuration/strongbox/storages/" + storageId + "/" + repositoryId + (force ? "?force=true" : ""); WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); Response response = resource.request().delete(); return response.getStatus(); } public String search(String query, MediaType mediaType) throws UnsupportedEncodingException { return search(null, query, mediaType); } public String search(String repositoryId, String query, MediaType mediaType) throws UnsupportedEncodingException { String url = getContextBaseUrl() + "/search?" + (repositoryId != null ? "repositoryId=" + URLEncoder.encode(repositoryId, "UTF-8") : "") + "&q=" + URLEncoder.encode(query, "UTF-8"); final Client client = ClientBuilder.newBuilder() .register(ObjectMapperProvider.class) .register(JacksonFeature.class) .build(); WebTarget webResource = getClientInstance().target(url); setupAuthentication(webResource); final Response response = webResource.request(mediaType).get(); //noinspection UnnecessaryLocalVariable final String asText = response.readEntity(String.class); return asText; } public int rebuildMetadata(String storageId, String repositoryId, String basePath) throws IOException, JAXBException { String url = getContextBaseUrl() + "/metadata/" + storageId + "/" + repositoryId + "/" + (basePath != null ? basePath : ""); WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); Response response = resource.request(MediaType.TEXT_PLAIN).post(Entity.entity("Rebuild", MediaType.APPLICATION_XML)); return response.getStatus(); } public int removeVersionFromMetadata(String storageId, String repositoryId, String artifactPath, String version, String classifier, String metadataType) throws IOException, JAXBException { String url = getContextBaseUrl() + "/metadata/" + storageId + "/" + repositoryId + "/" + (artifactPath != null ? artifactPath : "") + "?version=" + version + (classifier != null ? "&classifier=" + classifier : "") + "&metadataType=" + metadataType; WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); Response response = resource.request().delete(); return response.getStatus(); } public void copy(String path, String srcStorageId, String srcRepositoryId, String destStorageId, String destRepositoryId) { @SuppressWarnings("ConstantConditions") String url = getContextBaseUrl() + "/storages/copy/" + path + "?srcStorageId=" + srcStorageId + "&srcRepositoryId=" + srcRepositoryId + "&destStorageId=" + destStorageId + "&destRepositoryId=" + destRepositoryId; WebTarget resource = getClientInstance().target(url); setupAuthentication(resource); resource.request(MediaType.TEXT_PLAIN).post(Entity.entity("Copy", MediaType.TEXT_PLAIN)); } }
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krms.impl.peopleflow; import org.apache.commons.collections.CollectionUtils; import org.joda.time.DateTime; import org.junit.Before; import org.junit.Test; import org.kuali.rice.core.api.config.property.ConfigurationService; import org.kuali.rice.core.api.exception.RiceIllegalArgumentException; import org.kuali.rice.core.api.exception.RiceIllegalStateException; import org.kuali.rice.core.api.uif.RemotableAttributeError; import org.kuali.rice.kew.api.peopleflow.PeopleFlowDefinition; import org.kuali.rice.kew.api.peopleflow.PeopleFlowService; import org.kuali.rice.krms.api.engine.ExecutionEnvironment; import org.kuali.rice.krms.api.engine.ExecutionOptions; import org.kuali.rice.krms.api.engine.SelectionCriteria; import org.kuali.rice.krms.api.engine.Term; import org.kuali.rice.krms.api.repository.action.ActionDefinition; import org.kuali.rice.krms.api.repository.agenda.AgendaDefinition; import org.kuali.rice.krms.framework.engine.Action; import org.kuali.rice.krms.framework.engine.BasicExecutionEnvironment; import org.kuali.rice.krms.framework.engine.TermResolutionEngineImpl; import org.springframework.orm.ObjectRetrievalFailureException; import javax.jws.WebParam; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static org.junit.Assert.*; /** * Unit test for the {@link PeopleFlowActionTypeService} */ public class PeopleFlowActionTypeServiceTest { private final PeopleFlowActionTypeService notificationPFATS = PeopleFlowActionTypeService.getInstance( PeopleFlowActionTypeService.Type.NOTIFICATION); private final PeopleFlowActionTypeService approvalPFATS = PeopleFlowActionTypeService.getInstance( PeopleFlowActionTypeService.Type.APPROVAL); private static final String VALID_PEOPLEFLOW_ID_1 = "myBogusPeopleFlowId1"; private static final String VALID_PEOPLEFLOW_NAME_1 = "myBogusPeopleFlowName1"; private static final String VALID_PEOPLEFLOW_ID_2 = "myBogusPeopleFlowId2"; private static final String VALID_PEOPLEFLOW_NAME_2 = "myBogusPeopleFlowName2"; private static final String INVALID_PEOPLEFLOW_ID = "invalidPeopleFlowId"; private static final ConfigurationService configurationService = new ConfigurationService() { @Override public String getPropertyValueAsString(String key) { return "{0} message"; } @Override public boolean getPropertyValueAsBoolean(String key) { return false; } @Override public Map<String, String> getAllProperties() { return null; } }; @Before public void injectConfigurationService() { notificationPFATS.setConfigurationService(configurationService); approvalPFATS.setConfigurationService(configurationService); } @Test(expected = RiceIllegalArgumentException.class) public void testNullActionDefinition() { // should throw exception, NOT return null notificationPFATS.loadAction(null); } @Test public void testActionExecution() { // dummy up an ActionDefinition ActionDefinition.Builder actionDefinitionBuilder = ActionDefinition.Builder.create("myId", "myName", "myNamespace", "myTypeId", "myRuleId", 0); Map<String, String> attributes = new HashMap<String, String>(); attributes.put(PeopleFlowActionTypeService.ATTRIBUTE_FIELD_NAME, VALID_PEOPLEFLOW_ID_1); attributes.put(PeopleFlowActionTypeService.NAME_ATTRIBUTE_FIELD, VALID_PEOPLEFLOW_NAME_1); actionDefinitionBuilder.setAttributes(attributes); // create an ExecutionEnvironment SelectionCriteria sc1 = SelectionCriteria.createCriteria(new DateTime(), Collections.<String, String>emptyMap(), Collections.singletonMap( AgendaDefinition.Constants.EVENT, "foo")); ExecutionEnvironment ee = new BasicExecutionEnvironment(sc1, Collections.<Term, Object>emptyMap(), new ExecutionOptions(), new TermResolutionEngineImpl()); // load a notification action Action notificationAction = notificationPFATS.loadAction(actionDefinitionBuilder.build()); notificationAction.execute(ee); // change the peopleFlow id attributes.clear(); attributes.put(PeopleFlowActionTypeService.ATTRIBUTE_FIELD_NAME, VALID_PEOPLEFLOW_ID_2); attributes.put(PeopleFlowActionTypeService.NAME_ATTRIBUTE_FIELD, VALID_PEOPLEFLOW_NAME_2); actionDefinitionBuilder.setAttributes(attributes); // load an approval action Action approvalAction = approvalPFATS.loadAction(actionDefinitionBuilder.build()); approvalAction.execute(ee); // get our attribute for comparison String selectedPeopleFlows = (String)ee.getEngineResults().getAttribute(PeopleFlowActionTypeService.PEOPLE_FLOWS_SELECTED_ATTRIBUTE); // compare against our expected output: assertEquals("F:myBogusPeopleFlowId1,A:myBogusPeopleFlowId2", selectedPeopleFlows); String selectedPeopleName = (String)ee.getEngineResults().getAttribute(PeopleFlowActionTypeService.NAME_ATTRIBUTE_FIELD); // TODO: test ActionDefinition w/o the attribute we need actionDefinitionBuilder.setAttributes(Collections.<String, String>emptyMap()); try { approvalPFATS.loadAction(actionDefinitionBuilder.build()); fail("should have blown up since the attribute we need isn't in the ActionDefinition"); } catch (RiceIllegalArgumentException e) { // good } } @Test public void testValidateAttributes() { PeopleFlowActionTypeService peopleFlowActionTypeService = PeopleFlowActionTypeService.getInstance(PeopleFlowActionTypeService.Type.NOTIFICATION); peopleFlowActionTypeService.setConfigurationService(configurationService); // inject our mock PeopleFlowService: ((PeopleFlowActionTypeService)peopleFlowActionTypeService).setPeopleFlowService(mockPeopleFlowService); // set up attributes with an "invalid" peopleflow ID (according to our mock PeopleFlowService) Map<String, String> attributes = new HashMap<String, String>(); attributes.put(PeopleFlowActionTypeService.ATTRIBUTE_FIELD_NAME, INVALID_PEOPLEFLOW_ID); // test null ID parameter try { peopleFlowActionTypeService.validateAttributes(null, attributes); fail("null type id should throw an exception"); } catch (RiceIllegalArgumentException e) { // good } // test null attributes parameter try { peopleFlowActionTypeService.validateAttributes("bogusTypeId", null); fail("null type id should throw an exception"); } catch (RiceIllegalArgumentException e) { // good } // this should create errors List<RemotableAttributeError> errors = peopleFlowActionTypeService.validateAttributes("bogusTypeId", attributes); assertFalse(CollectionUtils.isEmpty(errors)); // this should create errors errors = peopleFlowActionTypeService.validateAttributesAgainstExisting("bogusTypeId", attributes, Collections.<String, String>emptyMap()); assertFalse(CollectionUtils.isEmpty(errors)); // reset attribute to have a "valid" peopleFlow ID (according to our mock PeopleFlowService) attributes.clear(); attributes.put(PeopleFlowActionTypeService.ATTRIBUTE_FIELD_NAME, VALID_PEOPLEFLOW_ID_1); // this should not create any errors errors = peopleFlowActionTypeService.validateAttributes("bogusTypeId", attributes); assertTrue(CollectionUtils.isEmpty(errors)); // this should not create any errors errors = peopleFlowActionTypeService.validateAttributesAgainstExisting("bogusTypeId", attributes, Collections.<String, String>emptyMap()); assertTrue(CollectionUtils.isEmpty(errors)); } private final PeopleFlowService mockPeopleFlowService = new PeopleFlowService() { private Set<String> validPeopleFlowIds = new HashSet<String>(); { validPeopleFlowIds.add(VALID_PEOPLEFLOW_ID_1); validPeopleFlowIds.add(VALID_PEOPLEFLOW_ID_2); } @Override public PeopleFlowDefinition getPeopleFlow(@WebParam(name = "peopleFlowId") String peopleFlowId) throws RiceIllegalArgumentException { if (validPeopleFlowIds.contains(peopleFlowId)) { return PeopleFlowDefinition.Builder.create("myNamespace", "myPeopleFlowName").build(); } else { // simulate what our PeopleFlowServiceImpl would do throw new ObjectRetrievalFailureException("", new RuntimeException()); } } @Override public PeopleFlowDefinition getPeopleFlowByName(@WebParam(name = "namespaceCode") String namespaceCode, @WebParam(name = "name") String name) throws RiceIllegalArgumentException { throw new UnsupportedOperationException(); } @Override public PeopleFlowDefinition createPeopleFlow(@WebParam( name = "peopleFlow") PeopleFlowDefinition peopleFlow) throws RiceIllegalArgumentException, RiceIllegalStateException { throw new UnsupportedOperationException(); } @Override public PeopleFlowDefinition updatePeopleFlow(@WebParam( name = "peopleFlow") PeopleFlowDefinition peopleFlow) throws RiceIllegalArgumentException, RiceIllegalStateException { throw new UnsupportedOperationException(); } }; }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.breizhbeans.thrift.tools.thriftmongobridge.secured; import com.mongodb.BasicDBObject; import com.mongodb.DBObject; import org.apache.commons.codec.binary.Hex; import org.apache.thrift.TBase; import org.apache.thrift.TException; import org.apache.thrift.TFieldIdEnum; import org.apache.thrift.meta_data.FieldMetaData; import org.apache.thrift.meta_data.MapMetaData; import org.apache.thrift.protocol.TType; import java.io.UnsupportedEncodingException; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; public abstract class TBSONSecuredWrapper { public class ThriftSecuredField { private boolean secured; private boolean hash; // Default unsecured field public ThriftSecuredField() { secured = false; hash = false; } public ThriftSecuredField(final boolean secured, final boolean hash) { this.secured = secured; this.hash = hash; } public boolean isSecured() { return secured; } public boolean isHash() { return hash; } } private final ThriftSecuredField UNSECURED_FIELD = new ThriftSecuredField(); private ConcurrentHashMap<Class<? extends TBase>, Map<Short, ThriftSecuredField>> securedFields = new ConcurrentHashMap<>(); public void secureThriftFields(Class<? extends TBase> tbase, boolean hash, TFieldIdEnum... fields) throws Exception { Map<Short, ThriftSecuredField> classSecuredFields = securedFields.get(tbase); if(classSecuredFields==null) { classSecuredFields = new ConcurrentHashMap<>(); } // get the Field class Class<?> fieldClass = null; Class<?>[] innerClasses = tbase.getClasses(); for (Class<?> innerClass : innerClasses) { if ("_Fields".equals(innerClass.getSimpleName())) { fieldClass = innerClass; break; } } // extract _Fields Class[] findByNameArgs = new Class[1]; findByNameArgs[0] = String.class; Method findByNameMethod = fieldClass.getMethod("findByName", findByNameArgs); // extract metadataMap Field metafaField = tbase.getField("metaDataMap"); Map<?, FieldMetaData> metaDataMap = (Map<?, org.apache.thrift.meta_data.FieldMetaData>) metafaField.get(tbase); for(TFieldIdEnum field : fields) { // get the _Field instance org.apache.thrift.TFieldIdEnum tfieldEnum = (TFieldIdEnum) findByNameMethod.invoke(null, field.getFieldName()); // get the matadata FieldMetaData fieldMetaData = metaDataMap.get(tfieldEnum); // only string are supported switch(fieldMetaData.valueMetaData.type) { case TType.STRING: break; case TType.MAP: MapMetaData mapMetaData = (MapMetaData) fieldMetaData.valueMetaData; if (mapMetaData.valueMetaData.type != TType.STRING) { throw new UnsupportedTTypeException("Unsupported secured type - FIELD:" + field.getFieldName() + " TYPE:" + mapMetaData.valueMetaData.type); } break; default: throw new UnsupportedTTypeException("Unsupported secured type - FIELD:" + field.getFieldName() + " TYPE:" + fieldMetaData.valueMetaData.type); } classSecuredFields.put(field.getThriftFieldId(), new ThriftSecuredField(true, hash)); } securedFields.put(tbase,classSecuredFields); } public void removeAll() { securedFields.clear(); } public void removeSecuredField(Class<? extends TBase> tbase, TFieldIdEnum field) { Map<Short, ThriftSecuredField> classSecuredFields = securedFields.get(tbase); if(classSecuredFields!=null) { classSecuredFields.remove(field.getThriftFieldId()); } } public void removeSecuredClass(Class<?> tbase) { securedFields.remove(tbase); } public boolean isSecured(Class<? extends TBase> tbase) { Map<Short, ThriftSecuredField> classSecuredFields = securedFields.get(tbase); if (classSecuredFields!=null && classSecuredFields.size()>0) { return true; } return false; } public ThriftSecuredField getField(Class<? extends TBase> tbase, Short id) { Map<Short, ThriftSecuredField> classSecuredFields = securedFields.get(tbase); if(classSecuredFields==null) { return UNSECURED_FIELD; } ThriftSecuredField securedField = classSecuredFields.get(id); if(securedField==null) { securedField=UNSECURED_FIELD; } return securedField; } public byte[] decipherSecuredField(Short id, DBObject securedWraper) { try { String key = Short.toString(id); String hexValue = (String) securedWraper.get(key); byte[] protectedData = Hex.decodeHex(hexValue.toCharArray()); return decipher(protectedData); } catch(Exception exp) { } return null; } public byte[] decipherValue(String hexValue) { try { byte[] protectedData = Hex.decodeHex(hexValue.toCharArray()); return decipher(protectedData); } catch(Exception exp) { } return null; } public DBObject getBSON(String prefix, Class<? extends TBase> tbase, TFieldIdEnum field, String value) throws TException { try { DBObject bson = new BasicDBObject(); ThriftSecuredField securedField = getField(tbase, field.getThriftFieldId()); StringBuilder builder = new StringBuilder(); if (prefix != null && prefix.length() > 0) { builder.append(prefix); builder.append("."); } builder.append(field.getFieldName()); if (!securedField.isSecured()) { bson.put( builder.toString(), value); } // adds the hash if necessary if (securedField.isHash()) { bson.put( builder.toString() , digest64(value.getBytes())); } builder = new StringBuilder(); if (prefix != null && prefix.length() > 0) { builder.append(prefix); builder.append("."); } builder.append("securedwrap."); builder.append(Short.toString(field.getThriftFieldId())); // adds the wrapped value if necessary bson.put( builder.toString(), Hex.encodeHexString(cipher(value.getBytes("UTF-8")))); return bson; } catch (UnsupportedEncodingException e) { throw new TException(e); } } /** * @param data * @return 64bits hash value of the input data */ abstract public long digest64(byte[] data) throws TException; /** * @param data to crypt * @return the input data secured */ abstract public byte[] cipher(byte[] data) throws TException; /** * @param thriftObject to crypt fully * @return the input data secured */ abstract public byte[] cipher(TBase<?, ?> thriftObject) throws TException; /** * * @param data coded data * @return decoded data */ abstract public byte[] decipher(byte[] data) throws TException; /** * Decipher a secured thrift object * @param data wrapped * @param thriftObject new instance of Thrift object * @return unwrapped Thrift object */ abstract public TBase<?, ?> decipher(byte[] data, TBase<?, ?> thriftObject) throws TException; }
/* * Copyright (C) The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jotbasan.regitramobileapp.ui.camera; import android.content.Context; import android.graphics.Canvas; import android.util.AttributeSet; import android.view.View; import com.google.android.gms.vision.CameraSource; import java.util.HashSet; import java.util.Set; /** * A view which renders a series of custom graphics to be overlaid on top of an associated preview * (i.e., the camera preview). The creator can add graphics objects, update the objects, and remove * them, triggering the appropriate drawing and invalidation within the view.<p> * * Supports scaling and mirroring of the graphics relative the camera's preview properties. The * idea is that detection items are expressed in terms of a preview size, but need to be scaled up * to the full view size, and also mirrored in the case of the front-facing camera.<p> * * Associated {@link Graphic} items should use the following methods to convert to view coordinates * for the graphics that are drawn: * <ol> * <li>{@link Graphic#scaleX(float)} and {@link Graphic#scaleY(float)} adjust the size of the * supplied value from the preview scale to the view scale.</li> * <li>{@link Graphic#translateX(float)} and {@link Graphic#translateY(float)} adjust the coordinate * from the preview's coordinate system to the view coordinate system.</li> * </ol> */ public class GraphicOverlay<T extends GraphicOverlay.Graphic> extends View { private final Object mLock = new Object(); private int mPreviewWidth; private float mWidthScaleFactor = 1.0f; private int mPreviewHeight; private float mHeightScaleFactor = 1.0f; private int mFacing = CameraSource.CAMERA_FACING_BACK; private Set<T> mGraphics = new HashSet<>(); /** * Base class for a custom graphics object to be rendered within the graphic overlay. Subclass * this and implement the {@link Graphic#draw(Canvas)} method to define the * graphics element. Add instances to the overlay using {@link GraphicOverlay#add(Graphic)}. */ public static abstract class Graphic { private GraphicOverlay mOverlay; public Graphic(GraphicOverlay overlay) { mOverlay = overlay; } /** * Draw the graphic on the supplied canvas. Drawing should use the following methods to * convert to view coordinates for the graphics that are drawn: * <ol> * <li>{@link Graphic#scaleX(float)} and {@link Graphic#scaleY(float)} adjust the size of * the supplied value from the preview scale to the view scale.</li> * <li>{@link Graphic#translateX(float)} and {@link Graphic#translateY(float)} adjust the * coordinate from the preview's coordinate system to the view coordinate system.</li> * </ol> * * @param canvas drawing canvas */ public abstract void draw(Canvas canvas); /** * Returns true if the supplied coordinates are within this graphic. */ public abstract boolean contains(float x, float y); /** * Adjusts a horizontal value of the supplied value from the preview scale to the view * scale. */ public float scaleX(float horizontal) { return horizontal * mOverlay.mWidthScaleFactor; } /** * Adjusts a vertical value of the supplied value from the preview scale to the view scale. */ public float scaleY(float vertical) { return vertical * mOverlay.mHeightScaleFactor; } /** * Adjusts the x coordinate from the preview's coordinate system to the view coordinate * system. */ public float translateX(float x) { if (mOverlay.mFacing == CameraSource.CAMERA_FACING_FRONT) { return mOverlay.getWidth() - scaleX(x); } else { return scaleX(x); } } /** * Adjusts the y coordinate from the preview's coordinate system to the view coordinate * system. */ public float translateY(float y) { return scaleY(y); } public void postInvalidate() { mOverlay.postInvalidate(); } } public GraphicOverlay(Context context, AttributeSet attrs) { super(context, attrs); } /** * Removes all graphics from the overlay. */ public void clear() { synchronized (mLock) { mGraphics.clear(); } postInvalidate(); } /** * Adds a graphic to the overlay. */ public void add(T graphic) { synchronized (mLock) { mGraphics.add(graphic); } postInvalidate(); } /** * Removes a graphic from the overlay. */ public void remove(T graphic) { synchronized (mLock) { mGraphics.remove(graphic); } postInvalidate(); } /** * Returns the first graphic, if any, that exists at the provided absolute screen coordinates. * These coordinates will be offset by the relative screen position of this view. * @return First graphic containing the point, or null if no text is detected. */ public T getGraphicAtLocation(float rawX, float rawY) { synchronized (mLock) { // Get the position of this View so the raw location can be offset relative to the view. int[] location = new int[2]; this.getLocationOnScreen(location); for (T graphic : mGraphics) { if (graphic.contains(rawX - location[0], rawY - location[1])) { return graphic; } } return null; } } /** * Sets the camera attributes for size and facing direction, which informs how to transform * image coordinates later. */ public void setCameraInfo(int previewWidth, int previewHeight, int facing) { synchronized (mLock) { mPreviewWidth = previewWidth; mPreviewHeight = previewHeight; mFacing = facing; } postInvalidate(); } /** * Draws the overlay with its associated graphic objects. */ @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); synchronized (mLock) { if ((mPreviewWidth != 0) && (mPreviewHeight != 0)) { mWidthScaleFactor = (float) canvas.getWidth() / (float) mPreviewWidth; mHeightScaleFactor = (float) canvas.getHeight() / (float) mPreviewHeight; } for (Graphic graphic : mGraphics) { graphic.draw(canvas); } } } }
/* * Copyright 2002-2007 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.orm.jdo; import javax.jdo.JDODataStoreException; import javax.jdo.JDOException; import javax.jdo.JDOFatalDataStoreException; import javax.jdo.JDOFatalUserException; import javax.jdo.JDOObjectNotFoundException; import javax.jdo.JDOOptimisticVerificationException; import javax.jdo.JDOUserException; import javax.jdo.PersistenceManager; import javax.jdo.PersistenceManagerFactory; import javax.jdo.Query; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.dao.DataAccessException; import org.springframework.dao.DataAccessResourceFailureException; import org.springframework.jdbc.datasource.DataSourceUtils; import org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator; import org.springframework.jdbc.support.SQLExceptionTranslator; import org.springframework.jdbc.support.SQLStateSQLExceptionTranslator; import org.springframework.transaction.support.TransactionSynchronizationAdapter; import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.util.Assert; /** * Helper class featuring methods for JDO PersistenceManager handling, * allowing for reuse of PersistenceManager instances within transactions. * Also provides support for exception translation. * * <p>Used internally by {@link JdoTemplate}, {@link JdoInterceptor} and * {@link JdoTransactionManager}. Can also be used directly in application code. * * @author Juergen Hoeller * @since 03.06.2003 * @see JdoTransactionManager * @see org.springframework.transaction.jta.JtaTransactionManager * @see org.springframework.transaction.support.TransactionSynchronizationManager */ public abstract class PersistenceManagerFactoryUtils { /** * Order value for TransactionSynchronization objects that clean up JDO * PersistenceManagers. Return DataSourceUtils.CONNECTION_SYNCHRONIZATION_ORDER - 100 * to execute PersistenceManager cleanup before JDBC Connection cleanup, if any. * @see org.springframework.jdbc.datasource.DataSourceUtils#CONNECTION_SYNCHRONIZATION_ORDER */ public static final int PERSISTENCE_MANAGER_SYNCHRONIZATION_ORDER = DataSourceUtils.CONNECTION_SYNCHRONIZATION_ORDER - 100; private static final Log logger = LogFactory.getLog(PersistenceManagerFactoryUtils.class); /** * Create an appropriate SQLExceptionTranslator for the given PersistenceManagerFactory. * <p>If a DataSource is found, creates a SQLErrorCodeSQLExceptionTranslator for the * DataSource; else, falls back to a SQLStateSQLExceptionTranslator. * @param connectionFactory the connection factory of the PersistenceManagerFactory * (may be <code>null</code>) * @return the SQLExceptionTranslator (never <code>null</code>) * @see javax.jdo.PersistenceManagerFactory#getConnectionFactory() * @see org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator * @see org.springframework.jdbc.support.SQLStateSQLExceptionTranslator */ static SQLExceptionTranslator newJdbcExceptionTranslator(Object connectionFactory) { // Check for PersistenceManagerFactory's DataSource. if (connectionFactory instanceof DataSource) { return new SQLErrorCodeSQLExceptionTranslator((DataSource) connectionFactory); } else { return new SQLStateSQLExceptionTranslator(); } } /** * Obtain a JDO PersistenceManager via the given factory. Is aware of a * corresponding PersistenceManager bound to the current thread, * for example when using JdoTransactionManager. Will create a new * PersistenceManager else, if "allowCreate" is <code>true</code>. * @param pmf PersistenceManagerFactory to create the PersistenceManager with * @param allowCreate if a non-transactional PersistenceManager should be created * when no transactional PersistenceManager can be found for the current thread * @return the PersistenceManager * @throws DataAccessResourceFailureException if the PersistenceManager couldn't be obtained * @throws IllegalStateException if no thread-bound PersistenceManager found and * "allowCreate" is <code>false</code> * @see JdoTransactionManager */ public static PersistenceManager getPersistenceManager(PersistenceManagerFactory pmf, boolean allowCreate) throws DataAccessResourceFailureException, IllegalStateException { try { return doGetPersistenceManager(pmf, allowCreate); } catch (JDOException ex) { throw new DataAccessResourceFailureException("Could not obtain JDO PersistenceManager", ex); } } /** * Obtain a JDO PersistenceManager via the given factory. Is aware of a * corresponding PersistenceManager bound to the current thread, * for example when using JdoTransactionManager. Will create a new * PersistenceManager else, if "allowCreate" is <code>true</code>. * <p>Same as <code>getPersistenceManager</code>, but throwing the original JDOException. * @param pmf PersistenceManagerFactory to create the PersistenceManager with * @param allowCreate if a non-transactional PersistenceManager should be created * when no transactional PersistenceManager can be found for the current thread * @return the PersistenceManager * @throws JDOException if the PersistenceManager couldn't be created * @throws IllegalStateException if no thread-bound PersistenceManager found and * "allowCreate" is <code>false</code> * @see #getPersistenceManager(javax.jdo.PersistenceManagerFactory, boolean) * @see JdoTransactionManager */ public static PersistenceManager doGetPersistenceManager(PersistenceManagerFactory pmf, boolean allowCreate) throws JDOException, IllegalStateException { Assert.notNull(pmf, "No PersistenceManagerFactory specified"); PersistenceManagerHolder pmHolder = (PersistenceManagerHolder) TransactionSynchronizationManager.getResource(pmf); if (pmHolder != null) { if (!pmHolder.isSynchronizedWithTransaction() && TransactionSynchronizationManager.isSynchronizationActive()) { pmHolder.setSynchronizedWithTransaction(true); TransactionSynchronizationManager.registerSynchronization( new PersistenceManagerSynchronization(pmHolder, pmf, false)); } return pmHolder.getPersistenceManager(); } if (!allowCreate && !TransactionSynchronizationManager.isSynchronizationActive()) { throw new IllegalStateException("No JDO PersistenceManager bound to thread, " + "and configuration does not allow creation of non-transactional one here"); } logger.debug("Opening JDO PersistenceManager"); PersistenceManager pm = pmf.getPersistenceManager(); if (TransactionSynchronizationManager.isSynchronizationActive()) { logger.debug("Registering transaction synchronization for JDO PersistenceManager"); // Use same PersistenceManager for further JDO actions within the transaction. // Thread object will get removed by synchronization at transaction completion. pmHolder = new PersistenceManagerHolder(pm); pmHolder.setSynchronizedWithTransaction(true); TransactionSynchronizationManager.registerSynchronization( new PersistenceManagerSynchronization(pmHolder, pmf, true)); TransactionSynchronizationManager.bindResource(pmf, pmHolder); } return pm; } /** * Return whether the given JDO PersistenceManager is transactional, that is, * bound to the current thread by Spring's transaction facilities. * @param pm the JDO PersistenceManager to check * @param pmf JDO PersistenceManagerFactory that the PersistenceManager * was created with (can be <code>null</code>) * @return whether the PersistenceManager is transactional */ public static boolean isPersistenceManagerTransactional( PersistenceManager pm, PersistenceManagerFactory pmf) { if (pmf == null) { return false; } PersistenceManagerHolder pmHolder = (PersistenceManagerHolder) TransactionSynchronizationManager.getResource(pmf); return (pmHolder != null && pm == pmHolder.getPersistenceManager()); } /** * Apply the current transaction timeout, if any, to the given JDO Query object. * @param query the JDO Query object * @param pmf JDO PersistenceManagerFactory that the Query was created for * @param jdoDialect the JdoDialect to use for applying a query timeout * (must not be <code>null</code>) * @throws JDOException if thrown by JDO methods * @see JdoDialect#applyQueryTimeout */ public static void applyTransactionTimeout( Query query, PersistenceManagerFactory pmf, JdoDialect jdoDialect) throws JDOException { Assert.notNull(query, "No Query object specified"); PersistenceManagerHolder pmHolder = (PersistenceManagerHolder) TransactionSynchronizationManager.getResource(pmf); if (pmHolder != null && pmHolder.hasTimeout()) { jdoDialect.applyQueryTimeout(query, pmHolder.getTimeToLiveInSeconds()); } } /** * Convert the given JDOException to an appropriate exception from the * <code>org.springframework.dao</code> hierarchy. * <p>The most important cases like object not found or optimistic locking * failure are covered here. For more fine-granular conversion, JdoAccessor and * JdoTransactionManager support sophisticated translation of exceptions via a * JdoDialect. * @param ex JDOException that occured * @return the corresponding DataAccessException instance * @see JdoAccessor#convertJdoAccessException * @see JdoTransactionManager#convertJdoAccessException * @see JdoDialect#translateException */ public static DataAccessException convertJdoAccessException(JDOException ex) { if (ex instanceof JDOObjectNotFoundException) { throw new JdoObjectRetrievalFailureException((JDOObjectNotFoundException) ex); } if (ex instanceof JDOOptimisticVerificationException) { throw new JdoOptimisticLockingFailureException((JDOOptimisticVerificationException) ex); } if (ex instanceof JDODataStoreException) { return new JdoResourceFailureException((JDODataStoreException) ex); } if (ex instanceof JDOFatalDataStoreException) { return new JdoResourceFailureException((JDOFatalDataStoreException) ex); } if (ex instanceof JDOUserException) { return new JdoUsageException((JDOUserException) ex); } if (ex instanceof JDOFatalUserException) { return new JdoUsageException((JDOFatalUserException) ex); } // fallback return new JdoSystemException(ex); } /** * Close the given PersistenceManager, created via the given factory, * if it is not managed externally (i.e. not bound to the thread). * @param pm PersistenceManager to close * @param pmf PersistenceManagerFactory that the PersistenceManager was created with * (can be <code>null</code>) */ public static void releasePersistenceManager(PersistenceManager pm, PersistenceManagerFactory pmf) { try { doReleasePersistenceManager(pm, pmf); } catch (JDOException ex) { logger.debug("Could not close JDO PersistenceManager", ex); } catch (Throwable ex) { logger.debug("Unexpected exception on closing JDO PersistenceManager", ex); } } /** * Actually release a PersistenceManager for the given factory. * Same as <code>releasePersistenceManager</code>, but throwing the original JDOException. * @param pm PersistenceManager to close * @param pmf PersistenceManagerFactory that the PersistenceManager was created with * (can be <code>null</code>) * @throws JDOException if thrown by JDO methods */ public static void doReleasePersistenceManager(PersistenceManager pm, PersistenceManagerFactory pmf) throws JDOException { if (pm == null) { return; } // Only release non-transactional PersistenceManagers. if (!isPersistenceManagerTransactional(pm, pmf)) { logger.debug("Closing JDO PersistenceManager"); pm.close(); } } /** * Callback for resource cleanup at the end of a non-JDO transaction * (e.g. when participating in a JtaTransactionManager transaction). * @see org.springframework.transaction.jta.JtaTransactionManager */ private static class PersistenceManagerSynchronization extends TransactionSynchronizationAdapter { private final PersistenceManagerHolder persistenceManagerHolder; private final PersistenceManagerFactory persistenceManagerFactory; private final boolean newPersistenceManager; private boolean holderActive = true; public PersistenceManagerSynchronization( PersistenceManagerHolder pmHolder, PersistenceManagerFactory pmf, boolean newPersistenceManager) { this.persistenceManagerHolder = pmHolder; this.persistenceManagerFactory = pmf; this.newPersistenceManager = newPersistenceManager; } public int getOrder() { return PERSISTENCE_MANAGER_SYNCHRONIZATION_ORDER; } public void suspend() { if (this.holderActive) { TransactionSynchronizationManager.unbindResource(this.persistenceManagerFactory); } } public void resume() { if (this.holderActive) { TransactionSynchronizationManager.bindResource( this.persistenceManagerFactory, this.persistenceManagerHolder); } } public void beforeCompletion() { if (this.newPersistenceManager) { TransactionSynchronizationManager.unbindResource(this.persistenceManagerFactory); this.holderActive = false; releasePersistenceManager( this.persistenceManagerHolder.getPersistenceManager(), this.persistenceManagerFactory); } } public void afterCompletion(int status) { this.persistenceManagerHolder.setSynchronizedWithTransaction(false); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair; import org.apache.hadoop.yarn.api.records.*; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.util.resource.Resources; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import java.util.ArrayList; import java.util.Collections; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Test scheduler node, especially preemption reservations. */ public class TestFSSchedulerNode { private final ArrayList<RMContainer> containers = new ArrayList<>(); private RMNode createNode() { RMNode node = mock(RMNode.class); when(node.getTotalCapability()).thenReturn(Resource.newInstance(8192, 8)); when(node.getHostName()).thenReturn("host.domain.com"); return node; } private void createDefaultContainer() { createContainer(Resource.newInstance(1024, 1), null); } private RMContainer createContainer( Resource request, ApplicationAttemptId appAttemptId) { RMContainer container = mock(RMContainer.class); Container containerInner = mock(Container.class); ContainerId id = mock(ContainerId.class); when(id.getContainerId()).thenReturn((long)containers.size()); when(containerInner.getResource()). thenReturn(Resources.clone(request)); when(containerInner.getId()).thenReturn(id); when(containerInner.getExecutionType()). thenReturn(ExecutionType.GUARANTEED); when(container.getApplicationAttemptId()).thenReturn(appAttemptId); when(container.getContainerId()).thenReturn(id); when(container.getContainer()).thenReturn(containerInner); when(container.getExecutionType()).thenReturn(ExecutionType.GUARANTEED); when(container.getAllocatedResource()). thenReturn(Resources.clone(request)); when(container.compareTo(any())).thenAnswer(new Answer<Integer>() { public Integer answer(InvocationOnMock invocation) { return Long.compare( ((RMContainer)invocation.getMock()).getContainerId() .getContainerId(), ((RMContainer)invocation.getArguments()[0]).getContainerId() .getContainerId()); } }); containers.add(container); return container; } private void saturateCluster(FSSchedulerNode schedulerNode) { while (!Resources.isNone(schedulerNode.getUnallocatedResource())) { createDefaultContainer(); schedulerNode.allocateContainer(containers.get(containers.size() - 1)); schedulerNode.containerStarted(containers.get(containers.size() - 1). getContainerId()); } } private FSAppAttempt createStarvingApp(FSSchedulerNode schedulerNode, Resource request) { FSAppAttempt starvingApp = mock(FSAppAttempt.class); ApplicationAttemptId appAttemptId = mock(ApplicationAttemptId.class); when(starvingApp.getApplicationAttemptId()).thenReturn(appAttemptId); when(starvingApp.assignContainer(schedulerNode)).thenAnswer( new Answer<Resource>() { @Override public Resource answer(InvocationOnMock invocationOnMock) throws Throwable { Resource response = Resource.newInstance(0, 0); while (!Resources.isNone(request) && !Resources.isNone(schedulerNode.getUnallocatedResource())) { RMContainer container = createContainer(request, appAttemptId); schedulerNode.allocateContainer(container); Resources.addTo(response, container.getAllocatedResource()); Resources.subtractFrom(request, container.getAllocatedResource()); } return response; } }); when(starvingApp.isStarved()).thenAnswer( new Answer<Boolean>() { @Override public Boolean answer(InvocationOnMock invocationOnMock) throws Throwable { return !Resources.isNone(request); } } ); when(starvingApp.getPendingDemand()).thenReturn(request); return starvingApp; } private void finalValidation(FSSchedulerNode schedulerNode) { assertEquals("Everything should have been released", Resources.none(), schedulerNode.getAllocatedResource()); assertTrue("No containers should be reserved for preemption", schedulerNode.containersForPreemption.isEmpty()); assertTrue("No resources should be reserved for preemptors", schedulerNode.resourcesPreemptedForApp.isEmpty()); assertEquals( "No amount of resource should be reserved for preemptees", Resources.none(), schedulerNode.getTotalReserved()); } private void allocateContainers(FSSchedulerNode schedulerNode) { FairScheduler.assignPreemptedContainers(schedulerNode); } /** * Allocate and release a single container. */ @Test public void testSimpleAllocation() { RMNode node = createNode(); FSSchedulerNode schedulerNode = new FSSchedulerNode(node, false); createDefaultContainer(); assertEquals("Nothing should have been allocated, yet", Resources.none(), schedulerNode.getAllocatedResource()); schedulerNode.allocateContainer(containers.get(0)); assertEquals("Container should be allocated", containers.get(0).getContainer().getResource(), schedulerNode.getAllocatedResource()); schedulerNode.releaseContainer(containers.get(0).getContainerId(), true); assertEquals("Everything should have been released", Resources.none(), schedulerNode.getAllocatedResource()); // Check that we are error prone schedulerNode.releaseContainer(containers.get(0).getContainerId(), true); finalValidation(schedulerNode); } /** * Allocate and release three containers with launch. */ @Test public void testMultipleAllocations() { RMNode node = createNode(); FSSchedulerNode schedulerNode = new FSSchedulerNode(node, false); createDefaultContainer(); createDefaultContainer(); createDefaultContainer(); assertEquals("Nothing should have been allocated, yet", Resources.none(), schedulerNode.getAllocatedResource()); schedulerNode.allocateContainer(containers.get(0)); schedulerNode.containerStarted(containers.get(0).getContainerId()); schedulerNode.allocateContainer(containers.get(1)); schedulerNode.containerStarted(containers.get(1).getContainerId()); schedulerNode.allocateContainer(containers.get(2)); assertEquals("Container should be allocated", Resources.multiply(containers.get(0).getContainer().getResource(), 3.0), schedulerNode.getAllocatedResource()); schedulerNode.releaseContainer(containers.get(1).getContainerId(), true); schedulerNode.releaseContainer(containers.get(2).getContainerId(), true); schedulerNode.releaseContainer(containers.get(0).getContainerId(), true); finalValidation(schedulerNode); } /** * Allocate and release a single container. */ @Test public void testSimplePreemption() { RMNode node = createNode(); FSSchedulerNode schedulerNode = new FSSchedulerNode(node, false); // Launch containers and saturate the cluster saturateCluster(schedulerNode); assertEquals("Container should be allocated", Resources.multiply(containers.get(0).getContainer().getResource(), containers.size()), schedulerNode.getAllocatedResource()); // Request preemption FSAppAttempt starvingApp = createStarvingApp(schedulerNode, Resource.newInstance(1024, 1)); schedulerNode.addContainersForPreemption( Collections.singletonList(containers.get(0)), starvingApp); assertEquals( "No resource amount should be reserved for preemptees", containers.get(0).getAllocatedResource(), schedulerNode.getTotalReserved()); // Preemption occurs release one container schedulerNode.releaseContainer(containers.get(0).getContainerId(), true); allocateContainers(schedulerNode); assertEquals("Container should be allocated", schedulerNode.getTotalResource(), schedulerNode.getAllocatedResource()); // Release all remaining containers for (int i = 1; i < containers.size(); ++i) { schedulerNode.releaseContainer(containers.get(i).getContainerId(), true); } finalValidation(schedulerNode); } /** * Allocate a single container twice and release. */ @Test public void testDuplicatePreemption() { RMNode node = createNode(); FSSchedulerNode schedulerNode = new FSSchedulerNode(node, false); // Launch containers and saturate the cluster saturateCluster(schedulerNode); assertEquals("Container should be allocated", Resources.multiply(containers.get(0).getContainer().getResource(), containers.size()), schedulerNode.getAllocatedResource()); // Request preemption twice FSAppAttempt starvingApp = createStarvingApp(schedulerNode, Resource.newInstance(1024, 1)); schedulerNode.addContainersForPreemption( Collections.singletonList(containers.get(0)), starvingApp); schedulerNode.addContainersForPreemption( Collections.singletonList(containers.get(0)), starvingApp); assertEquals( "No resource amount should be reserved for preemptees", containers.get(0).getAllocatedResource(), schedulerNode.getTotalReserved()); // Preemption occurs release one container schedulerNode.releaseContainer(containers.get(0).getContainerId(), true); allocateContainers(schedulerNode); assertEquals("Container should be allocated", schedulerNode.getTotalResource(), schedulerNode.getAllocatedResource()); // Release all remaining containers for (int i = 1; i < containers.size(); ++i) { schedulerNode.releaseContainer(containers.get(i).getContainerId(), true); } finalValidation(schedulerNode); } /** * Allocate and release three containers requested by two apps. */ @Test public void testComplexPreemption() { RMNode node = createNode(); FSSchedulerNode schedulerNode = new FSSchedulerNode(node, false); // Launch containers and saturate the cluster saturateCluster(schedulerNode); assertEquals("Container should be allocated", Resources.multiply(containers.get(0).getContainer().getResource(), containers.size()), schedulerNode.getAllocatedResource()); // Preempt a container FSAppAttempt starvingApp1 = createStarvingApp(schedulerNode, Resource.newInstance(2048, 2)); FSAppAttempt starvingApp2 = createStarvingApp(schedulerNode, Resource.newInstance(1024, 1)); // Preemption thread kicks in schedulerNode.addContainersForPreemption( Collections.singletonList(containers.get(0)), starvingApp1); schedulerNode.addContainersForPreemption( Collections.singletonList(containers.get(1)), starvingApp1); schedulerNode.addContainersForPreemption( Collections.singletonList(containers.get(2)), starvingApp2); // Preemption happens schedulerNode.releaseContainer(containers.get(0).getContainerId(), true); schedulerNode.releaseContainer(containers.get(2).getContainerId(), true); schedulerNode.releaseContainer(containers.get(1).getContainerId(), true); allocateContainers(schedulerNode); assertEquals("Container should be allocated", schedulerNode.getTotalResource(), schedulerNode.getAllocatedResource()); // Release all containers for (int i = 3; i < containers.size(); ++i) { schedulerNode.releaseContainer(containers.get(i).getContainerId(), true); } finalValidation(schedulerNode); } /** * Allocate and release three containers requested by two apps in two rounds. */ @Test public void testMultiplePreemptionEvents() { RMNode node = createNode(); FSSchedulerNode schedulerNode = new FSSchedulerNode(node, false); // Launch containers and saturate the cluster saturateCluster(schedulerNode); assertEquals("Container should be allocated", Resources.multiply(containers.get(0).getContainer().getResource(), containers.size()), schedulerNode.getAllocatedResource()); // Preempt a container FSAppAttempt starvingApp1 = createStarvingApp(schedulerNode, Resource.newInstance(2048, 2)); FSAppAttempt starvingApp2 = createStarvingApp(schedulerNode, Resource.newInstance(1024, 1)); // Preemption thread kicks in schedulerNode.addContainersForPreemption( Collections.singletonList(containers.get(0)), starvingApp1); schedulerNode.addContainersForPreemption( Collections.singletonList(containers.get(1)), starvingApp1); schedulerNode.addContainersForPreemption( Collections.singletonList(containers.get(2)), starvingApp2); // Preemption happens schedulerNode.releaseContainer(containers.get(1).getContainerId(), true); allocateContainers(schedulerNode); schedulerNode.releaseContainer(containers.get(2).getContainerId(), true); schedulerNode.releaseContainer(containers.get(0).getContainerId(), true); allocateContainers(schedulerNode); assertEquals("Container should be allocated", schedulerNode.getTotalResource(), schedulerNode.getAllocatedResource()); // Release all containers for (int i = 3; i < containers.size(); ++i) { schedulerNode.releaseContainer(containers.get(i).getContainerId(), true); } finalValidation(schedulerNode); } /** * Allocate and release a single container and delete the app in between. */ @Test public void testPreemptionToCompletedApp() { RMNode node = createNode(); FSSchedulerNode schedulerNode = new FSSchedulerNode(node, false); // Launch containers and saturate the cluster saturateCluster(schedulerNode); assertEquals("Container should be allocated", Resources.multiply(containers.get(0).getContainer().getResource(), containers.size()), schedulerNode.getAllocatedResource()); // Preempt a container FSAppAttempt starvingApp = createStarvingApp(schedulerNode, Resource.newInstance(1024, 1)); schedulerNode.addContainersForPreemption( Collections.singletonList(containers.get(0)), starvingApp); schedulerNode.releaseContainer(containers.get(0).getContainerId(), true); // Stop the application then try to satisfy the reservation // and observe that there are still free resources not allocated to // the deleted app when(starvingApp.isStopped()).thenReturn(true); allocateContainers(schedulerNode); assertNotEquals("Container should be allocated", schedulerNode.getTotalResource(), schedulerNode.getAllocatedResource()); // Release all containers for (int i = 1; i < containers.size(); ++i) { schedulerNode.releaseContainer(containers.get(i).getContainerId(), true); } finalValidation(schedulerNode); } /** * Preempt a bigger container than the preemption request. */ @Test public void testPartialReservedPreemption() { RMNode node = createNode(); FSSchedulerNode schedulerNode = new FSSchedulerNode(node, false); // Launch containers and saturate the cluster saturateCluster(schedulerNode); assertEquals("Container should be allocated", Resources.multiply(containers.get(0).getContainer().getResource(), containers.size()), schedulerNode.getAllocatedResource()); // Preempt a container Resource originalStarvingAppDemand = Resource.newInstance(512, 1); FSAppAttempt starvingApp = createStarvingApp(schedulerNode, originalStarvingAppDemand); schedulerNode.addContainersForPreemption( Collections.singletonList(containers.get(0)), starvingApp); // Preemption occurs schedulerNode.releaseContainer(containers.get(0).getContainerId(), true); // Container partially reassigned allocateContainers(schedulerNode); assertEquals("Container should be allocated", Resources.subtract(schedulerNode.getTotalResource(), Resource.newInstance(512, 0)), schedulerNode.getAllocatedResource()); // Cleanup simulating node update schedulerNode.getPreemptionList(); // Release all containers for (int i = 1; i < containers.size(); ++i) { schedulerNode.releaseContainer(containers.get(i).getContainerId(), true); } finalValidation(schedulerNode); } }
package agents.manic; import java.util.Random; import common.Matrix; import common.Vec; import common.json.JSONObject; import common.json.JSONArray; public class LayerTanh { public Matrix weights; // rows are inputs, cols are outputs public double[] bias; public double[] net; public double[] activation; public double[] error; //double[] hinge; LayerTanh(int inputs, int outputs) { weights = new Matrix(); weights.setSize(inputs, outputs); bias = new double[outputs]; net = new double[outputs]; activation = new double[outputs]; error = new double[outputs]; //hinge = new double[outputs]; } LayerTanh(LayerTanh that) { weights = new Matrix(that.weights); bias = Vec.copy(that.bias); net = Vec.copy(that.net); activation = Vec.copy(that.activation); error = Vec.copy(that.error); //hinge = Vec.copy(that.hinge); } LayerTanh(JSONObject obj) { weights = new Matrix((JSONObject)obj.get("weights")); bias = Vec.unmarshal((JSONArray)obj.get("bias")); net = new double[weights.cols()]; activation = new double[weights.cols()]; error = new double[weights.cols()]; //hinge = unmarshalVector((JSONArray)obj.get("hinge")); if(bias.length != weights.cols() /*|| hinge.length != weights.cols()*/) throw new IllegalArgumentException("mismatching sizes"); } /// Marshal this object into a JSON DOM. JSONObject marshal() { JSONObject obj = new JSONObject(); obj.put("weights", weights.marshal()); obj.put("bias", Vec.marshal(bias)); //obj.put("hinge", Vec.marshal(hinge)); return obj; } void copy(LayerTanh src) { if(src.weights.rows() != weights.rows() || src.weights.cols() != weights.cols()) throw new IllegalArgumentException("mismatching sizes"); weights.setSize(0, src.weights.cols()); weights.copyPart(src.weights, 0, 0, src.weights.rows(), src.weights.cols()); for(int i = 0; i < bias.length; i++) { bias[i] = src.bias[i]; //hinge[i] = src.hinge[i]; } } int inputCount() { return weights.rows(); } int outputCount() { return weights.cols(); } void initWeights(Random r) { double dev = Math.max(0.3, 1.0 / weights.rows()); for(int i = 0; i < weights.rows(); i++) { double[] row = weights.row(i); for(int j = 0; j < weights.cols(); j++) { row[j] = dev * r.nextGaussian(); } } for(int j = 0; j < weights.cols(); j++) { bias[j] = dev * r.nextGaussian(); //hinge[j] = 0.0; } } void feedForward(double[] in) { if(in.length != weights.rows()) throw new IllegalArgumentException("size mismatch. " + Integer.toString(in.length) + " != " + Integer.toString(weights.rows())); for(int i = 0; i < net.length; i++) net[i] = bias[i]; for(int j = 0; j < weights.rows(); j++) { double v = in[j]; double[] w = weights.row(j); for(int i = 0; i < weights.cols(); i++) net[i] += v * w[i]; } } void feedForward2(double[] in1, double[] in2) { if(in1.length + in2.length != weights.rows()) throw new IllegalArgumentException("size mismatch. " + Integer.toString(in1.length) + " + " + Integer.toString(in2.length) + " != " + Integer.toString(weights.rows())); for(int i = 0; i < net.length; i++) net[i] = bias[i]; for(int j = 0; j < in1.length; j++) { double v = in1[j]; double[] w = weights.row(j); for(int i = 0; i < weights.cols(); i++) net[i] += v * w[i]; } for(int j = 0; j < in2.length; j++) { double v = in2[j]; double[] w = weights.row(in1.length + j); for(int i = 0; i < weights.cols(); i++) net[i] += v * w[i]; } } void activate() { for(int i = 0; i < net.length; i++) { activation[i] = Math.tanh(net[i]); //activation[i] = hinge[i] * (Math.sqrt(net[i] * net[i] + 1) - 1) + net[i]; } } void computeError(double[] target) { if(target.length != activation.length) throw new IllegalArgumentException("size mismatch. " + Integer.toString(target.length) + " != " + Integer.toString(activation.length)); for(int i = 0; i < activation.length; i++) { if(target[i] < -1.0 || target[i] > 1.0) throw new IllegalArgumentException("target value out of range for the tanh activation function"); error[i] = target[i] - activation[i]; } } void deactivate() { for(int i = 0; i < error.length; i++) { error[i] *= (1.0 - activation[i] * activation[i]); //error[i] *= (net[i] * hinge[i] / Math.sqrt(net[i] * net[i] + 1) + 1); } } void feedBack(double[] upstream) { if(upstream.length != weights.rows()) throw new IllegalArgumentException("size mismatch"); for(int j = 0; j < weights.rows(); j++) { double[] w = weights.row(j); double d = 0.0; for(int i = 0; i < weights.cols(); i++) { d += error[i] * w[i]; } upstream[j] = d; } } void refineInputs(double[] inputs, double learningRate) { if(inputs.length != weights.rows()) throw new IllegalArgumentException("size mismatch"); for(int j = 0; j < weights.rows(); j++) { double[] w = weights.row(j); double d = 0.0; for(int i = 0; i < weights.cols(); i++) { d += error[i] * w[i]; } inputs[j] += learningRate * d; } } void updateWeights(double[] in, double learningRate) { for(int i = 0; i < bias.length; i++) { bias[i] += learningRate * error[i]; } for(int j = 0; j < weights.rows(); j++) { double[] w = weights.row(j); double x = learningRate * in[j]; for(int i = 0; i < weights.cols(); i++) { w[i] += x * error[i]; } } } /* void bendHinge(double learningRate) { for(int i = 0; i < hinge.length; i++) { hinge[i] = Math.max(-1.0, Math.min(1.0, hinge[i] + learningRate * error[i] * (Math.sqrt(net[i] * net[i] + 1.0) - 1.0))); } } // Applies both L2 and L1 regularization to the hinge void straightenHinge(double lambda) { for(int i = 0; i < hinge.length; i++) { hinge[i] *= (1.0 - lambda); if(hinge[i] < 0.0) hinge[i] += lambda; else hinge[i] -= lambda; } } */ // Applies both L2 and L1 regularization to the weights and bias values void regularizeWeights(double lambda) { for(int i = 0; i < weights.rows(); i++) { double[] row = weights.row(i); for(int j = 0; j < row.length; j++) { row[j] *= (1.0 - lambda); if(row[j] < 0.0) row[j] += lambda; else row[j] -= lambda; } } for(int j = 0; j < bias.length; j++) { bias[j] *= (1.0 - lambda); if(bias[j] < 0.0) bias[j] += lambda; else bias[j] -= lambda; } } }
/* * Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.emm.agent.proxy; import android.content.Context; import android.os.AsyncTask; import android.util.Log; import org.wso2.emm.agent.proxy.beans.EndPointInfo; import org.wso2.emm.agent.proxy.beans.Token; import org.wso2.emm.agent.proxy.interfaces.APIResultCallBack; import org.wso2.emm.agent.proxy.interfaces.TokenCallBack; import org.wso2.emm.agent.proxy.utils.Constants; import org.wso2.emm.agent.proxy.utils.ServerUtilities; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.Map; /** * This class includes the functionality related to invoking APIs and * return API results to the client. */ public class APIController implements TokenCallBack { private static final String TAG = "APIController"; private Token token; private String clientKey, clientSecret; private APIResultCallBack apiResultCallback; private EndPointInfo apiEndPointInfo; public APIController(String clientKey, String clientSecret){ this.clientKey = clientKey; this.clientSecret = clientSecret; } public APIController() { } /** * Invoking an API using retrieved token. * * @param apiEndPointInfo - Server and API end point information. * @param apiResultCallBack - API result callback data. * @param requestCode - Request code to avoid response complications. * @param context - Application context. */ public void invokeAPI(EndPointInfo apiEndPointInfo, APIResultCallBack apiResultCallBack, int requestCode, Context context) { this.apiResultCallback = apiResultCallBack; this.apiEndPointInfo = apiEndPointInfo; if (IdentityProxy.getInstance().getContext() == null) { IdentityProxy.getInstance().setContext(context); } IdentityProxy.getInstance().setRequestCode(requestCode); IdentityProxy.getInstance().requestToken(IdentityProxy.getInstance().getContext(), this, this.clientKey, this.clientSecret); // temporarily added to support non OAuth calls //new NetworkCallTask(apiResultCallback).execute(apiEndPointInfo); } @Override public void onReceiveTokenResult(Token token, String status) { this.token = token; new NetworkCallTask(apiResultCallback).execute(apiEndPointInfo); } /** * AsyncTask to contact server and access the API with retrieved token. */ private class NetworkCallTask extends AsyncTask<EndPointInfo, Void, Map<String, String>> { APIResultCallBack apiResultCallBack; public NetworkCallTask(APIResultCallBack apiResultCallBack) { this.apiResultCallBack = apiResultCallBack; } @Override protected Map<String, String> doInBackground(EndPointInfo... params) { EndPointInfo endPointInfo = params[0]; Map<String, String> responseParams = null; String accessToken = token.getAccessToken(); Map<String, String> headers = new HashMap<String, String>(); headers.put("Content-Type", "application/json"); headers.put("Accept", "*/*"); headers.put("User-Agent", "Mozilla/5.0 ( compatible ), Android"); headers.put("Authorization", "Bearer " + accessToken); try { responseParams = ServerUtilities.postData(endPointInfo, headers); if (Constants.DEBUG_ENABLED) { Iterator<Map.Entry<String, String>> iterator = responseParams.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<String, String> respParams = iterator.next(); StringBuilder paras = new StringBuilder(); paras.append("response-params: key:"); paras.append(respParams.getKey()); paras.append(", value:"); paras.append(respParams.getValue()); Log.d(TAG, paras.toString()); } } } catch (IDPTokenManagerException e) { Log.e(TAG, "Failed to contact server." + e); } return responseParams; } @Override protected void onPostExecute(Map<String, String> result) { if(Constants.DEBUG_ENABLED) { if(result != null && !result.isEmpty()) { Log.d(TAG, "Result :" + Arrays.toString(result.entrySet().toArray())); } } apiResultCallBack.onReceiveAPIResult(result, IdentityProxy.getInstance().getRequestCode()); } } public void securedNetworkCall(APIResultCallBack callback, int licenseRequestCode, EndPointInfo apiUtilities, Context context) { if (IdentityProxy.getInstance().getContext() == null) { IdentityProxy.getInstance().setContext(context); } IdentityProxy.getInstance().setRequestCode(licenseRequestCode); new SecuredNetworkCallTask(callback,licenseRequestCode).execute(apiUtilities); } public class SecuredNetworkCallTask extends AsyncTask<EndPointInfo, Void, Map<String, String>> { APIResultCallBack apiResultCallBack; int requestCode; public SecuredNetworkCallTask(APIResultCallBack apiResultCallBack, int requestCode) { this.apiResultCallBack = apiResultCallBack; this.requestCode = requestCode; } @Override protected Map<String, String> doInBackground(EndPointInfo... params) { EndPointInfo endPointInfo = params[0]; Map<String, String> responseParams = null; Map<String, String> headers = new HashMap<String, String>(); headers.put("Content-Type", "application/json"); headers.put("Accept", "*/*"); headers.put("User-Agent", "Mozilla/5.0 ( compatible ), Android"); try { responseParams = ServerUtilities.postData(endPointInfo, headers); if (Constants.DEBUG_ENABLED) { Iterator<Map.Entry<String, String>> iterator = responseParams.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<String, String> respParams = iterator.next(); StringBuilder paras = new StringBuilder(); paras.append("response-params: key:"); paras.append(respParams.getKey()); paras.append(", value:"); paras.append(respParams.getValue()); Log.d(TAG, paras.toString()); } } } catch (IDPTokenManagerException e) { Log.e(TAG, "Failed to contact server.", e); } return responseParams; } @Override protected void onPostExecute(Map<String, String> result) { if(Constants.DEBUG_ENABLED) { if(result != null && !result.isEmpty()) { Log.e(TAG, "Result :" + Arrays.toString(result.entrySet().toArray())); } } apiResultCallBack.onReceiveAPIResult(result, requestCode); } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.index.termvectors; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.Fields; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.MultiTerms; import org.apache.lucene.index.Terms; import org.apache.lucene.index.memory.MemoryIndex; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.termvectors.TermVectorsFilter; import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndVersion; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.DocumentParser; import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.SourceValueFetcher; import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import java.util.function.LongSupplier; public class TermVectorsService { private TermVectorsService() {} public static TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequest request) { return getTermVectors(indexShard, request, System::nanoTime); } static TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequest request, LongSupplier nanoTimeSupplier) { final long startTime = nanoTimeSupplier.getAsLong(); final TermVectorsResponse termVectorsResponse = new TermVectorsResponse(indexShard.shardId().getIndex().getName(), request.id()); Fields termVectorsByField = null; TermVectorsFilter termVectorsFilter = null; /* handle potential wildcards in fields */ if (request.selectedFields() != null) { handleFieldWildcards(indexShard, request); } try ( Engine.GetResult get = indexShard.get( new Engine.Get(request.realtime(), false, request.id()).version(request.version()).versionType(request.versionType()) ); Engine.Searcher searcher = indexShard.acquireSearcher("term_vector") ) { Fields topLevelFields = fields(get.searcher() != null ? get.searcher().getIndexReader() : searcher.getIndexReader()); DocIdAndVersion docIdAndVersion = get.docIdAndVersion(); /* from an artificial document */ if (request.doc() != null) { termVectorsByField = generateTermVectorsFromDoc(indexShard, request); termVectorsResponse.setArtificial(true); termVectorsResponse.setExists(true); } /* or from an existing document */ else if (docIdAndVersion != null) { // fields with stored term vectors termVectorsByField = docIdAndVersion.reader.getTermVectors(docIdAndVersion.docId); Set<String> selectedFields = request.selectedFields(); // generate tvs for fields where analyzer is overridden if (selectedFields == null && request.perFieldAnalyzer() != null) { selectedFields = getFieldsToGenerate(request.perFieldAnalyzer(), termVectorsByField); } // fields without term vectors if (selectedFields != null) { termVectorsByField = addGeneratedTermVectors(indexShard, get, termVectorsByField, request, selectedFields); } termVectorsResponse.setDocVersion(docIdAndVersion.version); termVectorsResponse.setExists(true); } /* no term vectors generated or found */ else { termVectorsResponse.setExists(false); } /* if there are term vectors, optional compute dfs and/or terms filtering */ if (termVectorsByField != null) { if (request.filterSettings() != null) { termVectorsFilter = new TermVectorsFilter(termVectorsByField, topLevelFields, request.selectedFields()); termVectorsFilter.setSettings(request.filterSettings()); try { termVectorsFilter.selectBestTerms(); } catch (IOException e) { throw new ElasticsearchException("failed to select best terms", e); } } // write term vectors termVectorsResponse.setFields( termVectorsByField, request.selectedFields(), request.getFlags(), topLevelFields, termVectorsFilter ); } termVectorsResponse.setTookInMillis(TimeUnit.NANOSECONDS.toMillis(nanoTimeSupplier.getAsLong() - startTime)); } catch (Exception ex) { throw new ElasticsearchException("failed to execute term vector request", ex); } return termVectorsResponse; } public static Fields fields(IndexReader reader) { return new Fields() { @Override public Iterator<String> iterator() { throw new UnsupportedOperationException(); } @Override public Terms terms(String field) throws IOException { return MultiTerms.getTerms(reader, field); } @Override public int size() { throw new UnsupportedOperationException(); } }; } private static void handleFieldWildcards(IndexShard indexShard, TermVectorsRequest request) { // TODO rewrite this to use a field filter built from field patterns // Using lookups doesn't work for eg dynamic fields Set<String> fieldNames = new HashSet<>(); for (String pattern : request.selectedFields()) { Set<String> expandedFields = indexShard.mapperService().mappingLookup().getMatchingFieldNames(pattern); if (expandedFields.isEmpty()) { if (Regex.isSimpleMatchPattern(pattern) == false) { fieldNames.add(pattern); } } else { fieldNames.addAll(expandedFields); } } request.selectedFields(fieldNames.toArray(Strings.EMPTY_ARRAY)); } private static boolean isValidField(MappedFieldType fieldType) { // must be a string if (fieldType instanceof StringFieldType == false) { return false; } // and must be indexed if (fieldType.isSearchable() == false) { return false; } return true; } private static Fields addGeneratedTermVectors( IndexShard indexShard, Engine.GetResult get, Fields termVectorsByField, TermVectorsRequest request, Set<String> selectedFields ) throws IOException { /* only keep valid fields */ Set<String> validFields = new HashSet<>(); for (String field : selectedFields) { MappedFieldType fieldType = indexShard.mapperService().fieldType(field); if (isValidField(fieldType) == false) { continue; } // already retrieved, only if the analyzer hasn't been overridden at the field if (fieldType.getTextSearchInfo().termVectors() != TextSearchInfo.TermVector.NONE && (request.perFieldAnalyzer() == null || request.perFieldAnalyzer().containsKey(field) == false)) { continue; } validFields.add(field); } if (validFields.isEmpty()) { return termVectorsByField; } /* generate term vectors from fetched document fields */ String[] getFields = validFields.toArray(new String[validFields.size() + 1]); getFields[getFields.length - 1] = SourceFieldMapper.NAME; GetResult getResult = indexShard.getService().get(get, request.id(), getFields, null); Fields generatedTermVectors = generateTermVectors( indexShard, getResult.sourceAsMap(), getResult.getFields().values(), request.offsets(), request.perFieldAnalyzer(), validFields ); /* merge with existing Fields */ if (termVectorsByField == null) { return generatedTermVectors; } else { return mergeFields(termVectorsByField, generatedTermVectors); } } private static Analyzer getAnalyzerAtField(IndexShard indexShard, String field, @Nullable Map<String, String> perFieldAnalyzer) { MapperService mapperService = indexShard.mapperService(); if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) { return mapperService.getIndexAnalyzers().get(perFieldAnalyzer.get(field)); } else { return mapperService.indexAnalyzer( field, f -> { throw new IllegalArgumentException("No analyzer configured for field " + f); } ); } } private static Set<String> getFieldsToGenerate(Map<String, String> perAnalyzerField, Fields fieldsObject) { Set<String> selectedFields = new HashSet<>(); for (String fieldName : fieldsObject) { if (perAnalyzerField.containsKey(fieldName)) { selectedFields.add(fieldName); } } return selectedFields; } private static Fields generateTermVectors( IndexShard indexShard, Map<String, Object> source, Collection<DocumentField> getFields, boolean withOffsets, @Nullable Map<String, String> perFieldAnalyzer, Set<String> fields ) throws IOException { Map<String, Collection<Object>> values = new HashMap<>(); for (DocumentField getField : getFields) { String field = getField.getName(); if (fields.contains(field)) { // some fields are returned even when not asked for, eg. _timestamp values.put(field, getField.getValues()); } } if (source != null) { MappingLookup mappingLookup = indexShard.mapperService().mappingLookup(); SourceLookup sourceLookup = new SourceLookup(); sourceLookup.setSource(source); for (String field : fields) { if (values.containsKey(field) == false) { SourceValueFetcher valueFetcher = SourceValueFetcher.toString(mappingLookup.sourcePaths(field)); List<Object> ignoredValues = new ArrayList<>(); List<Object> v = valueFetcher.fetchValues(sourceLookup, ignoredValues); if (v.isEmpty() == false) { values.put(field, v); } } } } /* store document in memory index */ MemoryIndex index = new MemoryIndex(withOffsets); for (Map.Entry<String, Collection<Object>> entry : values.entrySet()) { String field = entry.getKey(); Analyzer analyzer = getAnalyzerAtField(indexShard, field, perFieldAnalyzer); if (entry.getValue() instanceof List) { for (Object text : entry.getValue()) { index.addField(field, text.toString(), analyzer); } } else { index.addField(field, entry.getValue().toString(), analyzer); } } /* and read vectors from it */ return index.createSearcher().getIndexReader().getTermVectors(0); } private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request) throws IOException { SourceToParse source = new SourceToParse("_id_for_tv_api", request.doc(), request.xContentType(), request.routing(), Map.of()); DocumentParser documentParser = indexShard.mapperService().documentParser(); MappingLookup mappingLookup = indexShard.mapperService().mappingLookup(); ParsedDocument parsedDocument = documentParser.parseDocument(source, mappingLookup); // select the right fields and generate term vectors LuceneDocument doc = parsedDocument.rootDoc(); Set<String> seenFields = new HashSet<>(); Collection<DocumentField> documentFields = new HashSet<>(); for (IndexableField field : doc.getFields()) { MappedFieldType fieldType = indexShard.mapperService().fieldType(field.name()); if (isValidField(fieldType) == false) { continue; } if (request.selectedFields() != null && request.selectedFields().contains(field.name()) == false) { continue; } if (seenFields.contains(field.name())) { continue; } else { seenFields.add(field.name()); } String[] values = getValues(doc.getFields(field.name())); documentFields.add(new DocumentField(field.name(), Arrays.asList((Object[]) values))); } return generateTermVectors( indexShard, XContentHelper.convertToMap(parsedDocument.source(), true, request.xContentType()).v2(), documentFields, request.offsets(), request.perFieldAnalyzer(), seenFields ); } /** * Returns an array of values of the field specified as the method parameter. * This method returns an empty array when there are no * matching fields. It never returns null. * @param fields The <code>IndexableField</code> to get the values from * @return a <code>String[]</code> of field values */ public static String[] getValues(IndexableField[] fields) { List<String> result = new ArrayList<>(); for (IndexableField field : fields) { if (field.fieldType().indexOptions() != IndexOptions.NONE) { if (field.binaryValue() != null) { result.add(field.binaryValue().utf8ToString()); } else { result.add(field.stringValue()); } } } return result.toArray(new String[0]); } private static Fields mergeFields(Fields fields1, Fields fields2) throws IOException { ParallelFields parallelFields = new ParallelFields(); for (String fieldName : fields2) { Terms terms = fields2.terms(fieldName); if (terms != null) { parallelFields.addField(fieldName, terms); } } for (String fieldName : fields1) { if (parallelFields.fields.containsKey(fieldName)) { continue; } Terms terms = fields1.terms(fieldName); if (terms != null) { parallelFields.addField(fieldName, terms); } } return parallelFields; } // Poached from Lucene ParallelLeafReader private static final class ParallelFields extends Fields { final Map<String, Terms> fields = new TreeMap<>(); ParallelFields() {} void addField(String fieldName, Terms terms) { fields.put(fieldName, terms); } @Override public Iterator<String> iterator() { return Collections.unmodifiableSet(fields.keySet()).iterator(); } @Override public Terms terms(String field) { return fields.get(field); } @Override public int size() { return fields.size(); } } }
/* * @(#)SortItem.java 1.17f 95/04/10 James Gosling * * Copyright (c) 1994 Sun Microsystems, Inc. All Rights Reserved. * * Permission to use, copy, modify, and distribute this software * and its documentation for NON-COMMERCIAL purposes and without * fee is hereby granted provided that this copyright notice * appears in all copies. Please refer to the file "copyright.html" * for further important copyright and licensing information. * * SUN MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE SUITABILITY OF * THE SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED * TO THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A * PARTICULAR PURPOSE, OR NON-INFRINGEMENT. SUN SHALL NOT BE LIABLE FOR * ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR * DISTRIBUTING THIS SOFTWARE OR ITS DERIVATIVES. */ import java.awt.*; import java.io.InputStream; import java.util.Hashtable; import java.net.*; /** * A simple applet class to demonstrate a sort algorithm. * You can specify a sorting algorithm using the "alg" * attribyte. When you click on the applet, a thread is * forked which animates the sorting algorithm. * * @author James Gosling * @version 1.17f, 10 Apr 1995 */ public class SortItem extends java.applet.Applet implements Runnable { /** * The thread that is sorting (or null). */ private Thread kicker; /** * The array that is being sorted. */ int[] arr; /** * The high water mark. */ int h1 = -1; /** * The low water mark. */ int h2 = -1; /** * The name of the algorithm. */ String algName; /** * The sorting algorithm (or null). */ SortAlgorithm algorithm; /** * Fill the array with random numbers from 0..n-1. */ void scramble() { int[] a = new int[size().height / 2]; double f = size().width / (double) a.length; for (int i = a.length; --i >= 0;) { a[i] = (int)(i * f); } for (int i = a.length; --i >= 0;) { int j = (int)(i * Math.random()); int t = a[i]; a[i] = a[j]; a[j] = t; } arr = a; } /** * Pause a while. * @see SortAlgorithm */ void pause() { pause(-1, -1); } /** * Pause a while, and draw the high water mark. * @see SortAlgorithm */ void pause(int H1) { pause(H1, -1); } /** * Pause a while, and draw the low&high water marks. * @see SortAlgorithm */ void pause(int H1, int H2) { h1 = H1; h2 = H2; if (kicker != null) { repaint(); } try {Thread.sleep(20);} catch (InterruptedException e){} } /** * Initialize the applet. */ public void init() { String at = getParameter("alg"); if (at == null) { at = "BubbleSort"; } algName = at + "Algorithm"; scramble(); resize(100, 100); } /** * Paint the array of numbers as a list * of horizontal lines of varying lenghts. */ public void paint(Graphics g) { int[] a = arr; int y = size().height - 1; // Erase old lines g.setColor(Color.lightGray); for (int i = a.length; --i >= 0; y -= 2) { g.drawLine(arr[i], y, size().width, y); } // Draw new lines g.setColor(Color.black); y = size().height - 1; for (int i = a.length; --i >= 0; y -= 2) { g.drawLine(0, y, arr[i], y); } if (h1 >= 0) { g.setColor(Color.red); y = h1 * 2 + 1; g.drawLine(0, y, size().width, y); } if (h2 >= 0) { g.setColor(Color.blue); y = h2 * 2 + 1; g.drawLine(0, y, size().width, y); } } /** * Update without erasing the background. */ public void update(Graphics g) { paint(g); } /** * Run the sorting algorithm. This method is * called by class Thread once the sorting algorithm * is started. * @see java.lang.Thread#run * @see SortItem#mouseUp */ public void run() { try { if (algorithm == null) { algorithm = (SortAlgorithm)Class.forName(algName).newInstance(); algorithm.setParent(this); } algorithm.init(); algorithm.sort(arr); } catch(Exception e) { } } /** * Stop the applet. Kill any sorting algorithm that * is still sorting. */ public synchronized void stop() { if (kicker != null) { try { kicker.stop(); } catch (IllegalThreadStateException e) { // ignore this exception } kicker = null; } if (algorithm != null){ try { algorithm.stop(); } catch (IllegalThreadStateException e) { // ignore this exception } } } /** * For a Thread to actually do the sorting. This routine makes * sure we do not simultaneously start several sorts if the user * repeatedly clicks on the sort item. It needs to be * synchronoized with the stop() method because they both * manipulate the common kicker variable. */ private synchronized void startSort() { if (kicker == null || !kicker.isAlive()) { scramble(); repaint(); kicker = new Thread(this); kicker.start(); } } /** * The user clicked in the applet. Start the clock! */ public boolean mouseUp(java.awt.Event evt, int x, int y) { startSort(); return true; } }
/* Author: Miguel Calejo Contact: info@interprolog.com, www.interprolog.com Copyright InterProlog Consulting / Renting Point Lda, Portugal 2014 Use and distribution, without any warranties, under the terms of the Apache License, as per http://www.apache.org/licenses/LICENSE-2.0.html */ package com.declarativa.interprolog; import java.awt.Color; import java.awt.Component; import java.awt.Container; import java.awt.event.ActionEvent; import java.util.HashMap; import java.util.HashSet; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.JComponent; import javax.swing.JTextArea; import javax.swing.SwingUtilities; import com.declarativa.interprolog.gui.ListenerWindow; /** * An auxiliary object which listens to a PrologEngine and depending on its state will enable/disable a bunch of UI Components and/or Actions. * It also provides two actions for pause/continue and abort. Two implementations are provided: one assuming use of timed_call, the other of * (ctrl-C) breaks. * @author mc * */ public class EngineController implements PrologEngineListener{ HashSet<Object> itemsToDisableWhenBusy = new HashSet<Object>(); HashSet<Object> itemsToDisableWhenPaused = new HashSet<Object>(); /** helper objects to make text areas display a hint when empty */ /** text areas which will have hints and colors affected by the engine state */ protected HashSet<JTextArea> fields = new HashSet<JTextArea>(); protected String busyHint = ""; protected String idleOrPausedHint = ""; protected String needsMoreInputHint = ""; protected Color busyColor, idleOrPausedColor, needsMoreInputColor; protected HashMap<JTextArea,Color> originalColors = new HashMap<JTextArea,Color>(); // lazily initialized, to get stable colors protected HashMap<JTextArea,Color> darkerColors = new HashMap<JTextArea,Color>(); protected String busyLabel; // for the engine state "label" (button) protected String idleOrPausedLabel; protected String needsMoreInputLabel; public final AbstractAction engineStateAction; // use for display, not input public final PauseContinueAction pauseContinueAction; public final AbortAction stopAction; boolean pauseRequested = false, pauseEnded = false, inPause=false, prologCanWork=true; public static final String STOP_MESSAGE = "Query Aborted"; /** If not null, no timed_call is assumed */ SubprocessEngine pausableEngine; private boolean autoHints; /** * @param pausableEngine optional; if not null will be sent break requests * @param autoHints if true query entry fields will have a hint injected according to engine state * @see com.xsb.xj.util.XJEngineController */ @SuppressWarnings("serial") public EngineController(SubprocessEngine pausableEngine, boolean autoHints){ this.pausableEngine = pausableEngine; this.autoHints=autoHints; // This action will oscillate between Pause(disabled/enabled)/Continue: pauseContinueAction = new PauseContinueAction(); // This action will not change name, just enabled state: stopAction = new AbortAction(); engineStateAction = new AbstractAction(){ @Override public void actionPerformed(ActionEvent e) {} }; engineStateAction.putValue(Action.SHORT_DESCRIPTION, "Engine availability state"); } @SuppressWarnings("serial") class PauseContinueAction extends AbstractAction{ PauseContinueAction(){ prepareForPause(); setEnabled(false); } public void actionPerformed(ActionEvent e){ if (getValue(NAME).equals("Pause")){ setEnabled(false); stopAction.setEnabled(false); pauseRequested = true; pauseEnded=false; if (pausableEngine!=null) pausableEngine.breakEngine(); } else { // "Continue" clicked prepareForPause(); stopAction.setEnabled(true); pauseEnded = true; if (pausableEngine!=null) pausableEngine.resumeEngine(); } } private void prepareForPause(){ putValue(NAME,"Pause"); putValue(Action.SHORT_DESCRIPTION,"Click to pause the engine"); } private void prepareForContinue(){ // called dynamically bellow... putValue(NAME,"Continue"); putValue(Action.SHORT_DESCRIPTION,"Click to resume execution"); } } @SuppressWarnings("serial") class AbortAction extends AbstractAction{ AbortAction(){ super("Abort"); setEnabled(false); putValue(Action.SHORT_DESCRIPTION,"Click to abort (end) the query"); } @Override public void actionPerformed(ActionEvent e){ doIt(); } private void doIt(){ setEnabled(false); pauseContinueAction.setEnabled(false); pauseContinueAction.prepareForPause(); prologCanWork = false; pauseEnded = true; if (pausableEngine!=null){ if (pausableEngine.isPaused()) pausableEngine.abortEngine(); else pausableEngine.interrupt(); } } } void interruptCleanupHack(){ if (isInPause()){ pauseEnded = true; prologCanWork = false; } } /** The user has started a query */ public void queryStarted(){ pauseContinueAction.setEnabled(true); stopAction.setEnabled(true); //System.out.println("queryStarted"); } /** The user query ended */ public void queryEnded(){ pauseContinueAction.setEnabled(false); stopAction.setEnabled(false); //System.out.println("queryEnd"); } // PrologEngineListener methods: // This will never be called if not using timed_call public String willWork(AbstractPrologEngine source){ //System.out.print("(p)"); if (pauseRequested) { pauseContinueAction.prepareForContinue(); pauseContinueAction.setEnabled(true); if (autoHints) setUItoPausedOrIdle(); } inPause=true; if (!source.isAvailable()) availabilityChanged(source); while(pauseRequested & ! pauseEnded) { try { Thread.sleep(80); } catch (InterruptedException e) { System.err.println("Anomaly during Pause:"+e); break; } } inPause=false; if (!source.isAvailable()) availabilityChanged(source); if (pauseRequested) { pauseRequested=false; if (autoHints) setUItoBusy(); } //System.out.println("prologCanWork:"+prologCanWork+","+this); if (prologCanWork) return null; prologCanWork = true; return STOP_MESSAGE; } public void setUItoBusy() { } public void setUItoPausedOrIdle() { } public void setUItoNeedsMoreInput() { } public void javaMessaged(AbstractPrologEngine source){ // System.out.print("(j)"); } /** In addition to being messaged by the engine, this method also gets called by this controller when entering/leave pause * @param source an engine */ public void availabilityChanged(final AbstractPrologEngine source){ final boolean inPause = pausableEngine==null ? EngineController.this.inPause: pausableEngine.isPaused(); final boolean available = source.isAvailable(); //System.out.println("availabilityChanged:"+available+", inPause:"+inPause); SwingUtilities.invokeLater(new Runnable(){ public void run(){ if (available) queryEnded(); else queryStarted(); for (Object item:itemsToDisableWhenBusy){ boolean enabled = (available&&!inPause)||(available && inPause && !itemsToDisableWhenPaused.contains(item)); enableItem(item,enabled); } for (Object item:itemsToDisableWhenPaused) if (itemsToDisableWhenBusy.contains(item)) continue; else enableItem(item,!inPause); if (pausableEngine!=null){ // update actions explicitly, as willWork will never execute if (inPause){ pauseContinueAction.prepareForContinue(); pauseContinueAction.setEnabled(true); stopAction.setEnabled(true); if (autoHints) setUItoPausedOrIdle(); } else if (available){ pauseContinueAction.prepareForPause(); pauseContinueAction.setEnabled(false); stopAction.setEnabled(false); if (autoHints) setUItoPausedOrIdle(); } else{ pauseContinueAction.prepareForPause(); pauseContinueAction.setEnabled(true); stopAction.setEnabled(true); if (autoHints) setUItoBusy(); } } } }); } static void enableItem(Object item,boolean enabled){ if (item instanceof Component){ ((Component)item).setEnabled(enabled); Container top = null; if (item instanceof JComponent) top = ((JComponent)item).getTopLevelAncestor(); //TODO: NOT working, gets us nulls! //System.out.println(item.getComponent()); if (top!=null){ if (!(enabled)) ListenerWindow.setWaitCursor(top); else ListenerWindow.restoreCursor(top); } } else if (item instanceof Action){ ((Action)item).setEnabled(enabled); } } //TODO: These should probably enable/disable immediately when called /** This component or menu item should be enabled only when the engine is available or paused. Its window will get a busy cursor when the engine is busy * @param item some component*/ public void disableWhenBusy(Component item){ itemsToDisableWhenBusy.add(item); } public void disableWhenBusyOrPaused(Component item){ itemsToDisableWhenPaused.add(item); itemsToDisableWhenBusy.add(item); } public void disableWhenBusyOrPaused(Action item){ itemsToDisableWhenPaused.add(item); itemsToDisableWhenBusy.add(item); } public void disableWhenPaused(Component item){ itemsToDisableWhenPaused.add(item); } public void disableWhenBusy(Action item){ itemsToDisableWhenBusy.add(item); } /** Make a new Color to denote "busy" */ protected static Color makeBusyColorFrom(Color idleOrPaused){ return idleOrPaused.darker(); } public void setHintsForFields(String busyHint, String idleOrPausedHint, String needsMoreInputHint){ this.busyHint = busyHint; this.idleOrPausedHint = idleOrPausedHint; this.needsMoreInputHint = needsMoreInputHint; } public void setColorsForFields(Color busyColor, Color idleOrPausedColor, Color needsMoreInputColor){ this.busyColor=busyColor; this.idleOrPausedColor=idleOrPausedColor; this.needsMoreInputColor=needsMoreInputColor; } public void setLabelsForState(String busy,String idleOrPaused,String needsMoreInputLabel){ this.busyLabel = busy; this.idleOrPausedLabel=idleOrPaused; this.needsMoreInputLabel=needsMoreInputLabel; } /** For those annoying situations where anonymous classes do not expose their simple methods (void result and no args) elsewhere ;-) * @param target * @param method */ public static void myMessage(Object target,String method){ try{target.getClass().getMethod(method).invoke(target);} catch(Exception e){throw new RuntimeException(e);} } public boolean isInPause(){ if (pausableEngine!=null) return pausableEngine.isPaused(); else return inPause; } public void stop(){ stopAction.doIt(); } }
/** * Copyright (C) 2013 by Raphael Michel under the MIT license: * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software * is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ package de.geeksfactory.opacclient.frontend; import android.annotation.TargetApi; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.content.res.Configuration; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.preference.PreferenceManager; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.NavigationView; import android.support.v4.app.ActivityOptionsCompat; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.support.v4.view.ViewCompat; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.transition.TransitionInflater; import android.util.DisplayMetrics; import android.view.Gravity; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.inputmethod.InputMethodManager; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ArrayAdapter; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.RelativeLayout; import android.widget.TextView; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.Map; import de.geeksfactory.opacclient.OpacClient; import de.geeksfactory.opacclient.R; import de.geeksfactory.opacclient.objects.Account; import de.geeksfactory.opacclient.reminder.ReminderHelper; import de.geeksfactory.opacclient.storage.AccountDataSource; import de.geeksfactory.opacclient.ui.AccountSwitcherNavigationView; import de.geeksfactory.opacclient.utils.Utils; public abstract class OpacActivity extends AppCompatActivity implements DrawerAccountsAdapter.Listener { protected OpacClient app; protected AlertDialog adialog; protected AccountDataSource aData; protected int selectedItemId; protected AccountSwitcherNavigationView drawer; protected DrawerLayout drawerLayout; protected ActionBarDrawerToggle drawerToggle; protected FloatingActionButton fab; protected CharSequence title; protected Fragment fragment; protected boolean hasDrawer = false; protected Toolbar toolbar; private boolean twoPane; private boolean fabVisible; protected List<Account> accounts; protected ImageView accountExpand; protected TextView accountTitle; protected TextView accountSubtitle; protected TextView accountWarning; protected LinearLayout accountData; protected boolean accountSwitcherVisible = false; protected DrawerAccountsAdapter accountsAdapter; protected static void unbindDrawables(View view) { if (view == null) { return; } if (view.getBackground() != null) { view.getBackground().setCallback(null); } if (view instanceof ViewGroup) { for (int i = 0; i < ((ViewGroup) view).getChildCount(); i++) { unbindDrawables(((ViewGroup) view).getChildAt(i)); } if (!(view instanceof AdapterView)) { ((ViewGroup) view).removeAllViews(); } } } @Override public void onCreate(Bundle savedInstanceState) { supportRequestWindowFeature(android.view.Window.FEATURE_INDETERMINATE_PROGRESS); super.onCreate(savedInstanceState); setContentView(getContentView()); app = (OpacClient) getApplication(); aData = new AccountDataSource(this); toolbar = (Toolbar) findViewById(R.id.toolbar); if (toolbar != null) { setSupportActionBar(toolbar); } fab = (FloatingActionButton) findViewById(R.id.search_fab); setupDrawer(); setupAccountSwitcher(); if (savedInstanceState != null) { setTwoPane(savedInstanceState.getBoolean("twoPane")); setFabVisible(savedInstanceState.getBoolean("fabVisible")); selectedItemId = savedInstanceState.getInt("selectedItemId"); setFabOnClickListener(selectedItemId); if (savedInstanceState.containsKey("title")) { setTitle(savedInstanceState.getCharSequence("title")); } if (savedInstanceState.containsKey("fragment")) { fragment = getSupportFragmentManager().getFragment( savedInstanceState, "fragment"); getSupportFragmentManager().beginTransaction() .replace(R.id.content_frame, fragment).commit(); } } fixStatusBarFlashing(); } protected void setupAccountSwitcher() { if (drawer == null || app.getAccount() == null) return; accounts = aData.getAllAccounts(); Account selectedAccount = app.getAccount(); final View header = drawer.getHeaderView(0); accountExpand = (ImageView) header.findViewById(R.id.account_expand); accountTitle = (TextView) header.findViewById(R.id.account_title); accountSubtitle = (TextView) header.findViewById(R.id.account_subtitle); accountWarning = (TextView) header.findViewById(R.id.account_warning); accountData = (LinearLayout) header.findViewById(R.id.account_data); View.OnClickListener l = new View.OnClickListener() { @Override public void onClick(View v) { toggleAccountSwitcher(); } }; accountData.setOnClickListener(l); //accountExpand.setOnClickListener(l); final SharedPreferences sp = PreferenceManager .getDefaultSharedPreferences(OpacActivity.this); boolean show_toggle_notice = !sp.contains("seen_drawer_toggle_notice"); header.findViewById(R.id.toggle_notice) .setVisibility(show_toggle_notice ? View.VISIBLE : View.GONE); header.findViewById(R.id.btToggleNotice).setOnClickListener( new View.OnClickListener() { @Override public void onClick(View v) { sp.edit().putBoolean("seen_drawer_toggle_notice", true).commit(); header.findViewById(R.id.toggle_notice) .setVisibility(View.GONE); } }); accountsAdapter = new DrawerAccountsAdapter(this, accounts, app.getAccount()); drawer.setAccountsAdapter(accountsAdapter); accountsAdapter.setListener(this); updateAccountSwitcher(selectedAccount); } protected void toggleAccountSwitcher() { setAccountSwitcherVisible(!accountSwitcherVisible); } protected void setAccountSwitcherVisible(boolean accountSwitcherVisible) { if (accountSwitcherVisible == this.accountSwitcherVisible) return; this.accountSwitcherVisible = accountSwitcherVisible; drawer.setAccountsVisible(accountSwitcherVisible); if (Build.VERSION.SDK_INT >= 11) accountExpand.setActivated(accountSwitcherVisible); if (!accountSwitcherVisible) { fixNavigationSelection(); } } /** * Fix status bar flashing problem during transitions by excluding the status bar background * from transitions */ @TargetApi(21) private void fixStatusBarFlashing() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { getWindow().getEnterTransition().excludeTarget(android.R.id.statusBarBackground, true); getWindow().getReenterTransition() .excludeTarget(android.R.id.statusBarBackground, true); getWindow().getReturnTransition().excludeTarget(android.R.id.statusBarBackground, true); getWindow().getExitTransition().excludeTarget(android.R.id.statusBarBackground, true); } } protected abstract int getContentView(); protected void setupDrawer() { SharedPreferences sp = PreferenceManager .getDefaultSharedPreferences(this); drawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); if (drawerLayout != null) { hasDrawer = true; drawerLayout.setStatusBarBackground(R.color.primary_red_dark); drawerLayout.setDrawerShadow(R.drawable.drawer_shadow, Gravity.LEFT); drawerToggle = new ActionBarDrawerToggle(this, drawerLayout, toolbar, R.string.drawer_open, R.string.drawer_close) { /** * Called when a drawer has settled in a completely closed * state. */ @Override public void onDrawerClosed(View view) { super.onDrawerClosed(view); getSupportActionBar().setTitle(title); } /** Called when a drawer has settled in a completely open state. */ @Override public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); getSupportActionBar().setTitle( app.getResources().getString(R.string.app_name)); if (getCurrentFocus() != null) { InputMethodManager imm = (InputMethodManager) getSystemService( Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(getCurrentFocus() .getWindowToken(), 0); } } }; // Set the drawer toggle as the DrawerListener drawerLayout.setDrawerListener(new DrawerLayout.DrawerListener() { @Override public void onDrawerSlide(View drawerView, float slideOffset) { drawerToggle.onDrawerSlide(drawerView, slideOffset); } @Override public void onDrawerOpened(final View drawerView) { drawerToggle.onDrawerOpened(drawerView); } @Override public void onDrawerClosed(View drawerView) { drawerToggle.onDrawerClosed(drawerView); setAccountSwitcherVisible(false); } @Override public void onDrawerStateChanged(int newState) { drawerToggle.onDrawerStateChanged(newState); } }); getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSupportActionBar().setHomeButtonEnabled(true); drawer = (AccountSwitcherNavigationView) findViewById(R.id.navdrawer); drawer.setNavigationItemSelectedListener( new NavigationView.OnNavigationItemSelectedListener() { @Override public boolean onNavigationItemSelected(MenuItem item) { selectItem(item); return true; } }); if (!sp.getBoolean("version2.0.0-introduced", false) && app.getSlidingMenuEnabled()) { final Handler handler = new Handler(); // Just show the menu to explain that is there if people start // version 2 for the first time. // We need a handler because if we just put this in onCreate // nothing // happens. I don't have any idea, why. handler.postDelayed(new Runnable() { @Override public void run() { SharedPreferences sp = PreferenceManager .getDefaultSharedPreferences(OpacActivity.this); drawerLayout.openDrawer(drawer); sp.edit().putBoolean("version2.0.0-introduced", true) .commit(); } }, 500); } } } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); setSupportProgressBarIndeterminateVisibility(false); if (hasDrawer) { drawerToggle.syncState(); } } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); if (hasDrawer) { drawerToggle.onConfigurationChanged(newConfig); } } @Override protected void onResume() { setupDrawer(); setupAccountSwitcher(); fragment = getSupportFragmentManager().findFragmentById(R.id.content_frame); if (hasDrawer) { drawerToggle.syncState(); } setTwoPane(twoPane); super.onResume(); fixNavigationSelection(); } protected void fixNavigationSelection() { if (fragment == null) { return; } if (fragment instanceof SearchFragment) { drawer.setCheckedItem(R.id.nav_search); } else if (fragment instanceof AccountFragment) { drawer.setCheckedItem(R.id.nav_account); } else if (fragment instanceof StarredFragment) { drawer.setCheckedItem(R.id.nav_starred); } else if (fragment instanceof InfoFragment) { drawer.setCheckedItem(R.id.nav_info); } if (app.getLibrary() != null) { getSupportActionBar().setSubtitle(app.getLibrary().getDisplayName()); } } /** * Swaps fragments in the main content view */ protected void selectItem(MenuItem item) { try { setSupportProgressBarIndeterminateVisibility(false); } catch (Exception e) { } int itemId = item.getItemId(); if (selectItemById(itemId)) return; // Highlight the selected item, update the title, and close the drawer drawer.setCheckedItem(itemId); drawerLayout.closeDrawer(drawer); setAccountSwitcherVisible(false); return; } protected boolean selectItemById(int id) { Fragment previousFragment = fragment; // we cannot use a switch statement here because it breaks compatibility to the Plus Edition if (id == R.id.nav_search) { fragment = new SearchFragment(); setTwoPane(false); setFabVisible(true); } else if (id == R.id.nav_account) { fragment = new AccountFragment(); setTwoPane(false); setFabVisible(false); } else if (id == R.id.nav_starred) { fragment = new StarredFragment(); setTwoPane(true); setFabVisible(false); } else if (id == R.id.nav_info) { fragment = new InfoFragment(); setTwoPane(false); setFabVisible(false); } else if (id == R.id.nav_settings) { Intent intent = new Intent(this, MainPreferenceActivity.class); startActivity(intent); return true; } else if (id == R.id.nav_about) { Intent intent = new Intent(this, AboutActivity.class); startActivity(intent); return true; } setFabOnClickListener(id); // Insert the fragment by replacing any existing fragment FragmentManager fragmentManager = getSupportFragmentManager(); FragmentTransaction transaction = fragmentManager.beginTransaction() .replace(R.id.content_frame, fragment); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { fragment.setSharedElementEnterTransition( TransitionInflater.from(this).inflateTransition (android.R.transition.move)); fragment.setEnterTransition(TransitionInflater.from(this).inflateTransition (android.R.transition.fade)); } try { if (previousFragment instanceof SearchFragment && fragment instanceof AccountFragment && previousFragment.getView() != null) { transaction.addSharedElement(previousFragment.getView().findViewById(R.id .rlSimpleSearch), getString(R.string.transition_gray_box)); } else if (previousFragment instanceof AccountFragment && fragment instanceof SearchFragment && previousFragment.getView() != null) { transaction.addSharedElement(previousFragment.getView().findViewById(R.id .rlAccHeader), getString(R.string.transition_gray_box)); } } catch (NullPointerException e) { e.printStackTrace(); } transaction.commit(); selectedItemId = id; setTitle(getTitleForItem(id)); return false; } protected CharSequence getTitleForItem(int id) { return drawer.getMenu().findItem(id).getTitle(); } protected void selectItem(String tag) { int id; switch (tag) { case "search": id = R.id.nav_search; break; case "account": id = R.id.nav_account; break; case "starred": id = R.id.nav_starred; break; case "info": id = R.id.nav_info; break; default: return; } selectItemById(id); } protected void selectItem(int pos) { selectItem(drawer.getMenu().getItem(pos)); } protected void setFabOnClickListener(int id) { if (isTablet()) { if (id == R.id.nav_search) { fab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { ActivityOptionsCompat options = ActivityOptionsCompat.makeScaleUpAnimation (v, Math.round(v.getLeft()), Math.round(v.getTop()), v.getWidth(), v.getHeight()); ((SearchFragment) fragment).go(options.toBundle()); } }); } else { fab.setOnClickListener(null); } } } @Override public void setTitle(CharSequence title) { super.setTitle(title); this.title = title; } @Override protected void onStart() { super.onStart(); if (app.getLibrary() == null) { // Create new if (app.getAccount() != null) { try { InputStream stream = getAssets().open( OpacClient.ASSETS_BIBSDIR + "/" + app.getAccount().getLibrary() + ".json"); stream.close(); } catch (IOException e) { AccountDataSource data = new AccountDataSource(this); data.remove(app.getAccount()); List<Account> available_accounts = data.getAllAccounts(); if (available_accounts.size() > 0) { ((OpacClient) getApplication()) .setAccount(available_accounts.get(0).getId()); } new ReminderHelper(app).generateAlarms(); if (app.getLibrary() != null) { return; } } } app.addFirstAccount(this); } } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater mi = new MenuInflater(this); mi.inflate(R.menu.activity_opac, menu); return super.onCreateOptionsMenu(menu); } public void accountSelected(Account account) { updateAccountSwitcher(account); } protected void updateAccountSwitcher(Account account) { if (account == null) return; SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this); int tolerance = Integer.parseInt(sp.getString("notification_warning", "3")); int expiring = aData.getExpiring(account, tolerance); accountTitle.setText(Utils.getAccountTitle(account, this)); accountSubtitle.setText(Utils.getAccountSubtitle(account, this)); if (expiring > 0) { accountWarning.setText(String.valueOf(expiring)); accountWarning.setVisibility(View.VISIBLE); } else { accountWarning.setVisibility(View.GONE); } accountsAdapter.setCurrentAccount(account); } public void selectaccount() { AlertDialog.Builder builder = new AlertDialog.Builder(this); // Get the layout inflater LayoutInflater inflater = getLayoutInflater(); View view = inflater.inflate(R.layout.dialog_simple_list, null); ListView lv = (ListView) view.findViewById(R.id.lvBibs); AccountDataSource data = new AccountDataSource(this); final List<Account> accounts = data.getAllAccounts(); AccountListAdapter adapter = new AccountListAdapter(this, accounts); lv.setAdapter(adapter); lv.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { app.setAccount(accounts.get(position).getId()); adialog.dismiss(); ((AccountSelectedListener) fragment).accountSelected(accounts .get(position)); } }); builder.setTitle(R.string.account_select) .setView(view) .setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int id) { adialog.cancel(); } }) .setNeutralButton(R.string.accounts_edit, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int id) { dialog.dismiss(); Intent intent = new Intent(OpacActivity.this, AccountListActivity.class); startActivity(intent); } }); adialog = builder.create(); adialog.show(); } public void selectaccount(long id) { app.setAccount(id); accountSelected(app.getAccount()); } @Override public boolean onOptionsItemSelected(MenuItem item) { // Pass the event to ActionBarDrawerToggle, if it returns // true, then it has handled the app icon touch event return hasDrawer && drawerToggle.onOptionsItemSelected(item) || super.onOptionsItemSelected(item); } protected boolean isTablet() { return findViewById(R.id.content_frame_right) != null; } protected void setTwoPane(boolean active) { twoPane = active; if (isTablet()) { findViewById(R.id.content_frame_right).setVisibility( active ? View.VISIBLE : View.GONE); findViewById(R.id.twopane_wrapper).getLayoutParams().width = active ? LinearLayout.LayoutParams.MATCH_PARENT : getResources().getDimensionPixelSize(R.dimen.single_pane_max_width); } } protected void setFabVisible(boolean visible) { fabVisible = visible; if (isTablet()) { fab.setVisibility(visible ? View.VISIBLE : View.GONE); if (visible) { DisplayMetrics displayMetrics = getResources().getDisplayMetrics(); float density = getResources().getDisplayMetrics().density; float dpWidth = displayMetrics.widthPixels / density; RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams (Math.round(72 * density), Math.round(72 * density)); if (dpWidth >= 864) { params.addRule(RelativeLayout.BELOW, R.id.toolbar); params.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); params.setMargins(0, Math.round(-36 * density), Math.round(36 * density), 0); ViewCompat.setElevation(fab, 4 * density); } else { params.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); params.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM); params.setMargins(0, 0, Math.round(36 * density), Math.round(36 * density)); ViewCompat.setElevation(fab, 12 * density); } fab.setLayoutParams(params); } } } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putBoolean("twoPane", twoPane); outState.putBoolean("fabVisible", fabVisible); outState.putInt("selectedItemId", selectedItemId); if (fragment != null) { getSupportFragmentManager().putFragment(outState, "fragment", fragment); } if (title != null) { outState.putCharSequence("title", title); } } @Override public void onAccountClicked(Account account) { selectaccount(account.getId()); drawerLayout.closeDrawer(drawer); setAccountSwitcherVisible(false); } @Override public void onAddAccountClicked() { Intent intent = new Intent(this, LibraryListActivity.class); startActivity(intent); } @Override public void onManageAccountsClicked() { Intent intent = new Intent(this, AccountListActivity.class); startActivity(intent); } public interface AccountSelectedListener { void accountSelected(Account account); } public class MetaAdapter<T extends Map.Entry<?, String>> extends ArrayAdapter<T> { private List<T> objects; private int spinneritem; public MetaAdapter(Context context, List<T> objects, int spinneritem) { super(context, R.layout.simple_spinner_item, objects); this.objects = objects; this.spinneritem = spinneritem; } @Override public View getDropDownView(int position, View contentView, ViewGroup viewGroup) { View view; if (objects.get(position) == null) { LayoutInflater layoutInflater = (LayoutInflater) getContext() .getSystemService(Context.LAYOUT_INFLATER_SERVICE); view = layoutInflater .inflate(R.layout.simple_spinner_dropdown_item, viewGroup, false); return view; } T item = objects.get(position); if (contentView == null) { LayoutInflater layoutInflater = (LayoutInflater) getContext() .getSystemService(Context.LAYOUT_INFLATER_SERVICE); view = layoutInflater .inflate(R.layout.simple_spinner_dropdown_item, viewGroup, false); } else { view = contentView; } TextView tvText = (TextView) view.findViewById(android.R.id.text1); tvText.setText(item.getValue()); return view; } @Override public View getView(int position, View contentView, ViewGroup viewGroup) { View view; if (objects.get(position) == null) { LayoutInflater layoutInflater = (LayoutInflater) getContext() .getSystemService(Context.LAYOUT_INFLATER_SERVICE); view = layoutInflater.inflate(spinneritem, viewGroup, false); return view; } T item = objects.get(position); if (contentView == null) { LayoutInflater layoutInflater = (LayoutInflater) getContext() .getSystemService(Context.LAYOUT_INFLATER_SERVICE); view = layoutInflater.inflate(spinneritem, viewGroup, false); } else { view = contentView; } TextView tvText = (TextView) view.findViewById(android.R.id.text1); tvText.setText(item.getValue()); return view; } } }
package com.newsblur.database; import android.database.Cursor; import android.graphics.Color; import android.graphics.Typeface; import android.os.Parcelable; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.recyclerview.widget.RecyclerView; import androidx.recyclerview.widget.DiffUtil; import android.text.TextUtils; import android.view.ContextMenu; import android.view.GestureDetector; import android.view.LayoutInflater; import android.view.MenuInflater; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.ViewParent; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.RelativeLayout; import android.widget.TextView; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import com.newsblur.R; import com.newsblur.activity.FeedItemsList; import com.newsblur.activity.NbActivity; import com.newsblur.domain.Story; import com.newsblur.domain.UserDetails; import com.newsblur.fragment.ItemSetFragment; import com.newsblur.fragment.StoryIntelTrainerFragment; import com.newsblur.util.FeedSet; import com.newsblur.util.FeedUtils; import com.newsblur.util.GestureAction; import com.newsblur.util.ImageLoader; import com.newsblur.util.PrefsUtils; import com.newsblur.util.StoryContentPreviewStyle; import com.newsblur.util.StoryListStyle; import com.newsblur.util.StoryUtils; import com.newsblur.util.ThumbnailStyle; import com.newsblur.util.UIUtils; import com.newsblur.view.StoryThumbnailView; /** * Story list adapter, RecyclerView style. */ public class StoryViewAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> { public final static int VIEW_TYPE_STORY_TILE = 1; public final static int VIEW_TYPE_STORY_ROW = 2; public final static int VIEW_TYPE_FOOTER = 3; private final static float defaultTextSize_story_item_feedtitle = 13f; private final static float defaultTextSize_story_item_title = 14f; private final static float defaultTextSize_story_item_date = 11f; private final static float defaultTextSize_story_item_author = 11f; private final static float defaultTextSize_story_item_snip = 12f; private final static float READ_STORY_ALPHA = 0.35f; private final static int READ_STORY_ALPHA_B255 = (int) (255f * READ_STORY_ALPHA); private List<View> footerViews = new ArrayList<View>(); // the cursor from which we pull story objects. should not be used except by the thaw/diff worker private Cursor cursor; // the live list of stories being used by the adapter private List<Story> stories = new ArrayList<Story>(0); private Parcelable oldScrollState; private final ExecutorService executorService; private NbActivity context; private ItemSetFragment fragment; private FeedSet fs; private StoryListStyle listStyle; private boolean ignoreReadStatus; private boolean ignoreIntel; private boolean singleFeed; private float textSize; private UserDetails user; private ThumbnailStyle thumbnailStyle; public StoryViewAdapter(NbActivity context, ItemSetFragment fragment, FeedSet fs, StoryListStyle listStyle) { this.context = context; this.fragment = fragment; this.fs = fs; this.listStyle = listStyle; if (fs.isGlobalShared()) {ignoreReadStatus = false; ignoreIntel = true; singleFeed = false;} if (fs.isAllSocial()) {ignoreReadStatus = false; ignoreIntel = false; singleFeed = false;} if (fs.isAllNormal()) {ignoreReadStatus = false; ignoreIntel = false; singleFeed = false;} if (fs.isInfrequent()) {ignoreReadStatus = false; ignoreIntel = false; singleFeed = false;} if (fs.isSingleSocial()) {ignoreReadStatus = false; ignoreIntel = false; singleFeed = false;} if (fs.isFolder()) {ignoreReadStatus = fs.isFilterSaved(); ignoreIntel = fs.isFilterSaved(); singleFeed = false;} if (fs.isSingleNormal()) {ignoreReadStatus = fs.isFilterSaved(); ignoreIntel = fs.isFilterSaved(); singleFeed = true;} if (fs.isAllRead()) {ignoreReadStatus = false; ignoreIntel = true; singleFeed = false;} if (fs.isAllSaved()) {ignoreReadStatus = true; ignoreIntel = true; singleFeed = false;} if (fs.isSingleSavedTag()) {ignoreReadStatus = true; ignoreIntel = true; singleFeed = false;} textSize = PrefsUtils.getListTextSize(context); user = PrefsUtils.getUserDetails(context); thumbnailStyle = PrefsUtils.getThumbnailStyle(context); executorService = Executors.newFixedThreadPool(1); setHasStableIds(true); } public void updateFeedSet(FeedSet fs) { this.fs = fs; } public void setStyle(StoryListStyle listStyle) { this.listStyle = listStyle; } public void setThumbnailStyle(ThumbnailStyle thumbnailStyle) { this.thumbnailStyle = thumbnailStyle; } public void addFooterView(View v) { footerViews.add(v); } @Override public int getItemCount() { return (getStoryCount() + footerViews.size()); } public int getStoryCount() { if (fs != null && UIUtils.needsPremiumAccess(context, fs)) { return Math.min(3, stories.size()); } else { return stories.size(); } } /** * get the number of stories we very likely have, even if they haven't * been thawed yet, for callers that absolutely must know the size * of our dataset (such as for calculating when to fetch more stories) */ public int getRawStoryCount() { if (cursor == null) return 0; if (cursor.isClosed()) return 0; int count = 0; try { count = cursor.getCount(); } catch (Exception e) { // rather than worry about sync locking for cursor changes, just fail. a // closing cursor may as well not be loaded. } return count; } @Override public int getItemViewType(int position) { if (position >= getStoryCount()) return VIEW_TYPE_FOOTER; if (listStyle == StoryListStyle.LIST) { return VIEW_TYPE_STORY_ROW; } else { return VIEW_TYPE_STORY_TILE; } } @Override public long getItemId(int position) { if (position >= getStoryCount()) { return (footerViews.get(position - getStoryCount()).hashCode()); } if (position >= stories.size() || position < 0) return 0; return stories.get(position).storyHash.hashCode(); } public void swapCursor(final Cursor c, final RecyclerView rv, Parcelable oldScrollState) { // cache the identity of the most recent cursor so async batches can check to // see if they are stale cursor = c; // if the caller wants to restore a scroll state, hold onto it for when we update // the dataset and use that state at the right moment if (oldScrollState != null) { this.oldScrollState = oldScrollState; } // process the cursor into objects and update the View async Runnable r = new Runnable() { @Override public void run() { thawDiffUpdate(c, rv); } }; executorService.submit(r); } /** * Attempt to thaw a new set of stories from the cursor most recently * seen when the that cycle started. */ private void thawDiffUpdate(final Cursor c, final RecyclerView rv) { if (c != cursor) return; // thawed stories final List<Story> newStories; int indexOfLastUnread = -1; // attempt to thaw as gracefully as possible despite the fact that the loader // framework could close our cursor at any moment. if this happens, it is fine, // as a new one will be provided and another cycle will start. just return. try { if (c == null) { newStories = new ArrayList<Story>(0); } else { if (c.isClosed()) return; newStories = new ArrayList<Story>(c.getCount()); c.moveToPosition(-1); while (c.moveToNext()) { if (c.isClosed()) return; Story s = Story.fromCursor(c); s.bindExternValues(c); newStories.add(s); if (! s.read) indexOfLastUnread = c.getPosition(); } } } catch (Exception e) { com.newsblur.util.Log.e(this, "error thawing story list: " + e.getMessage(), e); return; } // generate the RecyclerView diff final DiffUtil.DiffResult diff = DiffUtil.calculateDiff(new StoryListDiffer(newStories), false); if (c != cursor) return; fragment.storyThawCompleted(indexOfLastUnread); rv.post(new Runnable() { @Override public void run() { if (c != cursor) return; // many versions of RecyclerView like to auto-scroll to inserted elements which is // not at all what we want. the current scroll position is one of the things frozen // in instance state, so keep it and re-apply after deltas to preserve position Parcelable scrollState = rv.getLayoutManager().onSaveInstanceState(); synchronized (StoryViewAdapter.this) { stories = newStories; diff.dispatchUpdatesTo(StoryViewAdapter.this); // the one exception to restoring state is if we were passed an old state to restore // along with the cursor if (oldScrollState != null) { rv.getLayoutManager().onRestoreInstanceState(oldScrollState); oldScrollState = null; } else { rv.getLayoutManager().onRestoreInstanceState(scrollState); } } } }); } private class StoryListDiffer extends DiffUtil.Callback { private List<Story> newStories; public StoryListDiffer(List<Story> newStories) { StoryListDiffer.this.newStories = newStories; } public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { return newStories.get(newItemPosition).isChanged(stories.get(oldItemPosition)); } public boolean areItemsTheSame(int oldItemPosition, int newItemPosition) { return newStories.get(newItemPosition).storyHash.equals(stories.get(oldItemPosition).storyHash); } public int getNewListSize() { return newStories.size(); } public int getOldListSize() { return stories.size(); } } public synchronized Story getStory(int position) { if (position >= stories.size() || position < 0) { return null; } else { return stories.get(position); } } public void setTextSize(float textSize) { this.textSize = textSize; } @NonNull @Override public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup viewGroup, int viewType) { // NB: the non-temporary calls to setLayerType() dramatically speed up list movement, but // are only safe because we perform fairly advanced delta updates. if any changes to invalidation // logic are made, check the list with hardare layer profiling to ensure we aren't over-invalidating if (viewType == VIEW_TYPE_STORY_TILE) { View v = LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.view_story_tile, viewGroup, false); v.setLayerType(View.LAYER_TYPE_HARDWARE, null); return new StoryTileViewHolder(v); } else if (viewType == VIEW_TYPE_STORY_ROW) { View v = LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.view_story_row, viewGroup, false); v.setLayerType(View.LAYER_TYPE_HARDWARE, null); return new StoryRowViewHolder(v); } else { View v = LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.view_footer_tile, viewGroup, false); return new FooterViewHolder(v); } } public class StoryViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener, View.OnCreateContextMenuListener, MenuItem.OnMenuItemClickListener, View.OnTouchListener { View leftBarOne; View leftBarTwo; ImageView intelDot; @Nullable StoryThumbnailView thumbViewRight; @Nullable StoryThumbnailView thumbViewLeft; @Nullable ImageView thumbTileView; ImageView feedIconView; TextView feedTitleView; TextView storyTitleView; TextView storyDate; View savedView; View sharedView; Story story; ImageLoader.PhotoToLoad thumbLoader; String lastThumbUrl; GestureDetector gestureDetector = new GestureDetector(context, new StoryViewGestureDetector(StoryViewHolder.this)); boolean gestureR2L = false; boolean gestureL2R = false; boolean gestureDebounce = false; public StoryViewHolder(View view) { super(view); leftBarOne = view.findViewById(R.id.story_item_favicon_borderbar_1); leftBarTwo = view.findViewById(R.id.story_item_favicon_borderbar_2); intelDot = view.findViewById(R.id.story_item_inteldot); thumbViewRight = view.findViewById(R.id.story_item_thumbnail_right); thumbViewLeft = view.findViewById(R.id.story_item_thumbnail_left); thumbTileView = view.findViewById(R.id.story_item_thumbnail); feedIconView = view.findViewById(R.id.story_item_feedicon); feedTitleView = view.findViewById(R.id.story_item_feedtitle); storyTitleView = view.findViewById(R.id.story_item_title); storyDate = view.findViewById(R.id.story_item_date); savedView = view.findViewById(R.id.story_item_saved_icon); sharedView = view.findViewById(R.id.story_item_shared_icon); view.setOnClickListener(StoryViewHolder.this); view.setOnCreateContextMenuListener(StoryViewHolder.this); view.setOnTouchListener(StoryViewHolder.this); } @Override public void onClick(View view) { // clicks like to get accidentally triggered by the system right after we detect // a gesture. ignore if a gesture appears to be in progress. if (gestureDebounce) { gestureDebounce = false; return; } if (gestureL2R || gestureR2L) return; UIUtils.startReadingActivity(fs, story.storyHash, context); } @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) { // clicks like to get accidentally triggered by the system right after we detect // a gesture. ignore if a gesture appears to be in progress. if (gestureDebounce) { gestureDebounce = false; return; } if (gestureL2R || gestureR2L) return; MenuInflater inflater = new MenuInflater(context); UIUtils.inflateStoryContextMenu(menu, inflater, context, fs, story); for (int i=0; i<menu.size(); i++) { menu.getItem(i).setOnMenuItemClickListener(StoryViewHolder.this); } } @Override public boolean onMenuItemClick(MenuItem item) { switch (item.getItemId()) { case R.id.menu_mark_story_as_read: FeedUtils.markStoryAsRead(story, context); return true; case R.id.menu_mark_story_as_unread: FeedUtils.markStoryUnread(story, context); return true; case R.id.menu_mark_older_stories_as_read: FeedUtils.markRead(context, fs, story.timestamp, null, R.array.mark_older_read_options, false); return true; case R.id.menu_mark_newer_stories_as_read: FeedUtils.markRead(context, fs, null, story.timestamp, R.array.mark_newer_read_options, false); return true; case R.id.menu_send_story: FeedUtils.sendStoryUrl(story, context); return true; case R.id.menu_send_story_full: FeedUtils.sendStoryFull(story, context); return true; case R.id.menu_save_story: //TODO get folder name FeedUtils.setStorySaved(story, true, context, null); return true; case R.id.menu_unsave_story: FeedUtils.setStorySaved(story, false, context, null); return true; case R.id.menu_intel: if (story.feedId.equals("0")) return true; // cannot train on feedless stories StoryIntelTrainerFragment intelFrag = StoryIntelTrainerFragment.newInstance(story, fs); intelFrag.show(context.getSupportFragmentManager(), StoryIntelTrainerFragment.class.getName()); return true; case R.id.menu_go_to_feed: FeedSet fs = FeedSet.singleFeed(story.feedId); FeedItemsList.startActivity(context, fs, FeedUtils.getFeed(story.feedId), null); return true; default: return false; } } @Override public boolean onTouch(View v, MotionEvent event) { // detector looks for ongoing gestures and sets our flags boolean result = gestureDetector.onTouchEvent(event); // iff a gesture possibly completed, see if any were found if (event.getActionMasked() == MotionEvent.ACTION_UP) { flushGesture(); } else if (event.getActionMasked() == MotionEvent.ACTION_CANCEL) { // RecyclerViews may take event ownership to detect scrolling and never send an ACTION_UP // to children. valid gestures end in a CANCEL more often than not flushGesture(); } return result; } private void flushGesture() { // by default, do nothing GestureAction action = GestureAction.GEST_ACTION_NONE; if (gestureL2R) { action = PrefsUtils.getLeftToRightGestureAction(context); gestureL2R = false; } if (gestureR2L) { action = PrefsUtils.getRightToLeftGestureAction(context); gestureR2L = false; } switch (action) { case GEST_ACTION_MARKREAD: FeedUtils.markStoryAsRead(story, context); break; case GEST_ACTION_MARKUNREAD: FeedUtils.markStoryUnread(story, context); break; case GEST_ACTION_SAVE: FeedUtils.setStorySaved(story, true, context, null); break; case GEST_ACTION_UNSAVE: FeedUtils.setStorySaved(story, false, context, null); break; case GEST_ACTION_STATISTICS: FeedUtils.openStatistics(context, story.feedId); break; case GEST_ACTION_NONE: default: } } } public class StoryTileViewHolder extends StoryViewHolder { public StoryTileViewHolder(View view) { super(view); } } public class StoryRowViewHolder extends StoryViewHolder { TextView storyAuthor; TextView storySnippet; public StoryRowViewHolder(View view) { super(view); storyAuthor = view.findViewById(R.id.story_item_author); storySnippet = view.findViewById(R.id.story_item_content); } } @Override public void onBindViewHolder(@NonNull RecyclerView.ViewHolder viewHolder, int position) { if (viewHolder instanceof StoryViewHolder) { StoryViewHolder vh = (StoryViewHolder) viewHolder; if (position >= stories.size() || position < 0) return; Story story = stories.get(position); vh.story = story; bindCommon(vh, position, story); if (vh instanceof StoryRowViewHolder) { StoryRowViewHolder vhRow = (StoryRowViewHolder) vh; bindRow(vhRow, position, story); } else { StoryTileViewHolder vhTile = (StoryTileViewHolder) vh; bindTile(vhTile, position, story); } } else { FooterViewHolder vh = (FooterViewHolder) viewHolder; vh.innerView.removeAllViews(); View targetFooter = footerViews.get(position - getStoryCount()); // footers often move aboslute position, but views can only have one parent. since the RV doesn't // necessarily remove from the old pos before adding to the new, we have to add a check here. // however, modifying other views out of order causes requestLayout to be called from within a // layout pass, which causes warnings. ViewParent oldFooterHolder = targetFooter.getParent(); if (oldFooterHolder instanceof ViewGroup) ((ViewGroup) oldFooterHolder).removeAllViews(); vh.innerView.addView(targetFooter); } } /** * Bind view elements that are common to tiles and rows. */ private void bindCommon(StoryViewHolder vh, int position, Story story) { vh.leftBarOne.setBackgroundColor(UIUtils.decodeColourValue(story.extern_feedColor, Color.GRAY)); vh.leftBarTwo.setBackgroundColor(UIUtils.decodeColourValue(story.extern_feedFade, Color.LTGRAY)); if (! ignoreIntel) { int score = story.extern_intelTotalScore; if (score > 0) { vh.intelDot.setImageResource(R.drawable.g_icn_focus); } else if (score == 0) { vh.intelDot.setImageResource(R.drawable.g_icn_unread); } else { vh.intelDot.setImageResource(R.drawable.g_icn_hidden); } } else { vh.intelDot.setImageResource(android.R.color.transparent); } vh.storyTitleView.setText(UIUtils.fromHtml(story.title)); vh.storyDate.setText(StoryUtils.formatShortDate(context, story.timestamp)); // lists with mixed feeds get added info, but single feeds do not if (!singleFeed) { FeedUtils.iconLoader.displayImage(story.extern_faviconUrl, vh.feedIconView); vh.feedTitleView.setText(story.extern_feedTitle); vh.feedIconView.setVisibility(View.VISIBLE); vh.feedTitleView.setVisibility(View.VISIBLE); } else { vh.feedIconView.setVisibility(View.GONE); vh.feedTitleView.setVisibility(View.GONE); } if (vh.story.starred) { vh.savedView.setVisibility(View.VISIBLE); } else { vh.savedView.setVisibility(View.GONE); } boolean shared = false; findshareloop: for (String userId : story.sharedUserIds) { if (TextUtils.equals(userId, user.id)) { shared = true; break findshareloop; } } if (shared) { vh.sharedView.setVisibility(View.VISIBLE); } else { vh.sharedView.setVisibility(View.GONE); } // dynamic text sizing vh.feedTitleView.setTextSize(textSize * defaultTextSize_story_item_feedtitle); vh.storyTitleView.setTextSize(textSize * defaultTextSize_story_item_title); vh.storyDate.setTextSize(textSize * defaultTextSize_story_item_date); // read/unread fading if (this.ignoreReadStatus || (! story.read)) { vh.leftBarOne.getBackground().setAlpha(255); vh.leftBarTwo.getBackground().setAlpha(255); vh.intelDot.setImageAlpha(255); if (vh.thumbViewLeft != null) vh.thumbViewLeft.setImageAlpha(255); if (vh.thumbViewRight != null) vh.thumbViewRight.setImageAlpha(255); if (vh.thumbTileView != null) vh.thumbTileView.setImageAlpha(255); vh.feedIconView.setImageAlpha(255); vh.feedTitleView.setAlpha(1.0f); vh.storyTitleView.setAlpha(1.0f); vh.storyDate.setAlpha(1.0f); } else { vh.leftBarOne.getBackground().setAlpha(READ_STORY_ALPHA_B255); vh.leftBarTwo.getBackground().setAlpha(READ_STORY_ALPHA_B255); vh.intelDot.setImageAlpha(READ_STORY_ALPHA_B255); if (vh.thumbViewLeft != null) vh.thumbViewLeft.setImageAlpha(READ_STORY_ALPHA_B255); if (vh.thumbViewRight != null) vh.thumbViewRight.setImageAlpha(READ_STORY_ALPHA_B255); if (vh.thumbTileView != null) vh.thumbTileView.setImageAlpha(READ_STORY_ALPHA_B255); vh.feedIconView.setImageAlpha(READ_STORY_ALPHA_B255); vh.feedTitleView.setAlpha(READ_STORY_ALPHA); vh.storyTitleView.setAlpha(READ_STORY_ALPHA); vh.storyDate.setAlpha(READ_STORY_ALPHA); } } private void bindTile(StoryTileViewHolder vh, int position, Story story) { // when first created, tiles' views tend to not yet have their dimensions calculated, but // upon being recycled they will often have a known size, which lets us give a max size to // the image loader, which in turn can massively optimise loading. the image loader will // reject nonsene values if (PrefsUtils.getThumbnailStyle(context) != ThumbnailStyle.OFF && vh.thumbTileView != null) { // the view will display a stale, recycled thumb before the new one loads if the old is not cleared int thumbSizeGuess = vh.thumbTileView.getMeasuredHeight(); vh.thumbTileView.setImageBitmap(null); vh.thumbLoader = FeedUtils.thumbnailLoader.displayImage(story.thumbnailUrl, vh.thumbTileView, thumbSizeGuess, true); vh.lastThumbUrl = story.thumbnailUrl; } } private void bindRow(StoryRowViewHolder vh, int position, Story story) { StoryContentPreviewStyle storyContentPreviewStyle = PrefsUtils.getStoryContentPreviewStyle(context); if (storyContentPreviewStyle != StoryContentPreviewStyle.NONE) { vh.storyTitleView.setMaxLines(3); if (storyContentPreviewStyle == StoryContentPreviewStyle.LARGE) { vh.storySnippet.setMaxLines(6); } else if (storyContentPreviewStyle == StoryContentPreviewStyle.MEDIUM) { vh.storySnippet.setMaxLines(4); } else if (storyContentPreviewStyle == StoryContentPreviewStyle.SMALL){ vh.storySnippet.setMaxLines(2); } vh.storySnippet.setVisibility(View.VISIBLE); vh.storySnippet.setText(story.shortContent); } else { vh.storyTitleView.setMaxLines(6); vh.storySnippet.setVisibility(View.GONE); } if (TextUtils.isEmpty(story.authors)) { vh.storyAuthor.setText(""); } else { vh.storyAuthor.setText(story.authors); } vh.storyAuthor.setTextSize(textSize * defaultTextSize_story_item_author); vh.storySnippet.setTextSize(textSize * defaultTextSize_story_item_snip); if (PrefsUtils.getThumbnailStyle(context) != ThumbnailStyle.OFF && vh.thumbViewRight != null && vh.thumbViewLeft != null) { // the view will display a stale, recycled thumb before the new one loads if the old is not cleared if (thumbnailStyle == ThumbnailStyle.LEFT_LARGE || thumbnailStyle == ThumbnailStyle.LEFT_SMALL) { int thumbSizeGuess = vh.thumbViewLeft.getMeasuredHeight(); vh.thumbViewLeft.setImageBitmap(null); vh.thumbLoader = FeedUtils.thumbnailLoader.displayImage(story.thumbnailUrl, vh.thumbViewLeft, thumbSizeGuess, true); vh.thumbViewRight.setVisibility(View.GONE); vh.thumbViewLeft.setVisibility(View.VISIBLE); } else if (thumbnailStyle == ThumbnailStyle.RIGHT_LARGE || thumbnailStyle == ThumbnailStyle.RIGHT_SMALL) { int thumbSizeGuess = vh.thumbViewRight.getMeasuredHeight(); vh.thumbViewRight.setImageBitmap(null); vh.thumbLoader = FeedUtils.thumbnailLoader.displayImage(story.thumbnailUrl, vh.thumbViewRight, thumbSizeGuess, true); vh.thumbViewLeft.setVisibility(View.GONE); vh.thumbViewRight.setVisibility(View.VISIBLE); } vh.lastThumbUrl = story.thumbnailUrl; } else if (vh.thumbViewRight != null && vh.thumbViewLeft != null) { // if in row mode and thumbnail is disabled or missing, don't just hide but collapse vh.thumbViewRight.setVisibility(View.GONE); vh.thumbViewLeft.setVisibility(View.GONE); } int sizeRes = R.dimen.thumbnails_size; if (thumbnailStyle == ThumbnailStyle.LEFT_SMALL || thumbnailStyle == ThumbnailStyle.RIGHT_SMALL) { sizeRes = R.dimen.thumbnails_small_size; } int sizeDp = context.getResources().getDimensionPixelSize(sizeRes); RelativeLayout.LayoutParams params = null; if ((thumbnailStyle == ThumbnailStyle.LEFT_LARGE || thumbnailStyle == ThumbnailStyle.LEFT_SMALL) && vh.thumbViewLeft != null) { vh.thumbViewLeft.setThumbnailStyle(thumbnailStyle); params = (RelativeLayout.LayoutParams) vh.thumbViewLeft.getLayoutParams(); } else if ((thumbnailStyle == ThumbnailStyle.RIGHT_LARGE || thumbnailStyle == ThumbnailStyle.RIGHT_SMALL) && vh.thumbViewRight != null) { vh.thumbViewRight.setThumbnailStyle(thumbnailStyle); params = (RelativeLayout.LayoutParams) vh.thumbViewRight.getLayoutParams(); } if (params != null && params.width != sizeDp) { params.width = sizeDp; } if (params != null && (thumbnailStyle == ThumbnailStyle.RIGHT_SMALL || thumbnailStyle == ThumbnailStyle.LEFT_SMALL)) { int verticalMargin = singleFeed ? UIUtils.dp2px(context, 10) : UIUtils.dp2px(context, 5); params.setMargins(UIUtils.dp2px(context, 8), verticalMargin, 0, verticalMargin); params.addRule(RelativeLayout.ALIGN_BOTTOM, vh.storySnippet.getId()); } else if (params != null) { params.setMargins(0, 0, 0, 0); params.removeRule(RelativeLayout.ALIGN_BOTTOM); params.height = sizeDp; } if (this.ignoreReadStatus || (! story.read)) { vh.storyAuthor.setAlpha(1.0f); vh.storySnippet.setAlpha(1.0f); vh.storyTitleView.setTypeface(vh.storyTitleView.getTypeface(), Typeface.BOLD); } else { vh.storyAuthor.setAlpha(READ_STORY_ALPHA); vh.storySnippet.setAlpha(READ_STORY_ALPHA); vh.storyTitleView.setTypeface(vh.storyTitleView.getTypeface(), Typeface.NORMAL); } } public static class FooterViewHolder extends RecyclerView.ViewHolder { FrameLayout innerView; public FooterViewHolder(View view) { super(view); innerView = view.findViewById(R.id.footer_view_inner); } } @Override public void onViewRecycled(@NonNull RecyclerView.ViewHolder viewHolder) { if (viewHolder instanceof StoryViewHolder) { StoryViewHolder vh = (StoryViewHolder) viewHolder; if (vh.thumbLoader != null) vh.thumbLoader.cancel = true; } if (viewHolder instanceof FooterViewHolder) { FooterViewHolder vh = (FooterViewHolder) viewHolder; vh.innerView.removeAllViews(); } } class StoryViewGestureDetector extends GestureDetector.SimpleOnGestureListener { private StoryViewHolder vh; public StoryViewGestureDetector(StoryViewHolder vh) { StoryViewGestureDetector.this.vh = vh; } @Override public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { float displayWidthPx = UIUtils.getDisplayWidthPx(context); float edgeWithNavGesturesPaddingPx = UIUtils.dp2px(context, 40); float rightEdgeNavGesturePaddingPx = displayWidthPx - edgeWithNavGesturesPaddingPx; if (e1.getX() > edgeWithNavGesturesPaddingPx && // the gesture should not start too close to the left edge and e2.getX() - e1.getX() > 50f && // move horizontally to the right and Math.abs(distanceY) < 25f) { // have minimal vertical travel, so we don't capture scrolling gestures vh.gestureL2R = true; vh.gestureDebounce = true; return true; } if (e1.getX() < rightEdgeNavGesturePaddingPx && // the gesture should not start too close to the right edge and e1.getX() - e2.getX() > 50f && // move horizontally to the left and Math.abs(distanceY) < 25f) { // have minimal vertical travel, so we don't capture scrolling gestures vh.gestureR2L = true; vh.gestureDebounce = true; return true; } return false; } } public void notifyAllItemsChanged() { notifyItemRangeChanged(0, getItemCount()); } }
package com.wordpress.tipsforjava.swing; import java.awt.*; /** * FlowLayout subclass that fully supports wrapping of components. * http://tips4java.wordpress.com/2008/11/06/wrap-layout/ * * @author Rob Camick * @author Darryl Burke */ public class WrapLayout extends FlowLayout { private Dimension preferredLayoutSize; /** * Constructs a new <code>WrapLayout</code> with a left * alignment and a default 5-unit horizontal and vertical gap. */ public WrapLayout() { super(); } /** * Constructs a new <code>FlowLayout</code> with the specified * alignment and a default 5-unit horizontal and vertical gap. * The value of the alignment argument must be one of * <code>WrapLayout</code>, <code>WrapLayout</code>, * or <code>WrapLayout</code>. * @param align the alignment value */ public WrapLayout(int align) { super(align); } /** * Creates a new flow layout manager with the indicated alignment * and the indicated horizontal and vertical gaps. * <p> * The value of the alignment argument must be one of * <code>WrapLayout</code>, <code>WrapLayout</code>, * or <code>WrapLayout</code>. * @param align the alignment value * @param hgap the horizontal gap between components * @param vgap the vertical gap between components */ public WrapLayout(int align, int hgap, int vgap) { super(align, hgap, vgap); } /** * Returns the preferred dimensions for this layout given the * <i>visible</i> components in the specified target container. * @param target the component which needs to be laid out * @return the preferred dimensions to lay out the * subcomponents of the specified container */ @Override public Dimension preferredLayoutSize(Container target) { return layoutSize(target, true); } /** * Returns the minimum dimensions needed to layout the <i>visible</i> * components contained in the specified target container. * @param target the component which needs to be laid out * @return the minimum dimensions to lay out the * subcomponents of the specified container */ @Override public Dimension minimumLayoutSize(Container target) { return layoutSize(target, false); } /** * Returns the minimum or preferred dimension needed to layout the target * container. * * @param target target to get layout size for * @param preferred should preferred size be calculated * @return the dimension to layout the target container */ private Dimension layoutSize(Container target, boolean preferred) { synchronized (target.getTreeLock()) { // Each row must fit with the width allocated to the containter. // When the container width = 0, the preferred width of the container // has not yet been calculated so lets ask for the maximum. int targetWidth = target.getSize().width; if (targetWidth == 0) targetWidth = Integer.MAX_VALUE; int hgap = getHgap(); int vgap = getVgap(); Insets insets = target.getInsets(); int horizontalInsetsAndGap = insets.left + insets.right + (hgap * 2); int maxWidth = targetWidth - horizontalInsetsAndGap; // Fit components into the allowed width Dimension dim = new Dimension(0, 0); int rowWidth = 0; int rowHeight = 0; int nmembers = target.getComponentCount(); for (int i = 0; i < nmembers; i++) { Component m = target.getComponent(i); if (m.isVisible()) { Dimension d = preferred ? m.getPreferredSize() : m.getMinimumSize(); // Can't add the component to current row. Start a new row. if (rowWidth + d.width > maxWidth) { addRow(dim, rowWidth, rowHeight); rowWidth = 0; rowHeight = 0; } // Add a horizontal gap for all components after the first if (rowWidth != 0) { rowWidth += hgap; } rowWidth += d.width; rowHeight = Math.max(rowHeight, d.height); } } addRow(dim, rowWidth, rowHeight); dim.width += horizontalInsetsAndGap; dim.height += insets.top + insets.bottom + vgap * 2; // When using a scroll pane or the DecoratedLookAndFeel we need to // make sure the preferred size is less than the size of the // target containter so shrinking the container size works // correctly. Removing the horizontal gap is an easy way to do this. dim.width -= (hgap + 1); return dim; } } /** * Layout the components in the Container using the layout logic of the * parent FlowLayout class. * * @param target the Container using this WrapLayout */ @Override public void layoutContainer(Container target) { Dimension size = preferredLayoutSize(target); // When a frame is minimized or maximized the preferred size of the // Container is assumed not to change. Therefore we need to force a // validate() to make sure that space, if available, is allocated to // the panel using a WrapLayout. if (size.equals(preferredLayoutSize)) { super.layoutContainer(target); } else { preferredLayoutSize = size; Container top = target; while (top.getParent() != null) { top = top.getParent(); } top.validate(); } } /* * A new row has been completed. Use the dimensions of this row * to update the preferred size for the container. * * @param dim update the width and height when appropriate * @param rowWidth the width of the row to add * @param rowHeight the height of the row to add */ private void addRow(Dimension dim, int rowWidth, int rowHeight) { dim.width = Math.max(dim.width, rowWidth); if (dim.height > 0) { dim.height += getVgap(); } dim.height += rowHeight; } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.utils; import java.util.NoSuchElementException; /** An {@link ObjectMap} that also stores keys in an {@link Array} using the insertion order. Iteration over the * {@link #entries()}, {@link #keys()}, and {@link #values()} is ordered and faster than an unordered map. Keys can also be * accessed and the order changed using {@link #orderedKeys()}. There is some additional overhead for put and remove. When used * for faster iteration versus ObjectMap and the order does not actually matter, copying during remove can be greatly reduced by * setting {@link Array#ordered} to false for {@link OrderedMap#orderedKeys()}. * @author Nathan Sweet */ public class OrderedMap<K, V> extends ObjectMap<K, V> { final Array<K> keys; public OrderedMap () { keys = new Array(); } public OrderedMap (int initialCapacity) { super(initialCapacity); keys = new Array(capacity); } public OrderedMap (int initialCapacity, float loadFactor) { super(initialCapacity, loadFactor); keys = new Array(capacity); } public OrderedMap (OrderedMap<? extends K, ? extends V> map) { super(map); keys = new Array(map.keys); } public V put (K key, V value) { if (!containsKey(key)) keys.add(key); return super.put(key, value); } public V remove (K key) { keys.removeValue(key, false); return super.remove(key); } public V removeIndex (int index) { return super.remove(keys.removeIndex(index)); } public void clear (int maximumCapacity) { keys.clear(); super.clear(maximumCapacity); } public void clear () { keys.clear(); super.clear(); } public Array<K> orderedKeys () { return keys; } public Entries<K, V> iterator () { return entries(); } /** Returns an iterator for the entries in the map. Remove is supported. * <p> * If {@link Collections#allocateIterators} is false, the same iterator instance is returned each time this method is called. Use the * {@link OrderedMapEntries} constructor for nested or multithreaded iteration. */ public Entries<K, V> entries () { if (Collections.allocateIterators) return new Entries(this); if (entries1 == null) { entries1 = new OrderedMapEntries(this); entries2 = new OrderedMapEntries(this); } if (!entries1.valid) { entries1.reset(); entries1.valid = true; entries2.valid = false; return entries1; } entries2.reset(); entries2.valid = true; entries1.valid = false; return entries2; } /** Returns an iterator for the values in the map. Remove is supported. * <p> * If {@link Collections#allocateIterators} is false, the same iterator instance is returned each time this method is called. Use the * {@link OrderedMapValues} constructor for nested or multithreaded iteration. */ public Values<V> values () { if (Collections.allocateIterators) return new Values(this); if (values1 == null) { values1 = new OrderedMapValues(this); values2 = new OrderedMapValues(this); } if (!values1.valid) { values1.reset(); values1.valid = true; values2.valid = false; return values1; } values2.reset(); values2.valid = true; values1.valid = false; return values2; } /** Returns an iterator for the keys in the map. Remove is supported. * <p> * If {@link Collections#allocateIterators} is false, the same iterator instance is returned each time this method is called. Use the * {@link OrderedMapKeys} constructor for nested or multithreaded iteration. */ public Keys<K> keys () { if (Collections.allocateIterators) return new Keys(this); if (keys1 == null) { keys1 = new OrderedMapKeys(this); keys2 = new OrderedMapKeys(this); } if (!keys1.valid) { keys1.reset(); keys1.valid = true; keys2.valid = false; return keys1; } keys2.reset(); keys2.valid = true; keys1.valid = false; return keys2; } public String toString () { if (size == 0) return "{}"; StringBuilder buffer = new StringBuilder(32); buffer.append('{'); Array<K> keys = this.keys; for (int i = 0, n = keys.size; i < n; i++) { K key = keys.get(i); if (i > 0) buffer.append(", "); buffer.append(key); buffer.append('='); buffer.append(get(key)); } buffer.append('}'); return buffer.toString(); } static public class OrderedMapEntries<K, V> extends Entries<K, V> { private Array<K> keys; public OrderedMapEntries (OrderedMap<K, V> map) { super(map); keys = map.keys; } public void reset () { nextIndex = 0; hasNext = map.size > 0; } public Entry next () { if (!hasNext) throw new NoSuchElementException(); if (!valid) throw new GdxRuntimeException("#iterator() cannot be used nested."); entry.key = keys.get(nextIndex); entry.value = map.get(entry.key); nextIndex++; hasNext = nextIndex < map.size; return entry; } public void remove () { if (currentIndex < 0) throw new IllegalStateException("next must be called before remove."); map.remove(entry.key); nextIndex--; } } static public class OrderedMapKeys<K> extends Keys<K> { private Array<K> keys; public OrderedMapKeys (OrderedMap<K, ?> map) { super(map); keys = map.keys; } public void reset () { nextIndex = 0; hasNext = map.size > 0; } public K next () { if (!hasNext) throw new NoSuchElementException(); if (!valid) throw new GdxRuntimeException("#iterator() cannot be used nested."); K key = keys.get(nextIndex); currentIndex = nextIndex; nextIndex++; hasNext = nextIndex < map.size; return key; } public void remove () { if (currentIndex < 0) throw new IllegalStateException("next must be called before remove."); ((OrderedMap)map).removeIndex(nextIndex - 1); nextIndex = currentIndex; currentIndex = -1; } } static public class OrderedMapValues<V> extends Values<V> { private Array keys; public OrderedMapValues (OrderedMap<?, V> map) { super(map); keys = map.keys; } public void reset () { nextIndex = 0; hasNext = map.size > 0; } public V next () { if (!hasNext) throw new NoSuchElementException(); if (!valid) throw new GdxRuntimeException("#iterator() cannot be used nested."); V value = (V)map.get(keys.get(nextIndex)); currentIndex = nextIndex; nextIndex++; hasNext = nextIndex < map.size; return value; } public void remove () { if (currentIndex < 0) throw new IllegalStateException("next must be called before remove."); ((OrderedMap)map).removeIndex(currentIndex); nextIndex = currentIndex; currentIndex = -1; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence.db; import java.util.Arrays; import java.util.List; import org.apache.ignite.IgniteCache; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.WALMode; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.junit.Test; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** * */ public class IgnitePdsCacheRestoreTest extends GridCommonAbstractTest { /** Non-persistent data region name. */ private static final String NO_PERSISTENCE_REGION = "no-persistence-region"; /** */ private CacheConfiguration[] ccfgs; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); if (ccfgs != null) { cfg.setCacheConfiguration(ccfgs); ccfgs = null; } long regionMaxSize = 20L * 1024 * 1024; DataStorageConfiguration memCfg = new DataStorageConfiguration() .setDefaultDataRegionConfiguration( new DataRegionConfiguration().setMaxSize(regionMaxSize).setPersistenceEnabled(true)) .setWalMode(WALMode.LOG_ONLY); memCfg.setDataRegionConfigurations(new DataRegionConfiguration() .setMaxSize(regionMaxSize) .setName(NO_PERSISTENCE_REGION) .setPersistenceEnabled(false)); cfg.setDataStorageConfiguration(memCfg); return cfg; } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { super.beforeTest(); cleanPersistenceDir(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); cleanPersistenceDir(); super.afterTest(); } /** * @throws Exception If failed. */ @Test public void testRestoreAndNewCache1() throws Exception { restoreAndNewCache(false); } /** * @throws Exception If failed. */ @Test public void testRestoreAndNewCache2() throws Exception { restoreAndNewCache(true); } /** * @param createNew If {@code true} need cache is added while node is stopped. * @throws Exception If failed. */ private void restoreAndNewCache(boolean createNew) throws Exception { for (int i = 0; i < 3; i++) { ccfgs = configurations1(); startGrid(i); } ignite(0).active(true); IgniteCache<Object, Object> cache1 = ignite(2).cache("c1"); List<Integer> keys = primaryKeys(cache1, 10); for (Integer key : keys) cache1.put(key, key); stopGrid(2); if (createNew) { // New cache is added when node is stopped. ignite(0).getOrCreateCaches(Arrays.asList(configurations2())); } else { // New cache is added on node restart. ccfgs = configurations2(); } IgniteEx g2 = startGrid(2); g2.resetLostPartitions(Arrays.asList("c1", "c2", "c3")); cache1 = ignite(2).cache("c1"); IgniteCache<Object, Object> cache2 = ignite(2).cache("c2"); IgniteCache<Object, Object> cache3 = ignite(2).cache("c3"); for (Integer key : keys) { assertEquals(key, cache1.get(key)); assertNull(cache2.get(key)); assertNull(cache3.get(key)); cache2.put(key, key); assertEquals(key, cache2.get(key)); cache3.put(key, key); assertEquals(key, cache3.get(key)); } List<Integer> nearKeys = nearKeys(cache1, 10, 0); for (Integer key : nearKeys) { assertNull(cache1.get(key)); assertNull(cache2.get(key)); assertNull(cache3.get(key)); cache3.put(key, key); assertEquals(key, cache3.get(key)); cache2.put(key, key); assertEquals(key, cache2.get(key)); cache1.put(key, key); assertEquals(key, cache1.get(key)); } startGrid(3); awaitPartitionMapExchange(); for (Integer key : nearKeys) { assertEquals(key, cache3.get(key)); assertEquals(key, cache2.get(key)); assertEquals(key, cache1.get(key)); } } /** * @return Configurations set 1. */ private CacheConfiguration[] configurations1() { CacheConfiguration[] ccfgs = new CacheConfiguration[1]; ccfgs[0] = cacheConfiguration("c1"); return ccfgs; } /** * @return Configurations set 1. */ private CacheConfiguration[] configurations2() { CacheConfiguration[] ccfgs = new CacheConfiguration[3]; ccfgs[0] = cacheConfiguration("c1"); ccfgs[1] = cacheConfiguration("c2"); ccfgs[2] = cacheConfiguration("c3"); ccfgs[2].setDataRegionName(NO_PERSISTENCE_REGION); ccfgs[2].setDiskPageCompression(null); return ccfgs; } /** * @param name Cache name. * @return Cache configuration. */ private CacheConfiguration cacheConfiguration(String name) { CacheConfiguration ccfg = new CacheConfiguration(name); ccfg.setWriteSynchronizationMode(FULL_SYNC); return ccfg; } }
package com.bazaarvoice.emodb.sor.audit.s3; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.model.PutObjectResult; import com.bazaarvoice.emodb.common.dropwizard.log.RateLimitedLog; import com.bazaarvoice.emodb.common.dropwizard.log.RateLimitedLogFactory; import com.bazaarvoice.emodb.sor.api.Audit; import com.bazaarvoice.emodb.sor.audit.AuditFlusher; import com.bazaarvoice.emodb.sor.audit.AuditStore; import com.bazaarvoice.emodb.sor.audit.AuditWriter; import com.bazaarvoice.emodb.sor.audit.AuditWriterConfiguration; import com.codahale.metrics.Gauge; import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; import com.google.common.io.ByteStreams; import com.google.common.io.CountingOutputStream; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.inject.Inject; import io.dropwizard.util.Size; import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.*; import java.nio.file.Files; import java.time.Clock; import java.time.Duration; import java.time.Instant; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.*; import java.util.concurrent.locks.ReentrantLock; import static com.google.common.base.Preconditions.checkArgument; import static java.util.Objects.requireNonNull; /** * Audit writer implementation which writes all audits as GZIP'd jsonl documents to S3 partitioned by date. * This format allows audit queries to be carried out by Athena, Amazon's Presto implementation over S3 documents. * * This audit writer favors fast, non-blocking calls to {@link #persist(String, String, Audit, long)} over guaranteeing * a completely loss-less audit history. To achieve this all audits are written to an in-memory queue. That queue * is written to a local log file until it has reached a maximum size or age, both configurable in the constructor. * At this time the file is asynchronously GZIP'd then delivered to S3. Once the file is delivered it is deleted from * the local host. * * Each stage has multiple layers of recovery, ensuring that once a line is written to a file that file will eventually * be delivered to S3. The exceptions to this which can cause audit loss are: * * <ol> * <li>The process is terminated while unwritten audits are still in the audit queue.</li> * <li>The host itself terminates before all files are delivered to S3.</li> * </ol> */ public class AthenaAuditWriter implements AuditStore { private final static Logger _log = LoggerFactory.getLogger(AthenaAuditWriter.class); private final static DateTimeFormatter LOG_FILE_DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyyMMddHHmmss").withZone(ZoneOffset.UTC); private final static String OPEN_FILE_SUFFIX = ".log.tmp"; private final static String CLOSED_FILE_SUFFIX = ".log"; private final static String COMPRESSED_FILE_SUFFIX = ".log.gz"; private final static long DEFAULT_MAX_FILE_SIZE = Size.megabytes(10).toBytes(); private final static Duration DEFAULT_MAX_BATCH_TIME = Duration.ofMinutes(2); private final AmazonS3 _s3; private final String _s3Bucket; private final String _s3AuditRoot; private final long _maxFileSize; private final long _maxBatchTimeMs; private final File _stagingDir; private final String _logFilePrefix; private final BlockingQueue<QueuedAudit> _auditQueue; private final Clock _clock; private final ObjectWriter _objectWriter; private final ConcurrentMap<Long, AuditOutput> _openAuditOutputs = Maps.newConcurrentMap(); private ScheduledExecutorService _auditService; private ExecutorService _fileTransferService; private AuditOutput _mruAuditOutput; private boolean _fileTransfersEnabled; private final RateLimitedLog _rateLimitedLog; @Inject public AthenaAuditWriter(AuditWriterConfiguration config, ObjectMapper objectMapper, Clock clock, RateLimitedLogFactory rateLimitedLogFactory, MetricRegistry metricRegistry) { this(getAmazonS3Client(config), config.getLogBucket(), config.getLogPath(), config.getMaxFileSize(), Duration.ofMillis(config.getMaxBatchTime().toMillis()), config.getStagingDir() != null ? new File(config.getStagingDir()) : com.google.common.io.Files.createTempDir(), config.getLogFilePrefix(), objectMapper, clock, config.isFileTransfersEnabled(), rateLimitedLogFactory, metricRegistry, null, null); } @VisibleForTesting AthenaAuditWriter(AmazonS3 s3, String s3Bucket, String s3Path, long maxFileSize, Duration maxBatchTime, File stagingDir, String logFilePrefix, ObjectMapper objectMapper, Clock clock, boolean fileTransfersEnabled, RateLimitedLogFactory rateLimitedLogFactory, MetricRegistry metricRegistry, ScheduledExecutorService auditService, ExecutorService fileTransferService) { _s3 = requireNonNull(s3); _s3Bucket = requireNonNull(s3Bucket); String s3AuditRoot = s3Path; if (s3AuditRoot.startsWith("/")) { s3AuditRoot = s3AuditRoot.substring(1); } if (s3AuditRoot.endsWith("/")) { s3AuditRoot = s3AuditRoot.substring(0, s3AuditRoot.length()-1); } _s3AuditRoot = s3AuditRoot; checkArgument(stagingDir.exists(), "Staging directory must exist"); _maxFileSize = maxFileSize > 0 ? maxFileSize : DEFAULT_MAX_FILE_SIZE; _maxBatchTimeMs = (maxBatchTime != null && maxBatchTime.compareTo(Duration.ZERO) > 0 ? maxBatchTime : DEFAULT_MAX_BATCH_TIME).toMillis(); _stagingDir = requireNonNull(stagingDir, "stagingDir"); _logFilePrefix = requireNonNull(logFilePrefix, "logFilePrefix"); _clock = requireNonNull(clock, "clock"); // Audit queue isn't completely unbounded but is large enough to ensure at several times the normal write rate // it can accept audits without blocking. _auditQueue = new ArrayBlockingQueue<>(4096); // Need to ensure the object mapper keeps the file stream open after each audit is written. _objectWriter = objectMapper.copy() .configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false) .writer(); _fileTransfersEnabled = fileTransfersEnabled; // Two threads for the audit service: once to drain queued audits and one to close audit logs files and submit // them for transfer. Normally these are initially null and locally managed, but unit tests may provide // pre-configured instances. _auditService = auditService != null ? auditService : Executors.newScheduledThreadPool(2, new ThreadFactoryBuilder().setNameFormat("audit-log-%d").build()); _fileTransferService = fileTransferService != null ? fileTransferService : Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("audit-transfer-%d").build()); long now = _clock.millis(); long msToNextBatch = _maxBatchTimeMs - (now % _maxBatchTimeMs); // Do a one-time closing of all orphaned log files which may have been left behind if a previous EmoDB process // terminated before closing them. for (final File logFile : _stagingDir.listFiles((dir, name) -> name.startsWith(_logFilePrefix) && name.endsWith(OPEN_FILE_SUFFIX))) { if (logFile.length() > 0) { try { renameClosedLogFile(logFile); } catch (IOException e) { _log.warn("Failed to close orphaned audit log file: {}", logFile, e); } } else { if (!logFile.delete()) { _log.debug("Failed to delete empty orphaned log file: {}", logFile); } } } _rateLimitedLog = requireNonNull(rateLimitedLogFactory.from(_log)); // Guage metric to measure the size of the audit queue metricRegistry.register(MetricRegistry.name(AthenaAuditWriter.class, "auditQueue", "size"), (Gauge<Integer>) _auditQueue::size); _auditService.scheduleWithFixedDelay(() -> processQueuedAudits(true), 0, 1, TimeUnit.SECONDS); _auditService.scheduleAtFixedRate(this::doLogFileMaintenance, msToNextBatch, _maxBatchTimeMs, TimeUnit.MILLISECONDS); } private static AmazonS3 getAmazonS3Client(AuditWriterConfiguration configuration) { AWSCredentialsProvider credentialsProvider; if (configuration.getS3AccessKey() != null && configuration.getS3SecretKey() != null) { credentialsProvider = new StaticCredentialsProvider( new BasicAWSCredentials(configuration.getS3AccessKey(), configuration.getS3SecretKey())); } else { credentialsProvider = new DefaultAWSCredentialsProviderChain(); } AmazonS3 s3 = new AmazonS3Client(credentialsProvider) .withRegion(Regions.fromName(configuration.getLogBucketRegion())); if (configuration.getS3Endpoint() != null) { s3.setEndpoint(configuration.getS3Endpoint()); } return s3; } @Override public void flushAndShutdown() { _auditService.shutdown(); try { if (_auditService.awaitTermination(5, TimeUnit.SECONDS)) { // Poll the queue one last time and drain anything that is still remaining processQueuedAudits(false); } else { _log.warn("Audit service did not shutdown cleanly."); _auditService.shutdownNow(); } } catch (InterruptedException e) { throw new RuntimeException(e); } // Close all log files and prepare them for transfer. closeCompleteLogFiles(true); prepareClosedLogFilesForTransfer(); transferLogFilesToS3(); _fileTransferService.shutdown(); try { if (_fileTransferService.awaitTermination(30, TimeUnit.SECONDS)) { _log.info("All audits were successfully persisted prior to shutdown"); } else { _log.warn("All audits could not be persisted prior to shutdown"); } } catch (InterruptedException e) { throw new RuntimeException(e); } } @Override public void persist(String table, String key, Audit audit, long auditTime) { try { _auditQueue.put(new QueuedAudit(table, key, audit, auditTime)); } catch (InterruptedException e) { // Don't error out if the audit was interrupted since this implementation does not guarantee 100% // audit retention, just warn that it happened. _log.warn("Interrupted attempting to write audit for {}/{}", table, key); } } /** * This method is run at regular intervals to close log files, gzip them and initiate their transfer to S3. */ private void doLogFileMaintenance() { // Close all files that have either exceeded their maximum size or age but have not closed due to lack of // audit activity. closeCompleteLogFiles(false); prepareClosedLogFilesForTransfer(); transferLogFilesToS3(); } private void closeCompleteLogFiles(boolean forceClose) { for (AuditOutput auditOutput : ImmutableList.copyOf(_openAuditOutputs.values())) { if (forceClose || auditOutput.shouldClose()) { auditOutput.close(); } } } /** * This method takes all closed log files and GZIPs and renames them in preparation for transfer. If the operation * fails the original file is unmodified so the next call should attempt to prepare the file again. This means * the same file may be transferred more than once, but this guarantees that so long as the host remains active the * file will eventually be transferred. */ private void prepareClosedLogFilesForTransfer() { for (final File logFile : _stagingDir.listFiles((dir, name) -> name.startsWith(_logFilePrefix) && name.endsWith(CLOSED_FILE_SUFFIX))) { boolean moved; String fileName = logFile.getName().substring(0, logFile.getName().length() - CLOSED_FILE_SUFFIX.length()) + COMPRESSED_FILE_SUFFIX; try (FileInputStream fileIn = new FileInputStream(logFile); FileOutputStream fileOut = new FileOutputStream(new File(logFile.getParentFile(), fileName)); GzipCompressorOutputStream gzipOut = new GzipCompressorOutputStream(fileOut)) { ByteStreams.copy(fileIn, gzipOut); moved = true; } catch (IOException e) { _log.warn("Failed to compress audit log file: {}", logFile, e); moved = false; } if (moved) { if (!logFile.delete()) { _log.warn("Failed to delete audit log file: {}", logFile); } } } } /** * This method takes all log files prepared by {@link #prepareClosedLogFilesForTransfer()} and initiates their * transfer to S3. The transfer itself is performed asynchronously. */ private void transferLogFilesToS3() { if (_fileTransfersEnabled) { // Find all closed log files in the staging directory and move them to S3 for (final File logFile : _stagingDir.listFiles((dir, name) -> name.startsWith(_logFilePrefix) && name.endsWith(COMPRESSED_FILE_SUFFIX))) { // Extract the date portion of the file name and is it to partition the file in S3 String auditDate = logFile.getName().substring(_logFilePrefix.length() + 1, _logFilePrefix.length() + 9); String dest = String.format("%s/date=%s/%s", _s3AuditRoot, auditDate, logFile.getName()); _fileTransferService.submit(() -> { // Since file transfers are done in a single thread, there shouldn't be any concurrency issues, // but verify the same file wasn't submitted previously and is already transferred. if (logFile.exists()) { try { PutObjectResult result = _s3.putObject(_s3Bucket, dest, logFile); _log.debug("Audit log copied: {}, ETag={}", logFile, result.getETag()); if (!logFile.delete()) { _log.warn("Failed to delete file after copying to s3: {}", logFile); } } catch (Exception e) { // Log the error, try again on the next iteration _rateLimitedLog.error(e, "Failed to copy log file {}", logFile); } } }); } } } /** * This method is run at regular intervals to remove audits from the audit queue and write them to a local file. */ private void processQueuedAudits(boolean interruptable) { QueuedAudit audit; try { while (!(_auditService.isShutdown() && interruptable) && ((audit = _auditQueue.poll()) != null)) { boolean written = false; while (!written) { AuditOutput auditOutput = getAuditOutputForTime(audit.time); written = auditOutput.writeAudit(audit); } } } catch (Exception e) { _log.error("Processing of queued audits failed", e); } } private AuditOutput getAuditOutputForTime(long time) { // Truncate the time based on the batch duration long batchTime = time - (time % _maxBatchTimeMs); // The most common case is that audits are written in time order, so optimize by caching the most recently // used AuditOutput and return it if it is usable for an audit at the given time. AuditOutput mruAuditOutput = _mruAuditOutput; if (mruAuditOutput != null && batchTime == mruAuditOutput.getBatchTime() && !mruAuditOutput.isClosed()) { return mruAuditOutput; } return _mruAuditOutput = _openAuditOutputs.computeIfAbsent(batchTime, this::createNewAuditLogOut); } private AuditOutput createNewAuditLogOut(long batchTime) { // Set the batch to close at the end of the next batch time from now, regardless of what time the batch is for. long now = _clock.millis(); long nextBatchCycleCloseTime = now - (now % _maxBatchTimeMs) + _maxBatchTimeMs; return new AuditOutput(LOG_FILE_DATE_FORMATTER.format(Instant.ofEpochMilli(batchTime)), batchTime, nextBatchCycleCloseTime); } private void renameClosedLogFile(File logFile) throws IOException { // Move the file to a new file without the ".tmp" suffix String closedFileName = logFile.getName().substring(0, logFile.getName().length() - OPEN_FILE_SUFFIX.length()) + CLOSED_FILE_SUFFIX; Files.move(logFile.toPath(), new File(logFile.getParentFile(), closedFileName).toPath()); } /** * In-memory holder for an audit in the queue. */ private static class QueuedAudit { final String table; final String key; final Audit audit; final long time; QueuedAudit(String table, String key, Audit audit, long time) { this.table = table; this.key = key; this.audit = audit; this.time = time; } } /** * Class holder for a single audit output file, with most of the file details abstracted by {@link #writeAudit(QueuedAudit)}. */ private class AuditOutput { private final File _auditLogFile; private final long _batchTime; private final long _closeTime; private final ReentrantLock _lock = new ReentrantLock(); private volatile boolean _closed; private volatile int _auditsWritten = 0; private volatile CountingOutputStream _auditLogOut; AuditOutput(String datePrefix, long batchTime, long closeTime) { String fileName = String.format("%s.%s.%s%s", _logFilePrefix, datePrefix, UUID.randomUUID(), OPEN_FILE_SUFFIX); _auditLogFile = new File(_stagingDir, fileName); _batchTime = batchTime; _closeTime = closeTime; } void createAuditLogOut() throws IOException { FileOutputStream fileOut = new FileOutputStream(_auditLogFile); _auditLogOut = new CountingOutputStream(fileOut); } /** * Writes a single audit to the log file. * @return True if the audit was written, false if the audit could not be written because the file was closed * or can no longer accept writes due to file size or age. */ boolean writeAudit(QueuedAudit audit) { Map<String, Object> auditMap = Maps.newLinkedHashMap(); // This is an intentional break from convention to use "tablename" instead of "table". This is because // "table" is a reserved word in Presto and complicates queries for that column. auditMap.put("tablename", audit.table); auditMap.put("key", audit.key); auditMap.put("time", audit.time); // Even though the content of the audit is valid JSON the potential key set is unbounded. This makes // it difficult to define a schema for Presto. So create values for the conventional keys and store // the rest in an opaque blob. Map<String, Object> custom = new HashMap<>(audit.audit.getAll()); if (custom.remove(Audit.COMMENT) != null) { auditMap.put("comment", audit.audit.getComment()); } if (custom.remove(Audit.HOST) != null) { auditMap.put("host", audit.audit.getHost()); } if (custom.remove(Audit.PROGRAM) != null) { auditMap.put("program", audit.audit.getProgram()); } if (custom.remove(Audit.SHA1) != null) { auditMap.put("sha1", audit.audit.getCustom(Audit.SHA1)); } if (custom.remove(Audit.TAGS) != null) { auditMap.put("tags", audit.audit.getTags()); } if (!custom.isEmpty()) { try { auditMap.put("custom", _objectWriter.writeValueAsString(custom)); } catch (JsonProcessingException e) { _log.info("Failed to write custom audit information", e); } } // Lock critical section to ensure the file isn't closed while writing the audit _lock.lock(); try { if (shouldClose()) { close(); } if (isClosed()) { return false; } // Lazily create the audit log file on the first write. This way if a race occurs and more than // one AuditOutput is created for a batch only the one which wins will actually generate a file to // be transferred to S3. if (_auditLogOut == null) { createAuditLogOut(); } _objectWriter.writeValue(_auditLogOut, auditMap); _auditLogOut.write('\n'); //noinspection NonAtomicOperationOnVolatileField _auditsWritten += 1; } catch (IOException e) { _log.warn("Failed to write audit to logs", e); } finally { _lock.unlock(); } return true; } boolean isClosed() { return _closed; } void close() { _lock.lock(); try { if (!_closed) { _closed = true; if (_auditLogOut != null) { _auditLogOut.close(); } if (_auditsWritten != 0) { renameClosedLogFile(_auditLogFile); } } } catch (IOException e) { _log.warn("Failed to close log file", e); } finally { _openAuditOutputs.remove(_batchTime, this); _lock.unlock(); } } long getBatchTime() { return _batchTime; } boolean isBatchTimedOut() { return _clock.millis() >= _closeTime; } boolean isOversized() { return _auditLogOut != null && _auditLogOut.getCount() > _maxFileSize; } boolean shouldClose() { return isBatchTimedOut() || isOversized(); } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.eql.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.shard.SearchOperationListener; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.ESIntegTestCase; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; /** * IT tests that can block EQL execution at different places */ @ESIntegTestCase.ClusterScope(scope = SUITE, numDataNodes = 0, numClientNodes = 0, maxNumDataNodes = 0) public abstract class AbstractEqlBlockingIntegTestCase extends AbstractEqlIntegTestCase { protected List<SearchBlockPlugin> initBlockFactory(boolean searchBlock, boolean fieldCapsBlock) { List<SearchBlockPlugin> plugins = new ArrayList<>(); for (PluginsService pluginsService : internalCluster().getInstances(PluginsService.class)) { plugins.addAll(pluginsService.filterPlugins(SearchBlockPlugin.class)); } for (SearchBlockPlugin plugin : plugins) { plugin.reset(); if (searchBlock) { plugin.enableSearchBlock(); } if (fieldCapsBlock) { plugin.enableFieldCapBlock(); } } return plugins; } protected void disableBlocks(List<SearchBlockPlugin> plugins) { disableFieldCapBlocks(plugins); disableSearchBlocks(plugins); } protected void disableSearchBlocks(List<SearchBlockPlugin> plugins) { for (SearchBlockPlugin plugin : plugins) { plugin.disableSearchBlock(); } } protected void disableFieldCapBlocks(List<SearchBlockPlugin> plugins) { for (SearchBlockPlugin plugin : plugins) { plugin.disableFieldCapBlock(); } } protected void awaitForBlockedSearches(List<SearchBlockPlugin> plugins, String index) throws Exception { int numberOfShards = getNumShards(index).numPrimaries; assertBusy(() -> { int numberOfBlockedPlugins = getNumberOfContexts(plugins); logger.trace("The plugin blocked on {} out of {} shards", numberOfBlockedPlugins, numberOfShards); assertThat(numberOfBlockedPlugins, greaterThan(0)); }); } protected int getNumberOfContexts(List<SearchBlockPlugin> plugins) throws Exception { int count = 0; for (SearchBlockPlugin plugin : plugins) { count += plugin.contexts.get(); } return count; } protected int getNumberOfFieldCaps(List<SearchBlockPlugin> plugins) throws Exception { int count = 0; for (SearchBlockPlugin plugin : plugins) { count += plugin.fieldCaps.get(); } return count; } protected void awaitForBlockedFieldCaps(List<SearchBlockPlugin> plugins) throws Exception { assertBusy(() -> { int numberOfBlockedPlugins = getNumberOfFieldCaps(plugins); logger.trace("The plugin blocked on {} nodes", numberOfBlockedPlugins); assertThat(numberOfBlockedPlugins, greaterThan(0)); }); } public static class SearchBlockPlugin extends Plugin implements ActionPlugin { protected final Logger logger = LogManager.getLogger(getClass()); private final AtomicInteger contexts = new AtomicInteger(); private final AtomicInteger fieldCaps = new AtomicInteger(); private final AtomicBoolean shouldBlockOnSearch = new AtomicBoolean(false); private final AtomicBoolean shouldBlockOnFieldCapabilities = new AtomicBoolean(false); private final String nodeId; public void reset() { contexts.set(0); fieldCaps.set(0); } public void disableSearchBlock() { shouldBlockOnSearch.set(false); } public void enableSearchBlock() { shouldBlockOnSearch.set(true); } public void disableFieldCapBlock() { shouldBlockOnFieldCapabilities.set(false); } public void enableFieldCapBlock() { shouldBlockOnFieldCapabilities.set(true); } public SearchBlockPlugin(Settings settings, Path configPath) throws Exception { nodeId = settings.get("node.name"); } @Override public void onIndexModule(IndexModule indexModule) { super.onIndexModule(indexModule); indexModule.addSearchOperationListener(new SearchOperationListener() { @Override public void onNewReaderContext(ReaderContext readerContext) { contexts.incrementAndGet(); try { logger.trace("blocking search on " + nodeId); assertBusy(() -> assertFalse(shouldBlockOnSearch.get())); logger.trace("unblocking search on " + nodeId); } catch (Exception e) { throw new RuntimeException(e); } } }); } @Override public List<ActionFilter> getActionFilters() { List<ActionFilter> list = new ArrayList<>(); list.add(new ActionFilter() { @Override public int order() { return 0; } @Override public <Request extends ActionRequest, Response extends ActionResponse> void apply( Task task, String action, Request request, ActionListener<Response> listener, ActionFilterChain<Request, Response> chain) { ActionListener<Response> listenerWrapper = listener; if (action.equals(FieldCapabilitiesAction.NAME)) { listenerWrapper = ActionListener.wrap(resp -> { try { fieldCaps.incrementAndGet(); logger.trace("blocking field caps on " + nodeId); assertBusy(() -> assertFalse(shouldBlockOnFieldCapabilities.get())); logger.trace("unblocking field caps on " + nodeId); } catch (Exception e) { throw new RuntimeException(e); } finally { listener.onResponse(resp); } }, listener::onFailure); } chain.proceed(task, action, request, listenerWrapper); } }); return list; } } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins()); plugins.add(SearchBlockPlugin.class); return plugins; } protected TaskId findTaskWithXOpaqueId(String id, String action) { TaskInfo taskInfo = getTaskInfoWithXOpaqueId(id, action); if (taskInfo != null) { return taskInfo.getTaskId(); } else { return null; } } protected TaskInfo getTaskInfoWithXOpaqueId(String id, String action) { ListTasksResponse tasks = client().admin().cluster().prepareListTasks().setActions(action).get(); for (TaskInfo task : tasks.getTasks()) { if (id.equals(task.getHeaders().get(Task.X_OPAQUE_ID))) { return task; } } return null; } protected TaskId cancelTaskWithXOpaqueId(String id, String action) { TaskId taskId = findTaskWithXOpaqueId(id, action); assertNotNull(taskId); logger.trace("Cancelling task " + taskId); CancelTasksResponse response = client().admin().cluster().prepareCancelTasks().setTaskId(taskId).get(); assertThat(response.getTasks(), hasSize(1)); assertThat(response.getTasks().get(0).getAction(), equalTo(action)); logger.trace("Task is cancelled " + taskId); return taskId; } }
package com.litle.sdk; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.math.BigInteger; import java.sql.Timestamp; import java.text.SimpleDateFormat; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import com.litle.sdk.generate.AccountUpdate; import com.litle.sdk.generate.Activate; import com.litle.sdk.generate.AuthReversal; import com.litle.sdk.generate.Authorization; import com.litle.sdk.generate.BalanceInquiry; import com.litle.sdk.generate.BatchRequest; import com.litle.sdk.generate.CancelSubscription; import com.litle.sdk.generate.Capture; import com.litle.sdk.generate.CaptureGivenAuth; import com.litle.sdk.generate.CreatePlan; import com.litle.sdk.generate.Credit; import com.litle.sdk.generate.Deactivate; import com.litle.sdk.generate.EcheckCredit; import com.litle.sdk.generate.EcheckPreNoteCredit; import com.litle.sdk.generate.EcheckPreNoteSale; import com.litle.sdk.generate.EcheckRedeposit; import com.litle.sdk.generate.EcheckSale; import com.litle.sdk.generate.EcheckVerification; import com.litle.sdk.generate.ForceCapture; import com.litle.sdk.generate.LitleTransactionInterface; import com.litle.sdk.generate.Load; import com.litle.sdk.generate.ObjectFactory; import com.litle.sdk.generate.PayFacCredit; import com.litle.sdk.generate.PayFacDebit; import com.litle.sdk.generate.PhysicalCheckCredit; import com.litle.sdk.generate.PhysicalCheckDebit; import com.litle.sdk.generate.RegisterTokenRequestType; import com.litle.sdk.generate.ReserveCredit; import com.litle.sdk.generate.ReserveDebit; import com.litle.sdk.generate.Sale; import com.litle.sdk.generate.SubmerchantCredit; import com.litle.sdk.generate.SubmerchantDebit; import com.litle.sdk.generate.TransactionType; import com.litle.sdk.generate.Unload; import com.litle.sdk.generate.UpdateCardValidationNumOnToken; import com.litle.sdk.generate.UpdatePlan; import com.litle.sdk.generate.UpdateSubscription; import com.litle.sdk.generate.VendorCredit; import com.litle.sdk.generate.VendorDebit; public class LitleBatchRequest { private BatchRequest batchRequest; private JAXBContext jc; private File file; private Marshaller marshaller; ObjectFactory objFac; TransactionType txn; String filePath; OutputStream osWrttxn; int numOfTxn; private final int maxTransactionsPerBatch; protected int litleLimit_maxTransactionsPerBatch = 100000; private final LitleBatchFileRequest lbfr; /** * This method initializes the batch level attributes of the XML and checks if the maxTransactionsPerBatch is not more than the value provided in the properties file * @param merchantId * @param lbfr * @throws JAXBException * @throws FileNotFoundException */ LitleBatchRequest(String merchantId, LitleBatchFileRequest lbfr) throws LitleBatchException{ this.batchRequest = new BatchRequest(); this.batchRequest.setMerchantId(merchantId); this.batchRequest.setMerchantSdk(Versions.SDK_VERSION); this.objFac = new ObjectFactory(); this.lbfr = lbfr; File tmpFile = new File(lbfr.getConfig().getProperty("batchRequestFolder")+"/tmp"); if(!tmpFile.exists()) { tmpFile.mkdir(); } String dateString = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss.SSS").format(new java.util.Date()); filePath = new String(lbfr.getConfig().getProperty("batchRequestFolder")+ "/tmp/Transactions" + merchantId + dateString); numOfTxn = 0; try { this.jc = JAXBContext.newInstance("com.litle.sdk.generate"); marshaller = jc.createMarshaller(); // JAXB_FRAGMENT property required to prevent unnecessary XML info from being printed in the file during marshal. marshaller.setProperty(Marshaller.JAXB_FRAGMENT, true); // Proper formatting of XML purely for aesthetic purposes. marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); } catch (JAXBException e) { throw new LitleBatchException("Unable to load jaxb dependencies. Perhaps a classpath issue?", e); } this.maxTransactionsPerBatch = Integer.parseInt(lbfr.getConfig().getProperty("maxTransactionsPerBatch","10000")); if( maxTransactionsPerBatch > litleLimit_maxTransactionsPerBatch ){ throw new LitleBatchException("maxTransactionsPerBatch property value cannot exceed " + String.valueOf(litleLimit_maxTransactionsPerBatch)); } } BatchRequest getBatchRequest(){ return batchRequest; } /** * This method is used to add transaction to a particular batch * @param transactionType * @return * @throws FileNotFoundException * @throws JAXBException */ public TransactionCodeEnum addTransaction(LitleTransactionInterface transactionType) throws LitleBatchException, LitleBatchFileFullException, LitleBatchBatchFullException { if (numOfTxn == 0) { this.file = new File(filePath); try { osWrttxn = new FileOutputStream(file.getAbsolutePath()); } catch (FileNotFoundException e) { throw new LitleBatchException("There was an exception while trying to create a Request file. Please check if the folder: " + lbfr.getConfig().getProperty("batchRequestFolder") +" has read and write access. "); } } if(numOfTxn > 0 && batchRequest.getNumAccountUpdates().intValue() != numOfTxn && (transactionType instanceof AccountUpdate)){ throw new LitleBatchException("An account update cannot be added to a batch containing transactions other than other AccountUpdates."); } else if(numOfTxn > 0 && batchRequest.getNumAccountUpdates().intValue() == numOfTxn && !(transactionType instanceof AccountUpdate)){ throw new LitleBatchException("Transactions that are not AccountUpdates cannot be added to a batch containing AccountUpdates."); } TransactionCodeEnum batchFileStatus = verifyFileThresholds(); if( batchFileStatus == TransactionCodeEnum.FILEFULL){ Exception e = new Exception(); throw new LitleBatchFileFullException("Batch File is already full -- it has reached the maximum number of transactions allowed per batch file.", e); } else if( batchFileStatus == TransactionCodeEnum.BATCHFULL ){ Exception e = new Exception(); throw new LitleBatchBatchFullException("Batch is already full -- it has reached the maximum number of transactions allowed per batch.", e); } //Adding 1 to the number of transaction. This is on the assumption that we are adding one transaction to the batch at a time. BigInteger numToAdd = new BigInteger("1"); boolean transactionAdded = false; JAXBElement transaction; if(transactionType instanceof Sale){ batchRequest.setNumSales(batchRequest.getNumSales().add(BigInteger.valueOf(1))); batchRequest.setSaleAmount(batchRequest.getSaleAmount().add(BigInteger.valueOf(((Sale) transactionType).getAmount()))); transaction = objFac.createSale((Sale)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof Authorization){ batchRequest.setNumAuths(batchRequest.getNumAuths().add(BigInteger.valueOf(1))); batchRequest.setAuthAmount(batchRequest.getAuthAmount().add(BigInteger.valueOf(((Authorization) transactionType).getAmount()))); transaction = objFac.createAuthorization((Authorization)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof Credit){ batchRequest.setNumCredits(batchRequest.getNumCredits().add(BigInteger.valueOf(1))); batchRequest.setCreditAmount(batchRequest.getCreditAmount().add(BigInteger.valueOf(((Credit) transactionType).getAmount()))); transaction = objFac.createCredit((Credit)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof RegisterTokenRequestType){ batchRequest.setNumTokenRegistrations(batchRequest.getNumTokenRegistrations().add(BigInteger.valueOf(1))); transaction = objFac.createRegisterTokenRequest((RegisterTokenRequestType)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof CaptureGivenAuth){ batchRequest.setNumCaptureGivenAuths(batchRequest.getNumCaptureGivenAuths().add(BigInteger.valueOf(1))); batchRequest.setCaptureGivenAuthAmount(batchRequest.getCaptureGivenAuthAmount().add(BigInteger.valueOf(((CaptureGivenAuth) transactionType).getAmount()))); transaction = objFac.createCaptureGivenAuth((CaptureGivenAuth)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof ForceCapture){ batchRequest.setNumForceCaptures(batchRequest.getNumForceCaptures().add(BigInteger.valueOf(1))); batchRequest.setForceCaptureAmount(batchRequest.getForceCaptureAmount().add(BigInteger.valueOf(((ForceCapture) transactionType).getAmount()))); transaction = objFac.createForceCapture((ForceCapture)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof AuthReversal){ batchRequest.setNumAuthReversals(batchRequest.getNumAuthReversals().add(BigInteger.valueOf(1))); batchRequest.setAuthReversalAmount(batchRequest.getAuthReversalAmount().add(BigInteger.valueOf(((AuthReversal) transactionType).getAmount()))); transaction = objFac.createAuthReversal((AuthReversal)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof Capture){ batchRequest.setNumCaptures(batchRequest.getNumCaptures().add(BigInteger.valueOf(1))); batchRequest.setCaptureAmount(batchRequest.getCaptureAmount().add(BigInteger.valueOf(((Capture) transactionType).getAmount()))); transaction = objFac.createCapture((Capture)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof EcheckVerification){ batchRequest.setNumEcheckVerification(batchRequest.getNumEcheckVerification().add(BigInteger.valueOf(1))); batchRequest.setEcheckVerificationAmount(batchRequest.getEcheckVerificationAmount().add(BigInteger.valueOf(((EcheckVerification) transactionType).getAmount()))); transaction = objFac.createEcheckVerification((EcheckVerification)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof EcheckCredit){ batchRequest.setNumEcheckCredit(batchRequest.getNumEcheckCredit().add(BigInteger.valueOf(1))); batchRequest.setEcheckCreditAmount(batchRequest.getEcheckCreditAmount().add(BigInteger.valueOf(((EcheckCredit) transactionType).getAmount()))); transaction = objFac.createEcheckCredit((EcheckCredit)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof EcheckRedeposit){ batchRequest.setNumEcheckRedeposit(batchRequest.getNumEcheckRedeposit().add(BigInteger.valueOf(1))); transaction = objFac.createEcheckRedeposit((EcheckRedeposit)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof EcheckSale){ batchRequest.setNumEcheckSales(batchRequest.getNumEcheckSales().add(BigInteger.valueOf(1))); batchRequest.setEcheckSalesAmount(batchRequest.getEcheckSalesAmount().add(BigInteger.valueOf(((EcheckSale) transactionType).getAmount()))); transaction = objFac.createEcheckSale((EcheckSale)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof UpdateCardValidationNumOnToken){ batchRequest.setNumUpdateCardValidationNumOnTokens(batchRequest.getNumUpdateCardValidationNumOnTokens().add(BigInteger.valueOf(1))); transaction = objFac.createUpdateCardValidationNumOnToken((UpdateCardValidationNumOnToken)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof UpdateSubscription) { batchRequest.setNumUpdateSubscriptions(batchRequest.getNumUpdateSubscriptions().add(BigInteger.valueOf(1))); transaction = objFac.createUpdateSubscription((UpdateSubscription)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof CancelSubscription) { batchRequest.setNumCancelSubscriptions(batchRequest.getNumCancelSubscriptions().add(BigInteger.valueOf(1))); transaction = objFac.createCancelSubscription((CancelSubscription)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof CreatePlan) { batchRequest.setNumCreatePlans(batchRequest.getNumCreatePlans().add(BigInteger.valueOf(1))); transaction = objFac.createCreatePlan((CreatePlan)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof UpdatePlan) { batchRequest.setNumUpdatePlans(batchRequest.getNumUpdatePlans().add(BigInteger.valueOf(1))); transaction = objFac.createUpdatePlan((UpdatePlan)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof Activate) { batchRequest.setNumActivates(batchRequest.getNumActivates().add(BigInteger.valueOf(1))); batchRequest.setActivateAmount(batchRequest.getActivateAmount().add(BigInteger.valueOf(((Activate) transactionType).getAmount()))); transaction = objFac.createActivate((Activate)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof Deactivate) { batchRequest.setNumDeactivates(batchRequest.getNumDeactivates().add(BigInteger.valueOf(1))); transaction = objFac.createDeactivate((Deactivate)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof Load) { batchRequest.setNumLoads(batchRequest.getNumLoads().add(BigInteger.valueOf(1))); batchRequest.setLoadAmount(batchRequest.getLoadAmount().add(BigInteger.valueOf(((Load) transactionType).getAmount()))); transaction = objFac.createLoad((Load)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof Unload) { batchRequest.setNumUnloads(batchRequest.getNumUnloads().add(BigInteger.valueOf(1))); batchRequest.setUnloadAmount(batchRequest.getUnloadAmount().add(BigInteger.valueOf(((Unload) transactionType).getAmount()))); transaction = objFac.createUnload((Unload)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof BalanceInquiry) { batchRequest.setNumBalanceInquirys(batchRequest.getNumBalanceInquirys().add(BigInteger.valueOf(1))); transaction = objFac.createBalanceInquiry((BalanceInquiry)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof EcheckPreNoteSale) { batchRequest.setNumEcheckPreNoteSale(batchRequest.getNumEcheckPreNoteSale().add(BigInteger.valueOf(1))); transaction = objFac.createEcheckPreNoteSale((EcheckPreNoteSale)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof EcheckPreNoteCredit) { batchRequest.setNumEcheckPreNoteCredit(batchRequest.getNumEcheckPreNoteCredit().add(BigInteger.valueOf(1))); transaction = objFac.createEcheckPreNoteCredit((EcheckPreNoteCredit)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof PayFacCredit) { batchRequest.setNumPayFacCredit(batchRequest.getNumPayFacCredit().add(BigInteger.valueOf(1))); batchRequest.setPayFacCreditAmount((batchRequest.getPayFacCreditAmount().add(BigInteger.valueOf(((PayFacCredit) transactionType).getAmount())))); transaction = objFac.createPayFacCredit((PayFacCredit)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof SubmerchantCredit) { batchRequest.setNumSubmerchantCredit(batchRequest.getNumSubmerchantCredit().add(BigInteger.valueOf(1))); batchRequest.setSubmerchantCreditAmount((batchRequest.getSubmerchantCreditAmount().add(BigInteger.valueOf(((SubmerchantCredit) transactionType).getAmount())))); transaction = objFac.createSubmerchantCredit((SubmerchantCredit)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof VendorCredit) { batchRequest.setNumVendorCredit(batchRequest.getNumVendorCredit().add(BigInteger.valueOf(1))); batchRequest.setVendorCreditAmount((batchRequest.getVendorCreditAmount().add(BigInteger.valueOf(((VendorCredit) transactionType).getAmount())))); transaction = objFac.createVendorCredit((VendorCredit)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof ReserveCredit) { batchRequest.setNumReserveCredit(batchRequest.getNumReserveCredit().add(BigInteger.valueOf(1))); batchRequest.setReserveCreditAmount((batchRequest.getReserveCreditAmount().add(BigInteger.valueOf(((ReserveCredit) transactionType).getAmount())))); transaction = objFac.createReserveCredit((ReserveCredit)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof PhysicalCheckCredit) { batchRequest.setNumPhysicalCheckCredit(batchRequest.getNumPhysicalCheckCredit().add(BigInteger.valueOf(1))); batchRequest.setPhysicalCheckCreditAmount((batchRequest.getPhysicalCheckCreditAmount().add(BigInteger.valueOf(((PhysicalCheckCredit) transactionType).getAmount())))); transaction = objFac.createPhysicalCheckCredit((PhysicalCheckCredit)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof PayFacDebit) { batchRequest.setNumPayFacDebit(batchRequest.getNumPayFacDebit().add(BigInteger.valueOf(1))); batchRequest.setPayFacDebitAmount((batchRequest.getPayFacDebitAmount().add(BigInteger.valueOf(((PayFacDebit) transactionType).getAmount())))); transaction = objFac.createPayFacDebit((PayFacDebit)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof SubmerchantDebit) { batchRequest.setNumSubmerchantDebit(batchRequest.getNumSubmerchantDebit().add(BigInteger.valueOf(1))); batchRequest.setSubmerchantDebitAmount((batchRequest.getSubmerchantDebitAmount().add(BigInteger.valueOf(((SubmerchantDebit) transactionType).getAmount())))); transaction = objFac.createSubmerchantDebit((SubmerchantDebit)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof VendorDebit) { batchRequest.setNumVendorDebit(batchRequest.getNumVendorDebit().add(BigInteger.valueOf(1))); batchRequest.setVendorDebitAmount((batchRequest.getVendorDebitAmount().add(BigInteger.valueOf(((VendorDebit) transactionType).getAmount())))); transaction = objFac.createVendorDebit((VendorDebit)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof ReserveDebit) { batchRequest.setNumReserveDebit(batchRequest.getNumReserveDebit().add(BigInteger.valueOf(1))); batchRequest.setReserveDebitAmount((batchRequest.getReserveDebitAmount().add(BigInteger.valueOf(((ReserveDebit) transactionType).getAmount())))); transaction = objFac.createReserveDebit((ReserveDebit)transactionType); transactionAdded = true; numOfTxn ++; } else if(transactionType instanceof PhysicalCheckDebit) { batchRequest.setNumPhysicalCheckDebit(batchRequest.getNumPhysicalCheckDebit().add(BigInteger.valueOf(1))); batchRequest.setPhysicalCheckDebitAmount((batchRequest.getPhysicalCheckDebitAmount().add(BigInteger.valueOf(((PhysicalCheckDebit) transactionType).getAmount())))); transaction = objFac.createPhysicalCheckDebit((PhysicalCheckDebit)transactionType); transactionAdded = true; numOfTxn ++; } else if (transactionType instanceof AccountUpdate){ batchRequest.setNumAccountUpdates(batchRequest.getNumAccountUpdates().add(BigInteger.valueOf(1))); transaction = objFac.createAccountUpdate((AccountUpdate)transactionType); transactionAdded = true; numOfTxn ++; } else { transaction = objFac.createTransaction(new TransactionType()); } try { marshaller.marshal(transaction, osWrttxn); } catch (JAXBException e) { throw new LitleBatchException("There was an exception while marshalling the transaction object.", e); } batchFileStatus = verifyFileThresholds(); if( batchFileStatus == TransactionCodeEnum.FILEFULL){ return TransactionCodeEnum.FILEFULL; } else if( batchFileStatus == TransactionCodeEnum.BATCHFULL ){ return TransactionCodeEnum.BATCHFULL; } if (transactionAdded) { return TransactionCodeEnum.SUCCESS; } else { return TransactionCodeEnum.FAILURE; } } /** * This method makes sure that the maximum number of transactions per batch and file is not exceeded * This is to ensure Performance. * @return */ TransactionCodeEnum verifyFileThresholds(){ if( this.lbfr.getNumberOfTransactionInFile() == this.lbfr.getMaxAllowedTransactionsPerFile()){ return TransactionCodeEnum.FILEFULL; } else if( getNumberOfTransactions() == this.maxTransactionsPerBatch ){ return TransactionCodeEnum.BATCHFULL; } return TransactionCodeEnum.SUCCESS; } /** * Returns the number of transactions in the batch * @return */ public int getNumberOfTransactions(){ return (numOfTxn); } /** * Gets whether the batch is full per the size specification * @return boolean indicating whether the batch is full */ public boolean isFull() { return (getNumberOfTransactions() == this.maxTransactionsPerBatch); } /** * Closes the batch output file * @throws IOException */ public void closeFile() throws IOException { osWrttxn.close(); } /** * Grabs the request file * @return the request file */ public File getFile() { return this.file; } public Marshaller getMarshaller() { return marshaller; } public void setMarshaller(Marshaller marshaller) { this.marshaller = marshaller; } public int getNumOfTxn() { return numOfTxn; } public void setNumOfTxn(int numOfTxn) { this.numOfTxn = numOfTxn; } }
package es.ucm.fdi.iw.controller; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import javax.persistence.EntityManager; import javax.persistence.NoResultException; import javax.persistence.PersistenceContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.stereotype.Controller; import org.springframework.transaction.annotation.Transactional; import org.springframework.ui.Model; import org.springframework.util.FileCopyUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.multipart.MultipartFile; import org.springframework.web.servlet.view.RedirectView; import es.ucm.fdi.iw.LocalData; import es.ucm.fdi.iw.model.Direction; import es.ucm.fdi.iw.model.User; @Controller @RequestMapping("user") public class UserController { private static Logger log = Logger.getLogger(UserController.class); @Autowired private LocalData localData; @PersistenceContext private EntityManager entityManager; @Autowired private PasswordEncoder passwordEncoder; /** * Create a new user type employee * @return */ @RequestMapping(value="/newUserEmployee", method=RequestMethod.POST) @ResponseBody @Transactional // needed to allow DB change public RedirectView newUserEmployee( @RequestParam("emailE") String email, @RequestParam("passE") String pass, @RequestParam("nameE") String name, @RequestParam("lastNameE") String lastName, @RequestParam("nickE") String nick, HttpServletRequest request, HttpServletResponse response, Model model, HttpSession session){ String feedback = ""; if((boolean) entityManager.createQuery( "select count(u)>0 from User u where email = :email or nick = :nick") .setParameter("email", email) .setParameter("nick", nick).getSingleResult()){ feedback = "email+o+nick+ya+en+uso"; }else{ User u = new User(); try { u.setName(name); u.setEmail(email); u.setLastName(lastName); u.setNick(nick); u.setPassword(passwordEncoder.encode(pass)); u.setRoles("USER,EMPLOYEE"); entityManager.persist(u); log.info("Usuario empleado creado satisfactoriamente"); feedback ="usuario+empleado+creado+correctamente"; } catch (NoResultException nre) { feedback = "ups,+algo+salio+mal,+intentelo+de+nuevo+mas+tarde"; log.error("Algo salio mal creando el usuario "+email); response.setStatus(HttpServletResponse.SC_NOT_FOUND); } } model.addAttribute("feedback",feedback); return new RedirectView("/registro?result="+feedback); } /** * Create a new user type bussines * @return */ @RequestMapping(value="/newUserBussines", method=RequestMethod.POST) @ResponseBody @Transactional // needed to allow DB change public RedirectView newUserBussines( @RequestParam("emailB") String email, @RequestParam("passB") String pass, @RequestParam("nameB") String name, HttpServletRequest request, HttpServletResponse response, Model model, HttpSession session){ String feedback = ""; if((boolean) entityManager.createQuery( "select count(u)>0 from User u where email = :email") .setParameter("email", email) .getSingleResult()){ log.info("Email ya en uso"); feedback = "email+ya+en+uso"; }else{ User u = new User(); try { u.setName(name); u.setEmail(email); u.setPassword(passwordEncoder.encode(pass)); u.setRoles("USER,BUSSINES"); entityManager.persist(u); log.info("Usuario negocio creado satisfactoriamente"); feedback ="Usuario+negocio+creado+correctamente"; } catch (NoResultException nre) { feedback = "Ups,algo+salio+mal,intentelo+de+nuevo+mas+tarde"; log.error("Algo salio mal creando el usuario "+email); response.setStatus(HttpServletResponse.SC_NOT_FOUND); } } model.addAttribute("feedback",feedback); return new RedirectView("/registro?result="+feedback); } /** * Returns a users' photo * @param id of user to get photo from * @return */ @RequestMapping(value="/photo/{id}", method = RequestMethod.GET, produces = MediaType.IMAGE_JPEG_VALUE) public void userPhoto(@PathVariable("id") String id, HttpServletResponse response) { File f = localData.getFile("user/"+id, "avatar"); InputStream in = null; try { if (f.exists()) { in = new BufferedInputStream(new FileInputStream(f)); } else { in = new BufferedInputStream( this.getClass().getClassLoader().getResourceAsStream("unknown-user.jpg")); } FileCopyUtils.copy(in, response.getOutputStream()); } catch (IOException ioe) { log.info("Error retrieving file: " + f + " -- " + ioe.getMessage()); } } /** * Uploads a photo for a user * @param id of user * @param photo to upload * @return */ @RequestMapping(value="/photo/{id}", method=RequestMethod.POST) public @ResponseBody RedirectView handleFileUpload(@RequestParam("photo") MultipartFile photo, @PathVariable("id") String id){ if (!photo.isEmpty()) { try { byte[] bytes = photo.getBytes(); BufferedOutputStream stream = new BufferedOutputStream( new FileOutputStream(localData.getFile("user/"+id, "avatar"))); stream.write(bytes); stream.close(); return new RedirectView("/perfilusuario?avatar=actualizado+con+exito"); } catch (Exception e) { return new RedirectView("/perfilusuario?avatar=fallo+al+actualizar+el+avatar"); } } else { return new RedirectView("/perfilusuario?avatar=archivo+vacio"); } } /** * Update the info of a user * @param dni-form dni of a user * @param name-form the name * @param lastName-form the last name * @param birthday-form the birthday * @return redirect to perfilusuario with or without error */ @RequestMapping(value="/updateInfoUser", method=RequestMethod.POST) @ResponseBody @Transactional // needed to allow DB change public RedirectView updateEmployee( @RequestParam("dni-form") String DNI, @RequestParam("name-form") String name, @RequestParam("lastName-form") String lastName, @RequestParam("birthday-form") String birthday, HttpServletRequest request, HttpServletResponse response, Model model, HttpSession session){ log.info("dentro del update\n dni = "+DNI+"\n name = "+name+"\nlastName = "+lastName+"\nbirthday = "+birthday+"\n"); try { User u = (User) session.getAttribute("user"); u.setName(name); u.setLastName(lastName); u.setDNI(DNI); u.setBirthday(birthday); entityManager.merge(u); session.setAttribute("user", u); response.setStatus(HttpServletResponse.SC_OK); return new RedirectView("/perfilusuario?update=tus+datos+se+actualizaron+correctamente"); } catch (NoResultException nre) { log.error("fallo al encontrar el usuario para actualizar\n"); response.setStatus(HttpServletResponse.SC_NOT_FOUND); return new RedirectView("/perfilusuario?update=false"); } } @RequestMapping(value="/updateInfoBusiness", method=RequestMethod.POST) @ResponseBody @Transactional // needed to allow DB change public RedirectView updateBusiness( @RequestParam("email-form") String email, @RequestParam("name-form") String name, @RequestParam("country-form") String country, @RequestParam("municipality-form") String municipality, @RequestParam("street-form") String street, @RequestParam("number-form") int number, HttpServletRequest request, HttpServletResponse response, Model model, HttpSession session){ log.info("dentro del update\n email = "+email+"\n name = "+name+"\n"); String updated = "error+no+se+ha+conseguido+update"; try { User u = (User) session.getAttribute("user"); u.setName(name); u.setEmail(email); if(country != null && municipality != null && street != null && number != 0){ Direction d = new Direction(); d.setCountry(country); d.setMunicipality(municipality); d.setStreet(street); d.setNumber(number); entityManager.persist(d);//se crea un objeto nuevo u.setAddress(d); } session.setAttribute("user", u); entityManager.merge(u); response.setStatus(HttpServletResponse.SC_OK); updated = "se+ha+actualizado+con+exito"; } catch (NoResultException nre) { log.error("fallo al encontrar el usuario para actualizar\n"); response.setStatus(HttpServletResponse.SC_NOT_FOUND); } return new RedirectView("/perfilempresa?update="+ updated); } @RequestMapping(value="/updateCard/{type}", method=RequestMethod.POST) @ResponseBody @Transactional // needed to allow DB change public RedirectView updateCard( @PathVariable("type") String type, @RequestParam("html_data") String html_data, HttpServletRequest request, HttpServletResponse response, Model model, HttpSession session){ String url; if(type.equals("E")) url="/perfilusuario?card="; else url="/perfilempresa?card="; String card = "error+no+se+ha+conseguido+actualizar+la+presentacion"; try { User u = (User) session.getAttribute("user"); u.setCard(html_data); session.setAttribute("user", u); entityManager.merge(u); response.setStatus(HttpServletResponse.SC_OK); card = "se+ha+actualizado+con+exito"; } catch (NoResultException nre) { log.error("fallo al encontrar el usuario para actualizar\n"); response.setStatus(HttpServletResponse.SC_NOT_FOUND); } return new RedirectView(url + card); } @RequestMapping(value="/Bphoto/{id}", method = RequestMethod.GET, produces = MediaType.IMAGE_JPEG_VALUE) public void userBPhoto(@PathVariable("id") String id, HttpServletResponse response) { File f = localData.getFile("user/"+id, "avatar"); InputStream in = null; try { if (f.exists()) { in = new BufferedInputStream(new FileInputStream(f)); } else { in = new BufferedInputStream( this.getClass().getClassLoader().getResourceAsStream("unknown-user.jpg")); } FileCopyUtils.copy(in, response.getOutputStream()); } catch (IOException ioe) { log.info("Error retrieving file: " + f + " -- " + ioe.getMessage()); } } /** * Uploads a photo for a user * @param id of user * @param photo to upload * @return */ @RequestMapping(value="/Bphoto/{id}", method=RequestMethod.POST) public @ResponseBody RedirectView handleFileUploadB(@RequestParam("Bphoto") MultipartFile photo, @PathVariable("id") String id){ if (!photo.isEmpty()) { try { byte[] bytes = photo.getBytes(); BufferedOutputStream stream = new BufferedOutputStream( new FileOutputStream(localData.getFile("user/"+id, "avatar"))); stream.write(bytes); stream.close(); return new RedirectView("/perfilempresa?avatar=actualizado+con+exito"); } catch (Exception e) { return new RedirectView("/perfilempresa?avatar=fallo+al+actualizar+el+avatar"); } } else { return new RedirectView("/perfilempresa?avatar=archivo+vacio"); } } }
package jp.ac.tsukuba.conclave.cytometry.tga; import java.util.Random; import jp.ac.tsukuba.conclave.cytometry.toolbox.IDGenerator; public class TGANode { /* node internal variables */ Boolean isTerminal; public Long uniqueID; public int maxdepth; // maximum depth public int curdepth; // current depth. Root = 1 public int maxindex; // the maximum value for an index /* Node Contents */ public TGANode leftChild = null; public TGANode rightChild = null; public double weight = 0.5; // weight of the children, if node // TODO: Not used right now, so it is fixed. // If terminal, determines whether this is a positive or negative terminal public int parameterIndex; // index of the parameter, if terminal /** * Empty constructor - generates an "empty" node. * */ public TGANode() { IDGenerator i = IDGenerator.getInstance(); uniqueID = i.newID(); curdepth = -1; // unitiated node } /** * Copy tree function - recursively copies a tree and send * it back */ public TGANode clone() { IDGenerator i = IDGenerator.getInstance(); TGANode newn = new TGANode(); newn.isTerminal = isTerminal; newn.parameterIndex = parameterIndex; newn.uniqueID = i.newID(); newn.weight = weight; newn.curdepth = curdepth; newn.maxdepth = maxdepth; newn.maxindex = maxindex; // Nodes with only one child don't make sense. if (!isTerminal) { newn.leftChild = leftChild.clone(); newn.rightChild = rightChild.clone(); } return newn; } /** * Generate a random tree starting from this node. * Recursive Function. A tree with only the root * has depth = 1. * * Prob is the probability that each node will * not be a terminal node. If prob = 1, then the * tree is a full tree. * * thisdepth is the value where the function is started, * and is compared with maxdepth to decide when to stop. * * */ public void generateTree(double prob) { if (curdepth == -1) { System.err.println("ERRO: You need to pass initialization parameters to generate this tree"); System.exit(1); } generateTree(maxdepth, maxindex, curdepth, prob); } public void generateTree(int maxd, int maxi, int depth, double prob) { Random dice = new Random(); maxdepth = maxd; maxindex = maxi; curdepth = depth; // try to generate childs if ((curdepth < maxdepth) && (dice.nextDouble() < prob)) { weight = 0.5; leftChild = new TGANode(); leftChild.generateTree(maxd, maxi, depth+1, prob); rightChild = new TGANode(); rightChild.generateTree(maxd, maxi, depth+1, prob); isTerminal = false; } else { parameterIndex = dice.nextInt(maxindex); isTerminal = true; // In terminals, the weight determines whether this is a positive or negative terminal if (dice.nextBoolean()) weight = 1; else weight = -1; } } public double[] getArray() { double[] ret = new double[maxindex]; if (isTerminal) { ret[parameterIndex]=weight; } else { double[] lchild = leftChild.getArray(); double[] rchild = rightChild.getArray(); for(int i = 0; i < ret.length; i++) { ret[i] = lchild[i]*weight + rchild[i]*(1-weight); } } return ret; } /** * alternative getArray function for additive genomes * @return */ public double[] getArray2() { double[] ret = new double[maxindex]; if (isTerminal) { ret[parameterIndex]=weight; } else { double[] lchild = leftChild.getArray2(); double[] rchild = rightChild.getArray2(); for(int i = 0; i < ret.length; i++) { ret[i] = lchild[i] + rchild[i]; } } return ret; } /** * walks down the tree from here, recursively. If this node is at maximum * depth and has children, produce the weight array for this node, and replaces it * with a terminal with the highest index. * * also corrects depth. To be used mostly with crossover * * true is returned if the tree was modified */ public boolean collapse(int depth) { curdepth = depth; if (isTerminal) return false; if (curdepth < maxdepth) { boolean ret; ret = leftChild.collapse(depth+1); ret = rightChild.collapse(depth+1) && ret; return ret; } // not a terminal, and already at maxdepth // turn into terminal double[] t = getArray2(); //FIXME: must stop this get array/get array 2 nonsense -- probably will stick with 2 double maxweight = 0; weight = 0; parameterIndex = 0; for (int i = 0; i < maxindex; i++) if (Math.abs(t[i]) > maxweight) { maxweight = Math.abs(t[i]); parameterIndex = i; weight = t[i]; } isTerminal = true; leftChild = null; //TODO: "delete" function rightChild = null; //TODO: "delete" function return true; } /** * Applies the tree crossover operator to this and another tree, modifying both. * A parent, non terminal node is randomly chosen for each tree (can't crossover root nodes). * A random child of one parent is swapped with the random child of the other parent. * Collapse is applied to both children. * * @param root root node of the tree to apply crossover to. */ public boolean crossover(TGANode root) { Random dice = new Random(); int dep_1 = dice.nextInt(maxdepth); int dep_2 = dice.nextInt(maxdepth); TGANode fnode1 = this; TGANode fnode2 = root; // if any of the roots are terminals, ignore the crossover if (fnode1.isTerminal || fnode2.isTerminal) return false; // Choosing the break node. Going beyond max depth loop back to the root. for (int i = 0; i < dep_1; i++) { if (dice.nextDouble() > 0.5) fnode1 = fnode1.leftChild; else fnode1 = fnode1.rightChild; if (fnode1.isTerminal) fnode1 = this; } for (int i = 0; i < dep_2; i++) { if (dice.nextDouble() > 0.5) fnode2 = fnode2.leftChild; else fnode2 = fnode2.rightChild; if (fnode2.isTerminal) fnode2 = root; } // i should have two non terminal nodes in hand. showtime: int choice = dice.nextInt(4); TGANode t; switch(choice) { case 0: // right right t = fnode2.rightChild; fnode2.rightChild = fnode1.rightChild; fnode1.rightChild = t; break; case 1: // right left t = fnode2.leftChild; fnode2.leftChild = fnode1.rightChild; fnode1.rightChild = t; break; case 2: // left right t = fnode2.rightChild; fnode2.rightChild = fnode1.leftChild; fnode1.leftChild = t; break; case 3: // left left t = fnode2.leftChild; fnode2.leftChild = fnode1.leftChild; fnode1.leftChild = t; break; } // checking that the nodes don't break depth limits. fnode1.collapse(fnode1.curdepth); fnode2.collapse(fnode2.curdepth); return true; } /** * Chooses a random node and recreates the tree from that node. */ public void mutate(double fullrate) { Random dice = new Random(); int dep_1 = dice.nextInt(maxdepth); TGANode fnode1 = this; // Choosing the break node. Going beyond max depth loop back to the root. for (int i = 0; i < dep_1; i++) { if (fnode1.isTerminal) fnode1 = this; else if (dice.nextDouble() > 0.5) fnode1 = fnode1.leftChild; else fnode1 = fnode1.rightChild; } fnode1.generateTree(fullrate); } /** * Returns a text string that contains an ASCII * representation of this tree. verbose, if set * to true, prints each node's contents. */ public String dumptree(boolean verbose) { return dumptree(0,verbose); } public String dumptree(int tab, boolean verbose) { String ret = ""; String tabulation = ""; for (int i = 0; i < tab; i++) tabulation = tabulation + " "; ret = ret + tabulation + "(Node " + uniqueID + ")"; if (isTerminal) { ret = ret + " Terminal: " + parameterIndex; } else { ret = ret + " Non-Term: " + weight; } ret = ret + "\n"; if (!isTerminal) { ret = ret + tabulation + leftChild.dumptree(tab + 1, verbose); ret = ret + tabulation + rightChild.dumptree(tab + 1, verbose); } return ret; } /** * Count the number of nodes and introns in this * tree. Recursive. Introns are defined as the number * of nodes that don't contribute to the final answer, * because they are under zero weight. * * @return * ret[0] is the total number of nodes, and * ret[1] is the total number of introns; */ public int[] countNodes() { int ret[] = new int[2]; if (isTerminal) { ret[0] = 1; ret[1] = 0; } else { int lret[] = leftChild.countNodes(); int rret[] = rightChild.countNodes(); ret[0] = lret[0] + rret[0] + 1; if (weight == 1.0) { ret[1] = lret[1] + rret[0]; } else if (weight == 0.0) { ret[1] = lret[0] + rret[1]; } else { ret[1] = lret[1] + rret[1]; } } return ret; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.core.datastore; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import org.apache.carbondata.common.logging.LogService; import org.apache.carbondata.common.logging.LogServiceFactory; import org.apache.carbondata.core.cache.Cache; import org.apache.carbondata.core.cache.CarbonLRUCache; import org.apache.carbondata.core.datastore.block.AbstractIndex; import org.apache.carbondata.core.datastore.block.SegmentTaskIndex; import org.apache.carbondata.core.datastore.block.SegmentTaskIndexWrapper; import org.apache.carbondata.core.datastore.block.TableBlockInfo; import org.apache.carbondata.core.datastore.exception.IndexBuilderException; import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier; import org.apache.carbondata.core.metadata.blocklet.DataFileFooter; import org.apache.carbondata.core.mutate.UpdateVO; import org.apache.carbondata.core.statusmanager.SegmentUpdateStatusManager; import org.apache.carbondata.core.util.CarbonUtil; import org.apache.carbondata.core.util.path.CarbonTablePath; import org.apache.carbondata.core.util.path.CarbonTablePath.DataFileUtil; /** * Class to handle loading, unloading,clearing,storing of the table * blocks */ public class SegmentTaskIndexStore implements Cache<TableSegmentUniqueIdentifier, SegmentTaskIndexWrapper> { private static final LogService LOGGER = LogServiceFactory.getLogService(SegmentTaskIndexStore.class.getName()); /** * carbon store path */ protected String carbonStorePath; /** * CarbonLRU cache */ protected CarbonLRUCache lruCache; /** * map of block info to lock object map, while loading the btree this will be filled * and removed after loading the tree for that particular block info, this will be useful * while loading the tree concurrently so only block level lock will be applied another * block can be loaded concurrently */ private Map<String, Object> segmentLockMap; /** * constructor to initialize the SegmentTaskIndexStore * * @param carbonStorePath * @param lruCache */ public SegmentTaskIndexStore(String carbonStorePath, CarbonLRUCache lruCache) { this.carbonStorePath = carbonStorePath; this.lruCache = lruCache; segmentLockMap = new ConcurrentHashMap<String, Object>(); } @Override public SegmentTaskIndexWrapper get(TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier) throws IOException { SegmentTaskIndexWrapper segmentTaskIndexWrapper = null; try { segmentTaskIndexWrapper = loadAndGetTaskIdToSegmentsMap(tableSegmentUniqueIdentifier.getSegmentToTableBlocksInfos(), tableSegmentUniqueIdentifier.getAbsoluteTableIdentifier(), tableSegmentUniqueIdentifier); } catch (IndexBuilderException e) { throw new IOException(e.getMessage(), e); } catch (Throwable e) { throw new IOException("Problem in loading segment block.", e); } return segmentTaskIndexWrapper; } @Override public List<SegmentTaskIndexWrapper> getAll( List<TableSegmentUniqueIdentifier> tableSegmentUniqueIdentifiers) throws IOException { List<SegmentTaskIndexWrapper> segmentTaskIndexWrappers = new ArrayList<>(tableSegmentUniqueIdentifiers.size()); try { for (TableSegmentUniqueIdentifier segmentUniqueIdentifier : tableSegmentUniqueIdentifiers) { segmentTaskIndexWrappers.add(get(segmentUniqueIdentifier)); } } catch (Throwable e) { for (SegmentTaskIndexWrapper segmentTaskIndexWrapper : segmentTaskIndexWrappers) { segmentTaskIndexWrapper.clear(); } throw new IOException("Problem in loading segment blocks.", e); } return segmentTaskIndexWrappers; } /** * returns the SegmentTaskIndexWrapper * * @param tableSegmentUniqueIdentifier * @return */ @Override public SegmentTaskIndexWrapper getIfPresent( TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier) { SegmentTaskIndexWrapper segmentTaskIndexWrapper = (SegmentTaskIndexWrapper) lruCache .get(tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier()); if (null != segmentTaskIndexWrapper) { segmentTaskIndexWrapper.incrementAccessCount(); } return segmentTaskIndexWrapper; } /** * method invalidate the segment cache for segment * * @param tableSegmentUniqueIdentifier */ @Override public void invalidate(TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier) { lruCache.remove(tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier()); } /** * returns block timestamp value from the given task * @param taskKey * @param listOfUpdatedFactFiles * @return */ private String getTimeStampValueFromBlock(String taskKey, List<String> listOfUpdatedFactFiles) { for (String blockName : listOfUpdatedFactFiles) { if (taskKey.equals(CarbonTablePath.DataFileUtil.getTaskNo(blockName))) { blockName = blockName.substring(blockName.lastIndexOf('-') + 1, blockName.lastIndexOf('.')); return blockName; } } return null; } /** * Below method will be used to load the segment of segments * One segment may have multiple task , so table segment will be loaded * based on task id and will return the map of taksId to table segment * map * * @param segmentToTableBlocksInfos segment id to block info * @param absoluteTableIdentifier absolute table identifier * @return map of taks id to segment mapping * @throws IOException */ private SegmentTaskIndexWrapper loadAndGetTaskIdToSegmentsMap( Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos, AbsoluteTableIdentifier absoluteTableIdentifier, TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier) throws IOException { // task id to segment map Iterator<Map.Entry<String, List<TableBlockInfo>>> iteratorOverSegmentBlocksInfos = segmentToTableBlocksInfos.entrySet().iterator(); Map<TaskBucketHolder, AbstractIndex> taskIdToSegmentIndexMap = null; SegmentTaskIndexWrapper segmentTaskIndexWrapper = null; SegmentUpdateStatusManager updateStatusManager = new SegmentUpdateStatusManager(absoluteTableIdentifier); String segmentId = null; TaskBucketHolder taskBucketHolder = null; try { while (iteratorOverSegmentBlocksInfos.hasNext()) { // segment id to table block mapping Map.Entry<String, List<TableBlockInfo>> next = iteratorOverSegmentBlocksInfos.next(); // group task id to table block info mapping for the segment Map<TaskBucketHolder, List<TableBlockInfo>> taskIdToTableBlockInfoMap = mappedAndGetTaskIdToTableBlockInfo(segmentToTableBlocksInfos); segmentId = next.getKey(); // get the existing map of task id to table segment map UpdateVO updateVO = updateStatusManager.getInvalidTimestampRange(segmentId); // check if segment is already loaded, if segment is already loaded //no need to load the segment block String lruCacheKey = tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier(); segmentTaskIndexWrapper = (SegmentTaskIndexWrapper) lruCache.get(lruCacheKey); if (segmentTaskIndexWrapper == null || tableSegmentUniqueIdentifier.isSegmentUpdated()) { // get the segment loader lock object this is to avoid // same segment is getting loaded multiple times // in case of concurrent query Object segmentLoderLockObject = segmentLockMap.get(lruCacheKey); if (null == segmentLoderLockObject) { segmentLoderLockObject = addAndGetSegmentLock(lruCacheKey); } // acquire lock to lod the segment synchronized (segmentLoderLockObject) { segmentTaskIndexWrapper = (SegmentTaskIndexWrapper) lruCache.get(lruCacheKey); if (null == segmentTaskIndexWrapper || tableSegmentUniqueIdentifier .isSegmentUpdated()) { // if the segment is updated then get the existing block task id map details // so that the same can be updated after loading the btree. if (tableSegmentUniqueIdentifier.isSegmentUpdated() && null != segmentTaskIndexWrapper) { taskIdToSegmentIndexMap = segmentTaskIndexWrapper.getTaskIdToTableSegmentMap(); } else { // creating a map of take if to table segment taskIdToSegmentIndexMap = new HashMap<TaskBucketHolder, AbstractIndex>(); segmentTaskIndexWrapper = new SegmentTaskIndexWrapper(taskIdToSegmentIndexMap); segmentTaskIndexWrapper.incrementAccessCount(); } Iterator<Map.Entry<TaskBucketHolder, List<TableBlockInfo>>> iterator = taskIdToTableBlockInfoMap.entrySet().iterator(); long requiredSize = calculateRequiredSize(taskIdToTableBlockInfoMap, absoluteTableIdentifier); segmentTaskIndexWrapper .setMemorySize(requiredSize + segmentTaskIndexWrapper.getMemorySize()); boolean isAddedToLruCache = lruCache.put(lruCacheKey, segmentTaskIndexWrapper, requiredSize); if (isAddedToLruCache) { while (iterator.hasNext()) { Map.Entry<TaskBucketHolder, List<TableBlockInfo>> taskToBlockInfoList = iterator.next(); taskBucketHolder = taskToBlockInfoList.getKey(); taskIdToSegmentIndexMap.put(taskBucketHolder, loadBlocks(taskBucketHolder, taskToBlockInfoList.getValue(), absoluteTableIdentifier)); } } else { throw new IndexBuilderException( "Can not load the segment. No Enough space available."); } // set the latest timestamp. segmentTaskIndexWrapper .setRefreshedTimeStamp(updateVO.getCreatedOrUpdatedTimeStamp()); // tableSegmentMapTemp.put(next.getKey(), taskIdToSegmentIndexMap); // removing from segment lock map as once segment is loaded // if concurrent query is coming for same segment // it will wait on the lock so after this segment will be already // loaded so lock is not required, that is why removing the // the lock object as it wont be useful segmentLockMap.remove(lruCacheKey); } else { segmentTaskIndexWrapper.incrementAccessCount(); } } } else { segmentTaskIndexWrapper.incrementAccessCount(); } } } catch (IndexBuilderException e) { LOGGER.error("Problem while loading the segment"); throw e; } return segmentTaskIndexWrapper; } private long calculateRequiredSize( Map<TaskBucketHolder, List<TableBlockInfo>> taskIdToTableBlockInfoMap, AbsoluteTableIdentifier absoluteTableIdentifier) { Iterator<Map.Entry<TaskBucketHolder, List<TableBlockInfo>>> iterator = taskIdToTableBlockInfoMap.entrySet().iterator(); TaskBucketHolder taskBucketHolder; long driverBTreeSize = 0; while (iterator.hasNext()) { Map.Entry<TaskBucketHolder, List<TableBlockInfo>> taskToBlockInfoList = iterator.next(); taskBucketHolder = taskToBlockInfoList.getKey(); driverBTreeSize += CarbonUtil .calculateDriverBTreeSize(taskBucketHolder.taskNo, taskBucketHolder.bucketNumber, taskToBlockInfoList.getValue(), absoluteTableIdentifier); } return driverBTreeSize; } /** * Below method will be used to get the task id to all the table block info belongs to * that task id mapping * * @param segmentToTableBlocksInfos segment if to table blocks info map * @return task id to table block info mapping */ private Map<TaskBucketHolder, List<TableBlockInfo>> mappedAndGetTaskIdToTableBlockInfo( Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos) { Map<TaskBucketHolder, List<TableBlockInfo>> taskIdToTableBlockInfoMap = new ConcurrentHashMap<>(); Iterator<Entry<String, List<TableBlockInfo>>> iterator = segmentToTableBlocksInfos.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, List<TableBlockInfo>> next = iterator.next(); List<TableBlockInfo> value = next.getValue(); for (TableBlockInfo blockInfo : value) { String taskNo = DataFileUtil.getTaskNo(blockInfo.getFilePath()); String bucketNo = DataFileUtil.getBucketNo(blockInfo.getFilePath()); TaskBucketHolder bucketHolder = new TaskBucketHolder(taskNo, bucketNo); List<TableBlockInfo> list = taskIdToTableBlockInfoMap.get(bucketHolder); if (null == list) { list = new ArrayList<TableBlockInfo>(); taskIdToTableBlockInfoMap.put(bucketHolder, list); } list.add(blockInfo); } } return taskIdToTableBlockInfoMap; } /** * Below method will be used to get the segment level lock object * * @param segmentId * @return lock object */ private synchronized Object addAndGetSegmentLock(String segmentId) { // get the segment lock object if it is present then return // otherwise add the new lock and return Object segmentLoderLockObject = segmentLockMap.get(segmentId); if (null == segmentLoderLockObject) { segmentLoderLockObject = new Object(); segmentLockMap.put(segmentId, segmentLoderLockObject); } return segmentLoderLockObject; } /** * Below method will be used to load the blocks * * @param tableBlockInfoList * @return loaded segment * @throws IOException */ private AbstractIndex loadBlocks(TaskBucketHolder taskBucketHolder, List<TableBlockInfo> tableBlockInfoList, AbsoluteTableIdentifier tableIdentifier) throws IOException { // all the block of one task id will be loaded together // so creating a list which will have all the data file meta data to of one task List<DataFileFooter> footerList = CarbonUtil .readCarbonIndexFile(taskBucketHolder.taskNo, taskBucketHolder.bucketNumber, tableBlockInfoList, tableIdentifier); AbstractIndex segment = new SegmentTaskIndex(); // file path of only first block is passed as it all table block info path of // same task id will be same segment.buildIndex(footerList); return segment; } /** * The method clears the access count of table segments * * @param tableSegmentUniqueIdentifiers */ @Override public void clearAccessCount(List<TableSegmentUniqueIdentifier> tableSegmentUniqueIdentifiers) { for (TableSegmentUniqueIdentifier segmentUniqueIdentifier : tableSegmentUniqueIdentifiers) { SegmentTaskIndexWrapper cacheable = (SegmentTaskIndexWrapper) lruCache .get(segmentUniqueIdentifier.getUniqueTableSegmentIdentifier()); cacheable.clear(); } } public static class TaskBucketHolder implements Serializable { public String taskNo; public String bucketNumber; public TaskBucketHolder(String taskNo, String bucketNumber) { this.taskNo = taskNo; this.bucketNumber = bucketNumber; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TaskBucketHolder that = (TaskBucketHolder) o; if (taskNo != null ? !taskNo.equals(that.taskNo) : that.taskNo != null) return false; return bucketNumber != null ? bucketNumber.equals(that.bucketNumber) : that.bucketNumber == null; } @Override public int hashCode() { int result = taskNo != null ? taskNo.hashCode() : 0; result = 31 * result + (bucketNumber != null ? bucketNumber.hashCode() : 0); return result; } } }
package com.deepoove.swagger.diff.output; import com.deepoove.swagger.diff.SwaggerDiff; import com.deepoove.swagger.diff.model.*; import io.swagger.models.HttpMethod; import io.swagger.models.parameters.Parameter; import io.swagger.models.properties.Property; import j2html.tags.ContainerTag; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import static j2html.TagCreator.*; public class HtmlRender implements Render { private String title; private String linkCss; public HtmlRender() { this("Api Change Log", "http://deepoove.com/swagger-diff/stylesheets/demo.css"); } public HtmlRender(String title, String linkCss) { this.title = title; this.linkCss = linkCss; } public String render(SwaggerDiff diff) { List<Endpoint> newEndpoints = diff.getNewEndpoints(); ContainerTag ol_newEndpoint = ol_newEndpoint(newEndpoints); List<Endpoint> missingEndpoints = diff.getMissingEndpoints(); ContainerTag ol_missingEndpoint = ol_missingEndpoint(missingEndpoints); List<ChangedEndpoint> changedEndpoints = diff.getChangedEndpoints(); ContainerTag ol_changed = ol_changed(changedEndpoints); ContainerTag p_versions = p_versions(diff.getOldVersion(), diff.getNewVersion()); return renderHtml(ol_newEndpoint, ol_missingEndpoint, ol_changed, p_versions); } public String renderHtml(ContainerTag ol_new, ContainerTag ol_miss, ContainerTag ol_changed, ContainerTag p_versions) { ContainerTag html = html().attr("lang", "en").with( head().with( meta().withCharset("utf-8"), title(title), script(rawHtml("function showHide(id){if(document.getElementById(id).style.display==\'none\'){document.getElementById(id).style.display=\'block\';document.getElementById(\'btn_\'+id).innerHTML=\'&uArr;\';}else{document.getElementById(id).style.display=\'none\';document.getElementById(\'btn_\'+id).innerHTML=\'&dArr;\';}return true;}")).withType("text/javascript"), link().withRel("stylesheet").withHref(linkCss) ), body().with( header().with(h1(title)), div().withClass("article").with( div_headArticle("Versions", "versions", p_versions), div_headArticle("What's New", "new", ol_new), div_headArticle("What's Deprecated", "deprecated", ol_miss), div_headArticle("What's Changed", "changed", ol_changed) ) ) ); return document().render() + html.render(); } private ContainerTag div_headArticle(final String title, final String type, final ContainerTag ol) { return div().with(h2(title).with(a(rawHtml("&uArr;")).withId("btn_" + type).withClass("showhide").withHref("#").attr("onClick", "javascript:showHide('" + type + "');")), hr(), ol); } private ContainerTag p_versions(String oldVersion, String newVersion) { ContainerTag p = p().withId("versions"); p.withText("Changes from " + oldVersion + " to " + newVersion + "."); return p; } private ContainerTag ol_newEndpoint(List<Endpoint> endpoints) { if (null == endpoints) return ol().withId("new"); ContainerTag ol = ol().withId("new"); for (Endpoint endpoint : endpoints) { ol.with(li_newEndpoint(endpoint.getMethod().toString(), endpoint.getPathUrl(), endpoint.getSummary())); } return ol; } private ContainerTag li_newEndpoint(String method, String path, String desc) { return li().with(span(method).withClass(method)).withText(path + " ") .with(span(null == desc ? "" : desc)); } private ContainerTag ol_missingEndpoint(List<Endpoint> endpoints) { if (null == endpoints) return ol().withId("deprecated"); ContainerTag ol = ol().withId("deprecated"); for (Endpoint endpoint : endpoints) { ol.with(li_missingEndpoint(endpoint.getMethod().toString(), endpoint.getPathUrl(), endpoint.getSummary())); } return ol; } private ContainerTag li_missingEndpoint(String method, String path, String desc) { return li().with(span(method).withClass(method), del().withText(path)).with(span(null == desc ? "" : " " + desc)); } private ContainerTag ol_changed(List<ChangedEndpoint> changedEndpoints) { if (null == changedEndpoints) return ol().withId("changed"); ContainerTag ol = ol().withId("changed"); for (ChangedEndpoint changedEndpoint : changedEndpoints) { String pathUrl = changedEndpoint.getPathUrl(); Map<HttpMethod, ChangedOperation> changedOperations = changedEndpoint.getChangedOperations(); for (Entry<HttpMethod, ChangedOperation> entry : changedOperations.entrySet()) { String method = entry.getKey().toString(); ChangedOperation changedOperation = entry.getValue(); String desc = changedOperation.getSummary(); ContainerTag ul_detail = ul().withClass("detail"); if (changedOperation.isDiffParam()) { ul_detail.with(li().with(h3("Parameter")).with(ul_param(changedOperation))); } if (changedOperation.isDiffProp()) { ul_detail.with(li().with(h3("Return Type")).with(ul_response(changedOperation))); } if (changedOperation.isDiffProduces()) { ul_detail.with(li().with(h3("Produces")).with(ul_produce(changedOperation))); } if (changedOperation.isDiffConsumes()) { ul_detail.with(li().with(h3("Consumes")).with(ul_consume(changedOperation))); } ol.with(li().with(span(method).withClass(method)).withText(pathUrl + " ").with(span(null == desc ? "" : desc)) .with(ul_detail)); } } return ol; } private ContainerTag ul_response(ChangedOperation changedOperation) { List<ElProperty> addProps = changedOperation.getAddProps(); List<ElProperty> delProps = changedOperation.getMissingProps(); List<ElProperty> chgProps = changedOperation.getChangedProps(); ContainerTag ul = ul().withClass("change response"); for (ElProperty prop : addProps) { ul.with(li_addProp(prop)); } for (ElProperty prop : delProps) { ul.with(li_missingProp(prop)); } for (ElProperty prop : chgProps) { ul.with(li_changedProp(prop)); } return ul; } private ContainerTag li_missingProp(ElProperty prop) { Property property = prop.getProperty(); return li().withClass("missing").withText("Delete").with(del(prop.getEl())).with(span(null == property.getDescription() ? "" : ("//" + property.getDescription())).withClass("comment")); } private ContainerTag li_addProp(ElProperty prop) { Property property = prop.getProperty(); return li().withText("Add " + prop.getEl()).with(span(null == property.getDescription() ? "" : ("//" + property.getDescription())).withClass("comment")); } private ContainerTag li_changedProp(ElProperty prop) { List<String> changeDetails = new ArrayList<>(); String changeDetailsHeading = ""; if (prop.isTypeChange()) { changeDetails.add("Data Type"); } if (prop.isNewEnums()) { changeDetails.add("Added Enum"); } if (prop.isRemovedEnums()) { changeDetails.add("Removed Enum"); } if (! changeDetails.isEmpty()) { changeDetailsHeading = " (" + String.join(", ", changeDetails) + ")"; } return li().withText("Change " + prop.getEl()).with(span(changeDetailsHeading).withClass("comment")); } private ContainerTag ul_param(ChangedOperation changedOperation) { List<Parameter> addParameters = changedOperation.getAddParameters(); List<Parameter> delParameters = changedOperation.getMissingParameters(); List<ChangedParameter> changedParameters = changedOperation.getChangedParameter(); ContainerTag ul = ul().withClass("change param"); for (Parameter param : addParameters) { ul.with(li_addParam(param)); } for (ChangedParameter param : changedParameters) { List<ElProperty> increased = param.getIncreased(); for (ElProperty prop : increased) { ul.with(li_addProp(prop)); } } for (ChangedParameter param : changedParameters) { boolean changeRequired = param.isChangeRequired(); boolean changeDescription = param.isChangeDescription(); if (changeRequired || changeDescription) ul.with(li_changedParam(param)); } for (ChangedParameter param : changedParameters) { List<ElProperty> missing = param.getMissing(); for (ElProperty prop : missing) { ul.with(li_missingProp(prop)); } } for (ChangedParameter param : changedParameters) { List<ElProperty> changed = param.getChanged(); for (ElProperty prop : changed) { ul.with(li_changedProp(prop)); } } for (Parameter param : delParameters) { ul.with(li_missingParam(param)); } return ul; } private ContainerTag li_addParam(Parameter param) { return li().withText("Add " + param.getName()).with(span(null == param.getDescription() ? "" : ("//" + param.getDescription())).withClass("comment")); } private ContainerTag li_missingParam(Parameter param) { return li().withClass("missing").with(span("Delete")).with(del(param.getName())).with(span(null == param.getDescription() ? "" : ("//" + param.getDescription())).withClass("comment")); } private ContainerTag li_changedParam(ChangedParameter changeParam) { boolean changeRequired = changeParam.isChangeRequired(); boolean changeDescription = changeParam.isChangeDescription(); Parameter rightParam = changeParam.getRightParameter(); Parameter leftParam = changeParam.getLeftParameter(); ContainerTag li = li().withText(rightParam.getName()); if (changeRequired) { li.withText(" change into " + (rightParam.getRequired() ? "required" : "not required")); } if (changeDescription) { li.withText(" Notes ").with(del(leftParam.getDescription()).withClass("comment")).withText(" change into ").with(span(span(null == rightParam.getDescription() ? "" : rightParam.getDescription()).withClass("comment"))); } return li; } private ContainerTag ul_produce(ChangedOperation changedOperation) { List<String> addProduce = changedOperation.getAddProduces(); List<String> delProduce = changedOperation.getMissingProduces(); ContainerTag ul = ul().withClass("change produces"); for (String mt : addProduce) { ul.with(li_addMediaType(mt)); } for (String mt : delProduce) { ul.with(li_missingMediaType(mt)); } return ul; } private ContainerTag ul_consume(ChangedOperation changedOperation) { List<String> addConsume = changedOperation.getAddConsumes(); List<String> delConsume = changedOperation.getMissingConsumes(); ContainerTag ul = ul().withClass("change consumes"); for (String mt : addConsume) { ul.with(li_addMediaType(mt)); } for (String mt : delConsume) { ul.with(li_missingMediaType(mt)); } return ul; } private ContainerTag li_missingMediaType(String type) { return li().withClass("missing").withText("Delete").with(del(type)).with(span("")); } private ContainerTag li_addMediaType(String type) { return li().withText("Add " + type).with(span("")); } }
/* * Copyright 2007-2008 Dave Griffith * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.codeInspection.naming; import com.intellij.openapi.util.InvalidDataException; import com.intellij.ui.DocumentAdapter; import org.jdom.Element; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.plugins.groovy.codeInspection.BaseInspection; import org.jetbrains.plugins.groovy.codeInspection.utils.FormattedTextFieldMacFix; import org.jetbrains.plugins.groovy.codeInspection.utils.RegExFormatter; import org.jetbrains.plugins.groovy.codeInspection.utils.RegExInputVerifier; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.text.Document; import javax.swing.text.InternationalFormatter; import java.awt.*; import java.text.NumberFormat; import java.text.ParseException; import java.util.regex.Matcher; import java.util.regex.Pattern; public abstract class ConventionInspection extends BaseInspection { @Nls @NotNull public String getGroupDisplayName() { return "Naming Conventions"; } /** * public fields for the DefaultJDomExternalizer * * @noinspection PublicField,WeakerAccess */ public String m_regex = getDefaultRegex(); /** * @noinspection PublicField,WeakerAccess */ public int m_minLength = getDefaultMinLength(); /** * @noinspection PublicField,WeakerAccess */ public int m_maxLength = getDefaultMaxLength(); protected Pattern m_regexPattern = Pattern.compile(m_regex); @NonNls protected abstract String getDefaultRegex(); protected abstract int getDefaultMinLength(); protected abstract int getDefaultMaxLength(); protected String getRegex() { return m_regex; } protected int getMinLength() { return m_minLength; } protected int getMaxLength() { return m_maxLength; } protected boolean isValid(String name) { final int length = name.length(); if (length < m_minLength) { return false; } if (length > m_maxLength) { return false; } if ("SerialVersionUID".equals(name)) { return true; } final Matcher matcher = m_regexPattern.matcher(name); return matcher.matches(); } public void readSettings(Element element) throws InvalidDataException { super.readSettings(element); m_regexPattern = Pattern.compile(m_regex); } private static final int REGEX_COLUMN_COUNT = 25; public JComponent createOptionsPanel() { final GridBagLayout layout = new GridBagLayout(); final JPanel panel = new JPanel(layout); final JLabel patternLabel = new JLabel( "Pattern:"); patternLabel.setHorizontalAlignment(SwingConstants.TRAILING); final JLabel minLengthLabel = new JLabel( "Min Length:"); minLengthLabel.setHorizontalAlignment(SwingConstants.TRAILING); final JLabel maxLengthLabel = new JLabel( "Max Length:"); maxLengthLabel.setHorizontalAlignment(SwingConstants.TRAILING); final NumberFormat numberFormat = NumberFormat.getIntegerInstance(); numberFormat.setParseIntegerOnly(true); numberFormat.setMinimumIntegerDigits(1); numberFormat.setMaximumIntegerDigits(2); final InternationalFormatter formatter = new InternationalFormatter(numberFormat); formatter.setAllowsInvalid(false); formatter.setCommitsOnValidEdit(true); final JFormattedTextField minLengthField = new JFormattedTextField(formatter); final Font panelFont = panel.getFont(); minLengthField.setFont(panelFont); minLengthField.setValue(m_minLength); minLengthField.setColumns(2); FormattedTextFieldMacFix.apply(minLengthField); final JFormattedTextField maxLengthField = new JFormattedTextField(formatter); maxLengthField.setFont(panelFont); maxLengthField.setValue(m_maxLength); maxLengthField.setColumns(2); FormattedTextFieldMacFix.apply(maxLengthField); final JFormattedTextField regexField = new JFormattedTextField(new RegExFormatter()); regexField.setFont(panelFont); regexField.setValue(m_regexPattern); regexField.setColumns(REGEX_COLUMN_COUNT); regexField.setInputVerifier(new RegExInputVerifier()); regexField.setFocusLostBehavior(JFormattedTextField.COMMIT); FormattedTextFieldMacFix.apply(regexField); final DocumentListener listener = new DocumentAdapter() { public void textChanged(DocumentEvent evt) { try { regexField.commitEdit(); minLengthField.commitEdit(); maxLengthField.commitEdit(); m_regexPattern = (Pattern) regexField.getValue(); m_regex = m_regexPattern.pattern(); m_minLength = ((Number) minLengthField.getValue()).intValue(); m_maxLength = ((Number) maxLengthField.getValue()).intValue(); } catch (ParseException ignore) { // No luck this time } } }; final Document regexDocument = regexField.getDocument(); regexDocument.addDocumentListener(listener); final Document minLengthDocument = minLengthField.getDocument(); minLengthDocument.addDocumentListener(listener); final Document maxLengthDocument = maxLengthField.getDocument(); maxLengthDocument.addDocumentListener(listener); final GridBagConstraints constraints = new GridBagConstraints(); constraints.gridx = 0; constraints.gridy = 0; constraints.weightx = 1.0; constraints.anchor = GridBagConstraints.EAST; constraints.fill = GridBagConstraints.HORIZONTAL; panel.add(patternLabel, constraints); constraints.gridx = 1; constraints.gridy = 0; constraints.gridwidth = 3; constraints.anchor = GridBagConstraints.WEST; panel.add(regexField, constraints); constraints.gridx = 0; constraints.gridy = 1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.EAST; panel.add(minLengthLabel, constraints); constraints.gridx = 1; constraints.gridy = 1; constraints.anchor = GridBagConstraints.WEST; panel.add(minLengthField, constraints); constraints.gridx = 2; constraints.gridy = 1; constraints.anchor = GridBagConstraints.EAST; panel.add(maxLengthLabel, constraints); constraints.gridx = 3; constraints.gridy = 1; constraints.anchor = GridBagConstraints.WEST; panel.add(maxLengthField, constraints); return panel; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.roots.ui.configuration.artifacts; import com.intellij.codeInsight.hint.HintManager; import com.intellij.codeInsight.hint.HintUtil; import com.intellij.icons.AllIcons; import com.intellij.ide.CommonActionsManager; import com.intellij.ide.DataManager; import com.intellij.ide.DefaultTreeExpander; import com.intellij.ide.impl.TypeSafeDataProviderAdapter; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.compiler.CompilerBundle; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectBundle; import com.intellij.openapi.roots.libraries.Library; import com.intellij.openapi.roots.ui.configuration.artifacts.actions.*; import com.intellij.openapi.roots.ui.configuration.artifacts.sourceItems.LibrarySourceItem; import com.intellij.openapi.roots.ui.configuration.artifacts.sourceItems.ModuleOutputSourceItem; import com.intellij.openapi.roots.ui.configuration.artifacts.sourceItems.SourceItemsTree; import com.intellij.openapi.ui.FixedSizeButton; import com.intellij.openapi.ui.TextFieldWithBrowseButton; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.packaging.artifacts.Artifact; import com.intellij.packaging.artifacts.ArtifactType; import com.intellij.packaging.artifacts.ModifiableArtifact; import com.intellij.packaging.elements.*; import com.intellij.packaging.impl.artifacts.ArtifactUtil; import com.intellij.packaging.impl.elements.ArchivePackagingElement; import com.intellij.packaging.impl.elements.ManifestFileUtil; import com.intellij.packaging.ui.ManifestFileConfiguration; import com.intellij.ui.*; import com.intellij.ui.awt.RelativePoint; import com.intellij.ui.border.CustomLineBorder; import com.intellij.util.EventDispatcher; import com.intellij.util.IconUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.ThreeStateCheckBox; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.CompoundBorder; import javax.swing.border.EmptyBorder; import javax.swing.border.LineBorder; import javax.swing.event.HyperlinkEvent; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * @author nik */ public class ArtifactEditorImpl implements ArtifactEditorEx { private JPanel myMainPanel; private JCheckBox myBuildOnMakeCheckBox; private TextFieldWithBrowseButton myOutputDirectoryField; private JPanel myEditorPanel; private JPanel myErrorPanelPlace; private ThreeStateCheckBox myShowContentCheckBox; private FixedSizeButton myShowSpecificContentOptionsButton; private JPanel myTopPanel; private final ActionGroup myShowSpecificContentOptionsGroup; private final Project myProject; private final ComplexElementSubstitutionParameters mySubstitutionParameters = new ComplexElementSubstitutionParameters(); private final EventDispatcher<ArtifactEditorListener> myDispatcher = EventDispatcher.create(ArtifactEditorListener.class); private final ArtifactEditorContextImpl myContext; private final SourceItemsTree mySourceItemsTree; private final Artifact myOriginalArtifact; private final LayoutTreeComponent myLayoutTreeComponent; private TabbedPaneWrapper myTabbedPane; private ArtifactPropertiesEditors myPropertiesEditors; private final ArtifactValidationManagerImpl myValidationManager; private boolean myDisposed; public ArtifactEditorImpl(final @NotNull ArtifactsStructureConfigurableContext context, @NotNull Artifact artifact, @NotNull ArtifactEditorSettings settings) { myContext = createArtifactEditorContext(context); myOriginalArtifact = artifact; myProject = context.getProject(); mySubstitutionParameters.setTypesToShowContent(settings.getTypesToShowContent()); mySourceItemsTree = new SourceItemsTree(myContext, this); myLayoutTreeComponent = new LayoutTreeComponent(this, mySubstitutionParameters, myContext, myOriginalArtifact, settings.isSortElements()); myPropertiesEditors = new ArtifactPropertiesEditors(myContext, myOriginalArtifact, myOriginalArtifact); Disposer.register(this, mySourceItemsTree); Disposer.register(this, myLayoutTreeComponent); if (Registry.is("ide.new.project.settings")) { myTopPanel.setBorder(new EmptyBorder(0, 10, 0, 10)); } myBuildOnMakeCheckBox.setSelected(artifact.isBuildOnMake()); final String outputPath = artifact.getOutputPath(); myOutputDirectoryField.addBrowseFolderListener(CompilerBundle.message("dialog.title.output.directory.for.artifact"), CompilerBundle.message("chooser.description.select.output.directory.for.0.artifact", getArtifact().getName()), myProject, FileChooserDescriptorFactory.createSingleFolderDescriptor()); myShowSpecificContentOptionsGroup = createShowSpecificContentOptionsGroup(); myShowSpecificContentOptionsButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { ActionManager.getInstance().createActionPopupMenu(ActionPlaces.UNKNOWN, myShowSpecificContentOptionsGroup).getComponent().show(myShowSpecificContentOptionsButton, 0, 0); } }); setOutputPath(outputPath); myValidationManager = new ArtifactValidationManagerImpl(this); updateShowContentCheckbox(); } protected ArtifactEditorContextImpl createArtifactEditorContext(ArtifactsStructureConfigurableContext parentContext) { return new ArtifactEditorContextImpl(parentContext, this); } private ActionGroup createShowSpecificContentOptionsGroup() { final DefaultActionGroup group = new DefaultActionGroup(); for (ComplexPackagingElementType<?> type : PackagingElementFactory.getInstance().getComplexElementTypes()) { group.add(new ToggleShowElementContentAction(type, this)); } return group; } private void setOutputPath(@Nullable String outputPath) { myOutputDirectoryField.setText(outputPath != null ? FileUtil.toSystemDependentName(outputPath) : null); } public void apply() { final ModifiableArtifact modifiableArtifact = myContext.getOrCreateModifiableArtifactModel().getOrCreateModifiableArtifact(myOriginalArtifact); modifiableArtifact.setBuildOnMake(myBuildOnMakeCheckBox.isSelected()); modifiableArtifact.setOutputPath(getConfiguredOutputPath()); myPropertiesEditors.applyProperties(); myLayoutTreeComponent.saveElementProperties(); } @Nullable private String getConfiguredOutputPath() { String outputPath = FileUtil.toSystemIndependentName(myOutputDirectoryField.getText().trim()); if (outputPath.length() == 0) { outputPath = null; } return outputPath; } public SourceItemsTree getSourceItemsTree() { return mySourceItemsTree; } public void addListener(@NotNull final ArtifactEditorListener listener) { myDispatcher.addListener(listener); } @Override public ArtifactEditorContextImpl getContext() { return myContext; } public void removeListener(@NotNull final ArtifactEditorListener listener) { myDispatcher.removeListener(listener); } @Override public Artifact getArtifact() { return myContext.getArtifactModel().getArtifactByOriginal(myOriginalArtifact); } @Override public CompositePackagingElement<?> getRootElement() { return myLayoutTreeComponent.getRootElement(); } @Override public void rebuildTries() { myLayoutTreeComponent.rebuildTree(); mySourceItemsTree.rebuildTree(); } @Override public void queueValidation() { myContext.queueValidation(); } public JComponent createMainComponent() { mySourceItemsTree.initTree(); myLayoutTreeComponent.initTree(); DataManager.registerDataProvider(myMainPanel, new TypeSafeDataProviderAdapter(new MyDataProvider())); myErrorPanelPlace.add(myValidationManager.getMainErrorPanel(), BorderLayout.CENTER); final JBSplitter splitter = Registry.is("ide.new.project.settings") ? new OnePixelSplitter(false) : new JBSplitter(false); final JPanel leftPanel = new JPanel(new BorderLayout()); JPanel treePanel = myLayoutTreeComponent.getTreePanel(); if (UIUtil.isUnderDarcula()) { treePanel.setBorder(new EmptyBorder(3, 0, 0, 0)); } else { treePanel.setBorder(new LineBorder(UIUtil.getBorderColor())); } leftPanel.add(treePanel, BorderLayout.CENTER); if (UIUtil.isUnderDarcula()) { CompoundBorder border = new CompoundBorder(new CustomLineBorder(0, 0, 0, 1), BorderFactory.createEmptyBorder(0, 0, 0, 0)); leftPanel.setBorder(border); } else { leftPanel.setBorder(BorderFactory.createEmptyBorder(3, 3, 3, 0)); } splitter.setFirstComponent(leftPanel); final JPanel rightPanel = new JPanel(new BorderLayout()); final JPanel rightTopPanel = new JPanel(new BorderLayout()); final JPanel labelPanel = new JPanel(); labelPanel.setLayout(new BoxLayout(labelPanel, BoxLayout.X_AXIS)); labelPanel.add(new JLabel("Available Elements ")); final HyperlinkLabel link = new HyperlinkLabel(""); link.setIcon(AllIcons.General.Help_small); link.setUseIconAsLink(true); link.addHyperlinkListener(new HyperlinkAdapter() { @Override protected void hyperlinkActivated(HyperlinkEvent e) { final JLabel label = new JLabel(ProjectBundle.message("artifact.source.items.tree.tooltip")); label.setBorder(HintUtil.createHintBorder()); label.setBackground(HintUtil.INFORMATION_COLOR); label.setOpaque(true); HintManager.getInstance().showHint(label, RelativePoint.getSouthWestOf(link), HintManager.HIDE_BY_ANY_KEY | HintManager.HIDE_BY_TEXT_CHANGE, -1); } }); labelPanel.add(link); rightTopPanel.add(labelPanel, BorderLayout.CENTER); rightPanel.add(rightTopPanel, BorderLayout.NORTH); JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(mySourceItemsTree, UIUtil.isUnderDarcula() || Registry.is("ide.new.project.settings")); JPanel scrollPaneWrap = new JPanel(new BorderLayout()); scrollPaneWrap.add(scrollPane, BorderLayout.CENTER); if (UIUtil.isUnderDarcula()) { scrollPaneWrap.setBorder(new EmptyBorder(3, 0, 0, 0)); } else { scrollPaneWrap.setBorder(new LineBorder(UIUtil.getBorderColor())); } rightPanel.add(scrollPaneWrap, BorderLayout.CENTER); if (UIUtil.isUnderDarcula()) { rightPanel.setBorder(new CompoundBorder(new CustomLineBorder(0, 1, 0, 0), BorderFactory.createEmptyBorder(0, 0, 0, 0))); } else { rightPanel.setBorder(BorderFactory.createEmptyBorder(3, 0, 3, 3)); } splitter.setSecondComponent(rightPanel); if (Registry.is("ide.new.project.settings")) { splitter.getDivider().setBackground(UIUtil.getPanelBackground()); treePanel.setBorder(JBUI.Borders.empty()); rightPanel.setBorder(JBUI.Borders.empty()); scrollPaneWrap.setBorder(JBUI.Borders.empty()); leftPanel.setBorder(JBUI.Borders.empty()); } myShowContentCheckBox.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final ThreeStateCheckBox.State state = myShowContentCheckBox.getState(); if (state == ThreeStateCheckBox.State.SELECTED) { mySubstitutionParameters.setSubstituteAll(); } else if (state == ThreeStateCheckBox.State.NOT_SELECTED) { mySubstitutionParameters.setSubstituteNone(); } myShowContentCheckBox.setThirdStateEnabled(false); myLayoutTreeComponent.rebuildTree(); onShowContentSettingsChanged(); } }); ActionToolbar toolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, createToolbarActionGroup(), true); JComponent toolbarComponent = toolbar.getComponent(); if (UIUtil.isUnderDarcula()) { toolbarComponent.setBorder(new CustomLineBorder(0,0,1,0)); } leftPanel.add(toolbarComponent, BorderLayout.NORTH); toolbar.updateActionsImmediately(); rightTopPanel.setPreferredSize(new Dimension(-1, toolbarComponent.getPreferredSize().height)); myTabbedPane = new TabbedPaneWrapper(this); myTabbedPane.addTab("Output Layout", splitter); myPropertiesEditors.addTabs(myTabbedPane); myEditorPanel.add(myTabbedPane.getComponent(), BorderLayout.CENTER); final LayoutTree tree = myLayoutTreeComponent.getLayoutTree(); new ShowAddPackagingElementPopupAction(this).registerCustomShortcutSet(CommonShortcuts.getNew(), tree); PopupHandler.installPopupHandler(tree, createPopupActionGroup(), ActionPlaces.UNKNOWN, ActionManager.getInstance()); ToolTipManager.sharedInstance().registerComponent(tree); rebuildTries(); return getMainComponent(); } private void onShowContentSettingsChanged() { myContext.getParent().getDefaultSettings().setTypesToShowContent(mySubstitutionParameters.getTypesToSubstitute()); } public void updateShowContentCheckbox() { final ThreeStateCheckBox.State state; if (mySubstitutionParameters.isAllSubstituted()) { state = ThreeStateCheckBox.State.SELECTED; } else if (mySubstitutionParameters.isNoneSubstituted()) { state = ThreeStateCheckBox.State.NOT_SELECTED; } else { state = ThreeStateCheckBox.State.DONT_CARE; } myShowContentCheckBox.setThirdStateEnabled(state == ThreeStateCheckBox.State.DONT_CARE); myShowContentCheckBox.setState(state); onShowContentSettingsChanged(); } public ArtifactEditorSettings createSettings() { return new ArtifactEditorSettings(myLayoutTreeComponent.isSortElements(), mySubstitutionParameters.getTypesToSubstitute()); } private DefaultActionGroup createToolbarActionGroup() { final DefaultActionGroup toolbarActionGroup = new DefaultActionGroup(); final List<AnAction> createActions = new ArrayList<AnAction>(createNewElementActions()); for (AnAction createAction : createActions) { toolbarActionGroup.add(createAction); } toolbarActionGroup.add(new RemovePackagingElementAction(this)); toolbarActionGroup.add(Separator.getInstance()); toolbarActionGroup.add(new SortElementsToggleAction(this.getLayoutTreeComponent())); toolbarActionGroup.add(new MovePackagingElementAction(myLayoutTreeComponent, "Move Up", "", IconUtil.getMoveUpIcon(), -1)); toolbarActionGroup.add(new MovePackagingElementAction(myLayoutTreeComponent, "Move Down", "", IconUtil.getMoveDownIcon(), 1)); return toolbarActionGroup; } public List<AnAction> createNewElementActions() { final List<AnAction> createActions = new ArrayList<AnAction>(); AddCompositeElementAction.addCompositeCreateActions(createActions, this); createActions.add(createAddNonCompositeElementGroup()); return createActions; } private DefaultActionGroup createPopupActionGroup() { final LayoutTree tree = myLayoutTreeComponent.getLayoutTree(); DefaultActionGroup popupActionGroup = new DefaultActionGroup(); final List<AnAction> createActions = new ArrayList<AnAction>(); AddCompositeElementAction.addCompositeCreateActions(createActions, this); for (AnAction createAction : createActions) { popupActionGroup.add(createAction); } popupActionGroup.add(createAddNonCompositeElementGroup()); final RemovePackagingElementAction removeAction = new RemovePackagingElementAction(this); removeAction.registerCustomShortcutSet(CommonShortcuts.getDelete(), tree); popupActionGroup.add(removeAction); popupActionGroup.add(new ExtractArtifactAction(this)); popupActionGroup.add(new InlineArtifactAction(this)); popupActionGroup.add(new RenamePackagingElementAction(this)); popupActionGroup.add(new SurroundElementWithAction(this)); popupActionGroup.add(Separator.getInstance()); popupActionGroup.add(new HideContentAction(this)); popupActionGroup.add(new LayoutTreeNavigateAction(myLayoutTreeComponent)); popupActionGroup.add(new LayoutTreeFindUsagesAction(myLayoutTreeComponent, myProject, myContext.getParent())); popupActionGroup.add(Separator.getInstance()); CommonActionsManager actionsManager = CommonActionsManager.getInstance(); DefaultTreeExpander treeExpander = new DefaultTreeExpander(tree); popupActionGroup.add(actionsManager.createExpandAllAction(treeExpander, tree)); popupActionGroup.add(actionsManager.createCollapseAllAction(treeExpander, tree)); return popupActionGroup; } @Override public ComplexElementSubstitutionParameters getSubstitutionParameters() { return mySubstitutionParameters; } private ActionGroup createAddNonCompositeElementGroup() { DefaultActionGroup group = new DefaultActionGroup(ProjectBundle.message("artifacts.add.copy.action"), true); group.getTemplatePresentation().setIcon(IconUtil.getAddIcon()); for (PackagingElementType<?> type : PackagingElementFactory.getInstance().getNonCompositeElementTypes()) { group.add(new AddNewPackagingElementAction(type, this)); } return group; } @Override public JComponent getMainComponent() { return myMainPanel; } @Override public void addNewPackagingElement(@NotNull PackagingElementType<?> type) { myLayoutTreeComponent.addNewPackagingElement(type); mySourceItemsTree.rebuildTree(); } @Override public void removeSelectedElements() { myLayoutTreeComponent.removeSelectedElements(); } @Override public void removePackagingElement(@NotNull final String pathToParent, @NotNull final PackagingElement<?> element) { doReplaceElement(pathToParent, element, null); } @Override public void replacePackagingElement(@NotNull final String pathToParent, @NotNull final PackagingElement<?> element, @NotNull final PackagingElement<?> replacement) { doReplaceElement(pathToParent, element, replacement); } private void doReplaceElement(final @NotNull String pathToParent, final @NotNull PackagingElement<?> element, final @Nullable PackagingElement replacement) { myLayoutTreeComponent.editLayout(new Runnable() { @Override public void run() { final CompositePackagingElement<?> parent = findCompositeElementByPath(pathToParent); if (parent == null) return; for (PackagingElement<?> child : parent.getChildren()) { if (child.isEqualTo(element)) { parent.removeChild(child); if (replacement != null) { parent.addOrFindChild(replacement); } break; } } } }); myLayoutTreeComponent.rebuildTree(); } @Nullable private CompositePackagingElement<?> findCompositeElementByPath(String pathToElement) { CompositePackagingElement<?> element = getRootElement(); for (String name : StringUtil.split(pathToElement, "/")) { element = element.findCompositeChild(name); if (element == null) return null; } return element; } public boolean isModified() { return myBuildOnMakeCheckBox.isSelected() != myOriginalArtifact.isBuildOnMake() || !Comparing.equal(getConfiguredOutputPath(), myOriginalArtifact.getOutputPath()) || myPropertiesEditors.isModified() || myLayoutTreeComponent.isPropertiesModified(); } @Override public void dispose() { myDisposed = true; } @Override public boolean isDisposed() { return myDisposed; } @Override public LayoutTreeComponent getLayoutTreeComponent() { return myLayoutTreeComponent; } public void updateOutputPath(@NotNull String oldArtifactName, @NotNull final String newArtifactName) { final String oldDefaultPath = ArtifactUtil.getDefaultArtifactOutputPath(oldArtifactName, myProject); if (Comparing.equal(oldDefaultPath, getConfiguredOutputPath())) { setOutputPath(ArtifactUtil.getDefaultArtifactOutputPath(newArtifactName, myProject)); } final CompositePackagingElement<?> root = getRootElement(); if (root instanceof ArchivePackagingElement) { String oldFileName = ArtifactUtil.suggestArtifactFileName(oldArtifactName); final String name = ((ArchivePackagingElement)root).getArchiveFileName(); final String fileName = FileUtil.getNameWithoutExtension(name); final String extension = FileUtilRt.getExtension(name); if (fileName.equals(oldFileName) && extension.length() > 0) { myLayoutTreeComponent.editLayout(new Runnable() { @Override public void run() { ((ArchivePackagingElement)getRootElement()).setArchiveFileName(ArtifactUtil.suggestArtifactFileName(newArtifactName) + "." + extension); } }); myLayoutTreeComponent.updateRootNode(); } } } @Override public void updateLayoutTree() { myLayoutTreeComponent.rebuildTree(); } @Override public void putLibraryIntoDefaultLocation(@NotNull Library library) { myLayoutTreeComponent.putIntoDefaultLocations(Collections.singletonList(new LibrarySourceItem(library))); } @Override public void putModuleIntoDefaultLocation(@NotNull Module module) { myLayoutTreeComponent.putIntoDefaultLocations(Collections.singletonList(new ModuleOutputSourceItem(module))); } @Override public void addToClasspath(final CompositePackagingElement<?> element, List<String> classpath) { myLayoutTreeComponent.saveElementProperties(); ManifestFileConfiguration manifest = myContext.getManifestFile(element, getArtifact().getArtifactType()); if (manifest == null) { final VirtualFile file = ManifestFileUtil.showDialogAndCreateManifest(myContext, element); if (file == null) { return; } ManifestFileUtil.addManifestFileToLayout(file.getPath(), myContext, element); manifest = myContext.getManifestFile(element, getArtifact().getArtifactType()); } if (manifest != null) { manifest.addToClasspath(classpath); } myLayoutTreeComponent.resetElementProperties(); } public void setArtifactType(ArtifactType artifactType) { final ModifiableArtifact modifiableArtifact = myContext.getOrCreateModifiableArtifactModel().getOrCreateModifiableArtifact(myOriginalArtifact); modifiableArtifact.setArtifactType(artifactType); myPropertiesEditors.removeTabs(myTabbedPane); myPropertiesEditors = new ArtifactPropertiesEditors(myContext, myOriginalArtifact, getArtifact()); myPropertiesEditors.addTabs(myTabbedPane); final CompositePackagingElement<?> oldRootElement = getRootElement(); final CompositePackagingElement<?> newRootElement = artifactType.createRootElement(getArtifact().getName()); ArtifactUtil.copyChildren(oldRootElement, newRootElement, myProject); myLayoutTreeComponent.setRootElement(newRootElement); } public ArtifactValidationManagerImpl getValidationManager() { return myValidationManager; } private void createUIComponents() { myShowContentCheckBox = new ThreeStateCheckBox(); myShowSpecificContentOptionsButton = new FixedSizeButton(16); } public String getHelpTopic() { final int tab = myTabbedPane.getSelectedIndex(); if (tab == 0) { return "reference.project.structure.artifacts.output"; } String helpId = myPropertiesEditors.getHelpId(myTabbedPane.getSelectedTitle()); return helpId != null ? helpId : "reference.settingsdialog.project.structure.artifacts"; } private class MyDataProvider implements TypeSafeDataProvider { @Override public void calcData(DataKey key, DataSink sink) { if (ARTIFACTS_EDITOR_KEY.equals(key)) { sink.put(ARTIFACTS_EDITOR_KEY, ArtifactEditorImpl.this); } } } }
/* Copyright (C) 2003 Vladimir Roubtsov. All rights reserved. * * This program and the accompanying materials are made available under * the terms of the Common Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/cpl-v10.html * * $Id: WCMatcher.java,v 1.1.1.1 2004/05/09 16:57:56 vlad_r Exp $ */ package com.vladium.util; // ---------------------------------------------------------------------------- /** * @author Vlad Roubtsov, (C) 2002 */ public abstract class WCMatcher { // public: ................................................................ public static WCMatcher compile (final String pattern) { if (pattern == null) throw new IllegalArgumentException ("null input: pattern"); final char [] chars = pattern.toCharArray (); // is this faster than using charAt()? final int charsLength = chars.length; if (charsLength == 0) return EMPTY_MATCHER; // TODO: should be an EMPTY_MATCHER else { int patternLength = 0, starCount = 0, questionCount = 0; boolean star = false; for (int c = 0; c < charsLength; ++ c) { final char ch = chars [c]; if (ch == '*') { if (! star) { star = true; ++ starCount; chars [patternLength ++] = '*'; } } else { star = false; if (ch == '?') ++ questionCount; chars [patternLength ++] = ch; } } // [assertion: patternLength > 0] if ((starCount == 1) && (questionCount == 0)) { if (patternLength == 1) return ALL_MATCHER; else if (chars [0] == '*') return new EndsWithMatcher (chars, patternLength); else if (chars [patternLength - 1] == '*') return new StartsWithMatcher (chars, patternLength); } return new PatternMatcher (chars, patternLength); } } public abstract boolean matches (String s); public abstract boolean matches (char [] chars); // private boolean matches (int pi, int si, final char [] string) // { // System.out.println ("pi = " + pi + ", si = " + si); // // if (pi == m_pattern.length) // return si == string.length; // else // { // switch (m_pattern [pi]) // { // case '?': // { // return (si < string.length) && matches (pi + 1, si + 1, string); // } // // case '*': // { // return matches (pi + 1, si, string) || ((si < string.length) && matches (pi, si + 1, string)); // } // // default: // { // return (si < string.length) && (m_pattern [pi] == string [si]) && matches (pi + 1, si + 1, string); // } // // } // end of switch // } // } // protected: ............................................................. // package: ............................................................... WCMatcher () {} // private: ............................................................... private static final class AllMatcher extends WCMatcher { public final boolean matches (final String s) { if (s == null) throw new IllegalArgumentException ("null input: s"); return true; } public final boolean matches (final char [] chars) { if (chars == null) throw new IllegalArgumentException ("null input: chars"); return true; } } // end of nested class private static final class EmptyMatcher extends WCMatcher { public final boolean matches (final String s) { if (s == null) throw new IllegalArgumentException ("null input: s"); return false; } public final boolean matches (final char [] chars) { if (chars == null) throw new IllegalArgumentException ("null input: chars"); return chars.length == 0; } } // end of nested class private static final class StartsWithMatcher extends WCMatcher { public final boolean matches (final String s) { if (s == null) throw new IllegalArgumentException ("null input: s"); return s.startsWith (m_prefix); } public final boolean matches (final char [] chars) { if (chars == null) throw new IllegalArgumentException ("null input: chars"); final char [] prefixChars = m_prefixChars; final int prefixLength = prefixChars.length - 1; if (chars.length < prefixLength) return false; for (int c = 0; c < prefixLength; ++ c) { if (chars [c] != prefixChars [c]) return false; } return true; } StartsWithMatcher (final char [] pattern, final int patternLength) { m_prefixChars = pattern; m_prefix = new String (pattern, 0, patternLength - 1); } private final char [] m_prefixChars; private final String m_prefix; } // end of nested class private static final class EndsWithMatcher extends WCMatcher { public final boolean matches (final String s) { if (s == null) throw new IllegalArgumentException ("null input: s"); return s.endsWith (m_suffix); } public final boolean matches (final char [] chars) { if (chars == null) throw new IllegalArgumentException ("null input: chars"); final char [] suffixChars = m_suffixChars; final int suffixLength = suffixChars.length - 1; final int charsLength = chars.length; if (charsLength < suffixLength) return false; for (int c = 0; c < suffixLength; ++ c) { if (chars [charsLength - 1 - c] != suffixChars [suffixLength - c]) return false; } return true; } EndsWithMatcher (final char [] pattern, final int patternLength) { m_suffixChars = pattern; m_suffix = new String (pattern, 1, patternLength - 1); } private final char [] m_suffixChars; private final String m_suffix; } // end of nested class private static final class PatternMatcher extends WCMatcher { public final boolean matches (final String s) { if (s == null) throw new IllegalArgumentException ("null input: s"); final char [] string = s.toCharArray (); // implies an array copy; is this faster than using charAt()? final int stringLength = string.length; final char [] pattern = m_pattern; final int patternLength = m_patternLength; // [assertion: patternLength > 0] int si = 0, pi = 0; boolean star = false; next: while (true) { //System.out.println ("pi = " + pi + ", si = " + si); int i = 0; for ( ; pi + i < patternLength; ++ i) { final char patternChar = pattern [pi + i]; if (patternChar == '*') { si += i; pi += (i + 1); star = true; continue next; } final int si_i = si + i; if (si_i == stringLength) return false; if (patternChar != string [si_i]) { if (patternChar == '?') continue; if (! star) return false; ++ si; continue next; } } // end of for if (si + i == stringLength) return true; if (! star) return false; ++ si; // [continue next;] } } public final boolean matches (final char [] string) { if (string == null) throw new IllegalArgumentException ("null input: string"); final int stringLength = string.length; final char [] pattern = m_pattern; final int patternLength = m_patternLength; // [assertion: patternLength > 0] int si = 0, pi = 0; boolean star = false; next: while (true) { //System.out.println ("pi = " + pi + ", si = " + si); int i = 0; for ( ; pi + i < patternLength; ++ i) { final char patternChar = pattern [pi + i]; if (patternChar == '*') { si += i; pi += (i + 1); star = true; continue next; } final int si_i = si + i; if (si_i == stringLength) return false; if (patternChar != string [si_i]) { if (patternChar == '?') continue; if (! star) return false; ++ si; continue next; } } // end of for if (si + i == stringLength) return true; if (! star) return false; ++ si; // [continue next;] } } PatternMatcher (final char [] pattern, final int patternLength) { m_pattern = pattern; m_patternLength = patternLength; } private final char [] m_pattern; private final int m_patternLength; } // end of nested class private static final WCMatcher ALL_MATCHER = new AllMatcher (); private static final WCMatcher EMPTY_MATCHER = new EmptyMatcher (); } // end of class // ----------------------------------------------------------------------------
/* * Copyright 2015, Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.grpc.grpclb; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import io.grpc.Attributes; import io.grpc.Channel; import io.grpc.EquivalentAddressGroup; import io.grpc.LoadBalancer; import io.grpc.ResolvedServerInfo; import io.grpc.Status; import io.grpc.TransportManager; import io.grpc.TransportManager.InterimTransport; import io.grpc.internal.GrpcUtil; import io.grpc.internal.SharedResourceHolder; import io.grpc.stub.StreamObserver; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.logging.Logger; import javax.annotation.concurrent.GuardedBy; /** * A {@link LoadBalancer} that uses the GRPCLB protocol. */ class GrpclbLoadBalancer<T> extends LoadBalancer<T> { private static final Logger logger = Logger.getLogger(GrpclbLoadBalancer.class.getName()); private static final Status SHUTDOWN_STATUS = Status.UNAVAILABLE.augmentDescription("GrpclbLoadBalancer has shut down"); private final Object lock = new Object(); private final String serviceName; private final TransportManager<T> tm; // General states @GuardedBy("lock") private InterimTransport<T> interimTransport; @GuardedBy("lock") private Status lastError; @GuardedBy("lock") private boolean closed; // Load-balancer service states @GuardedBy("lock") private EquivalentAddressGroup lbAddresses; @GuardedBy("lock") private T lbTransport; @GuardedBy("lock") private T directTransport; @GuardedBy("lock") private StreamObserver<LoadBalanceResponse> lbResponseObserver; @GuardedBy("lock") private StreamObserver<LoadBalanceRequest> lbRequestWriter; // Server list states @GuardedBy("lock") private HashMap<SocketAddress, ResolvedServerInfo> servers; @GuardedBy("lock") @VisibleForTesting private RoundRobinServerList<T> roundRobinServerList; private ExecutorService executor; GrpclbLoadBalancer(String serviceName, TransportManager<T> tm) { this.serviceName = serviceName; this.tm = tm; executor = SharedResourceHolder.get(GrpcUtil.SHARED_CHANNEL_EXECUTOR); } @VisibleForTesting StreamObserver<LoadBalanceResponse> getLbResponseObserver() { synchronized (lock) { return lbResponseObserver; } } @VisibleForTesting RoundRobinServerList<T> getRoundRobinServerList() { synchronized (lock) { return roundRobinServerList; } } @Override public T pickTransport(Attributes affinity) { RoundRobinServerList<T> serverListCopy; synchronized (lock) { if (closed) { return tm.createFailingTransport(SHUTDOWN_STATUS); } if (directTransport != null) { return directTransport; } if (roundRobinServerList == null) { if (lastError != null) { return tm.createFailingTransport(lastError); } if (interimTransport == null) { interimTransport = tm.createInterimTransport(); } return interimTransport.transport(); } serverListCopy = roundRobinServerList; } return serverListCopy.getTransportForNextServer(); } @Override public void handleResolvedAddresses( List<ResolvedServerInfo> updatedServers, Attributes config) { synchronized (lock) { if (closed) { return; } ArrayList<SocketAddress> addrs = new ArrayList<SocketAddress>(updatedServers.size()); for (ResolvedServerInfo serverInfo : updatedServers) { addrs.add(serverInfo.getAddress()); } EquivalentAddressGroup newLbAddresses = new EquivalentAddressGroup(addrs); if (!newLbAddresses.equals(lbAddresses)) { lbAddresses = newLbAddresses; connectToLb(); } } updateRetainedTransports(); } @GuardedBy("lock") private void connectToLb() { directTransport = null; if (closed) { return; } lbResponseObserver = null; Preconditions.checkNotNull(lbAddresses, "lbAddresses"); // TODO(zhangkun83): LB servers may use an authority different from the service's. // getTransport() will need to add an argument for the authority. lbTransport = tm.getTransport(lbAddresses); startNegotiation(); } @GuardedBy("lock") private void startNegotiation() { if (closed) { return; } Preconditions.checkState(lbTransport != null, "lbTransport must be available"); logger.info("Starting LB negotiation"); LoadBalanceRequest initRequest = LoadBalanceRequest.newBuilder() .setInitialRequest(InitialLoadBalanceRequest.newBuilder() .setName(serviceName).build()) .build(); lbResponseObserver = new LbResponseObserver(); sendLbRequest(lbTransport, initRequest); } @VisibleForTesting // to be mocked in tests @GuardedBy("lock") void sendLbRequest(T transport, LoadBalanceRequest request) { Channel channel = tm.makeChannel(transport); LoadBalancerGrpc.LoadBalancerStub stub = LoadBalancerGrpc.newStub(channel); lbRequestWriter = stub.balanceLoad(lbResponseObserver); lbRequestWriter.onNext(request); } @Override public void handleNameResolutionError(Status error) { handleError(error.augmentDescription("Name resolution failed")); } @Override public void shutdown() { InterimTransport<T> savedInterimTransport; synchronized (lock) { if (closed) { return; } closed = true; if (lbRequestWriter != null) { lbRequestWriter.onCompleted(); } savedInterimTransport = interimTransport; interimTransport = null; executor = SharedResourceHolder.release(GrpcUtil.SHARED_CHANNEL_EXECUTOR, executor); } if (savedInterimTransport != null) { savedInterimTransport.closeWithError(SHUTDOWN_STATUS); } } @Override public void handleTransportShutdown(EquivalentAddressGroup addressGroup, Status status) { handleError(status.augmentDescription("Transport to LB server closed")); synchronized (lock) { if (closed) { return; } if (addressGroup.equals(lbAddresses)) { connectToLb(); } } } private void handleError(Status error) { InterimTransport<T> savedInterimTransport; synchronized (lock) { savedInterimTransport = interimTransport; interimTransport = null; lastError = error; } if (savedInterimTransport != null) { savedInterimTransport.closeWithError(error); } } private void updateRetainedTransports() { HashSet<EquivalentAddressGroup> addresses = new HashSet<EquivalentAddressGroup>(); synchronized (lock) { if (lbAddresses != null) { addresses.add(lbAddresses); } if (servers != null) { for (SocketAddress addr : servers.keySet()) { addresses.add(new EquivalentAddressGroup(addr)); } } } tm.updateRetainedTransports(addresses); } private class LbResponseObserver implements StreamObserver<LoadBalanceResponse> { @Override public void onNext(LoadBalanceResponse response) { logger.info("Got a LB response: " + response); // TODO(zhangkun83): make use of initialResponse // InitialLoadBalanceResponse initialResponse = response.getInitialResponse(); RoundRobinServerList.Builder<T> listBuilder = new RoundRobinServerList.Builder<T>(tm); ServerList serverList = response.getServerList(); HashMap<SocketAddress, ResolvedServerInfo> newServerMap = new HashMap<SocketAddress, ResolvedServerInfo>(); // TODO(zhangkun83): honor expiration_interval for (Server server : serverList.getServersList()) { if (server.getDropRequest()) { listBuilder.add(null); } else { try { InetSocketAddress address = new InetSocketAddress( InetAddress.getByAddress(server.getIpAddress().toByteArray()), server.getPort()); listBuilder.add(address); // TODO(zhangkun83): fill the LB token to the attributes, and insert it to the // application RPCs. if (!newServerMap.containsKey(address)) { newServerMap.put(address, new ResolvedServerInfo(address, Attributes.EMPTY)); } } catch (UnknownHostException e) { throw new RuntimeException(e); } } } final RoundRobinServerList<T> newRoundRobinServerList = listBuilder.build(); if (newRoundRobinServerList.size() == 0) { // initialResponse and serverList are under a oneof group. If initialResponse is set, // serverList will be empty. return; } InterimTransport<T> savedInterimTransport; synchronized (lock) { if (lbResponseObserver != this) { // Make sure I am still the current stream. return; } roundRobinServerList = newRoundRobinServerList; servers = newServerMap; savedInterimTransport = interimTransport; interimTransport = null; } updateRetainedTransports(); if (savedInterimTransport != null) { savedInterimTransport.closeWithRealTransports(new Supplier<T>() { @Override public T get() { return newRoundRobinServerList.getTransportForNextServer(); } }); } } @Override public void onError(Throwable error) { onStreamClosed(Status.fromThrowable(error) .augmentDescription("Stream to GRPCLB LoadBalancer had an error")); } @Override public void onCompleted() { onStreamClosed(Status.UNAVAILABLE.augmentDescription( "Stream to GRPCLB LoadBalancer was closed")); } private void onStreamClosed(Status status) { if (status.getCode() == Status.Code.UNIMPLEMENTED) { InterimTransport<T> savedInterimTransport; final T transport; // This LB transport doesn't seem to be an actual LB server, if the LB address comes // directly from NameResolver, just use it to serve normal RPCs. // TODO(zhangkun83): check if lbAddresses are from NameResolver after we start getting // lbAddresses from LoadBalanceResponse. synchronized (lock) { if (lbResponseObserver != this) { return; } directTransport = transport = lbTransport; savedInterimTransport = interimTransport; interimTransport = null; } if (savedInterimTransport != null) { savedInterimTransport.closeWithRealTransports(Suppliers.ofInstance(transport)); } } else { handleError(status); synchronized (lock) { if (lbResponseObserver != this) { return; } // TODO(zhangkun83): apply back-off, otherwise this will spam the server continually // with requests if the server tends to fail it for any reason. // I am still the active LB stream. Reopen the stream. startNegotiation(); } } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.analyzer; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.spi.PrestoWarning; import com.facebook.presto.spi.WarningCollector; import com.facebook.presto.sql.planner.ParameterRewriter; import com.facebook.presto.sql.relational.FunctionResolution; import com.facebook.presto.sql.tree.ArithmeticBinaryExpression; import com.facebook.presto.sql.tree.ArithmeticUnaryExpression; import com.facebook.presto.sql.tree.ArrayConstructor; import com.facebook.presto.sql.tree.AstVisitor; import com.facebook.presto.sql.tree.AtTimeZone; import com.facebook.presto.sql.tree.BetweenPredicate; import com.facebook.presto.sql.tree.BindExpression; import com.facebook.presto.sql.tree.Cast; import com.facebook.presto.sql.tree.CoalesceExpression; import com.facebook.presto.sql.tree.ComparisonExpression; import com.facebook.presto.sql.tree.CurrentTime; import com.facebook.presto.sql.tree.DereferenceExpression; import com.facebook.presto.sql.tree.ExistsPredicate; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.ExpressionTreeRewriter; import com.facebook.presto.sql.tree.Extract; import com.facebook.presto.sql.tree.FieldReference; import com.facebook.presto.sql.tree.FunctionCall; import com.facebook.presto.sql.tree.GroupingOperation; import com.facebook.presto.sql.tree.Identifier; import com.facebook.presto.sql.tree.IfExpression; import com.facebook.presto.sql.tree.InListExpression; import com.facebook.presto.sql.tree.InPredicate; import com.facebook.presto.sql.tree.IsNotNullPredicate; import com.facebook.presto.sql.tree.IsNullPredicate; import com.facebook.presto.sql.tree.LambdaExpression; import com.facebook.presto.sql.tree.LikePredicate; import com.facebook.presto.sql.tree.Literal; import com.facebook.presto.sql.tree.LogicalBinaryExpression; import com.facebook.presto.sql.tree.Node; import com.facebook.presto.sql.tree.NodeRef; import com.facebook.presto.sql.tree.NotExpression; import com.facebook.presto.sql.tree.NullIfExpression; import com.facebook.presto.sql.tree.Parameter; import com.facebook.presto.sql.tree.Row; import com.facebook.presto.sql.tree.SearchedCaseExpression; import com.facebook.presto.sql.tree.SimpleCaseExpression; import com.facebook.presto.sql.tree.SortItem; import com.facebook.presto.sql.tree.SubqueryExpression; import com.facebook.presto.sql.tree.SubscriptExpression; import com.facebook.presto.sql.tree.TryExpression; import com.facebook.presto.sql.tree.WhenClause; import com.facebook.presto.sql.tree.Window; import com.facebook.presto.sql.tree.WindowFrame; import com.google.common.collect.ImmutableList; import com.google.common.collect.Multimap; import javax.annotation.Nullable; import java.util.Collection; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; import static com.facebook.presto.spi.StandardWarningCode.PERFORMANCE_WARNING; import static com.facebook.presto.spi.function.FunctionKind.AGGREGATE; import static com.facebook.presto.sql.NodeUtils.getSortItemsFromOrderBy; import static com.facebook.presto.sql.analyzer.ExpressionTreeUtils.checkAndGetColumnReferenceField; import static com.facebook.presto.sql.analyzer.ExpressionTreeUtils.extractAggregateFunctions; import static com.facebook.presto.sql.analyzer.ExpressionTreeUtils.extractWindowFunctions; import static com.facebook.presto.sql.analyzer.FreeLambdaReferenceExtractor.hasFreeReferencesToLambdaArgument; import static com.facebook.presto.sql.analyzer.ScopeReferenceExtractor.getReferencesToScope; import static com.facebook.presto.sql.analyzer.ScopeReferenceExtractor.hasReferencesToScope; import static com.facebook.presto.sql.analyzer.ScopeReferenceExtractor.isFieldFromScope; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_PROCEDURE_ARGUMENTS; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MUST_BE_AGGREGATE_OR_GROUP_BY; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MUST_BE_AGGREGATION_FUNCTION; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NESTED_AGGREGATION; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NESTED_WINDOW; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.ORDER_BY_MUST_BE_IN_AGGREGATE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.REFERENCE_TO_OUTPUT_ATTRIBUTE_WITHIN_ORDER_BY_AGGREGATION; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.REFERENCE_TO_OUTPUT_ATTRIBUTE_WITHIN_ORDER_BY_GROUPING; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static java.util.Objects.requireNonNull; /** * Checks whether an expression is constant with respect to the group */ class AggregationAnalyzer { // fields and expressions in the group by clause private final Set<FieldId> groupingFields; private final List<Expression> expressions; private final Multimap<NodeRef<Expression>, FieldId> columnReferences; private final Metadata metadata; private final Analysis analysis; private final Scope sourceScope; private final Optional<Scope> orderByScope; private final WarningCollector warningCollector; private final FunctionResolution functionResolution; public static void verifySourceAggregations( List<Expression> groupByExpressions, Scope sourceScope, Expression expression, Metadata metadata, Analysis analysis, WarningCollector warningCollector) { AggregationAnalyzer analyzer = new AggregationAnalyzer(groupByExpressions, sourceScope, Optional.empty(), metadata, analysis, warningCollector); analyzer.analyze(expression); } public static void verifyOrderByAggregations( List<Expression> groupByExpressions, Scope sourceScope, Scope orderByScope, Expression expression, Metadata metadata, Analysis analysis, WarningCollector warningCollector) { AggregationAnalyzer analyzer = new AggregationAnalyzer(groupByExpressions, sourceScope, Optional.of(orderByScope), metadata, analysis, warningCollector); analyzer.analyze(expression); } private AggregationAnalyzer(List<Expression> groupByExpressions, Scope sourceScope, Optional<Scope> orderByScope, Metadata metadata, Analysis analysis, WarningCollector warningCollector) { requireNonNull(groupByExpressions, "groupByExpressions is null"); requireNonNull(sourceScope, "sourceScope is null"); requireNonNull(orderByScope, "orderByScope is null"); requireNonNull(metadata, "metadata is null"); requireNonNull(analysis, "analysis is null"); requireNonNull(warningCollector, "warningCollector is null"); this.sourceScope = sourceScope; this.warningCollector = warningCollector; this.orderByScope = orderByScope; this.metadata = metadata; this.analysis = analysis; this.functionResolution = new FunctionResolution(metadata.getFunctionAndTypeManager()); this.expressions = groupByExpressions.stream() .map(e -> ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(analysis.getParameters()), e)) .collect(toImmutableList()); this.columnReferences = analysis.getColumnReferenceFields(); this.groupingFields = groupByExpressions.stream() .map(NodeRef::of) .filter(columnReferences::containsKey) .map(columnReferences::get) .flatMap(Collection::stream) .collect(toImmutableSet()); this.groupingFields.forEach(fieldId -> { checkState(isFieldFromScope(fieldId, sourceScope), "Grouping field %s should originate from %s", fieldId, sourceScope.getRelationType()); }); } private void analyze(Expression expression) { Visitor visitor = new Visitor(); if (!visitor.process(expression, null)) { throw new SemanticException(MUST_BE_AGGREGATE_OR_GROUP_BY, expression, "'%s' must be an aggregate expression or appear in GROUP BY clause", expression); } } /** * visitor returns true if all expressions are constant with respect to the group. */ private class Visitor extends AstVisitor<Boolean, Void> { @Override protected Boolean visitExpression(Expression node, Void context) { throw new UnsupportedOperationException("aggregation analysis not yet implemented for: " + node.getClass().getName()); } @Override protected Boolean visitAtTimeZone(AtTimeZone node, Void context) { return process(node.getValue(), context); } @Override protected Boolean visitSubqueryExpression(SubqueryExpression node, Void context) { /* * Column reference can resolve to (a) some subquery's scope, (b) a projection (ORDER BY scope), * (c) source scope or (d) outer query scope (effectively a constant). * From AggregationAnalyzer's perspective, only case (c) needs verification. */ getReferencesToScope(node, analysis, sourceScope) .filter(expression -> !isGroupingKey(expression)) .findFirst() .ifPresent(expression -> { throw new SemanticException(MUST_BE_AGGREGATE_OR_GROUP_BY, expression, "Subquery uses '%s' which must appear in GROUP BY clause", expression); }); return true; } @Override protected Boolean visitExists(ExistsPredicate node, Void context) { checkState(node.getSubquery() instanceof SubqueryExpression); return process(node.getSubquery(), context); } @Override protected Boolean visitSubscriptExpression(SubscriptExpression node, Void context) { return process(node.getBase(), context) && process(node.getIndex(), context); } @Override protected Boolean visitArrayConstructor(ArrayConstructor node, Void context) { return node.getValues().stream().allMatch(expression -> process(expression, context)); } @Override protected Boolean visitCast(Cast node, Void context) { return process(node.getExpression(), context); } @Override protected Boolean visitCoalesceExpression(CoalesceExpression node, Void context) { return node.getOperands().stream().allMatch(expression -> process(expression, context)); } @Override protected Boolean visitNullIfExpression(NullIfExpression node, Void context) { return process(node.getFirst(), context) && process(node.getSecond(), context); } @Override protected Boolean visitExtract(Extract node, Void context) { return process(node.getExpression(), context); } @Override protected Boolean visitBetweenPredicate(BetweenPredicate node, Void context) { return process(node.getMin(), context) && process(node.getValue(), context) && process(node.getMax(), context); } @Override protected Boolean visitCurrentTime(CurrentTime node, Void context) { return true; } @Override protected Boolean visitArithmeticBinary(ArithmeticBinaryExpression node, Void context) { return process(node.getLeft(), context) && process(node.getRight(), context); } @Override protected Boolean visitComparisonExpression(ComparisonExpression node, Void context) { return process(node.getLeft(), context) && process(node.getRight(), context); } @Override protected Boolean visitLiteral(Literal node, Void context) { return true; } @Override protected Boolean visitIsNotNullPredicate(IsNotNullPredicate node, Void context) { return process(node.getValue(), context); } @Override protected Boolean visitIsNullPredicate(IsNullPredicate node, Void context) { return process(node.getValue(), context); } @Override protected Boolean visitLikePredicate(LikePredicate node, Void context) { return process(node.getValue(), context) && process(node.getPattern(), context); } @Override protected Boolean visitInListExpression(InListExpression node, Void context) { return node.getValues().stream().allMatch(expression -> process(expression, context)); } @Override protected Boolean visitInPredicate(InPredicate node, Void context) { return process(node.getValue(), context) && process(node.getValueList(), context); } @Override protected Boolean visitFunctionCall(FunctionCall node, Void context) { if (metadata.getFunctionAndTypeManager().getFunctionMetadata(analysis.getFunctionHandle(node)).getFunctionKind() == AGGREGATE) { if (functionResolution.isCountFunction(analysis.getFunctionHandle(node)) && node.isDistinct()) { warningCollector.add(new PrestoWarning( PERFORMANCE_WARNING, "COUNT(DISTINCT xxx) can be a very expensive operation when the cardinality is high for xxx. In most scenarios, using approx_distinct instead would be enough")); } if (!node.getWindow().isPresent()) { List<FunctionCall> aggregateFunctions = extractAggregateFunctions(analysis.getFunctionHandles(), node.getArguments(), metadata.getFunctionAndTypeManager()); List<FunctionCall> windowFunctions = extractWindowFunctions(node.getArguments()); if (!aggregateFunctions.isEmpty()) { throw new SemanticException(NESTED_AGGREGATION, node, "Cannot nest aggregations inside aggregation '%s': %s", node.getName(), aggregateFunctions); } if (!windowFunctions.isEmpty()) { throw new SemanticException(NESTED_WINDOW, node, "Cannot nest window functions inside aggregation '%s': %s", node.getName(), windowFunctions); } if (node.getOrderBy().isPresent()) { List<Expression> sortKeys = node.getOrderBy().get().getSortItems().stream() .map(SortItem::getSortKey) .collect(toImmutableList()); if (node.isDistinct()) { List<FieldId> fieldIds = node.getArguments().stream() .map(NodeRef::of) .map(columnReferences::get) .filter(Objects::nonNull) .flatMap(Collection::stream) .collect(toImmutableList()); for (Expression sortKey : sortKeys) { if (!node.getArguments().contains(sortKey) && !(columnReferences.containsKey(NodeRef.of(sortKey)) && fieldIds.containsAll(columnReferences.get(NodeRef.of(sortKey))))) { throw new SemanticException( ORDER_BY_MUST_BE_IN_AGGREGATE, sortKey, "For aggregate function with DISTINCT, ORDER BY expressions must appear in arguments"); } } } // ensure that no output fields are referenced from ORDER BY clause if (orderByScope.isPresent()) { for (Expression sortKey : sortKeys) { verifyNoOrderByReferencesToOutputColumns( sortKey, REFERENCE_TO_OUTPUT_ATTRIBUTE_WITHIN_ORDER_BY_AGGREGATION, "ORDER BY clause in aggregation function must not reference query output columns"); } } } // ensure that no output fields are referenced from ORDER BY clause if (orderByScope.isPresent()) { node.getArguments().stream() .forEach(argument -> verifyNoOrderByReferencesToOutputColumns( argument, REFERENCE_TO_OUTPUT_ATTRIBUTE_WITHIN_ORDER_BY_AGGREGATION, "Invalid reference to output projection attribute from ORDER BY aggregation")); } return true; } } else { if (node.getFilter().isPresent()) { throw new SemanticException(MUST_BE_AGGREGATION_FUNCTION, node, "Filter is only valid for aggregation functions", node); } if (node.getOrderBy().isPresent()) { throw new SemanticException(MUST_BE_AGGREGATION_FUNCTION, node, "ORDER BY is only valid for aggregation functions"); } } if (node.getWindow().isPresent() && !process(node.getWindow().get(), context)) { return false; } return node.getArguments().stream().allMatch(expression -> process(expression, context)); } @Override protected Boolean visitLambdaExpression(LambdaExpression node, Void context) { return process(node.getBody(), context); } @Override protected Boolean visitBindExpression(BindExpression node, Void context) { for (Expression value : node.getValues()) { if (!process(value, context)) { return false; } } return process(node.getFunction(), context); } @Override public Boolean visitWindow(Window node, Void context) { for (Expression expression : node.getPartitionBy()) { if (!process(expression, context)) { throw new SemanticException(MUST_BE_AGGREGATE_OR_GROUP_BY, expression, "PARTITION BY expression '%s' must be an aggregate expression or appear in GROUP BY clause", expression); } } for (SortItem sortItem : getSortItemsFromOrderBy(node.getOrderBy())) { Expression expression = sortItem.getSortKey(); if (!process(expression, context)) { throw new SemanticException(MUST_BE_AGGREGATE_OR_GROUP_BY, expression, "ORDER BY expression '%s' must be an aggregate expression or appear in GROUP BY clause", expression); } } if (node.getFrame().isPresent()) { process(node.getFrame().get(), context); } return true; } @Override public Boolean visitWindowFrame(WindowFrame node, Void context) { Optional<Expression> start = node.getStart().getValue(); if (start.isPresent()) { if (!process(start.get(), context)) { throw new SemanticException(MUST_BE_AGGREGATE_OR_GROUP_BY, start.get(), "Window frame start must be an aggregate expression or appear in GROUP BY clause"); } } if (node.getEnd().isPresent() && node.getEnd().get().getValue().isPresent()) { Expression endValue = node.getEnd().get().getValue().get(); if (!process(endValue, context)) { throw new SemanticException(MUST_BE_AGGREGATE_OR_GROUP_BY, endValue, "Window frame end must be an aggregate expression or appear in GROUP BY clause"); } } return true; } @Override protected Boolean visitIdentifier(Identifier node, Void context) { if (analysis.getLambdaArgumentReferences().containsKey(NodeRef.of(node))) { return true; } return isGroupingKey(node); } @Override protected Boolean visitDereferenceExpression(DereferenceExpression node, Void context) { if (columnReferences.containsKey(NodeRef.<Expression>of(node))) { return isGroupingKey(node); } // Allow SELECT col1.f1 FROM table1 GROUP BY col1 return process(node.getBase(), context); } private boolean isGroupingKey(Expression node) { FieldId fieldId = checkAndGetColumnReferenceField(node, columnReferences); if (orderByScope.isPresent() && isFieldFromScope(fieldId, orderByScope.get())) { return true; } return groupingFields.contains(fieldId); } @Override protected Boolean visitFieldReference(FieldReference node, Void context) { if (orderByScope.isPresent()) { return true; } FieldId fieldId = checkAndGetColumnReferenceField(node, columnReferences); boolean inGroup = groupingFields.contains(fieldId); if (!inGroup) { Field field = sourceScope.getRelationType().getFieldByIndex(node.getFieldIndex()); String column; if (!field.getName().isPresent()) { column = Integer.toString(node.getFieldIndex() + 1); } else if (field.getRelationAlias().isPresent()) { column = String.format("'%s.%s'", field.getRelationAlias().get(), field.getName().get()); } else { column = "'" + field.getName().get() + "'"; } throw new SemanticException(MUST_BE_AGGREGATE_OR_GROUP_BY, node, "Column %s not in GROUP BY clause", column); } return inGroup; } @Override protected Boolean visitArithmeticUnary(ArithmeticUnaryExpression node, Void context) { return process(node.getValue(), context); } @Override protected Boolean visitNotExpression(NotExpression node, Void context) { return process(node.getValue(), context); } @Override protected Boolean visitLogicalBinaryExpression(LogicalBinaryExpression node, Void context) { return process(node.getLeft(), context) && process(node.getRight(), context); } @Override protected Boolean visitIfExpression(IfExpression node, Void context) { ImmutableList.Builder<Expression> expressions = ImmutableList.<Expression>builder() .add(node.getCondition()) .add(node.getTrueValue()); if (node.getFalseValue().isPresent()) { expressions.add(node.getFalseValue().get()); } return expressions.build().stream().allMatch(expression -> process(expression, context)); } @Override protected Boolean visitSimpleCaseExpression(SimpleCaseExpression node, Void context) { if (!process(node.getOperand(), context)) { return false; } for (WhenClause whenClause : node.getWhenClauses()) { if (!process(whenClause.getOperand(), context) || !process(whenClause.getResult(), context)) { return false; } } if (node.getDefaultValue().isPresent() && !process(node.getDefaultValue().get(), context)) { return false; } return true; } @Override protected Boolean visitSearchedCaseExpression(SearchedCaseExpression node, Void context) { for (WhenClause whenClause : node.getWhenClauses()) { if (!process(whenClause.getOperand(), context) || !process(whenClause.getResult(), context)) { return false; } } return !node.getDefaultValue().isPresent() || process(node.getDefaultValue().get(), context); } @Override protected Boolean visitTryExpression(TryExpression node, Void context) { return process(node.getInnerExpression(), context); } @Override public Boolean visitRow(Row node, final Void context) { return node.getItems().stream() .allMatch(item -> process(item, context)); } @Override public Boolean visitParameter(Parameter node, Void context) { if (analysis.isDescribe()) { return true; } List<Expression> parameters = analysis.getParameters(); checkArgument(node.getPosition() < parameters.size(), "Invalid parameter number %s, max values is %s", node.getPosition(), parameters.size() - 1); return process(parameters.get(node.getPosition()), context); } public Boolean visitGroupingOperation(GroupingOperation node, Void context) { // ensure that no output fields are referenced from ORDER BY clause if (orderByScope.isPresent()) { node.getGroupingColumns().forEach(groupingColumn -> verifyNoOrderByReferencesToOutputColumns( groupingColumn, REFERENCE_TO_OUTPUT_ATTRIBUTE_WITHIN_ORDER_BY_GROUPING, "Invalid reference to output of SELECT clause from grouping() expression in ORDER BY")); } Optional<Expression> argumentNotInGroupBy = node.getGroupingColumns().stream() .filter(argument -> !columnReferences.containsKey(NodeRef.of(argument)) || !isGroupingKey(argument)) .findAny(); if (argumentNotInGroupBy.isPresent()) { throw new SemanticException( INVALID_PROCEDURE_ARGUMENTS, node, "The arguments to GROUPING() must be expressions referenced by the GROUP BY at the associated query level. Mismatch due to %s.", argumentNotInGroupBy.get()); } return true; } @Override public Boolean process(Node node, @Nullable Void context) { if (expressions.stream().anyMatch(node::equals) && (!orderByScope.isPresent() || !hasOrderByReferencesToOutputColumns(node)) && !hasFreeReferencesToLambdaArgument(node, analysis)) { return true; } return super.process(node, context); } } private boolean hasOrderByReferencesToOutputColumns(Node node) { return hasReferencesToScope(node, analysis, orderByScope.get()); } private void verifyNoOrderByReferencesToOutputColumns(Node node, SemanticErrorCode errorCode, String errorString) { getReferencesToScope(node, analysis, orderByScope.get()) .findFirst() .ifPresent(expression -> { throw new SemanticException(errorCode, expression, errorString); }); } }
/* * Copyright 2015 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.biodata.models.variation; /** * This class models a Cosmic mutation, the attributes are found in the TXT file * User: imedina * Date: 9/21/13 * Time: 7:37 PM * To change this template use File | Settings | File Templates. */ public class Mutation { private String id; private String chromosome; private int start; private int end; private String strand; private String protein; private int proteinStart; private int proteinEnd; private String gene; private String transcriptId; private String hgncId; private String sampleId; private String sampleName; private String sampleSource; private String tumourId; private String primarySite; private String siteSubtype; private String primaryHistology; private String histologySubtype; private String genomeWideScreen; private String mutationCDS; private String mutationAA; private String mutationZygosity; private String status; private String pubmed; private String tumourOrigin; private String description; private String source; // private String comments; public Mutation() { } public Mutation(String id, String chromosome, int start, int end, String strand, String protein, int proteinStart, int proteinEnd, String gene, String transcriptId, String hgncId, String sampleId, String sampleName, String sampleSource, String tumourId, String primarySite, String siteSubtype, String primaryHistology, String histologySubtype, String genomeWideScreen, String mutationCDS, String mutationAA, String mutationZygosity, String status, String pubmed, String tumourOrigin, String description, String source) { this.id = id; this.chromosome = chromosome; this.start = start; this.end = end; this.strand = strand; this.protein = protein; this.proteinStart = proteinStart; this.proteinEnd = proteinEnd; this.gene = gene; this.transcriptId = transcriptId; this.hgncId = hgncId; this.sampleId = sampleId; this.sampleName = sampleName; this.sampleSource = sampleSource; this.tumourId = tumourId; this.primarySite = primarySite; this.siteSubtype = siteSubtype; this.primaryHistology = primaryHistology; this.histologySubtype = histologySubtype; this.genomeWideScreen = genomeWideScreen; this.mutationCDS = mutationCDS; this.mutationAA = mutationAA; this.mutationZygosity = mutationZygosity; this.status = status; this.pubmed = pubmed; this.tumourOrigin = tumourOrigin; this.description = description; this.source = source; } public String getChromosome() { return chromosome; } public void setChromosome(String chromosome) { this.chromosome = chromosome; } public int getStart() { return start; } public void setStart(int start) { this.start = start; } public int getEnd() { return end; } public void setEnd(int end) { this.end = end; } public String getStrand() { return strand; } public void setStrand(String strand) { this.strand = strand; } public String getProtein() { return protein; } public void setProtein(String protein) { this.protein = protein; } public int getProteinStart() { return proteinStart; } public void setProteinStart(int proteinStart) { this.proteinStart = proteinStart; } public int getProteinEnd() { return proteinEnd; } public void setProteinEnd(int proteinEnd) { this.proteinEnd = proteinEnd; } public String getGene() { return gene; } public void setGene(String gene) { this.gene = gene; } public String getTranscriptId() { return transcriptId; } public void setTranscriptId(String transcriptId) { this.transcriptId = transcriptId; } public String getHgncId() { return hgncId; } public void setHgncId(String hgncId) { this.hgncId = hgncId; } public String getSampleName() { return sampleName; } public void setSampleName(String sampleName) { this.sampleName = sampleName; } public String getSampleId() { return sampleId; } public void setSampleId(String sampleId) { this.sampleId = sampleId; } public String getTumourId() { return tumourId; } public void setTumourId(String tumourId) { this.tumourId = tumourId; } public String getPrimarySite() { return primarySite; } public void setPrimarySite(String primarySite) { this.primarySite = primarySite; } public String getSiteSubtype() { return siteSubtype; } public void setSiteSubtype(String siteSubtype) { this.siteSubtype = siteSubtype; } public String getPrimaryHistology() { return primaryHistology; } public void setPrimaryHistology(String primaryHistology) { this.primaryHistology = primaryHistology; } public String getHistologySubtype() { return histologySubtype; } public void setHistologySubtype(String histologySubtype) { this.histologySubtype = histologySubtype; } public String getGenomeWideScreen() { return genomeWideScreen; } public void setGenomeWideScreen(String genomeWideScreen) { this.genomeWideScreen = genomeWideScreen; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getMutationCDS() { return mutationCDS; } public void setMutationCDS(String mutationCDS) { this.mutationCDS = mutationCDS; } public String getMutationAA() { return mutationAA; } public void setMutationAA(String mutationAA) { this.mutationAA = mutationAA; } public String getMutationZygosity() { return mutationZygosity; } public void setMutationZygosity(String mutationZygosity) { this.mutationZygosity = mutationZygosity; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public String getPubmed() { return pubmed; } public void setPubmed(String pubmed) { this.pubmed = pubmed; } public String getSampleSource() { return sampleSource; } public void setSampleSource(String sampleSource) { this.sampleSource = sampleSource; } public String getTumourOrigin() { return tumourOrigin; } public void setTumourOrigin(String tumourOrigin) { this.tumourOrigin = tumourOrigin; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getSource() { return source; } public void setSource(String source) { this.source = source; } }
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; /** * Created by Stephen West on 29/11/2018. */ class TestBoard { private static final int[] POS_0_0 = {0, 0}; private static final int[] POS_2_2 = {2, 2}; private static final int[] POS_4_4 = {4, 4}; private SnakeGame sg; private static final int[] SIZE_5x5 = {5, 5}; private static final int[] SIZE_3x5 = {3, 5}; private static final int[] SIZE_4x4 = {4, 4}; @BeforeEach void setUp() { sg = new SnakeGame(SIZE_5x5); } @Test void testEmptyBoardOFSize5x5isCreatedCorrectly() { sg = new SnakeGame(SIZE_5x5); String expBoardString = TestUtils.getTestCase("data01.txt", "e5x5"); assertEquals(expBoardString, sg.getBoardString()); } @Test void testEmptyBoardOFSize3x5isCreatedCorrectly() { sg = new SnakeGame(SIZE_3x5); assertEquals(TestUtils.getTestCase("data01.txt", "e3x5"), sg.getBoardString()); } @Test void testEmptyBoardOFSize4x4isCreatedCorrectly() { sg = new SnakeGame(SIZE_4x4); assertEquals(TestUtils.getTestCase("data01.txt", "e4x4"), sg.getBoardString()); } @Test void testThatAllGetPosDetailIsEmptyOnAnEmptyBoard() { for (int row = 0; row < SIZE_5x5[0]; row++) { for (int col = 0; col < SIZE_5x5[1]; col++) { assertEquals(BLOCK.IS_EMPTY, sg.at(new int[]{row, col})); } } } @Test void thatSnakePlaceAtGivenPosReturnsIS_SNAKE() { sg.setSnakeAt(POS_0_0); assertEquals(BLOCK.IS_SNAKE, sg.at(POS_0_0)); } @Test void thatSnakeCanMoveInUPDirections() { sg.setSnakeAt(POS_2_2); sg.moveSnake(MOVE.UP); assertEquals(BLOCK.IS_SNAKE, sg.at(pos(3, 2))); sg = new SnakeGame(new int[]{5,5}); sg.setSnakeAt(pos(0,0)); sg.moveSnake(MOVE.UP); assertEquals(BLOCK.IS_SNAKE, sg.at(pos(1, 0))); } @Test void thatSnakeCanMoveInDOWNDirections() { sg.setSnakeAt(POS_2_2); sg.moveSnake(MOVE.DOWN); assertEquals(BLOCK.IS_SNAKE, sg.at(pos(1, 2))); } @Test void thatSnakeCanMoveInRIGHTDirections() { sg.setSnakeAt(POS_2_2); sg.moveSnake(MOVE.RIGHT); assertEquals(BLOCK.IS_SNAKE, sg.at(pos(2, 1))); } @Test void thatSnakeCanMoveInLEFTDirections() { sg.setSnakeAt(POS_2_2); sg.moveSnake(MOVE.LEFT); assertEquals(BLOCK.IS_SNAKE, sg.at(pos(2, 3))); } @Test void theSnakeCannotMoveUPOfTheBoard() { sg.setSnakeAt(POS_4_4); sg.moveSnake(MOVE.UP); // snake must not have moved assertEquals(BLOCK.IS_SNAKE, sg.at(POS_4_4)); } @Test void theSnakeCannotMoveDOWNOfTheBoard() { sg.setSnakeAt(POS_0_0); sg.moveSnake(MOVE.DOWN); // snake must not have moved assertEquals(BLOCK.IS_SNAKE, sg.at(POS_0_0)); } @Test void theSnakeCannotMoveLEFTOfTheBoard() { sg.setSnakeAt(POS_4_4); sg.moveSnake(MOVE.LEFT); // snake must not have moved assertEquals(BLOCK.IS_SNAKE, sg.at(POS_4_4)); } @Test void theSnakeCannotMoveRIGHTOfTheBoard() { sg.setSnakeAt(POS_0_0); sg.moveSnake(MOVE.RIGHT); // snake must not have moved assertEquals(BLOCK.IS_SNAKE, sg.at(POS_0_0)); } @Test void testToStringOnSnakeGame() { sg.setSnakeAt(POS_0_0); assertEquals(TestUtils.getTestCase("data01.txt", "e5x5s0_0"), sg.toString()); } @Test void thatByDefaultPos0_0IS_WALL() { sg.setSnakeAt(POS_2_2); sg.setHasBorderWall(true); assertEquals(BLOCK.IS_WALL, sg.at(POS_0_0)); } @Test void thatByDefaultPos4_4IS_WALL() { sg.setSnakeAt(POS_2_2); sg.setHasBorderWall(true); assertEquals(BLOCK.IS_WALL, sg.at(POS_4_4)); } @Test void testThatByDefaultEdgeOFABoardIS_WALL() { sg.setHasBorderWall(true); sg.setSnakeAt(POS_2_2); assertEquals(BLOCK.IS_WALL, sg.at(POS_0_0)); assertEquals(BLOCK.IS_WALL, sg.at(POS_4_4)); assertEquals(TestUtils.getTestCase("data01.txt", "e5x5test01"), sg.toString()); } @Test void testThatIfSnakeMovesUPIntoWallItBecomesIS_DEAD() { testHittingWallKillsSnake(pos(3,3),MOVE.UP); assertEquals(TestUtils.getTestCase("data01.txt", "e5x5test03"), sg.toString()); } @Test void testThatIfSnakeMovesLEFTIntoWallItBecomesIS_DEAD() { testHittingWallKillsSnake(pos(3,3),MOVE.LEFT); } @Test void testThatIfSnakeMovesDOWNIntoWallItBecomesIS_DEAD() { testHittingWallKillsSnake(pos(1,1),MOVE.DOWN); } @Test void testThatIfSnakeMovesRIGHTIntoWallItBecomesIS_DEAD() { testHittingWallKillsSnake(pos(1,1),MOVE.RIGHT); } private void testHittingWallKillsSnake(int[] initialPos, MOVE direction){ sg.setHasBorderWall(true); sg.setSnakeAt(initialPos); sg.moveSnake(direction); assertEquals(BLOCK.IS_DEAD, sg.at(initialPos)); } @Test public void testPlacingOfAMushroomATGivenPos(){ sg.setMushroomAt(pos(1,1)); assertEquals(BLOCK.IS_MUSHROOM, sg.at(pos(1,1))); } @Test void testPlacingOfMushroomOnWallFails(){ sg.setHasBorderWall(true); int[] pos = pos(0,0); sg.setMushroomAt(pos); assertEquals(BLOCK.IS_WALL, sg.at(pos)); } @Test void testPlacingOfMushroomOnSnakeFails(){ sg.setHasBorderWall(true); int[] pos = pos(1,1); sg.setSnakeAt(pos); sg.setMushroomAt(pos); assertEquals(BLOCK.IS_SNAKE, sg.at(pos)); } @Test void testMovingSnakeHeadUPOnAMushroomIncreaseSnakesSizeBy1(){ testSnakeEatingMushrooms(POS_2_2, MOVE.UP); } @Test void testMovingSnakeHeadLEFTOnAMushroomIncreaseSnakesSizeBy1(){ testSnakeEatingMushrooms(POS_2_2, MOVE.LEFT); } @Test void testMovingSnakeHeadRIGHTOnAMushroomIncreaseSnakesSizeBy1(){ testSnakeEatingMushrooms(POS_2_2, MOVE.RIGHT); } @Test void testMovingSnakeHeadDOWNOnAMushroomIncreaseSnakesSizeBy1(){ testSnakeEatingMushrooms(POS_2_2, MOVE.RIGHT); } private void testSnakeEatingMushrooms(int[] pos, MOVE direction){ int[] adjPos = sg.getAdjacentPos(pos,direction); sg.setSnakeAt(pos); sg.setMushroomAt(adjPos); sg.moveSnake(direction); assertEquals(BLOCK.IS_SNAKE, sg.at(pos)); assertEquals(BLOCK.IS_SNAKE, sg.at(adjPos)); } @Test void testMovingInUPDirectionWithSnakeThatIsOfLenghTwoAfterEatingMushroom(){ testSnakeEatingMushroomAndMovingInSameDirection(POS_0_0,MOVE.UP); } private void testSnakeEatingMushroomAndMovingInSameDirection(int[] pos, MOVE direction) { int[] adjPos = sg.getAdjacentPos(pos,direction); int[] nextAdjPos = sg.getAdjacentPos(adjPos,direction); sg.setSnakeAt(pos); sg.setMushroomAt(adjPos); assertEquals(TestUtils.getTestCase("data01.txt", "e5x5test04"), sg.toString()); sg.moveSnake(direction); System.out.println(sg.toString()); sg.moveSnake(direction); System.out.println(sg.toString()); assertEquals(BLOCK.IS_EMPTY, sg.at(adjPos)); assertEquals(BLOCK.IS_SNAKE, sg.at(adjPos)); assertEquals(BLOCK.IS_SNAKE, sg.at(nextAdjPos)); assertEquals(TestUtils.getTestCase("data01.txt", "e5x5test05"), sg.toString()); } /* private void testEatingMushroom(){ }*/ // helper methods private int[] pos(int row, int col) { return new int[]{row, col}; } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vfs.newvfs.impl; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.application.impl.ApplicationImpl; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.roots.OrderEnumerator; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.io.FileAttributes; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.JarFileSystem; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.newvfs.NewVirtualFile; import com.intellij.openapi.vfs.newvfs.NewVirtualFileSystem; import com.intellij.openapi.vfs.newvfs.RefreshQueue; import com.intellij.openapi.vfs.newvfs.events.VFileCreateEvent; import com.intellij.openapi.vfs.newvfs.persistent.FSRecords; import com.intellij.openapi.vfs.newvfs.persistent.PersistentFS; import com.intellij.util.*; import com.intellij.util.containers.ContainerUtil; import gnu.trove.THashSet; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URISyntaxException; import java.net.URL; import java.util.*; /** * @author max */ public class VirtualDirectoryImpl extends VirtualFileSystemEntry { public static boolean CHECK = ApplicationManager.getApplication().isUnitTestMode(); static final VirtualDirectoryImpl NULL_VIRTUAL_FILE = new VirtualDirectoryImpl("*?;%NULL", null, LocalFileSystem.getInstance(), -42, 0) { public String toString() { return "NULL"; } }; private final NewVirtualFileSystem myFS; /** * The array is logically divided into the two parts: * - left subarray for storing real child files * - right subarray for storing "adopted children" files. * "Adopted children" are fake files which are used for storing names which were accessed via findFileByName() or similar calls. * We have to store these unsuccessful find attempts to be able to correctly refresh in the future. * See usages of {@link #getSuspiciousNames()} in the {@link com.intellij.openapi.vfs.newvfs.persistent.RefreshWorker} * * Guarded by this, files in each subarray are sorted according to the compareNameTo() comparator * TODO: revise the whole adopted scheme */ private VirtualFileSystemEntry[] myChildren = EMPTY_ARRAY; public VirtualDirectoryImpl(@NonNls @NotNull final String name, @Nullable final VirtualDirectoryImpl parent, @NotNull final NewVirtualFileSystem fs, final int id, @PersistentFS.Attributes final int attributes) { super(name, parent, id, attributes); myFS = fs; } @Override @NotNull public NewVirtualFileSystem getFileSystem() { return myFS; } @Nullable private VirtualFileSystemEntry findChild(@NotNull String name, final boolean doRefresh, boolean ensureCanonicalName, @NotNull NewVirtualFileSystem delegate) { boolean ignoreCase = !delegate.isCaseSensitive(); Comparator comparator = getComparator(name, ignoreCase); VirtualFileSystemEntry result = doFindChild(name, ensureCanonicalName, delegate, comparator); if (result == NULL_VIRTUAL_FILE) { result = doRefresh ? createAndFindChildWithEventFire(name, delegate) : null; } else if (result != null) { if (doRefresh && delegate.isDirectory(result) != result.isDirectory()) { RefreshQueue.getInstance().refresh(false, false, null, result); result = findChild(name, false, ensureCanonicalName, delegate); } } if (result == null) { addToAdoptedChildren(name, !delegate.isCaseSensitive(), comparator); } return result; } private synchronized void addToAdoptedChildren(@NotNull final String name, final boolean ignoreCase, @NotNull Comparator comparator) { long r = findIndexInBoth(myChildren, comparator); int indexInReal = (int)(r >> 32); int indexInAdopted = (int)r; if (indexInAdopted >= 0) return; //already added if (!allChildrenLoaded()) { insertChildAt(new AdoptedChild(name), indexInAdopted); } if (indexInReal >= 0) { // there suddenly can be that we ask to add name to adopted whereas it already contains in the real part // in this case we should remove it from there removeFromArray(indexInReal); } assertConsistency(myChildren, ignoreCase, name); } private static class AdoptedChild extends VirtualFileImpl { private AdoptedChild(String name) { super(name, NULL_VIRTUAL_FILE, -42, -1); } } @Nullable // null if there can't be a child with this name, NULL_VIRTUAL_FILE private synchronized VirtualFileSystemEntry doFindChildInArray(@NotNull Comparator comparator) { VirtualFileSystemEntry[] array = myChildren; long r = findIndexInBoth(array, comparator); int indexInReal = (int)(r >> 32); int indexInAdopted = (int)r; if (indexInAdopted >= 0) return NULL_VIRTUAL_FILE; if (indexInReal >= 0) { return array[indexInReal]; } return null; } @Nullable // null if there can't be a child with this name, NULL_VIRTUAL_FILE if cached as absent, the file if found private VirtualFileSystemEntry doFindChild(@NotNull String name, boolean ensureCanonicalName, @NotNull NewVirtualFileSystem delegate, @NotNull Comparator comparator) { if (name.isEmpty()) { return null; } VirtualFileSystemEntry found = doFindChildInArray(comparator); if (found != null) return found; if (allChildrenLoaded()) { return NULL_VIRTUAL_FILE; } if (ensureCanonicalName) { VirtualFile fake = new FakeVirtualFile(this, name); name = delegate.getCanonicallyCasedName(fake); if (name.isEmpty()) return null; } synchronized (this) { // do not extract getId outside the synchronized block since it will cause a concurrency problem. int id = ourPersistence.getId(this, name, delegate); if (id <= 0) { return null; } // maybe another doFindChild() sneaked in the middle VirtualFileSystemEntry[] array = myChildren; long r = findIndexInBoth(array, comparator); int indexInReal = (int)(r >> 32); int indexInAdopted = (int)r; if (indexInAdopted >= 0) return NULL_VIRTUAL_FILE; // double check if (indexInReal >= 0) { return array[indexInReal]; } String shorty = new String(name); VirtualFileSystemEntry child = createChild(shorty, id, delegate); // So we don't hold whole char[] buffer of a lengthy path VirtualFileSystemEntry[] after = myChildren; if (after != array) { // in tests when we call assertAccessInTests it can load a huge number of files which lead to children modification // so fall back to slow path addChild(child); } else { insertChildAt(child, indexInReal); assertConsistency(myChildren, !delegate.isCaseSensitive(), name); } return child; } } @NotNull private static Comparator getComparator(@NotNull final String name, final boolean ignoreCase) { return new Comparator() { @Override public int compareMyKeyTo(@NotNull VirtualFileSystemEntry file) { return -file.compareNameTo(name, ignoreCase); } }; } private synchronized VirtualFileSystemEntry[] getArraySafely() { return myChildren; } @NotNull public VirtualFileSystemEntry createChild(@NotNull String name, int id, @NotNull NewVirtualFileSystem delegate) { VirtualFileSystemEntry child; final int attributes = ourPersistence.getFileAttributes(id); if (PersistentFS.isDirectory(attributes)) { child = new VirtualDirectoryImpl(name, this, delegate, id, attributes); } else { child = new VirtualFileImpl(name, this, id, attributes); //noinspection TestOnlyProblems assertAccessInTests(child, delegate); } if (delegate.markNewFilesAsDirty()) { child.markDirty(); } return child; } private static final boolean IS_UNDER_TEAMCITY = System.getProperty("bootstrap.testcases") != null; private static final boolean SHOULD_PERFORM_ACCESS_CHECK = System.getenv("NO_FS_ROOTS_ACCESS_CHECK") == null; private static final Collection<String> ourAdditionalRoots = new THashSet<String>(); @TestOnly public static void allowRootAccess(@NotNull String... roots) { for (String root : roots) { ourAdditionalRoots.add(FileUtil.toSystemIndependentName(root)); } } @TestOnly public static void disallowRootAccess(@NotNull String... roots) { for (String root : roots) { ourAdditionalRoots.remove(FileUtil.toSystemIndependentName(root)); } } @TestOnly private static void assertAccessInTests(@NotNull VirtualFileSystemEntry child, @NotNull NewVirtualFileSystem delegate) { final Application application = ApplicationManager.getApplication(); if (IS_UNDER_TEAMCITY && SHOULD_PERFORM_ACCESS_CHECK && application.isUnitTestMode() && application instanceof ApplicationImpl && ((ApplicationImpl)application).isComponentsCreated()) { if (delegate != LocalFileSystem.getInstance() && delegate != JarFileSystem.getInstance()) { return; } // root' children are loaded always if (child.getParent() == null || child.getParent().getParent() == null) return; Set<String> allowed = allowedRoots(); boolean isUnder = allowed == null; if (!isUnder) { String childPath = child.getPath(); if (delegate == JarFileSystem.getInstance()) { VirtualFile local = JarFileSystem.getInstance().getVirtualFileForJar(child); assert local != null : child; childPath = local.getPath(); } for (String root : allowed) { if (FileUtil.startsWith(childPath, root)) { isUnder = true; break; } if (root.startsWith(JarFileSystem.PROTOCOL_PREFIX)) { String rootLocalPath = FileUtil.toSystemIndependentName(PathUtil.toPresentableUrl(root)); isUnder = FileUtil.startsWith(childPath, rootLocalPath); if (isUnder) break; } } } assert isUnder || allowed.isEmpty() : "File accessed outside allowed roots: " + child + ";\nAllowed roots: " + new ArrayList<String>(allowed); } } // null means we were unable to get roots, so do not check access @Nullable @TestOnly private static Set<String> allowedRoots() { if (insideGettingRoots) return null; Project[] openProjects = ProjectManager.getInstance().getOpenProjects(); if (openProjects.length == 0) return null; final Set<String> allowed = new THashSet<String>(); allowed.add(FileUtil.toSystemIndependentName(PathManager.getHomePath())); try { URL outUrl = Application.class.getResource("/"); String output = new File(outUrl.toURI()).getParentFile().getParentFile().getPath(); allowed.add(FileUtil.toSystemIndependentName(output)); } catch (URISyntaxException ignored) { } allowed.add(FileUtil.toSystemIndependentName(SystemProperties.getJavaHome())); allowed.add(FileUtil.toSystemIndependentName(new File(FileUtil.getTempDirectory()).getParent())); allowed.add(FileUtil.toSystemIndependentName(System.getProperty("java.io.tmpdir"))); allowed.add(FileUtil.toSystemIndependentName(SystemProperties.getUserHome())); for (Project project : openProjects) { if (!project.isInitialized()) { return null; // all is allowed } for (VirtualFile root : ProjectRootManager.getInstance(project).getContentRoots()) { allowed.add(root.getPath()); } for (VirtualFile root : getAllRoots(project)) { allowed.add(StringUtil.trimEnd(root.getPath(), JarFileSystem.JAR_SEPARATOR)); } String location = project.getBasePath(); assert location != null : project; allowed.add(FileUtil.toSystemIndependentName(location)); } allowed.addAll(ourAdditionalRoots); return allowed; } private static boolean insideGettingRoots; @TestOnly private static VirtualFile[] getAllRoots(@NotNull Project project) { insideGettingRoots = true; final Set<VirtualFile> roots = new THashSet<VirtualFile>(); final OrderEnumerator enumerator = ProjectRootManager.getInstance(project).orderEntries(); ContainerUtil.addAll(roots, enumerator.getClassesRoots()); ContainerUtil.addAll(roots, enumerator.getSourceRoots()); insideGettingRoots = false; return VfsUtilCore.toVirtualFileArray(roots); } @Nullable private VirtualFileSystemEntry createAndFindChildWithEventFire(@NotNull String name, @NotNull NewVirtualFileSystem delegate) { final VirtualFile fake = new FakeVirtualFile(this, name); final FileAttributes attributes = delegate.getAttributes(fake); if (attributes == null) return null; final String realName = delegate.getCanonicallyCasedName(fake); final VFileCreateEvent event = new VFileCreateEvent(null, this, realName, attributes.isDirectory(), true); RefreshQueue.getInstance().processSingleEvent(event); return findChild(realName); } @Override @Nullable public NewVirtualFile refreshAndFindChild(@NotNull String name) { return findChild(name, true, true, getFileSystem()); } private static int findIndexInOneHalf(final VirtualFileSystemEntry[] array, int start, int end, final boolean isAdopted, @NotNull final Comparator comparator) { return binSearch(array, start, end, new Comparator() { @Override public int compareMyKeyTo(@NotNull VirtualFileSystemEntry file) { if (isAdopted && !isAdoptedChild(file)) return 1; if (!isAdopted && isAdoptedChild(file)) return -1; return comparator.compareMyKeyTo(file); } }); } // returns two int indices packed into one long. left index is for the real file array half, right is for the adopted children name array private static long findIndexInBoth(@NotNull VirtualFileSystemEntry[] array, @NotNull Comparator comparator) { int high = array.length - 1; if (high == -1) { return pack(-1, -1); } int low = 0; boolean startInAdopted = isAdoptedChild(array[low]); boolean endInAdopted = isAdoptedChild(array[high]); if (startInAdopted == endInAdopted) { int index = findIndexInOneHalf(array, low, high + 1, startInAdopted, comparator); int otherIndex = startInAdopted ? -1 : -array.length - 1; return startInAdopted ? pack(otherIndex, index) : pack(index, otherIndex); } boolean adopted = false; int cmp = -1; int mid = -1; int foundIndex = -1; while (low <= high) { mid = low + high >>> 1; VirtualFileSystemEntry file = array[mid]; cmp = comparator.compareMyKeyTo(file); adopted = isAdoptedChild(file); if (cmp == 0) { foundIndex = mid; break; } if ((adopted || cmp <= 0) && (!adopted || cmp >= 0)) { int indexInAdopted = findIndexInOneHalf(array, mid + 1, high + 1, true, comparator); int indexInReal = findIndexInOneHalf(array, low, mid, false, comparator); return pack(indexInReal, indexInAdopted); } if (cmp > 0) { low = mid + 1; } else { high = mid - 1; } } // key not found. if (cmp != 0) foundIndex = -low-1; int newStart = adopted ? low : mid + 1; int newEnd = adopted ? mid + 1 : high + 1; int theOtherHalfIndex = newStart < newEnd ? findIndexInOneHalf(array, newStart, newEnd, !adopted, comparator) : -newStart-1; return adopted ? pack(theOtherHalfIndex, foundIndex) : pack(foundIndex, theOtherHalfIndex); } private static long pack(int indexInReal, int indexInAdopted) { return (long)indexInReal << 32 | (indexInAdopted & 0xffffffffL); } @Override @Nullable public synchronized NewVirtualFile findChildIfCached(@NotNull String name) { final boolean ignoreCase = !getFileSystem().isCaseSensitive(); Comparator comparator = getComparator(name, ignoreCase); VirtualFileSystemEntry found = doFindChildInArray(comparator); return found == NULL_VIRTUAL_FILE ? null : found; } @Override @NotNull public Iterable<VirtualFile> iterInDbChildren() { if (!ourPersistence.wereChildrenAccessed(this)) { return Collections.emptyList(); } if (!ourPersistence.areChildrenLoaded(this)) { final String[] names = ourPersistence.listPersisted(this); final NewVirtualFileSystem delegate = PersistentFS.replaceWithNativeFS(getFileSystem()); for (String name : names) { findChild(name, false, false, delegate); } } return getCachedChildren(); } @Override @NotNull public synchronized VirtualFile[] getChildren() { VirtualFileSystemEntry[] children = myChildren; NewVirtualFileSystem delegate = getFileSystem(); final boolean ignoreCase = !delegate.isCaseSensitive(); if (allChildrenLoaded()) { assertConsistency(children, ignoreCase); return children; } FSRecords.NameId[] childrenIds = ourPersistence.listAll(this); VirtualFileSystemEntry[] result; if (childrenIds.length == 0) { result = EMPTY_ARRAY; } else { Arrays.sort(childrenIds, new java.util.Comparator<FSRecords.NameId>() { @Override public int compare(FSRecords.NameId o1, FSRecords.NameId o2) { String name1 = o1.name; String name2 = o2.name; return compareNames(name1, name2, ignoreCase); } }); result = new VirtualFileSystemEntry[childrenIds.length]; int delegateI = 0; int i = 0; int cachedEnd = getAdoptedChildrenStart(); // merge (sorted) children[0..cachedEnd) and childrenIds into the result array. // file that is already in children array must be copied into the result as is // for the file name that is new in childrenIds the file must be created and copied into result while (delegateI < childrenIds.length) { FSRecords.NameId nameId = childrenIds[delegateI]; while (i < cachedEnd && children[i].compareNameTo(nameId.name, ignoreCase) < 0) i++; // skip files that are not in childrenIds VirtualFileSystemEntry resultFile; if (i < cachedEnd && children[i].compareNameTo(nameId.name, ignoreCase) == 0) { resultFile = children[i++]; } else { resultFile = createChild(nameId.name, nameId.id, delegate); } result[delegateI++] = resultFile; } assertConsistency(result, ignoreCase, children, cachedEnd, childrenIds); } if (getId() > 0) { myChildren = result; setChildrenLoaded(); } return result; } private void assertConsistency(@NotNull VirtualFileSystemEntry[] array, boolean ignoreCase, @NotNull Object... details) { if (!CHECK) return; boolean allChildrenLoaded = allChildrenLoaded(); for (int i = 0; i < array.length; i++) { VirtualFileSystemEntry file = array[i]; boolean isAdopted = isAdoptedChild(file); assert !isAdopted || !allChildrenLoaded; if (isAdopted && i != array.length - 1) { assert isAdoptedChild(array[i + 1]); } if (i != 0) { VirtualFileSystemEntry prev = array[i - 1]; String prevName = prev.getName(); int cmp = file.compareNameTo(prevName, ignoreCase); if (cmp == 0) { Function<VirtualFileSystemEntry, String> verboseToString = new Function<VirtualFileSystemEntry, String>() { @Override public String fun(VirtualFileSystemEntry entry) { return entry + " (name: '" + entry.getName() + "', " + entry.getClass() + ", parent:"+entry.getParent() + "; id:"+entry.getId() + "; FS:" +entry.getFileSystem() + "; delegate.attrs:" +entry.getFileSystem().getAttributes(entry) + "; caseSensitive:" +entry.getFileSystem().isCaseSensitive() + "; canonical:" +entry.getFileSystem().getCanonicallyCasedName(entry) + ") "; } }; String children = StringUtil.join(array, verboseToString, ","); throw new AssertionError( verboseToString.fun(prev) + " equals to " + verboseToString.fun(file) + "; children: " + children + "\nDetails: " + ContainerUtil.map( details, new Function<Object, Object>() { @Override public Object fun(Object o) { return o instanceof Object[] ? Arrays.toString((Object[])o) : o; } })); } if (isAdopted == isAdoptedChild(prev)) { assert cmp > 0 : "Not sorted. "+Arrays.toString(details); } } } } @Override @Nullable public VirtualFileSystemEntry findChild(@NotNull final String name) { return findChild(name, false, true, getFileSystem()); } public VirtualFileSystemEntry findChildById(int id, boolean cachedOnly) { VirtualFile[] array = getArraySafely(); VirtualFileSystemEntry result = null; for (VirtualFile file : array) { VirtualFileSystemEntry withId = (VirtualFileSystemEntry)file; if (withId.getId() == id) { result = withId; break; } } if (result != null) return result; if (cachedOnly) return null; String name = ourPersistence.getName(id); return findChild(name, false, false, getFileSystem()); } @NotNull @Override public byte[] contentsToByteArray() throws IOException { throw new IOException("Cannot get content of directory: " + this); } public synchronized void addChild(@NotNull VirtualFileSystemEntry child) { VirtualFileSystemEntry[] array = myChildren; final String childName = child.getName(); final boolean ignoreCase = !getFileSystem().isCaseSensitive(); long r = findIndexInBoth(array, getComparator(childName, ignoreCase)); int indexInReal = (int)(r >> 32); int indexInAdopted = (int)r; if (indexInAdopted >= 0) { // remove Adopted first removeFromArray(indexInAdopted); } if (indexInReal < 0) { insertChildAt(child, indexInReal); } // else already stored assertConsistency(myChildren, ignoreCase, child); } private void insertChildAt(@NotNull VirtualFileSystemEntry file, int negativeIndex) { @NotNull VirtualFileSystemEntry[] array = myChildren; VirtualFileSystemEntry[] appended = new VirtualFileSystemEntry[array.length + 1]; int i = -negativeIndex -1; System.arraycopy(array, 0, appended, 0, i); appended[i] = file; System.arraycopy(array, i, appended, i + 1, array.length - i); myChildren = appended; } public synchronized void removeChild(@NotNull VirtualFile file) { boolean ignoreCase = !getFileSystem().isCaseSensitive(); String name = file.getName(); addToAdoptedChildren(name, ignoreCase, getComparator(name, ignoreCase)); assertConsistency(myChildren, ignoreCase, file); } private void removeFromArray(int index) { myChildren = ArrayUtil.remove(myChildren, index, new ArrayFactory<VirtualFileSystemEntry>() { @NotNull @Override public VirtualFileSystemEntry[] create(int count) { return new VirtualFileSystemEntry[count]; } }); } public boolean allChildrenLoaded() { return getFlagInt(CHILDREN_CACHED); } private void setChildrenLoaded() { setFlagInt(CHILDREN_CACHED, true); } @NotNull public synchronized List<String> getSuspiciousNames() { List<VirtualFile> suspicious = new SubList<VirtualFile>(myChildren, getAdoptedChildrenStart(), myChildren.length); return ContainerUtil.map2List(suspicious, new Function<VirtualFile, String>() { @Override public String fun(VirtualFile file) { return file.getName(); } }); } private int getAdoptedChildrenStart() { int index = binSearch(myChildren, 0, myChildren.length, new Comparator() { @Override public int compareMyKeyTo(@NotNull VirtualFileSystemEntry v) { return isAdoptedChild(v) ? -1 : 1; } }); return -index - 1; } private static boolean isAdoptedChild(@NotNull VirtualFileSystemEntry v) { return v.getParent() == NULL_VIRTUAL_FILE; } private interface Comparator { int compareMyKeyTo(@NotNull VirtualFileSystemEntry file); } private static int binSearch(@NotNull VirtualFileSystemEntry[] array, int start, int end, @NotNull Comparator comparator) { int low = start; int high = end - 1; assert low >= 0 && low <= array.length; while (low <= high) { int mid = low + high >>> 1; int cmp = comparator.compareMyKeyTo(array[mid]); if (cmp > 0) { low = mid + 1; } else if (cmp < 0) { high = mid - 1; } else { return mid; // key found } } return -(low + 1); // key not found. } @Override public boolean isDirectory() { return true; } @Override @NotNull public synchronized List<VirtualFile> getCachedChildren() { return new SubList<VirtualFile>(myChildren, 0, getAdoptedChildrenStart()); } @Override public InputStream getInputStream() throws IOException { throw new IOException("getInputStream() must not be called against a directory: " + getUrl()); } @Override @NotNull public OutputStream getOutputStream(final Object requestor, final long newModificationStamp, final long newTimeStamp) throws IOException { throw new IOException("getOutputStream() must not be called against a directory: " + getUrl()); } @Override public void markDirtyRecursively() { markDirty(); markDirtyRecursivelyInternal(); } // optimisation: do not travel up unnecessary private void markDirtyRecursivelyInternal() { for (VirtualFileSystemEntry child : getArraySafely()) { if (isAdoptedChild(child)) break; child.markDirtyInternal(); if (child instanceof VirtualDirectoryImpl) { ((VirtualDirectoryImpl)child).markDirtyRecursivelyInternal(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.checkpoint; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.time.Time; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.runtime.checkpoint.hooks.MasterHooks; import org.apache.flink.runtime.checkpoint.savepoint.SavepointLoader; import org.apache.flink.runtime.checkpoint.savepoint.SavepointStore; import org.apache.flink.runtime.concurrent.ApplyFunction; import org.apache.flink.runtime.concurrent.Future; import org.apache.flink.runtime.concurrent.impl.FlinkCompletableFuture; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.executiongraph.Execution; import org.apache.flink.runtime.executiongraph.ExecutionAttemptID; import org.apache.flink.runtime.executiongraph.ExecutionJobVertex; import org.apache.flink.runtime.executiongraph.ExecutionVertex; import org.apache.flink.runtime.executiongraph.JobStatusListener; import org.apache.flink.runtime.jobgraph.JobStatus; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings; import org.apache.flink.runtime.messages.checkpoint.AcknowledgeCheckpoint; import org.apache.flink.runtime.messages.checkpoint.DeclineCheckpoint; import org.apache.flink.runtime.state.SharedStateRegistry; import org.apache.flink.runtime.state.SharedStateRegistryFactory; import org.apache.flink.runtime.state.TaskStateHandles; import org.apache.flink.runtime.taskmanager.DispatcherThreadFactory; import org.apache.flink.util.Preconditions; import org.apache.flink.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.util.ArrayDeque; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; /** * The checkpoint coordinator coordinates the distributed snapshots of operators and state. * It triggers the checkpoint by sending the messages to the relevant tasks and collects the * checkpoint acknowledgements. It also collects and maintains the overview of the state handles * reported by the tasks that acknowledge the checkpoint. */ public class CheckpointCoordinator { private static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class); /** The number of recent checkpoints whose IDs are remembered */ private static final int NUM_GHOST_CHECKPOINT_IDS = 16; // ------------------------------------------------------------------------ /** Coordinator-wide lock to safeguard the checkpoint updates */ private final Object lock = new Object(); /** Lock specially to make sure that trigger requests do not overtake each other. * This is not done with the coordinator-wide lock, because as part of triggering, * blocking operations may happen (distributed atomic counters). * Using a dedicated lock, we avoid blocking the processing of 'acknowledge/decline' * messages during that phase. */ private final Object triggerLock = new Object(); /** The job whose checkpoint this coordinator coordinates */ private final JobID job; /** Default checkpoint properties **/ private final CheckpointProperties checkpointProperties; /** The executor used for asynchronous calls, like potentially blocking I/O */ private final Executor executor; /** Tasks who need to be sent a message when a checkpoint is started */ private final ExecutionVertex[] tasksToTrigger; /** Tasks who need to acknowledge a checkpoint before it succeeds */ private final ExecutionVertex[] tasksToWaitFor; /** Tasks who need to be sent a message when a checkpoint is confirmed */ private final ExecutionVertex[] tasksToCommitTo; /** Map from checkpoint ID to the pending checkpoint */ private final Map<Long, PendingCheckpoint> pendingCheckpoints; /** Completed checkpoints. Implementations can be blocking. Make sure calls to methods * accessing this don't block the job manager actor and run asynchronously. */ private final CompletedCheckpointStore completedCheckpointStore; /** Default directory for persistent checkpoints; <code>null</code> if none configured. * THIS WILL BE REPLACED BY PROPER STATE-BACKEND METADATA WRITING */ @Nullable private final String checkpointDirectory; /** A list of recent checkpoint IDs, to identify late messages (vs invalid ones) */ private final ArrayDeque<Long> recentPendingCheckpoints; /** Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these * need to be ascending across job managers. */ private final CheckpointIDCounter checkpointIdCounter; /** The base checkpoint interval. Actual trigger time may be affected by the * max concurrent checkpoints and minimum-pause values */ private final long baseInterval; /** The max time (in ms) that a checkpoint may take */ private final long checkpointTimeout; /** The min time(in ms) to delay after a checkpoint could be triggered. Allows to * enforce minimum processing time between checkpoint attempts */ private final long minPauseBetweenCheckpointsNanos; /** The maximum number of checkpoints that may be in progress at the same time */ private final int maxConcurrentCheckpointAttempts; /** The timer that handles the checkpoint timeouts and triggers periodic checkpoints */ private final ScheduledThreadPoolExecutor timer; /** The master checkpoint hooks executed by this checkpoint coordinator */ private final HashMap<String, MasterTriggerRestoreHook<?>> masterHooks; /** Actor that receives status updates from the execution graph this coordinator works for */ private JobStatusListener jobStatusListener; /** The number of consecutive failed trigger attempts */ private final AtomicInteger numUnsuccessfulCheckpointsTriggers = new AtomicInteger(0); /** A handle to the current periodic trigger, to cancel it when necessary */ private ScheduledFuture<?> currentPeriodicTrigger; /** The timestamp (via {@link System#nanoTime()}) when the last checkpoint completed */ private long lastCheckpointCompletionNanos; /** Flag whether a triggered checkpoint should immediately schedule the next checkpoint. * Non-volatile, because only accessed in synchronized scope */ private boolean periodicScheduling; /** Flag whether a trigger request could not be handled immediately. Non-volatile, because only * accessed in synchronized scope */ private boolean triggerRequestQueued; /** Flag marking the coordinator as shut down (not accepting any messages any more) */ private volatile boolean shutdown; /** Optional tracker for checkpoint statistics. */ @Nullable private CheckpointStatsTracker statsTracker; /** A factory for SharedStateRegistry objects */ private final SharedStateRegistryFactory sharedStateRegistryFactory; /** Registry that tracks state which is shared across (incremental) checkpoints */ private SharedStateRegistry sharedStateRegistry; // -------------------------------------------------------------------------------------------- public CheckpointCoordinator( JobID job, long baseInterval, long checkpointTimeout, long minPauseBetweenCheckpoints, int maxConcurrentCheckpointAttempts, ExternalizedCheckpointSettings externalizeSettings, ExecutionVertex[] tasksToTrigger, ExecutionVertex[] tasksToWaitFor, ExecutionVertex[] tasksToCommitTo, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore completedCheckpointStore, @Nullable String checkpointDirectory, Executor executor, SharedStateRegistryFactory sharedStateRegistryFactory) { // sanity checks checkArgument(baseInterval > 0, "Checkpoint timeout must be larger than zero"); checkArgument(checkpointTimeout >= 1, "Checkpoint timeout must be larger than zero"); checkArgument(minPauseBetweenCheckpoints >= 0, "minPauseBetweenCheckpoints must be >= 0"); checkArgument(maxConcurrentCheckpointAttempts >= 1, "maxConcurrentCheckpointAttempts must be >= 1"); if (externalizeSettings.externalizeCheckpoints() && checkpointDirectory == null) { throw new IllegalStateException("CheckpointConfig says to persist periodic " + "checkpoints, but no checkpoint directory has been configured. You can " + "configure configure one via key '" + ConfigConstants.CHECKPOINTS_DIRECTORY_KEY + "'."); } // max "in between duration" can be one year - this is to prevent numeric overflows if (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) { minPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000; } // it does not make sense to schedule checkpoints more often then the desired // time between checkpoints if (baseInterval < minPauseBetweenCheckpoints) { baseInterval = minPauseBetweenCheckpoints; } this.job = checkNotNull(job); this.baseInterval = baseInterval; this.checkpointTimeout = checkpointTimeout; this.minPauseBetweenCheckpointsNanos = minPauseBetweenCheckpoints * 1_000_000; this.maxConcurrentCheckpointAttempts = maxConcurrentCheckpointAttempts; this.tasksToTrigger = checkNotNull(tasksToTrigger); this.tasksToWaitFor = checkNotNull(tasksToWaitFor); this.tasksToCommitTo = checkNotNull(tasksToCommitTo); this.pendingCheckpoints = new LinkedHashMap<>(); this.checkpointIdCounter = checkNotNull(checkpointIDCounter); this.completedCheckpointStore = checkNotNull(completedCheckpointStore); this.checkpointDirectory = checkpointDirectory; this.executor = checkNotNull(executor); this.sharedStateRegistryFactory = checkNotNull(sharedStateRegistryFactory); this.sharedStateRegistry = sharedStateRegistryFactory.create(executor); this.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS); this.masterHooks = new HashMap<>(); this.timer = new ScheduledThreadPoolExecutor(1, new DispatcherThreadFactory(Thread.currentThread().getThreadGroup(), "Checkpoint Timer")); // make sure the timer internally cleans up and does not hold onto stale scheduled tasks this.timer.setRemoveOnCancelPolicy(true); this.timer.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); this.timer.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); if (externalizeSettings.externalizeCheckpoints()) { LOG.info("Persisting periodic checkpoints externally at {}.", checkpointDirectory); checkpointProperties = CheckpointProperties.forExternalizedCheckpoint(externalizeSettings.deleteOnCancellation()); } else { checkpointProperties = CheckpointProperties.forStandardCheckpoint(); } try { // Make sure the checkpoint ID enumerator is running. Possibly // issues a blocking call to ZooKeeper. checkpointIDCounter.start(); } catch (Throwable t) { throw new RuntimeException("Failed to start checkpoint ID counter: " + t.getMessage(), t); } } // -------------------------------------------------------------------------------------------- // Configuration // -------------------------------------------------------------------------------------------- /** * Adds the given master hook to the checkpoint coordinator. This method does nothing, if * the checkpoint coordinator already contained a hook with the same ID (as defined via * {@link MasterTriggerRestoreHook#getIdentifier()}). * * @param hook The hook to add. * @return True, if the hook was added, false if the checkpoint coordinator already * contained a hook with the same ID. */ public boolean addMasterHook(MasterTriggerRestoreHook<?> hook) { checkNotNull(hook); final String id = hook.getIdentifier(); checkArgument(!StringUtils.isNullOrWhitespaceOnly(id), "The hook has a null or empty id"); synchronized (lock) { if (!masterHooks.containsKey(id)) { masterHooks.put(id, hook); return true; } else { return false; } } } /** * Gets the number of currently register master hooks. */ public int getNumberOfRegisteredMasterHooks() { synchronized (lock) { return masterHooks.size(); } } /** * Sets the checkpoint stats tracker. * * @param statsTracker The checkpoint stats tracker. */ public void setCheckpointStatsTracker(@Nullable CheckpointStatsTracker statsTracker) { this.statsTracker = statsTracker; } // -------------------------------------------------------------------------------------------- // Clean shutdown // -------------------------------------------------------------------------------------------- /** * Shuts down the checkpoint coordinator. * * <p>After this method has been called, the coordinator does not accept * and further messages and cannot trigger any further checkpoints. */ public void shutdown(JobStatus jobStatus) throws Exception { synchronized (lock) { if (!shutdown) { shutdown = true; LOG.info("Stopping checkpoint coordinator for job " + job); periodicScheduling = false; triggerRequestQueued = false; // shut down the thread that handles the timeouts and pending triggers timer.shutdownNow(); // clear and discard all pending checkpoints for (PendingCheckpoint pending : pendingCheckpoints.values()) { pending.abortError(new Exception("Checkpoint Coordinator is shutting down")); } pendingCheckpoints.clear(); completedCheckpointStore.shutdown(jobStatus); checkpointIdCounter.shutdown(jobStatus); } } } public boolean isShutdown() { return shutdown; } // -------------------------------------------------------------------------------------------- // Handling checkpoints and messages // -------------------------------------------------------------------------------------------- /** * Triggers a savepoint with the given savepoint directory as a target. * * @param timestamp The timestamp for the savepoint. * @param targetDirectory Target directory for the savepoint. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured * @throws Exception Failures during triggering are forwarded */ public Future<CompletedCheckpoint> triggerSavepoint(long timestamp, String targetDirectory) throws Exception { checkNotNull(targetDirectory, "Savepoint target directory"); CheckpointProperties props = CheckpointProperties.forStandardSavepoint(); // Create the unique savepoint directory final String savepointDirectory = SavepointStore .createSavepointDirectory(targetDirectory, job); CheckpointTriggerResult triggerResult = triggerCheckpoint( timestamp, props, savepointDirectory, false); Future<CompletedCheckpoint> result; if (triggerResult.isSuccess()) { result = triggerResult.getPendingCheckpoint().getCompletionFuture(); } else { Throwable cause = new Exception("Failed to trigger savepoint: " + triggerResult.getFailureReason().message()); result = FlinkCompletableFuture.completedExceptionally(cause); } // Make sure to remove the created base directory on Exceptions result.exceptionallyAsync(new ApplyFunction<Throwable, Void>() { @Override public Void apply(Throwable value) { try { SavepointStore.deleteSavepointDirectory(savepointDirectory); } catch (Throwable t) { LOG.warn("Failed to delete savepoint directory " + savepointDirectory + " after failed savepoint.", t); } return null; } }, executor); return result; } /** * Triggers a new standard checkpoint and uses the given timestamp as the checkpoint * timestamp. * * @param timestamp The timestamp for the checkpoint. * @param isPeriodic Flag indicating whether this triggered checkpoint is * periodic. If this flag is true, but the periodic scheduler is disabled, * the checkpoint will be declined. * @return <code>true</code> if triggering the checkpoint succeeded. */ public boolean triggerCheckpoint(long timestamp, boolean isPeriodic) { return triggerCheckpoint(timestamp, checkpointProperties, checkpointDirectory, isPeriodic).isSuccess(); } /** * Test method to trigger a checkpoint/savepoint. * * @param timestamp The timestamp for the checkpoint. * @param options The checkpoint options. * @return A future to the completed checkpoint */ @VisibleForTesting @Internal public Future<CompletedCheckpoint> triggerCheckpoint(long timestamp, CheckpointOptions options) throws Exception { switch (options.getCheckpointType()) { case SAVEPOINT: return triggerSavepoint(timestamp, options.getTargetLocation()); case FULL_CHECKPOINT: CheckpointTriggerResult triggerResult = triggerCheckpoint(timestamp, checkpointProperties, checkpointDirectory, false); if (triggerResult.isSuccess()) { return triggerResult.getPendingCheckpoint().getCompletionFuture(); } else { Throwable cause = new Exception("Failed to trigger checkpoint: " + triggerResult.getFailureReason().message()); return FlinkCompletableFuture.completedExceptionally(cause); } default: throw new IllegalArgumentException("Unknown checkpoint type: " + options.getCheckpointType()); } } @VisibleForTesting CheckpointTriggerResult triggerCheckpoint( long timestamp, CheckpointProperties props, String targetDirectory, boolean isPeriodic) { // Sanity check if (props.externalizeCheckpoint() && targetDirectory == null) { throw new IllegalStateException("No target directory specified to persist checkpoint to."); } // make some eager pre-checks synchronized (lock) { // abort if the coordinator has been shutdown in the meantime if (shutdown) { return new CheckpointTriggerResult(CheckpointDeclineReason.COORDINATOR_SHUTDOWN); } // Don't allow periodic checkpoint if scheduling has been disabled if (isPeriodic && !periodicScheduling) { return new CheckpointTriggerResult(CheckpointDeclineReason.PERIODIC_SCHEDULER_SHUTDOWN); } // validate whether the checkpoint can be triggered, with respect to the limit of // concurrent checkpoints, and the minimum time between checkpoints. // these checks are not relevant for savepoints if (!props.forceCheckpoint()) { // sanity check: there should never be more than one trigger request queued if (triggerRequestQueued) { LOG.warn("Trying to trigger another checkpoint while one was queued already"); return new CheckpointTriggerResult(CheckpointDeclineReason.ALREADY_QUEUED); } // if too many checkpoints are currently in progress, we need to mark that a request is queued if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) { triggerRequestQueued = true; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } return new CheckpointTriggerResult(CheckpointDeclineReason.TOO_MANY_CONCURRENT_CHECKPOINTS); } // make sure the minimum interval between checkpoints has passed final long earliestNext = lastCheckpointCompletionNanos + minPauseBetweenCheckpointsNanos; final long durationTillNextMillis = (earliestNext - System.nanoTime()) / 1_000_000; if (durationTillNextMillis > 0) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } // Reassign the new trigger to the currentPeriodicTrigger currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), durationTillNextMillis, baseInterval, TimeUnit.MILLISECONDS); return new CheckpointTriggerResult(CheckpointDeclineReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS); } } } // check if all tasks that we need to trigger are running. // if not, abort the checkpoint Execution[] executions = new Execution[tasksToTrigger.length]; for (int i = 0; i < tasksToTrigger.length; i++) { Execution ee = tasksToTrigger[i].getCurrentExecutionAttempt(); if (ee != null && ee.getState() == ExecutionState.RUNNING) { executions[i] = ee; } else { LOG.info("Checkpoint triggering task {} is not being executed at the moment. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex()); return new CheckpointTriggerResult(CheckpointDeclineReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } // next, check if all tasks that need to acknowledge the checkpoint are running. // if not, abort the checkpoint Map<ExecutionAttemptID, ExecutionVertex> ackTasks = new HashMap<>(tasksToWaitFor.length); for (ExecutionVertex ev : tasksToWaitFor) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ackTasks.put(ee.getAttemptId(), ev); } else { LOG.info("Checkpoint acknowledging task {} is not being executed at the moment. Aborting checkpoint.", ev.getTaskNameWithSubtaskIndex()); return new CheckpointTriggerResult(CheckpointDeclineReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } // we will actually trigger this checkpoint! // we lock with a special lock to make sure that trigger requests do not overtake each other. // this is not done with the coordinator-wide lock, because the 'checkpointIdCounter' // may issue blocking operations. Using a different lock than the coordinator-wide lock, // we avoid blocking the processing of 'acknowledge/decline' messages during that time. synchronized (triggerLock) { final long checkpointID; try { // this must happen outside the coordinator-wide lock, because it communicates // with external services (in HA mode) and may block for a while. checkpointID = checkpointIdCounter.getAndIncrement(); } catch (Throwable t) { int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn("Failed to trigger checkpoint (" + numUnsuccessful + " consecutive failed attempts so far)", t); return new CheckpointTriggerResult(CheckpointDeclineReason.EXCEPTION); } final PendingCheckpoint checkpoint = new PendingCheckpoint( job, checkpointID, timestamp, ackTasks, props, targetDirectory, executor); if (statsTracker != null) { PendingCheckpointStats callback = statsTracker.reportPendingCheckpoint( checkpointID, timestamp, props); checkpoint.setStatsCallback(callback); } // schedule the timer that will clean up the expired checkpoints final Runnable canceller = new Runnable() { @Override public void run() { synchronized (lock) { // only do the work if the checkpoint is not discarded anyways // note that checkpoint completion discards the pending checkpoint object if (!checkpoint.isDiscarded()) { LOG.info("Checkpoint " + checkpointID + " expired before completing."); checkpoint.abortExpired(); pendingCheckpoints.remove(checkpointID); rememberRecentCheckpointId(checkpointID); triggerQueuedRequests(); } } } }; try { // re-acquire the coordinator-wide lock synchronized (lock) { // since we released the lock in the meantime, we need to re-check // that the conditions still hold. if (shutdown) { return new CheckpointTriggerResult(CheckpointDeclineReason.COORDINATOR_SHUTDOWN); } else if (!props.forceCheckpoint()) { if (triggerRequestQueued) { LOG.warn("Trying to trigger another checkpoint while one was queued already"); return new CheckpointTriggerResult(CheckpointDeclineReason.ALREADY_QUEUED); } if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) { triggerRequestQueued = true; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } return new CheckpointTriggerResult(CheckpointDeclineReason.TOO_MANY_CONCURRENT_CHECKPOINTS); } // make sure the minimum interval between checkpoints has passed final long earliestNext = lastCheckpointCompletionNanos + minPauseBetweenCheckpointsNanos; final long durationTillNextMillis = (earliestNext - System.nanoTime()) / 1_000_000; if (durationTillNextMillis > 0) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } // Reassign the new trigger to the currentPeriodicTrigger currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), durationTillNextMillis, baseInterval, TimeUnit.MILLISECONDS); return new CheckpointTriggerResult(CheckpointDeclineReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS); } } LOG.info("Triggering checkpoint " + checkpointID + " @ " + timestamp); pendingCheckpoints.put(checkpointID, checkpoint); ScheduledFuture<?> cancellerHandle = timer.schedule( canceller, checkpointTimeout, TimeUnit.MILLISECONDS); if (!checkpoint.setCancellerHandle(cancellerHandle)) { // checkpoint is already disposed! cancellerHandle.cancel(false); } // trigger the master hooks for the checkpoint final List<MasterState> masterStates = MasterHooks.triggerMasterHooks(masterHooks.values(), checkpointID, timestamp, executor, Time.milliseconds(checkpointTimeout)); for (MasterState s : masterStates) { checkpoint.addMasterState(s); } } // end of lock scope CheckpointOptions checkpointOptions; if (!props.isSavepoint()) { checkpointOptions = CheckpointOptions.forFullCheckpoint(); } else { checkpointOptions = CheckpointOptions.forSavepoint(targetDirectory); } // send the messages to the tasks that trigger their checkpoint for (Execution execution: executions) { execution.triggerCheckpoint(checkpointID, timestamp, checkpointOptions); } numUnsuccessfulCheckpointsTriggers.set(0); return new CheckpointTriggerResult(checkpoint); } catch (Throwable t) { // guard the map against concurrent modifications synchronized (lock) { pendingCheckpoints.remove(checkpointID); } int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn("Failed to trigger checkpoint {}. ({} consecutive failed attempts so far)", checkpointID, numUnsuccessful, t); if (!checkpoint.isDiscarded()) { checkpoint.abortError(new Exception("Failed to trigger checkpoint", t)); } return new CheckpointTriggerResult(CheckpointDeclineReason.EXCEPTION); } } // end trigger lock } /** * Receives a {@link DeclineCheckpoint} message for a pending checkpoint. * * @param message Checkpoint decline from the task manager */ public void receiveDeclineMessage(DeclineCheckpoint message) { if (shutdown || message == null) { return; } if (!job.equals(message.getJob())) { throw new IllegalArgumentException("Received DeclineCheckpoint message for job " + message.getJob() + " while this coordinator handles job " + job); } final long checkpointId = message.getCheckpointId(); final String reason = (message.getReason() != null ? message.getReason().getMessage() : ""); PendingCheckpoint checkpoint; synchronized (lock) { // we need to check inside the lock for being shutdown as well, otherwise we // get races and invalid error log messages if (shutdown) { return; } checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null && !checkpoint.isDiscarded()) { LOG.info("Discarding checkpoint {} because of checkpoint decline from task {} : {}", checkpointId, message.getTaskExecutionId(), reason); pendingCheckpoints.remove(checkpointId); checkpoint.abortDeclined(); rememberRecentCheckpointId(checkpointId); // we don't have to schedule another "dissolving" checkpoint any more because the // cancellation barriers take care of breaking downstream alignments // we only need to make sure that suspended queued requests are resumed boolean haveMoreRecentPending = false; for (PendingCheckpoint p : pendingCheckpoints.values()) { if (!p.isDiscarded() && p.getCheckpointId() >= checkpoint.getCheckpointId()) { haveMoreRecentPending = true; break; } } if (!haveMoreRecentPending) { triggerQueuedRequests(); } } else if (checkpoint != null) { // this should not happen throw new IllegalStateException( "Received message for discarded but non-removed checkpoint " + checkpointId); } else if (LOG.isDebugEnabled()) { if (recentPendingCheckpoints.contains(checkpointId)) { // message is for an unknown checkpoint, or comes too late (checkpoint disposed) LOG.debug("Received another decline message for now expired checkpoint attempt {} : {}", checkpointId, reason); } else { // message is for an unknown checkpoint. might be so old that we don't even remember it any more LOG.debug("Received decline message for unknown (too old?) checkpoint attempt {} : {}", checkpointId, reason); } } } } /** * Receives an AcknowledgeCheckpoint message and returns whether the * message was associated with a pending checkpoint. * * @param message Checkpoint ack from the task manager * * @return Flag indicating whether the ack'd checkpoint was associated * with a pending checkpoint. * * @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint store. */ public boolean receiveAcknowledgeMessage(AcknowledgeCheckpoint message) throws CheckpointException { if (shutdown || message == null) { return false; } if (!job.equals(message.getJob())) { LOG.error("Received wrong AcknowledgeCheckpoint message for job {}: {}", job, message); return false; } final long checkpointId = message.getCheckpointId(); synchronized (lock) { // we need to check inside the lock for being shutdown as well, otherwise we // get races and invalid error log messages if (shutdown) { return false; } final PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null && !checkpoint.isDiscarded()) { switch (checkpoint.acknowledgeTask(message.getTaskExecutionId(), message.getSubtaskState(), message.getCheckpointMetrics())) { case SUCCESS: LOG.debug("Received acknowledge message for checkpoint {} from task {} of job {}.", checkpointId, message.getTaskExecutionId(), message.getJob()); if (checkpoint.isFullyAcknowledged()) { completePendingCheckpoint(checkpoint); } break; case DUPLICATE: LOG.debug("Received a duplicate acknowledge message for checkpoint {}, task {}, job {}.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob()); break; case UNKNOWN: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {}, " + "because the task's execution attempt id was unknown. Discarding " + "the state handle to avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob()); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); break; case DISCARDED: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {}, " + "because the pending checkpoint had been discarded. Discarding the " + "state handle tp avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob()); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); } return true; } else if (checkpoint != null) { // this should not happen throw new IllegalStateException( "Received message for discarded but non-removed checkpoint " + checkpointId); } else { boolean wasPendingCheckpoint; // message is for an unknown checkpoint, or comes too late (checkpoint disposed) if (recentPendingCheckpoints.contains(checkpointId)) { wasPendingCheckpoint = true; LOG.warn("Received late message for now expired checkpoint attempt {} from " + "{} of job {}.", checkpointId, message.getTaskExecutionId(), message.getJob()); } else { LOG.debug("Received message for an unknown checkpoint {} from {} of job {}.", checkpointId, message.getTaskExecutionId(), message.getJob()); wasPendingCheckpoint = false; } // try to discard the state so that we don't have lingering state lying around discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); return wasPendingCheckpoint; } } } /** * Try to complete the given pending checkpoint. * * Important: This method should only be called in the checkpoint lock scope. * * @param pendingCheckpoint to complete * @throws CheckpointException if the completion failed */ private void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint) throws CheckpointException { final long checkpointId = pendingCheckpoint.getCheckpointId(); final CompletedCheckpoint completedCheckpoint; // As a first step to complete the checkpoint, we register its state with the registry Map<OperatorID, OperatorState> operatorStates = pendingCheckpoint.getOperatorStates(); sharedStateRegistry.registerAll(operatorStates.values()); try { try { // externalize the checkpoint if required if (pendingCheckpoint.getProps().externalizeCheckpoint()) { completedCheckpoint = pendingCheckpoint.finalizeCheckpointExternalized(); } else { completedCheckpoint = pendingCheckpoint.finalizeCheckpointNonExternalized(); } } catch (Exception e1) { // abort the current pending checkpoint if we fails to finalize the pending checkpoint. if (!pendingCheckpoint.isDiscarded()) { pendingCheckpoint.abortError(e1); } throw new CheckpointException("Could not finalize the pending checkpoint " + checkpointId + '.', e1); } // the pending checkpoint must be discarded after the finalization Preconditions.checkState(pendingCheckpoint.isDiscarded() && completedCheckpoint != null); // TODO: add savepoints to completed checkpoint store once FLINK-4815 has been completed if (!completedCheckpoint.getProperties().isSavepoint()) { try { completedCheckpointStore.addCheckpoint(completedCheckpoint); } catch (Exception exception) { // we failed to store the completed checkpoint. Let's clean up executor.execute(new Runnable() { @Override public void run() { try { completedCheckpoint.discardOnFailedStoring(); } catch (Throwable t) { LOG.warn("Could not properly discard completed checkpoint {}.", completedCheckpoint.getCheckpointID(), t); } } }); throw new CheckpointException("Could not complete the pending checkpoint " + checkpointId + '.', exception); } // drop those pending checkpoints that are at prior to the completed one dropSubsumedCheckpoints(checkpointId); } } finally { pendingCheckpoints.remove(checkpointId); triggerQueuedRequests(); } rememberRecentCheckpointId(checkpointId); // record the time when this was completed, to calculate // the 'min delay between checkpoints' lastCheckpointCompletionNanos = System.nanoTime(); LOG.info("Completed checkpoint {} ({} bytes in {} ms).", checkpointId, completedCheckpoint.getStateSize(), completedCheckpoint.getDuration()); if (LOG.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("Checkpoint state: "); for (OperatorState state : completedCheckpoint.getOperatorStates().values()) { builder.append(state); builder.append(", "); } // Remove last two chars ", " builder.setLength(builder.length() - 2); LOG.debug(builder.toString()); } // send the "notify complete" call to all vertices final long timestamp = completedCheckpoint.getTimestamp(); for (ExecutionVertex ev : tasksToCommitTo) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ee.notifyCheckpointComplete(checkpointId, timestamp); } } } private void rememberRecentCheckpointId(long id) { if (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) { recentPendingCheckpoints.removeFirst(); } recentPendingCheckpoints.addLast(id); } private void dropSubsumedCheckpoints(long checkpointId) { Iterator<Map.Entry<Long, PendingCheckpoint>> entries = pendingCheckpoints.entrySet().iterator(); while (entries.hasNext()) { PendingCheckpoint p = entries.next().getValue(); // remove all pending checkpoints that are lesser than the current completed checkpoint if (p.getCheckpointId() < checkpointId && p.canBeSubsumed()) { rememberRecentCheckpointId(p.getCheckpointId()); p.abortSubsumed(); entries.remove(); } } } /** * Triggers the queued request, if there is one. * * <p>NOTE: The caller of this method must hold the lock when invoking the method! */ private void triggerQueuedRequests() { if (triggerRequestQueued) { triggerRequestQueued = false; // trigger the checkpoint from the trigger timer, to finish the work of this thread before // starting with the next checkpoint if (periodicScheduling) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); } currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), 0L, baseInterval, TimeUnit.MILLISECONDS); } else { timer.execute(new ScheduledTrigger()); } } } @VisibleForTesting int getNumScheduledTasks() { return timer.getQueue().size(); } // -------------------------------------------------------------------------------------------- // Checkpoint State Restoring // -------------------------------------------------------------------------------------------- /** * Restores the latest checkpointed state. * * @param tasks Map of job vertices to restore. State for these vertices is * restored via {@link Execution#setInitialState(TaskStateHandles)}. * @param errorIfNoCheckpoint Fail if no completed checkpoint is available to * restore from. * @param allowNonRestoredState Allow checkpoint state that cannot be mapped * to any job vertex in tasks. * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ public boolean restoreLatestCheckpointedState( Map<JobVertexID, ExecutionJobVertex> tasks, boolean errorIfNoCheckpoint, boolean allowNonRestoredState) throws Exception { synchronized (lock) { if (shutdown) { throw new IllegalStateException("CheckpointCoordinator is shut down"); } // We create a new shared state registry object, so that all pending async disposal requests from previous // runs will go against the old object (were they can do no harm). // This must happen under the checkpoint lock. sharedStateRegistry.close(); sharedStateRegistry = sharedStateRegistryFactory.create(executor); // Recover the checkpoints, TODO this could be done only when there is a new leader, not on each recovery completedCheckpointStore.recover(); // Now, we re-register all (shared) states from the checkpoint store with the new registry for (CompletedCheckpoint completedCheckpoint : completedCheckpointStore.getAllCheckpoints()) { completedCheckpoint.registerSharedStatesAfterRestored(sharedStateRegistry); } LOG.debug("Status of the shared state registry after restore: {}.", sharedStateRegistry); // Restore from the latest checkpoint CompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint(); if (latest == null) { if (errorIfNoCheckpoint) { throw new IllegalStateException("No completed checkpoint available"); } else { return false; } } LOG.info("Restoring from latest valid checkpoint: {}.", latest); // re-assign the task states final Map<OperatorID, OperatorState> operatorStates = latest.getOperatorStates(); StateAssignmentOperation stateAssignmentOperation = new StateAssignmentOperation(tasks, operatorStates, allowNonRestoredState); stateAssignmentOperation.assignStates(); // call master hooks for restore MasterHooks.restoreMasterHooks( masterHooks, latest.getMasterHookStates(), latest.getCheckpointID(), allowNonRestoredState, LOG); // update metrics if (statsTracker != null) { long restoreTimestamp = System.currentTimeMillis(); RestoredCheckpointStats restored = new RestoredCheckpointStats( latest.getCheckpointID(), latest.getProperties(), restoreTimestamp, latest.getExternalPointer()); statsTracker.reportRestoredCheckpoint(restored); } return true; } } /** * Restore the state with given savepoint * * @param savepointPath Location of the savepoint * @param allowNonRestored True if allowing checkpoint state that cannot be * mapped to any job vertex in tasks. * @param tasks Map of job vertices to restore. State for these * vertices is restored via * {@link Execution#setInitialState(TaskStateHandles)}. * @param userClassLoader The class loader to resolve serialized classes in * legacy savepoint versions. */ public boolean restoreSavepoint( String savepointPath, boolean allowNonRestored, Map<JobVertexID, ExecutionJobVertex> tasks, ClassLoader userClassLoader) throws Exception { Preconditions.checkNotNull(savepointPath, "The savepoint path cannot be null."); LOG.info("Starting job from savepoint {} ({})", savepointPath, (allowNonRestored ? "allowing non restored state" : "")); // Load the savepoint as a checkpoint into the system CompletedCheckpoint savepoint = SavepointLoader.loadAndValidateSavepoint( job, tasks, savepointPath, userClassLoader, allowNonRestored); completedCheckpointStore.addCheckpoint(savepoint); // Reset the checkpoint ID counter long nextCheckpointId = savepoint.getCheckpointID() + 1; checkpointIdCounter.setCount(nextCheckpointId); LOG.info("Reset the checkpoint ID to {}.", nextCheckpointId); return restoreLatestCheckpointedState(tasks, true, allowNonRestored); } // ------------------------------------------------------------------------ // Accessors // ------------------------------------------------------------------------ public int getNumberOfPendingCheckpoints() { return this.pendingCheckpoints.size(); } public int getNumberOfRetainedSuccessfulCheckpoints() { synchronized (lock) { return completedCheckpointStore.getNumberOfRetainedCheckpoints(); } } public Map<Long, PendingCheckpoint> getPendingCheckpoints() { synchronized (lock) { return new HashMap<>(this.pendingCheckpoints); } } public List<CompletedCheckpoint> getSuccessfulCheckpoints() throws Exception { synchronized (lock) { return completedCheckpointStore.getAllCheckpoints(); } } public CompletedCheckpointStore getCheckpointStore() { return completedCheckpointStore; } public CheckpointIDCounter getCheckpointIdCounter() { return checkpointIdCounter; } public long getCheckpointTimeout() { return checkpointTimeout; } // -------------------------------------------------------------------------------------------- // Periodic scheduling of checkpoints // -------------------------------------------------------------------------------------------- public void startCheckpointScheduler() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } // make sure all prior timers are cancelled stopCheckpointScheduler(); periodicScheduling = true; currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), baseInterval, baseInterval, TimeUnit.MILLISECONDS); } } public void stopCheckpointScheduler() { synchronized (lock) { triggerRequestQueued = false; periodicScheduling = false; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } for (PendingCheckpoint p : pendingCheckpoints.values()) { p.abortError(new Exception("Checkpoint Coordinator is suspending.")); } pendingCheckpoints.clear(); numUnsuccessfulCheckpointsTriggers.set(0); } } // ------------------------------------------------------------------------ // job status listener that schedules / cancels periodic checkpoints // ------------------------------------------------------------------------ public JobStatusListener createActivatorDeactivator() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } if (jobStatusListener == null) { jobStatusListener = new CheckpointCoordinatorDeActivator(this); } return jobStatusListener; } } // ------------------------------------------------------------------------ private final class ScheduledTrigger implements Runnable { @Override public void run() { try { triggerCheckpoint(System.currentTimeMillis(), true); } catch (Exception e) { LOG.error("Exception while triggering checkpoint.", e); } } } /** * Discards the given state object asynchronously belonging to the given job, execution attempt * id and checkpoint id. * * @param jobId identifying the job to which the state object belongs * @param executionAttemptID identifying the task to which the state object belongs * @param checkpointId of the state object * @param subtaskState to discard asynchronously */ private void discardSubtaskState( final JobID jobId, final ExecutionAttemptID executionAttemptID, final long checkpointId, final SubtaskState subtaskState) { if (subtaskState != null) { executor.execute(new Runnable() { @Override public void run() { try { subtaskState.discardState(); } catch (Throwable t2) { LOG.warn("Could not properly discard state object of checkpoint {} " + "belonging to task {} of job {}.", checkpointId, executionAttemptID, jobId, t2); } } }); } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * TrafficDataRequest.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.admanager.axis.v202102; /** * Defines a segment of traffic for which traffic data should be returned. */ public class TrafficDataRequest implements java.io.Serializable { /* The {@link TargetingDto} that defines a segment of traffic. * <span class="constraint Required">This attribute is required.</span> */ private com.google.api.ads.admanager.axis.v202102.Targeting targeting; /* The date range for which traffic data are requested. This range * may cover historical dates, * future dates, or both. * * <p>The data returned are not guaranteed to cover the * entire requested date range. If sufficient * data are not available to cover the entire requested * date range, a response may be returned * with a later start date, earlier end date, or both. * <span class="constraint Required">This attribute is required.</span> */ private com.google.api.ads.admanager.axis.v202102.DateRange requestedDateRange; public TrafficDataRequest() { } public TrafficDataRequest( com.google.api.ads.admanager.axis.v202102.Targeting targeting, com.google.api.ads.admanager.axis.v202102.DateRange requestedDateRange) { this.targeting = targeting; this.requestedDateRange = requestedDateRange; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() .add("requestedDateRange", getRequestedDateRange()) .add("targeting", getTargeting()) .toString(); } /** * Gets the targeting value for this TrafficDataRequest. * * @return targeting * The {@link TargetingDto} that defines a segment of traffic. * <span class="constraint Required">This attribute is required.</span> */ public com.google.api.ads.admanager.axis.v202102.Targeting getTargeting() { return targeting; } /** * Sets the targeting value for this TrafficDataRequest. * * @param targeting * The {@link TargetingDto} that defines a segment of traffic. * <span class="constraint Required">This attribute is required.</span> */ public void setTargeting(com.google.api.ads.admanager.axis.v202102.Targeting targeting) { this.targeting = targeting; } /** * Gets the requestedDateRange value for this TrafficDataRequest. * * @return requestedDateRange * The date range for which traffic data are requested. This range * may cover historical dates, * future dates, or both. * * <p>The data returned are not guaranteed to cover the * entire requested date range. If sufficient * data are not available to cover the entire requested * date range, a response may be returned * with a later start date, earlier end date, or both. * <span class="constraint Required">This attribute is required.</span> */ public com.google.api.ads.admanager.axis.v202102.DateRange getRequestedDateRange() { return requestedDateRange; } /** * Sets the requestedDateRange value for this TrafficDataRequest. * * @param requestedDateRange * The date range for which traffic data are requested. This range * may cover historical dates, * future dates, or both. * * <p>The data returned are not guaranteed to cover the * entire requested date range. If sufficient * data are not available to cover the entire requested * date range, a response may be returned * with a later start date, earlier end date, or both. * <span class="constraint Required">This attribute is required.</span> */ public void setRequestedDateRange(com.google.api.ads.admanager.axis.v202102.DateRange requestedDateRange) { this.requestedDateRange = requestedDateRange; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof TrafficDataRequest)) return false; TrafficDataRequest other = (TrafficDataRequest) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.targeting==null && other.getTargeting()==null) || (this.targeting!=null && this.targeting.equals(other.getTargeting()))) && ((this.requestedDateRange==null && other.getRequestedDateRange()==null) || (this.requestedDateRange!=null && this.requestedDateRange.equals(other.getRequestedDateRange()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getTargeting() != null) { _hashCode += getTargeting().hashCode(); } if (getRequestedDateRange() != null) { _hashCode += getRequestedDateRange().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(TrafficDataRequest.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "TrafficDataRequest")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("targeting"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "targeting")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "Targeting")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("requestedDateRange"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "requestedDateRange")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "DateRange")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
package org.spincast.tests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.Arrays; import java.util.List; import java.util.Map; import org.junit.Test; import org.spincast.core.exchange.DefaultRequestContext; import org.spincast.core.routing.Handler; import org.spincast.core.utils.ContentTypeDefaults; import org.spincast.core.utils.SpincastStatics; import org.spincast.plugins.httpclient.HttpResponse; import org.spincast.shaded.org.apache.http.HttpHeaders; import org.spincast.shaded.org.apache.http.HttpStatus; import org.spincast.testing.core.utils.SpincastTestingUtils; import org.spincast.testing.defaults.NoAppStartHttpServerTestingBase; public class HeadersTest extends NoAppStartHttpServerTestingBase { @Test public void requestHeaders() throws Exception { getRouter().GET("/one").handle(new Handler<DefaultRequestContext>() { @Override public void handle(DefaultRequestContext context) { try { Map<String, List<String>> requestHeaders = context.request().getHeaders(); assertNotNull(requestHeaders); List<String> vals = requestHeaders.get("header1"); assertEquals(1, vals.size()); assertEquals("val1 val2", vals.get(0)); vals = requestHeaders.get("header2"); assertEquals(1, vals.size()); assertEquals(SpincastTestingUtils.TEST_STRING, URLDecoder.decode(vals.get(0), "UTF-8")); vals = context.request().getHeader("header1"); assertEquals(1, vals.size()); assertEquals("val1 val2", vals.get(0)); String headerFirstValue = context.request().getHeaderFirst("header2"); assertEquals(SpincastTestingUtils.TEST_STRING, URLDecoder.decode(headerFirstValue, "UTF-8")); String insensitive = context.request().getHeaderFirst("HEADer1"); assertEquals("val1 val2", insensitive); // Added by Apache HttpClient headerFirstValue = context.request().getHeaderFirst(HttpHeaders.ACCEPT_ENCODING); assertNotNull(headerFirstValue); // immutable try { requestHeaders.remove("HEADer1"); fail(); } catch (UnsupportedOperationException ex) { } try { requestHeaders.put("HEADer3", Arrays.asList("val3")); fail(); } catch (UnsupportedOperationException ex) { } headerFirstValue = context.request().getHeaderFirst("HEADer1"); assertNotNull(headerFirstValue); headerFirstValue = context.request().getHeaderFirst("HEADer3"); assertNull(headerFirstValue); } catch (Exception ex) { throw SpincastStatics.runtimize(ex); } } }); HttpResponse response = GET("/one").addHeaderValue("header1", "val1 val2") .addHeaderValue("header2", URLEncoder.encode(SpincastTestingUtils.TEST_STRING, "UTF-8")) .send(); assertEquals(HttpStatus.SC_OK, response.getStatus()); assertEquals(ContentTypeDefaults.TEXT.getMainVariationWithUtf8Charset(), response.getContentType()); assertEquals("", response.getContentAsString()); } @Test public void responseHeadersMutable() throws Exception { getRouter().GET("/one").handle(new Handler<DefaultRequestContext>() { @Override public void handle(DefaultRequestContext context) { try { Map<String, List<String>> responseHeaders = context.response().getHeaders(); assertNotNull(responseHeaders); assertEquals(0, responseHeaders.size()); // mutable responseHeaders.put("header1", Arrays.asList("val1", "val2")); } catch (Exception ex) { throw SpincastStatics.runtimize(ex); } } }); HttpResponse response = GET("/one").send(); assertEquals(HttpStatus.SC_OK, response.getStatus()); List<String> vals = response.getHeader("header1"); assertNotNull(vals); assertEquals(2, vals.size()); assertEquals("val1", vals.get(0)); assertEquals("val2", vals.get(1)); } @Test public void responseHeadersResetEverything() throws Exception { getRouter().GET("/one").handle(new Handler<DefaultRequestContext>() { @Override public void handle(DefaultRequestContext context) { try { context.response().addHeaderValue("header1", "val1"); context.response().addHeaderValue("header1", "val2"); List<String> vals = context.response().getHeader("header1"); assertNotNull(vals); assertEquals(2, vals.size()); context.response().resetEverything(); vals = context.response().getHeader("header1"); assertNotNull(vals); assertEquals(0, vals.size()); } catch (Exception ex) { throw SpincastStatics.runtimize(ex); } } }); HttpResponse response = GET("/one").send(); assertEquals(HttpStatus.SC_OK, response.getStatus()); List<String> vals = response.getHeader("header1"); assertNotNull(vals); assertEquals(0, vals.size()); } @Test public void responseHeadersAdd() throws Exception { getRouter().GET("/one").handle(new Handler<DefaultRequestContext>() { @Override public void handle(DefaultRequestContext context) { try { List<String> vals = context.response().getHeader("header1"); assertNotNull(vals); assertEquals(0, vals.size()); context.response().addHeaderValue("header1", "val1"); context.response().addHeaderValue("header1", "val2"); vals = context.response().getHeader("header1"); assertNotNull(vals); assertEquals(2, vals.size()); } catch (Exception ex) { throw SpincastStatics.runtimize(ex); } } }); HttpResponse response = GET("/one").send(); assertEquals(HttpStatus.SC_OK, response.getStatus()); List<String> vals = response.getHeader("header1"); assertNotNull(vals); assertEquals(2, vals.size()); assertEquals("val1", vals.get(0)); assertEquals("val2", vals.get(1)); } @Test public void responseHeadersAddValues() throws Exception { getRouter().GET("/one").handle(new Handler<DefaultRequestContext>() { @Override public void handle(DefaultRequestContext context) { try { List<String> vals = context.response().getHeader("header1"); assertNotNull(vals); assertEquals(0, vals.size()); context.response().addHeaderValues("header1", Arrays.asList("val1", "val2")); vals = context.response().getHeader("header1"); assertNotNull(vals); assertEquals(2, vals.size()); } catch (Exception ex) { throw SpincastStatics.runtimize(ex); } } }); HttpResponse response = GET("/one").send(); assertEquals(HttpStatus.SC_OK, response.getStatus()); List<String> vals = response.getHeader("header1"); assertNotNull(vals); assertEquals(2, vals.size()); assertEquals("val1", vals.get(0)); assertEquals("val2", vals.get(1)); } @Test public void responseHeadersSet() throws Exception { getRouter().GET("/one").handle(new Handler<DefaultRequestContext>() { @Override public void handle(DefaultRequestContext context) { try { context.response().addHeaderValues("header1", Arrays.asList("val1", "val2")); context.response().setHeader("HEAder1", "val3"); List<String> vals = context.response().getHeader("header1"); assertNotNull(vals); assertEquals(1, vals.size()); assertEquals("val3", vals.get(0)); } catch (Exception ex) { throw SpincastStatics.runtimize(ex); } } }); HttpResponse response = GET("/one").send(); assertEquals(HttpStatus.SC_OK, response.getStatus()); List<String> vals = response.getHeader("header1"); assertNotNull(vals); assertEquals(1, vals.size()); assertEquals("val3", vals.get(0)); } @Test public void responseHeadersRemove() throws Exception { getRouter().GET("/one").handle(new Handler<DefaultRequestContext>() { @Override public void handle(DefaultRequestContext context) { try { context.response().addHeaderValues("header1", Arrays.asList("val1", "val2")); context.response().removeHeader("HEAder1"); List<String> vals = context.response().getHeader("header1"); assertNotNull(vals); assertEquals(0, vals.size()); } catch (Exception ex) { throw SpincastStatics.runtimize(ex); } } }); HttpResponse response = GET("/one").send(); assertEquals(HttpStatus.SC_OK, response.getStatus()); List<String> vals = response.getHeader("header1"); assertNotNull(vals); assertEquals(0, vals.size()); } @Test public void responseHeadersRemoveWithSetNull() throws Exception { getRouter().GET("/one").handle(new Handler<DefaultRequestContext>() { @Override public void handle(DefaultRequestContext context) { try { context.response().addHeaderValues("header1", Arrays.asList("val1", "val2")); context.response().setHeader("HEAder1", (String)null); List<String> vals = context.response().getHeader("header1"); assertNotNull(vals); assertEquals(0, vals.size()); } catch (Exception ex) { throw SpincastStatics.runtimize(ex); } } }); HttpResponse response = GET("/one").send(); assertEquals(HttpStatus.SC_OK, response.getStatus()); List<String> vals = response.getHeader("header1"); assertNotNull(vals); assertEquals(0, vals.size()); } @Test public void responseHeadersGetFirstValue() throws Exception { getRouter().GET("/one").handle(new Handler<DefaultRequestContext>() { @Override public void handle(DefaultRequestContext context) { try { context.response().addHeaderValues("header1", Arrays.asList("val1", "val2")); String headerFirstValue = context.response().getHeaderFirst("HEAder1"); assertNotNull(headerFirstValue); assertEquals("val1", headerFirstValue); } catch (Exception ex) { throw SpincastStatics.runtimize(ex); } } }); HttpResponse response = GET("/one").send(); assertEquals(HttpStatus.SC_OK, response.getStatus()); List<String> vals = response.getHeader("header1"); assertNotNull(vals); assertEquals(2, vals.size()); assertEquals("val1", vals.get(0)); assertEquals("val2", vals.get(1)); } @Test public void responseHeadersFlush() throws Exception { getRouter().GET("/one").handle(new Handler<DefaultRequestContext>() { @Override public void handle(DefaultRequestContext context) { try { context.response().addHeaderValues("header1", Arrays.asList("val1", "val2")); context.response().flush(); // Does nothing, headers sent! context.response().removeHeader("header1"); context.response().setHeader("header1", Arrays.asList("val3")); context.response().setHeader("header2", Arrays.asList("val3")); List<String> vals = context.response().getHeader("header1"); assertNotNull(vals); assertEquals(2, vals.size()); vals = context.response().getHeader("header2"); assertNotNull(vals); assertEquals(0, vals.size()); } catch (Exception ex) { throw SpincastStatics.runtimize(ex); } } }); HttpResponse response = GET("/one").send(); assertEquals(HttpStatus.SC_OK, response.getStatus()); List<String> vals = response.getHeader("header1"); assertNotNull(vals); assertEquals(2, vals.size()); assertEquals("val1", vals.get(0)); assertEquals("val2", vals.get(1)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.kafka.client; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.clients.consumer.OffsetCommitCallback; import org.apache.kafka.clients.consumer.internals.NoOpConsumerRebalanceListener; import org.apache.kafka.common.MetricName; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.metrics.KafkaMetric; import com.codahale.metrics.Gauge; import com.codahale.metrics.Metric; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import javax.annotation.Nonnull; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; import lombok.extern.slf4j.Slf4j; import org.apache.gobblin.configuration.ConfigurationKeys; import org.apache.gobblin.source.extractor.extract.kafka.KafkaOffsetRetrievalFailureException; import org.apache.gobblin.source.extractor.extract.kafka.KafkaPartition; import org.apache.gobblin.source.extractor.extract.kafka.KafkaTopic; import org.apache.gobblin.util.ConfigUtils; /** * A {@link GobblinKafkaConsumerClient} that uses kafka 09 consumer client. Use {@link Factory#create(Config)} to create * new Kafka09ConsumerClients. The {@link Config} used to create clients must have required key {@value #GOBBLIN_CONFIG_VALUE_DESERIALIZER_CLASS_KEY} * * @param <K> Message key type * @param <V> Message value type */ @Slf4j public class Kafka09ConsumerClient<K, V> extends AbstractBaseKafkaConsumerClient { private static final String KAFKA_09_CLIENT_BOOTSTRAP_SERVERS_KEY = "bootstrap.servers"; private static final String KAFKA_09_CLIENT_ENABLE_AUTO_COMMIT_KEY = "enable.auto.commit"; private static final String KAFKA_09_CLIENT_SESSION_TIMEOUT_KEY = "session.timeout.ms"; private static final String KAFKA_09_CLIENT_KEY_DESERIALIZER_CLASS_KEY = "key.deserializer"; private static final String KAFKA_09_CLIENT_VALUE_DESERIALIZER_CLASS_KEY = "value.deserializer"; private static final String KAFKA_09_CLIENT_GROUP_ID = "group.id"; private static final String KAFKA_09_DEFAULT_ENABLE_AUTO_COMMIT = Boolean.toString(false); public static final String KAFKA_09_DEFAULT_KEY_DESERIALIZER = "org.apache.kafka.common.serialization.StringDeserializer"; private static final String KAFKA_09_DEFAULT_GROUP_ID = "kafka09"; public static final String GOBBLIN_CONFIG_KEY_DESERIALIZER_CLASS_KEY = CONFIG_PREFIX + KAFKA_09_CLIENT_KEY_DESERIALIZER_CLASS_KEY; public static final String GOBBLIN_CONFIG_VALUE_DESERIALIZER_CLASS_KEY = CONFIG_PREFIX + KAFKA_09_CLIENT_VALUE_DESERIALIZER_CLASS_KEY; private static final Config FALLBACK = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder() .put(KAFKA_09_CLIENT_ENABLE_AUTO_COMMIT_KEY, KAFKA_09_DEFAULT_ENABLE_AUTO_COMMIT) .put(KAFKA_09_CLIENT_KEY_DESERIALIZER_CLASS_KEY, KAFKA_09_DEFAULT_KEY_DESERIALIZER) .put(KAFKA_09_CLIENT_GROUP_ID, KAFKA_09_DEFAULT_GROUP_ID) .build()); private final Consumer<K, V> consumer; private Kafka09ConsumerClient(Config config) { super(config); Preconditions.checkArgument(config.hasPath(GOBBLIN_CONFIG_VALUE_DESERIALIZER_CLASS_KEY), "Missing required property " + GOBBLIN_CONFIG_VALUE_DESERIALIZER_CLASS_KEY); Properties props = new Properties(); props.put(KAFKA_09_CLIENT_BOOTSTRAP_SERVERS_KEY, Joiner.on(",").join(super.brokers)); props.put(KAFKA_09_CLIENT_SESSION_TIMEOUT_KEY, super.socketTimeoutMillis); // grab all the config under "source.kafka" and add the defaults as fallback. Config baseConfig = ConfigUtils.getConfigOrEmpty(config, CONFIG_NAMESPACE).withFallback(FALLBACK); // get the "source.kafka.consumerConfig" config for extra config to pass along to Kafka with a fallback to the // shared config that start with "gobblin.kafka.sharedConfig" Config specificConfig = ConfigUtils.getConfigOrEmpty(baseConfig, CONSUMER_CONFIG).withFallback( ConfigUtils.getConfigOrEmpty(config, ConfigurationKeys.SHARED_KAFKA_CONFIG_PREFIX)); // The specific config overrides settings in the base config Config scopedConfig = specificConfig.withFallback(baseConfig.withoutPath(CONSUMER_CONFIG)); props.putAll(ConfigUtils.configToProperties(scopedConfig)); this.consumer = new KafkaConsumer<>(props); } public Kafka09ConsumerClient(Config config, Consumer<K, V> consumer) { super(config); this.consumer = consumer; } @Override public List<KafkaTopic> getTopics() { return FluentIterable.from(this.consumer.listTopics().entrySet()) .transform(new Function<Entry<String, List<PartitionInfo>>, KafkaTopic>() { @Override public KafkaTopic apply(Entry<String, List<PartitionInfo>> filteredTopicEntry) { return new KafkaTopic(filteredTopicEntry.getKey(), Lists.transform(filteredTopicEntry.getValue(), PARTITION_INFO_TO_KAFKA_PARTITION)); } }).toList(); } @Override public long getEarliestOffset(KafkaPartition partition) throws KafkaOffsetRetrievalFailureException { TopicPartition topicPartition = new TopicPartition(partition.getTopicName(), partition.getId()); this.consumer.assign(Collections.singletonList(topicPartition)); this.consumer.seekToBeginning(topicPartition); return this.consumer.position(topicPartition); } @Override public long getLatestOffset(KafkaPartition partition) throws KafkaOffsetRetrievalFailureException { TopicPartition topicPartition = new TopicPartition(partition.getTopicName(), partition.getId()); this.consumer.assign(Collections.singletonList(topicPartition)); this.consumer.seekToEnd(topicPartition); return this.consumer.position(topicPartition); } @Override public Iterator<KafkaConsumerRecord> consume(KafkaPartition partition, long nextOffset, long maxOffset) { if (nextOffset > maxOffset) { return null; } this.consumer.assign(Lists.newArrayList(new TopicPartition(partition.getTopicName(), partition.getId()))); this.consumer.seek(new TopicPartition(partition.getTopicName(), partition.getId()), nextOffset); return consume(); } @Override public Iterator<KafkaConsumerRecord> consume() { try { ConsumerRecords<K, V> consumerRecords = consumer.poll(super.fetchTimeoutMillis); return Iterators.transform(consumerRecords.iterator(), input -> { try { return new Kafka09ConsumerRecord(input); } catch (Throwable t) { throw Throwables.propagate(t); } }); } catch (Exception e) { log.error("Exception on polling records", e); throw new RuntimeException(e); } } /** * Subscribe to a kafka topic * TODO Add multi topic support * @param topic */ @Override public void subscribe(String topic) { this.consumer.subscribe(Lists.newArrayList(topic), new NoOpConsumerRebalanceListener()); } /** * Subscribe to a kafka topic with a {#GobblinConsumerRebalanceListener} * TODO Add multi topic support * @param topic */ @Override public void subscribe(String topic, GobblinConsumerRebalanceListener listener) { this.consumer.subscribe(Lists.newArrayList(topic), new ConsumerRebalanceListener() { @Override public void onPartitionsRevoked(Collection<TopicPartition> partitions) { listener.onPartitionsRevoked(partitions.stream().map(a -> new KafkaPartition.Builder().withTopicName(a.topic()).withId(a.partition()).build()).collect(Collectors.toList())); } @Override public void onPartitionsAssigned(Collection<TopicPartition> partitions) { listener.onPartitionsAssigned(partitions.stream().map(a -> new KafkaPartition.Builder().withTopicName(a.topic()).withId(a.partition()).build()).collect(Collectors.toList())); } }); } @Override public Map<String, Metric> getMetrics() { Map<MetricName, KafkaMetric> kafkaMetrics = (Map<MetricName, KafkaMetric>) this.consumer.metrics(); Map<String, Metric> codaHaleMetricMap = new HashMap<>(); kafkaMetrics .forEach((key, value) -> codaHaleMetricMap.put(canonicalMetricName(value), kafkaToCodaHaleMetric(value))); return codaHaleMetricMap; } /** * Commit offsets to Kafka asynchronously */ @Override public void commitOffsetsAsync(Map<KafkaPartition, Long> partitionOffsets) { Map<TopicPartition, OffsetAndMetadata> offsets = partitionOffsets.entrySet().stream().collect(Collectors.toMap(e -> new TopicPartition(e.getKey().getTopicName(),e.getKey().getId()), e -> new OffsetAndMetadata(e.getValue()))); consumer.commitAsync(offsets, new OffsetCommitCallback() { @Override public void onComplete(Map<TopicPartition, OffsetAndMetadata> offsets, Exception exception) { if(exception != null) { log.error("Exception while committing offsets " + offsets, exception); return; } } }); } /** * Commit offsets to Kafka synchronously */ @Override public void commitOffsetsSync(Map<KafkaPartition, Long> partitionOffsets) { Map<TopicPartition, OffsetAndMetadata> offsets = partitionOffsets.entrySet().stream().collect(Collectors.toMap(e -> new TopicPartition(e.getKey().getTopicName(),e.getKey().getId()), e -> new OffsetAndMetadata(e.getValue()))); consumer.commitSync(offsets); } /** * returns the last committed offset for a KafkaPartition * @param partition * @return last committed offset or -1 for invalid KafkaPartition */ @Override public long committed(KafkaPartition partition) { OffsetAndMetadata offsetAndMetadata = consumer.committed(new TopicPartition(partition.getTopicName(), partition.getId())); return offsetAndMetadata != null ? offsetAndMetadata.offset() : -1l; } /** * Convert a {@link KafkaMetric} instance to a {@link Metric}. * @param kafkaMetric * @return */ private Metric kafkaToCodaHaleMetric(final KafkaMetric kafkaMetric) { if (log.isDebugEnabled()) { log.debug("Processing a metric change for {}", kafkaMetric.metricName().toString()); } Gauge<Double> gauge = () -> kafkaMetric.value(); return gauge; } private String canonicalMetricName(KafkaMetric kafkaMetric) { MetricName name = kafkaMetric.metricName(); return canonicalMetricName(name.group(), name.tags().values(), name.name()); } @Override public void close() throws IOException { this.consumer.close(); } private static final Function<PartitionInfo, KafkaPartition> PARTITION_INFO_TO_KAFKA_PARTITION = new Function<PartitionInfo, KafkaPartition>() { @Override public KafkaPartition apply(@Nonnull PartitionInfo partitionInfo) { return new KafkaPartition.Builder().withId(partitionInfo.partition()).withTopicName(partitionInfo.topic()) .withLeaderId(partitionInfo.leader().id()) .withLeaderHostAndPort(partitionInfo.leader().host(), partitionInfo.leader().port()).build(); } }; /** * A factory class to instantiate {@link Kafka09ConsumerClient} */ public static class Factory implements GobblinKafkaConsumerClientFactory { @SuppressWarnings("rawtypes") @Override public GobblinKafkaConsumerClient create(Config config) { return new Kafka09ConsumerClient(config); } } /** * A record returned by {@link Kafka09ConsumerClient} * * @param <K> Message key type * @param <V> Message value type */ @EqualsAndHashCode(callSuper = true) @ToString public static class Kafka09ConsumerRecord<K, V> extends BaseKafkaConsumerRecord implements DecodeableKafkaRecord<K, V> { private final ConsumerRecord<K, V> consumerRecord; public Kafka09ConsumerRecord(ConsumerRecord<K, V> consumerRecord) { // Kafka 09 consumerRecords do not provide value size. // Only 08 and 10 versions provide them. super(consumerRecord.offset(), BaseKafkaConsumerRecord.VALUE_SIZE_UNAVAILABLE, consumerRecord.topic(), consumerRecord.partition()); this.consumerRecord = consumerRecord; } @Override public K getKey() { return this.consumerRecord.key(); } @Override public V getValue() { return this.consumerRecord.value(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.common.utils; import java.util.AbstractCollection; import java.util.AbstractSequentialList; import java.util.AbstractSet; import java.util.ArrayList; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.NoSuchElementException; import java.util.Set; /** * A memory-efficient hash set which tracks the order of insertion of elements. * * Like java.util.LinkedHashSet, this collection maintains a linked list of elements. * However, rather than using a separate linked list, this collection embeds the next * and previous fields into the elements themselves. This reduces memory consumption, * because it means that we only have to store one Java object per element, rather * than multiple. * * The next and previous fields are stored as array indices rather than pointers. * This ensures that the fields only take 32 bits, even when pointers are 64 bits. * It also makes the garbage collector's job easier, because it reduces the number of * pointers that it must chase. * * This class uses linear probing. Unlike HashMap (but like HashTable), we don't force * the size to be a power of 2. This saves memory. * * This set does not allow null elements. It does not have internal synchronization. */ public class ImplicitLinkedHashCollection<E extends ImplicitLinkedHashCollection.Element> extends AbstractCollection<E> { /** * The interface which elements of this collection must implement. The prev, * setPrev, next, and setNext functions handle manipulating the implicit linked * list which these elements reside in inside the collection. * elementKeysAreEqual() is the function which this collection uses to compare * elements. */ public interface Element { int prev(); void setPrev(int prev); int next(); void setNext(int next); default boolean elementKeysAreEqual(Object other) { return equals(other); } } /** * A special index value used to indicate that the next or previous field is * the head. */ private static final int HEAD_INDEX = -1; /** * A special index value used for next and previous indices which have not * been initialized. */ public static final int INVALID_INDEX = -2; /** * The minimum new capacity for a non-empty implicit hash set. */ private static final int MIN_NONEMPTY_CAPACITY = 5; /** * A static empty array used to avoid object allocations when the capacity is zero. */ private static final Element[] EMPTY_ELEMENTS = new Element[0]; private static class HeadElement implements Element { static final HeadElement EMPTY = new HeadElement(); private int prev = HEAD_INDEX; private int next = HEAD_INDEX; @Override public int prev() { return prev; } @Override public void setPrev(int prev) { this.prev = prev; } @Override public int next() { return next; } @Override public void setNext(int next) { this.next = next; } } private static Element indexToElement(Element head, Element[] elements, int index) { if (index == HEAD_INDEX) { return head; } return elements[index]; } private static void addToListTail(Element head, Element[] elements, int elementIdx) { int oldTailIdx = head.prev(); Element element = indexToElement(head, elements, elementIdx); Element oldTail = indexToElement(head, elements, oldTailIdx); head.setPrev(elementIdx); oldTail.setNext(elementIdx); element.setPrev(oldTailIdx); element.setNext(HEAD_INDEX); } private static void removeFromList(Element head, Element[] elements, int elementIdx) { Element element = indexToElement(head, elements, elementIdx); elements[elementIdx] = null; int prevIdx = element.prev(); int nextIdx = element.next(); Element prev = indexToElement(head, elements, prevIdx); Element next = indexToElement(head, elements, nextIdx); prev.setNext(nextIdx); next.setPrev(prevIdx); element.setNext(INVALID_INDEX); element.setPrev(INVALID_INDEX); } private class ImplicitLinkedHashCollectionIterator implements ListIterator<E> { private int index = 0; private Element cur; private Element lastReturned; ImplicitLinkedHashCollectionIterator(int index) { this.cur = indexToElement(head, elements, head.next()); for (int i = 0; i < index; ++i) { next(); } this.lastReturned = null; } @Override public boolean hasNext() { return cur != head; } @Override public boolean hasPrevious() { return indexToElement(head, elements, cur.prev()) != head; } @Override public E next() { if (!hasNext()) { throw new NoSuchElementException(); } @SuppressWarnings("unchecked") E returnValue = (E) cur; lastReturned = cur; cur = indexToElement(head, elements, cur.next()); ++index; return returnValue; } @Override public E previous() { Element prev = indexToElement(head, elements, cur.prev()); if (prev == head) { throw new NoSuchElementException(); } cur = prev; --index; lastReturned = cur; @SuppressWarnings("unchecked") E returnValue = (E) cur; return returnValue; } @Override public int nextIndex() { return index; } @Override public int previousIndex() { return index - 1; } @Override public void remove() { if (lastReturned == null) { throw new IllegalStateException(); } Element nextElement = indexToElement(head, elements, lastReturned.next()); ImplicitLinkedHashCollection.this.removeElementAtSlot(nextElement.prev()); if (lastReturned == cur) { // If the element we are removing was cur, set cur to cur->next. cur = nextElement; } else { // If the element we are removing comes before cur, decrement the index, // since there are now fewer entries before cur. --index; } lastReturned = null; } @Override public void set(E e) { throw new UnsupportedOperationException(); } @Override public void add(E e) { throw new UnsupportedOperationException(); } } private class ImplicitLinkedHashCollectionListView extends AbstractSequentialList<E> { @Override public ListIterator<E> listIterator(int index) { if (index < 0 || index > size) { throw new IndexOutOfBoundsException(); } return ImplicitLinkedHashCollection.this.listIterator(index); } @Override public int size() { return size; } } private class ImplicitLinkedHashCollectionSetView extends AbstractSet<E> { @Override public Iterator<E> iterator() { return ImplicitLinkedHashCollection.this.iterator(); } @Override public int size() { return size; } @Override public boolean add(E newElement) { return ImplicitLinkedHashCollection.this.add(newElement); } @Override public boolean remove(Object key) { return ImplicitLinkedHashCollection.this.remove(key); } @Override public boolean contains(Object key) { return ImplicitLinkedHashCollection.this.contains(key); } @Override public void clear() { ImplicitLinkedHashCollection.this.clear(); } } private Element head; Element[] elements; private int size; /** * Returns an iterator that will yield every element in the set. * The elements will be returned in the order that they were inserted in. * * Do not modify the set while you are iterating over it (except by calling * remove on the iterator itself, of course.) */ @Override final public Iterator<E> iterator() { return listIterator(0); } private ListIterator<E> listIterator(int index) { return new ImplicitLinkedHashCollectionIterator(index); } final int slot(Element[] curElements, Object e) { return (e.hashCode() & 0x7fffffff) % curElements.length; } /** * Find an element matching an example element. * * Using the element's hash code, we can look up the slot where it belongs. * However, it may not have ended up in exactly this slot, due to a collision. * Therefore, we must search forward in the array until we hit a null, before * concluding that the element is not present. * * @param key The element to match. * @return The match index, or INVALID_INDEX if no match was found. */ final private int findIndexOfEqualElement(Object key) { if (key == null || size == 0) { return INVALID_INDEX; } int slot = slot(elements, key); for (int seen = 0; seen < elements.length; seen++) { Element element = elements[slot]; if (element == null) { return INVALID_INDEX; } if (element.elementKeysAreEqual(key)) { return slot; } slot = (slot + 1) % elements.length; } return INVALID_INDEX; } /** * An element e in the collection such that e.elementKeysAreEqual(key) and * e.hashCode() == key.hashCode(). * * @param key The element to match. * @return The matching element, or null if there were none. */ final public E find(E key) { int index = findIndexOfEqualElement(key); if (index == INVALID_INDEX) { return null; } @SuppressWarnings("unchecked") E result = (E) elements[index]; return result; } /** * Returns the number of elements in the set. */ @Override final public int size() { return size; } /** * Returns true if there is at least one element e in the collection such * that key.elementKeysAreEqual(e) and key.hashCode() == e.hashCode(). * * @param key The object to try to match. */ @Override final public boolean contains(Object key) { return findIndexOfEqualElement(key) != INVALID_INDEX; } private static int calculateCapacity(int expectedNumElements) { // Avoid using even-sized capacities, to get better key distribution. int newCapacity = (2 * expectedNumElements) + 1; // Don't use a capacity that is too small. return Math.max(newCapacity, MIN_NONEMPTY_CAPACITY); } /** * Add a new element to the collection. * * @param newElement The new element. * * @return True if the element was added to the collection; * false if it was not, because there was an existing equal element. */ @Override final public boolean add(E newElement) { if (newElement == null) { return false; } if (newElement.prev() != INVALID_INDEX || newElement.next() != INVALID_INDEX) { return false; } if ((size + 1) >= elements.length / 2) { changeCapacity(calculateCapacity(elements.length)); } int slot = addInternal(newElement, elements); if (slot >= 0) { addToListTail(head, elements, slot); size++; return true; } return false; } final public void mustAdd(E newElement) { if (!add(newElement)) { throw new RuntimeException("Unable to add " + newElement); } } /** * Adds a new element to the appropriate place in the elements array. * * @param newElement The new element to add. * @param addElements The elements array. * @return The index at which the element was inserted, or INVALID_INDEX * if the element could not be inserted. */ int addInternal(Element newElement, Element[] addElements) { int slot = slot(addElements, newElement); for (int seen = 0; seen < addElements.length; seen++) { Element element = addElements[slot]; if (element == null) { addElements[slot] = newElement; return slot; } if (element.elementKeysAreEqual(newElement)) { return INVALID_INDEX; } slot = (slot + 1) % addElements.length; } throw new RuntimeException("Not enough hash table slots to add a new element."); } private void changeCapacity(int newCapacity) { Element[] newElements = new Element[newCapacity]; HeadElement newHead = new HeadElement(); int oldSize = size; for (Iterator<E> iter = iterator(); iter.hasNext(); ) { Element element = iter.next(); iter.remove(); int newSlot = addInternal(element, newElements); addToListTail(newHead, newElements, newSlot); } this.elements = newElements; this.head = newHead; this.size = oldSize; } /** * Remove the first element e such that key.elementKeysAreEqual(e) * and key.hashCode == e.hashCode. * * @param key The object to try to match. * @return True if an element was removed; false otherwise. */ @Override final public boolean remove(Object key) { int slot = findElementToRemove(key); if (slot == INVALID_INDEX) { return false; } removeElementAtSlot(slot); return true; } int findElementToRemove(Object key) { return findIndexOfEqualElement(key); } /** * Remove an element in a particular slot. * * @param slot The slot of the element to remove. * * @return True if an element was removed; false otherwise. */ private boolean removeElementAtSlot(int slot) { size--; removeFromList(head, elements, slot); slot = (slot + 1) % elements.length; // Find the next empty slot int endSlot = slot; for (int seen = 0; seen < elements.length; seen++) { Element element = elements[endSlot]; if (element == null) { break; } endSlot = (endSlot + 1) % elements.length; } // We must preserve the denseness invariant. The denseness invariant says that // any element is either in the slot indicated by its hash code, or a slot which // is not separated from that slot by any nulls. // Reseat all elements in between the deleted element and the next empty slot. while (slot != endSlot) { reseat(slot); slot = (slot + 1) % elements.length; } return true; } private void reseat(int prevSlot) { Element element = elements[prevSlot]; int newSlot = slot(elements, element); for (int seen = 0; seen < elements.length; seen++) { Element e = elements[newSlot]; if ((e == null) || (e == element)) { break; } newSlot = (newSlot + 1) % elements.length; } if (newSlot == prevSlot) { return; } Element prev = indexToElement(head, elements, element.prev()); prev.setNext(newSlot); Element next = indexToElement(head, elements, element.next()); next.setPrev(newSlot); elements[prevSlot] = null; elements[newSlot] = element; } /** * Create a new ImplicitLinkedHashCollection. */ public ImplicitLinkedHashCollection() { this(0); } /** * Create a new ImplicitLinkedHashCollection. * * @param expectedNumElements The number of elements we expect to have in this set. * This is used to optimize by setting the capacity ahead * of time rather than growing incrementally. */ public ImplicitLinkedHashCollection(int expectedNumElements) { clear(expectedNumElements); } /** * Create a new ImplicitLinkedHashCollection. * * @param iter We will add all the elements accessible through this iterator * to the set. */ public ImplicitLinkedHashCollection(Iterator<E> iter) { clear(0); while (iter.hasNext()) { mustAdd(iter.next()); } } /** * Removes all of the elements from this set. */ @Override final public void clear() { clear(elements.length); } /** * Moves an element which is already in the collection so that it comes last * in iteration order. */ final public void moveToEnd(E element) { if (element.prev() == INVALID_INDEX || element.next() == INVALID_INDEX) { throw new RuntimeException("Element " + element + " is not in the collection."); } Element prevElement = indexToElement(head, elements, element.prev()); Element nextElement = indexToElement(head, elements, element.next()); int slot = prevElement.next(); prevElement.setNext(element.next()); nextElement.setPrev(element.prev()); addToListTail(head, elements, slot); } /** * Removes all of the elements from this set, and resets the set capacity * based on the provided expected number of elements. */ final public void clear(int expectedNumElements) { if (expectedNumElements == 0) { // Optimize away object allocations for empty sets. this.head = HeadElement.EMPTY; this.elements = EMPTY_ELEMENTS; this.size = 0; } else { this.head = new HeadElement(); this.elements = new Element[calculateCapacity(expectedNumElements)]; this.size = 0; } } /** * Compares the specified object with this collection for equality. Two * {@code ImplicitLinkedHashCollection} objects are equal if they contain the * same elements (as determined by the element's {@code equals} method), and * those elements were inserted in the same order. Because * {@code ImplicitLinkedHashCollectionListIterator} iterates over the elements * in insertion order, it is sufficient to call {@code valuesList.equals}. * * Note that {@link ImplicitLinkedHashMultiCollection} does not override * {@code equals} and uses this method as well. This means that two * {@code ImplicitLinkedHashMultiCollection} objects will be considered equal even * if they each contain two elements A and B such that A.equals(B) but A != B and * A and B have switched insertion positions between the two collections. This * is an acceptable definition of equality, because the collections are still * equal in terms of the order and value of each element. * * @param o object to be compared for equality with this collection * @return true is the specified object is equal to this collection */ @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof ImplicitLinkedHashCollection)) return false; ImplicitLinkedHashCollection<?> ilhs = (ImplicitLinkedHashCollection<?>) o; return this.valuesList().equals(ilhs.valuesList()); } /** * Returns the hash code value for this collection. Because * {@code ImplicitLinkedHashCollection.equals} compares the {@code valuesList} * of two {@code ImplicitLinkedHashCollection} objects to determine equality, * this method uses the @{code valuesList} to compute the has code value as well. * * @return the hash code value for this collection */ @Override public int hashCode() { return this.valuesList().hashCode(); } // Visible for testing final int numSlots() { return elements.length; } /** * Returns a {@link List} view of the elements contained in the collection, * ordered by order of insertion into the collection. The list is backed by the * collection, so changes to the collection are reflected in the list and * vice-versa. The list supports element removal, which removes the corresponding * element from the collection, but does not support the {@code add} or * {@code set} operations. * * The list is implemented as a circular linked list, so all index-based * operations, such as {@code List.get}, run in O(n) time. * * @return a list view of the elements contained in this collection */ public List<E> valuesList() { return new ImplicitLinkedHashCollectionListView(); } /** * Returns a {@link Set} view of the elements contained in the collection. The * set is backed by the collection, so changes to the collection are reflected in * the set, and vice versa. The set supports element removal and addition, which * removes from or adds to the collection, respectively. * * @return a set view of the elements contained in this collection */ public Set<E> valuesSet() { return new ImplicitLinkedHashCollectionSetView(); } public void sort(Comparator<E> comparator) { ArrayList<E> array = new ArrayList<>(size); Iterator<E> iterator = iterator(); while (iterator.hasNext()) { E e = iterator.next(); iterator.remove(); array.add(e); } array.sort(comparator); for (E e : array) { add(e); } } }
/** * Copyright 2005-2014 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.jube.apimaster; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import javax.inject.Inject; import javax.inject.Singleton; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.fabric8.jube.KubernetesModel; import io.fabric8.jube.local.EntityListener; import io.fabric8.jube.local.EntityListenerList; import io.fabric8.jube.local.LocalKubernetesModel; import io.fabric8.jube.local.NodeHelper; import io.fabric8.jube.local.PodCurrentContainer; import io.fabric8.jube.model.HostNode; import io.fabric8.jube.model.HostNodeModel; import io.fabric8.kubernetes.api.KubernetesHelper; import io.fabric8.kubernetes.api.model.PodList; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.api.model.PodState; import io.fabric8.kubernetes.api.model.ReplicationControllerList; import io.fabric8.kubernetes.api.model.ReplicationController; import io.fabric8.kubernetes.api.model.ServiceList; import io.fabric8.kubernetes.api.model.Service; import io.fabric8.utils.Filter; import io.fabric8.utils.Strings; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.TreeCacheEvent; import org.apache.curator.framework.recipes.cache.TreeCacheListener; import org.apache.curator.framework.recipes.cache.TreeCache; import org.apache.zookeeper.data.Stat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Mirrors ZooKeeper data into a local in memory model and all updates to the model are written directly to ZooKeeper */ public class ApiMasterKubernetesModel implements KubernetesModel { private static final transient Logger LOG = LoggerFactory.getLogger(ApiMasterKubernetesModel.class); private static final String KUBERNETES_MODEL = "/kubernetes/model"; private final LocalKubernetesModel memoryModel = new LocalKubernetesModel(); private final CuratorFramework curator; private final HostNodeModel hostNodeModel; private final TreeCacheListener treeListener = new TreeCacheListener() { @Override public void childEvent(CuratorFramework curatorFramework, TreeCacheEvent event) throws Exception { treeCacheEvent(event); } }; private final TreeCache treeCache; private final EntityListenerList<Pod> podListeners = new EntityListenerList<>(); private final EntityListenerList<ReplicationController> replicationControllerListeners = new EntityListenerList<>(); private final EntityListenerList<Service> serviceListeners = new EntityListenerList<>(); @Singleton @Inject public ApiMasterKubernetesModel(CuratorFramework curator, HostNodeModel hostNodeModel) throws Exception { this.curator = curator; this.hostNodeModel = hostNodeModel; this.treeCache = new TreeCache(curator, KUBERNETES_MODEL); this.treeCache.start(); this.treeCache.getListenable().addListener(treeListener); } // Add and remove listeners //------------------------------------------------------------------------- public void addPodListener(EntityListener<Pod> listener) { podListeners.addListener(listener); } public void removePodListener(EntityListener<Pod> listener) { podListeners.removeListener(listener); } public void addReplicationControllerListener(EntityListener<ReplicationController> listener) { replicationControllerListeners.addListener(listener); } public void removeReplicationControllerListener(EntityListener<ReplicationController> listener) { replicationControllerListeners.removeListener(listener); } public void addServiceListener(EntityListener<Service> listener) { serviceListeners.addListener(listener); } public void removeServiceListener(EntityListener<Service> listener) { serviceListeners.removeListener(listener); } // Updating API which just writes to ZK and waits for ZK watches to update in memory // ------------------------------------------------------------------------- @Override public Pod deletePod(String podId, String namespace) { if (Strings.isNotBlank(podId)) { Pod answer = memoryModel.deletePod(podId, namespace); deleteEntity(zkPathForPod(podId)); return answer; } else { return null; } } @Override public void updatePod(String id, Pod entity) { writeEntity(zkPathForPod(id), entity); // memoryModel.updatePod(id, entity); } @Override public boolean updatePodIfNotExist(String id, Pod entity) { if (memoryModel.updatePodIfNotExist(id, entity)) { // lets not write it yet - we're just doing this to set a unique ID return true; } return false; } @Override public void updateService(String id, Service entity) { writeEntity(zkPathForService(id), entity); // memoryModel.updateService(id, entity); } @Override public void deleteService(String id, String namespace) { deleteEntity(zkPathForService(id)); //memoryModel.deleteService(id); } @Override public void updateReplicationController(String id, ReplicationController entity) { writeEntity(zkPathForReplicationController(id, entity.getNamespace()), entity); //memoryModel.updateReplicationController(id, entity); } @Override public void deleteReplicationController(String id, String namespace) { deleteEntity(zkPathForReplicationController(id, namespace)); // memoryModel.deleteReplicationController(id); } // Reading API from memory // ------------------------------------------------------------------------- @Override public String getNamespace() { return memoryModel.getNamespace(); } @Override public void setNamespace(String namespace) { memoryModel.setNamespace(namespace); } @Override public String getOrCreateId(String id, String kind) { return memoryModel.getOrCreateId(id, kind); } @Override public ImmutableMap<String, Pod> getPodMap() { return memoryModel.getPodMap(); } @Override public PodList getPods() { return memoryModel.getPods(); } @Override public ImmutableList<Pod> getPods(Map<String, String> replicaSelector) { return memoryModel.getPods(replicaSelector); } @Override public ImmutableList<Pod> getPods(Filter<Pod> podFilter) { return memoryModel.getPods(podFilter); } @Override public Pod getPod(String id) { return memoryModel.getPod(id); } @Override public PodList getPods(String namespace) { return memoryModel.getPods(namespace); } @Override public Pod getPod(String id, String namespace) { return memoryModel.getPod(id, namespace); } @Override public ImmutableMap<String, PodCurrentContainer> getPodRunningContainers(KubernetesModel model) { return memoryModel.getPodRunningContainers(model); } @Override public ReplicationController getReplicationController(String id) { return memoryModel.getReplicationController(id); } @Override public ReplicationControllerList getReplicationControllers() { return memoryModel.getReplicationControllers(); } @Override public ImmutableMap<String, ReplicationController> getReplicationControllerMap() { return memoryModel.getReplicationControllerMap(); } @Override public ReplicationController getReplicationController(String id, String namespace) { return memoryModel.getReplicationController(id, namespace); } @Override public ReplicationControllerList getReplicationControllers(String namespace) { return memoryModel.getReplicationControllers(namespace); } @Override public ServiceList getServices() { return memoryModel.getServices(); } @Override public Service getService(String id) { return memoryModel.getService(id); } @Override public ServiceList getServices(String namespace) { return memoryModel.getServices(namespace); } @Override public Service getService(String id, String namespace) { return memoryModel.getService(id, namespace); } @Override public ImmutableMap<String, Service> getServiceMap() { return memoryModel.getServiceMap(); } @Override public String createID(String kind) { return memoryModel.createID(kind); } // Load balancing API //------------------------------------------------------------------------- public String remoteCreatePod(Pod pod) { Exception failed = null; List<HostNode> hosts = new ArrayList<>(hostNodeModel.getMap().values()); // lets try randomize the list int size = hosts.size(); if (size <= 0) { throw new IllegalStateException("No host nodes available"); } if (size == 1) { HostNode hostNode = hosts.get(0); try { return tryCreatePod(hostNode, pod); } catch (Exception e) { failed = e; LOG.error("Failed to create pod: " + pod.getId() + " on host: " + hostNode + ". " + e, e); } } else { Collections.shuffle(hosts); for (HostNode hostNode : hosts) { try { return tryCreatePod(hostNode, pod); } catch (Exception e) { failed = e; LOG.error("Failed to create pod: " + pod.getId() + " on host: " + hostNode + ". " + e, e); } } } PodState currentState = NodeHelper.getOrCreateCurrentState(pod); currentState.setStatus("Terminated: " + failed); return null; } protected String tryCreatePod(HostNode hostNode, Pod pod) throws Exception { LOG.info("Attempting to create pod on host: " + hostNode.getWebUrl()); KubernetesExtensionsClient client = createClient(hostNode); return client.createLocalPod(pod); } public String deleteRemotePod(Pod pod) { List<HostNode> hosts = new ArrayList<>(hostNodeModel.getMap().values()); for (HostNode hostNode : hosts) { try { return tryDeletePod(hostNode, pod); } catch (Exception e) { LOG.warn("Failed to delete pod on host " + hostNode.getWebUrl() + ". Might not be on that pod ;). " + e, e); } } return null; } protected String tryDeletePod(HostNode hostNode, Pod pod) throws Exception { String id = pod.getId(); LOG.info("Attempting to delete pod: " + id + " on host: " + hostNode.getWebUrl()); KubernetesExtensionsClient client = createClient(hostNode); return client.deleteLocalPod(id, pod.getNamespace()); } private KubernetesExtensionsClient createClient(HostNode hostNode) { String webUrl = hostNode.getWebUrl(); if (Strings.isNullOrBlank(webUrl)) { throw new IllegalArgumentException("No WebUrl so could not create client for host: " + hostNode); } return new KubernetesExtensionsClient(webUrl); } // Implementation methods //------------------------------------------------------------------------- protected String zkPathForPod(String id) { return KUBERNETES_MODEL + "/" + NodeHelper.KIND_POD + "-" + id; } protected String zkPathForService(String id) { return KUBERNETES_MODEL + "/" + NodeHelper.KIND_SERVICE + "-" + id; } protected String zkPathForReplicationController(String id, String namespace) { if (Strings.isNotBlank(namespace)) { namespace = "default"; } return KUBERNETES_MODEL + "/" + NodeHelper.KIND_REPLICATION_CONTROLLER + "-" + namespace + "-" + id; } protected void writeEntity(String path, Object entity) { try { String json = KubernetesHelper.toJson(entity); if (LOG.isDebugEnabled()) { LOG.debug("Writing to path: " + path + " json: " + json); } if (curator.checkExists().forPath(path) == null) { curator.create().creatingParentsIfNeeded().forPath(path, json.getBytes()); } else { curator.setData().forPath(path, json.getBytes()); } updateLocalModel(entity, false); } catch (Exception e) { throw new RuntimeException("Failed to update object at path: " + path + ". " + e, e); } } protected void deleteEntity(String path) { try { Stat stat = curator.checkExists().forPath(path); if (stat != null) { curator.delete().deletingChildrenIfNeeded().forPath(path); } } catch (Exception e) { throw new RuntimeException("Failed to delete object at path: " + path + ". " + e, e); } } protected void treeCacheEvent(TreeCacheEvent event) { ChildData childData = event.getData(); if (childData == null) { return; } String path = childData.getPath(); TreeCacheEvent.Type type = event.getType(); byte[] data = childData.getData(); if (data == null || data.length == 0 || path == null) { return; } if (path.startsWith(KUBERNETES_MODEL)) { path = path.substring(KUBERNETES_MODEL.length()); } boolean remove = false; switch (type) { case NODE_ADDED: case NODE_UPDATED: break; case NODE_REMOVED: remove = true; break; default: return; } try { Object dto = KubernetesHelper.loadJson(data); updateLocalModel(dto, remove); } catch (Exception e) { LOG.warn("Failed to parse the JSON: " + new String(data) + ". Reason: " + e, e); } } protected void updateLocalModel(Object dto, boolean remove) { if (dto instanceof Pod) { podChanged((Pod) dto, remove); } else if (dto instanceof ReplicationController) { replicationControllerChanged((ReplicationController) dto, remove); } else if (dto instanceof Service) { serviceChanged((Service) dto, remove); } else { LOG.warn("Unrecognised DTO: " + dto); } } protected void podChanged(Pod entity, boolean remove) { if (remove) { String id = entity.getId(); if (Strings.isNotBlank(id)) { memoryModel.deletePod(id, entity.getNamespace()); podListeners.entityDeleted(id); } } else { String id = memoryModel.getOrCreateId(entity.getId(), NodeHelper.KIND_POD); Pod old = memoryModel.getPod(id); // lets only replace the Pod if it really has changed to avoid overwriting // pods which are being installed if (NodeHelper.podHasChanged(entity, old)) { memoryModel.updatePod(id, entity); podListeners.entityChanged(id, entity); } } } protected void replicationControllerChanged(ReplicationController entity, boolean remove) { if (remove) { String id = entity.getId(); if (Strings.isNotBlank(id)) { memoryModel.deleteReplicationController(id, entity.getNamespace()); replicationControllerListeners.entityDeleted(id); } } else { String id = memoryModel.getOrCreateId(entity.getId(), NodeHelper.KIND_REPLICATION_CONTROLLER); memoryModel.updateReplicationController(id, entity); replicationControllerListeners.entityChanged(id, entity); } } protected void serviceChanged(Service entity, boolean remove) { if (remove) { String id = entity.getId(); if (Strings.isNotBlank(id)) { memoryModel.deleteService(id, entity.getNamespace()); serviceListeners.entityDeleted(id); } } else { String id = memoryModel.getOrCreateId(entity.getId(), NodeHelper.KIND_SERVICE); memoryModel.updateService(id, entity); serviceListeners.entityChanged(id, entity); } } }
/* * Copyright 2012 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.guvnor.m2repo.backend.server; import java.io.File; import java.io.InputStream; import java.util.Collection; import org.junit.After; import org.junit.Ignore; import org.junit.Test; import org.guvnor.common.services.project.model.GAV; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.junit.Assert.*; public class M2RepositoryTest { private static final Logger log = LoggerFactory.getLogger( M2RepositoryTest.class ); @After public void tearDown() throws Exception { log.info( "Creating a new Repository Instance.." ); File dir = new File( "repository" ); log.info( "DELETING test repo: " + dir.getAbsolutePath() ); deleteDir( dir ); log.info( "TEST repo was deleted." ); } public static boolean deleteDir(File dir) { if ( dir.isDirectory() ) { String[] children = dir.list(); for ( int i = 0; i < children.length; i++ ) { if ( !deleteDir( new File( dir, children[i] ) ) ) { return false; } } } // The directory is now empty so delete it return dir.delete(); } @Test @Ignore("Fails - ignored for Beta3") public void testDeployArtifact() throws Exception { GuvnorM2Repository repo = new GuvnorM2Repository(); repo.init(); GAV gav = new GAV("org.kie.guvnor", "guvnor-m2repo-editor-backend", "6.0.0-SNAPSHOT"); InputStream is = this.getClass().getResourceAsStream("guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar"); repo.deployArtifact(is, gav); Collection<File> files = repo.listFiles(); boolean found = false; for(File file : files) { String fileName = file.getName(); if(fileName.startsWith("guvnor-m2repo-editor-backend-6.0.0") && fileName.endsWith(".jar")) { found = true; String path = file.getPath(); String pom = GuvnorM2Repository.loadPOMFromJar(path); assertNotNull(pom); break; } } assertTrue("Did not find expected file after calling M2Repository.addFile()", found); } @Test public void testListFiles() throws Exception { GuvnorM2Repository repo = new GuvnorM2Repository(); repo.init(); GAV gav = new GAV("org.kie.guvnor", "guvnor-m2repo-editor-backend", "6.0.0-SNAPSHOT"); InputStream is = this.getClass().getResourceAsStream("guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar"); repo.deployArtifact(is, gav); gav = new GAV("org.jboss.arquillian.core", "arquillian-core-api", "1.0.2.Final"); is = this.getClass().getResourceAsStream("guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar"); repo.deployArtifact(is, gav); Collection<File> files = repo.listFiles(); boolean found1 = false; boolean found2 = false; for(File file : files) { String fileName = file.getName(); if(fileName.startsWith("guvnor-m2repo-editor-backend-6.0.0") && fileName.endsWith(".jar")) { found1 = true; } if(fileName.startsWith("arquillian-core-api-1.0.2.Final") && fileName.endsWith(".jar")) { found2 = true; } } assertTrue("Did not find expected file after calling M2Repository.addFile()", found1); assertTrue("Did not find expected file after calling M2Repository.addFile()", found2); } @Test public void testListFilesWithFilter() throws Exception { GuvnorM2Repository repo = new GuvnorM2Repository(); repo.init(); GAV gav = new GAV("org.kie.guvnor", "guvnor-m2repo-editor-backend", "6.0.0-SNAPSHOT"); InputStream is = this.getClass().getResourceAsStream("guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar"); repo.deployArtifact(is, gav); gav = new GAV("org.jboss.arquillian.core", "arquillian-core-api", "1.0.2.Final"); is = this.getClass().getResourceAsStream("guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar"); repo.deployArtifact(is, gav); //filter with version number Collection<File> files = repo.listFiles("1.0.2"); boolean found1 = false; for(File file : files) { String fileName = file.getName(); if(fileName.startsWith("arquillian-core-api-1.0.2") && fileName.endsWith(".jar")) { found1 = true; } } assertTrue("Did not find expected file after calling M2Repository.addFile()", found1); /* //filter with group id files = repo.listFiles("org.kie.guvnor"); found1 = false; for(File file : files) { String fileName = file.getName(); if("guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar".equals(fileName)) { found1 = true; } } assertTrue("Did not find expected file after calling M2Repository.addFile()", found1);*/ //fileter with artifact id files = repo.listFiles("arquillian-core-api"); found1 = false; for(File file : files) { String fileName = file.getName(); if(fileName.startsWith("arquillian-core-api-1.0.2") && fileName.endsWith(".jar")) { found1 = true; } } assertTrue("Did not find expected file after calling M2Repository.addFile()", found1); } @Test @Ignore("Fails - ignored for Beta3") public void testDeleteFile() throws Exception { GuvnorM2Repository repo = new GuvnorM2Repository(); repo.init(); GAV gav = new GAV("org.kie.guvnor", "guvnor-m2repo-editor-backend", "6.0.0-SNAPSHOT"); InputStream is = this.getClass().getResourceAsStream("guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar"); repo.deployArtifact(is, gav); gav = new GAV("org.jboss.arquillian.core", "arquillian-core-api", "1.0.2.Final"); is = this.getClass().getResourceAsStream("guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar"); repo.deployArtifact(is, gav); Collection<File> files = repo.listFiles(); boolean found1 = false; boolean found2 = false; for(File file : files) { String fileName = file.getName(); if(fileName.startsWith("guvnor-m2repo-editor-backend-6.0.0") && fileName.endsWith(".jar")) { found1 = true; } if(fileName.startsWith("arquillian-core-api-1.0.2") && fileName.endsWith(".jar")) { found2 = true; } } assertTrue("Did not find expected file after calling M2Repository.addFile()", found1); assertTrue("Did not find expected file after calling M2Repository.addFile()", found2); boolean result = repo.deleteFile(new String[]{"repository"+ File.separator + "releases"+ File.separator + "org" + File.separator + "kie"+ File.separator + "guvnor"+ File.separator + "guvnor-m2repo-editor-backend"+ File.separator + "6.0.0-SNAPSHOT"+ File.separator + "guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar"}); result = repo.deleteFile(new String[]{"repository"+ File.separator + "org" + File.separator + "jboss"+ File.separator + "arquillian"+ File.separator + "core"+ File.separator + "arquillian-core-api"+ File.separator + "1.0.2.Final"+ File.separator + "arquillian-core-api-1.0.2.Final.jar"}); found1 = false; found2 = false; files = repo.listFiles(); for(File file : files) { String fileName = file.getName(); if(fileName.startsWith("guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar") && fileName.endsWith(".jar")) { found1 = true; } if(fileName.startsWith("arquillian-core-api-1.0.2.Final.jar") && fileName.endsWith(".jar")) { found2 = true; } } assertFalse("Found unexpected file after calling M2Repository.deleteFile()", found1); assertFalse("Found unexpected file after calling M2Repository.deleteFile()", found2); } @Test public void testLoadPom() throws Exception { GuvnorM2Repository repo = new GuvnorM2Repository(); repo.init(); GAV gav = new GAV("org.kie.guvnor", "guvnor-m2repo-editor-backend", "6.0.0-SNAPSHOT"); InputStream is = this.getClass().getResourceAsStream("guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar"); repo.deployArtifact(is, gav); /* String pom = repo.loadPOM("repository"+ File.separator + "org"+ File.separator + "kie"+ File.separator + "guvnor"+ File.separator + "guvnor-m2repo-editor-backend"+ File.separator + "6.0.0-SNAPSHOT"+ File.separator + "guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar"); assertNotNull(pom); assertTrue(pom.length() > 0);*/ } @Test public void testLoadPomFromInputStream() throws Exception { GuvnorM2Repository repo = new GuvnorM2Repository(); repo.init(); GAV gav = new GAV("org.kie.guvnor", "guvnor-m2repo-editor-backend", "6.0.0-SNAPSHOT"); InputStream is = this.getClass().getResourceAsStream("guvnor-m2repo-editor-backend-6.0.0-SNAPSHOT.jar"); /* String pom = repo.loadPOM(is); assertNotNull(pom); assertTrue(pom.length() > 0);*/ } }
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.endpoint; import java.lang.reflect.Method; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.annotation.AnnotationAttributes; import org.springframework.util.ReflectionUtils; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link AnnotationEndpointDiscoverer}. * * @author Andy Wilkinson * @author Stephane Nicoll */ public class AnnotationEndpointDiscovererTests { @Rule public final ExpectedException thrown = ExpectedException.none(); @Test public void discoverWorksWhenThereAreNoEndpoints() { load(EmptyConfiguration.class, (context) -> assertThat(new TestAnnotationEndpointDiscoverer(context) .discoverEndpoints().isEmpty())); } @Test public void endpointIsDiscovered() { load(TestEndpointConfiguration.class, (context) -> { Map<String, EndpointInfo<TestEndpointOperation>> endpoints = mapEndpoints( new TestAnnotationEndpointDiscoverer(context).discoverEndpoints()); assertThat(endpoints).containsOnlyKeys("test"); Map<Method, TestEndpointOperation> operations = mapOperations( endpoints.get("test")); assertThat(operations).hasSize(3); assertThat(operations).containsKeys( ReflectionUtils.findMethod(TestEndpoint.class, "getAll"), ReflectionUtils.findMethod(TestEndpoint.class, "getOne", String.class), ReflectionUtils.findMethod(TestEndpoint.class, "update", String.class, String.class)); }); } @Test public void subclassedEndpointIsDiscovered() { load(TestEndpointSubclassConfiguration.class, (context) -> { Map<String, EndpointInfo<TestEndpointOperation>> endpoints = mapEndpoints( new TestAnnotationEndpointDiscoverer(context).discoverEndpoints()); assertThat(endpoints).containsOnlyKeys("test"); Map<Method, TestEndpointOperation> operations = mapOperations( endpoints.get("test")); assertThat(operations).hasSize(4); assertThat(operations).containsKeys( ReflectionUtils.findMethod(TestEndpoint.class, "getAll"), ReflectionUtils.findMethod(TestEndpoint.class, "getOne", String.class), ReflectionUtils.findMethod(TestEndpoint.class, "update", String.class, String.class), ReflectionUtils.findMethod(TestEndpointSubclass.class, "updateWithMoreArguments", String.class, String.class, String.class)); }); } @Test public void discoveryFailsWhenTwoEndpointsHaveTheSameId() { load(ClashingEndpointConfiguration.class, (context) -> { this.thrown.expect(IllegalStateException.class); this.thrown.expectMessage("Found two endpoints with the id 'test': "); new TestAnnotationEndpointDiscoverer(context).discoverEndpoints(); }); } @Test public void endpointMainReadOperationIsNotCachedWithTtlSetToZero() { load(TestEndpointConfiguration.class, (context) -> { Map<String, EndpointInfo<TestEndpointOperation>> endpoints = mapEndpoints( new TestAnnotationEndpointDiscoverer(context, (endpointId) -> new CachingConfiguration(0)) .discoverEndpoints()); assertThat(endpoints).containsOnlyKeys("test"); Map<Method, TestEndpointOperation> operations = mapOperations( endpoints.get("test")); assertThat(operations).hasSize(3); operations.values() .forEach(operation -> assertThat(operation.getOperationInvoker()) .isNotInstanceOf(CachingOperationInvoker.class)); }); } @Test public void endpointMainReadOperationIsNotCachedWithNonMatchingId() { Function<String, CachingConfiguration> cachingConfigurationFactory = ( endpointId) -> (endpointId.equals("foo") ? new CachingConfiguration(500) : new CachingConfiguration(0)); load(TestEndpointConfiguration.class, (context) -> { Map<String, EndpointInfo<TestEndpointOperation>> endpoints = mapEndpoints( new TestAnnotationEndpointDiscoverer(context, cachingConfigurationFactory).discoverEndpoints()); assertThat(endpoints).containsOnlyKeys("test"); Map<Method, TestEndpointOperation> operations = mapOperations( endpoints.get("test")); assertThat(operations).hasSize(3); operations.values() .forEach(operation -> assertThat(operation.getOperationInvoker()) .isNotInstanceOf(CachingOperationInvoker.class)); }); } @Test public void endpointMainReadOperationIsCachedWithMatchingId() { Function<String, CachingConfiguration> cachingConfigurationFactory = ( endpointId) -> (endpointId.equals("test") ? new CachingConfiguration(500) : new CachingConfiguration(0)); load(TestEndpointConfiguration.class, (context) -> { Map<String, EndpointInfo<TestEndpointOperation>> endpoints = mapEndpoints( new TestAnnotationEndpointDiscoverer(context, cachingConfigurationFactory).discoverEndpoints()); assertThat(endpoints).containsOnlyKeys("test"); Map<Method, TestEndpointOperation> operations = mapOperations( endpoints.get("test")); OperationInvoker getAllOperationInvoker = operations .get(ReflectionUtils.findMethod(TestEndpoint.class, "getAll")) .getOperationInvoker(); assertThat(getAllOperationInvoker) .isInstanceOf(CachingOperationInvoker.class); assertThat(((CachingOperationInvoker) getAllOperationInvoker).getTimeToLive()) .isEqualTo(500); assertThat(operations.get(ReflectionUtils.findMethod(TestEndpoint.class, "getOne", String.class)).getOperationInvoker()) .isNotInstanceOf(CachingOperationInvoker.class); assertThat(operations.get(ReflectionUtils.findMethod(TestEndpoint.class, "update", String.class, String.class)).getOperationInvoker()) .isNotInstanceOf(CachingOperationInvoker.class); }); } private Map<String, EndpointInfo<TestEndpointOperation>> mapEndpoints( Collection<EndpointInfo<TestEndpointOperation>> endpoints) { Map<String, EndpointInfo<TestEndpointOperation>> endpointById = new LinkedHashMap<>(); endpoints.forEach((endpoint) -> { EndpointInfo<TestEndpointOperation> existing = endpointById .put(endpoint.getId(), endpoint); if (existing != null) { throw new AssertionError(String.format( "Found endpoints with duplicate id '%s'", endpoint.getId())); } }); return endpointById; } private Map<Method, TestEndpointOperation> mapOperations( EndpointInfo<TestEndpointOperation> endpoint) { Map<Method, TestEndpointOperation> operationByMethod = new HashMap<>(); endpoint.getOperations().forEach((operation) -> { EndpointOperation existing = operationByMethod .put(operation.getOperationMethod(), operation); if (existing != null) { throw new AssertionError(String.format( "Found endpoint with duplicate operation method '%s'", operation.getOperationMethod())); } }); return operationByMethod; } private void load(Class<?> configuration, Consumer<AnnotationConfigApplicationContext> consumer) { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext( configuration); try { consumer.accept(context); } finally { context.close(); } } @Configuration static class EmptyConfiguration { } @Endpoint(id = "test") static class TestEndpoint { @ReadOperation public Object getAll() { return null; } @ReadOperation public Object getOne(@Selector String id) { return null; } @WriteOperation public void update(String foo, String bar) { } public void someOtherMethod() { } } static class TestEndpointSubclass extends TestEndpoint { @WriteOperation public void updateWithMoreArguments(String foo, String bar, String baz) { } } @Configuration static class TestEndpointConfiguration { @Bean public TestEndpoint testEndpoint() { return new TestEndpoint(); } } @Configuration static class TestEndpointSubclassConfiguration { @Bean public TestEndpointSubclass testEndpointSubclass() { return new TestEndpointSubclass(); } } @Configuration static class ClashingEndpointConfiguration { @Bean public TestEndpoint testEndpointTwo() { return new TestEndpoint(); } @Bean public TestEndpoint testEndpointOne() { return new TestEndpoint(); } } private static final class TestEndpointOperation extends EndpointOperation { private final Method operationMethod; private TestEndpointOperation(EndpointOperationType type, OperationInvoker operationInvoker, Method operationMethod) { super(type, operationInvoker, true); this.operationMethod = operationMethod; } private Method getOperationMethod() { return this.operationMethod; } } private static class TestAnnotationEndpointDiscoverer extends AnnotationEndpointDiscoverer<TestEndpointOperation, Method> { TestAnnotationEndpointDiscoverer(ApplicationContext applicationContext, Function<String, CachingConfiguration> cachingConfigurationFactory) { super(applicationContext, endpointOperationFactory(), TestEndpointOperation::getOperationMethod, cachingConfigurationFactory); } TestAnnotationEndpointDiscoverer(ApplicationContext applicationContext) { this(applicationContext, (id) -> null); } @Override public Collection<EndpointInfo<TestEndpointOperation>> discoverEndpoints() { return discoverEndpointsWithExtension(null, null).stream() .map(EndpointInfoDescriptor::getEndpointInfo) .collect(Collectors.toList()); } private static EndpointOperationFactory<TestEndpointOperation> endpointOperationFactory() { return new EndpointOperationFactory<TestEndpointOperation>() { @Override public TestEndpointOperation createOperation(String endpointId, AnnotationAttributes operationAttributes, Object target, Method operationMethod, EndpointOperationType operationType, long timeToLive) { return new TestEndpointOperation(operationType, createOperationInvoker(timeToLive), operationMethod); } private OperationInvoker createOperationInvoker(long timeToLive) { OperationInvoker invoker = (arguments) -> null; if (timeToLive > 0) { return new CachingOperationInvoker(invoker, timeToLive); } else { return invoker; } } }; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.plugin.geospatial; import com.esri.core.geometry.Envelope; import com.esri.core.geometry.GeometryCursor; import com.esri.core.geometry.GeometryException; import com.esri.core.geometry.ListeningGeometryCursor; import com.esri.core.geometry.NonSimpleResult.Reason; import com.esri.core.geometry.OperatorUnion; import com.esri.core.geometry.Point; import com.esri.core.geometry.Polyline; import com.esri.core.geometry.ogc.OGCConcreteGeometryCollection; import com.esri.core.geometry.ogc.OGCGeometry; import com.esri.core.geometry.ogc.OGCLineString; import com.facebook.presto.geospatial.GeometryType; import com.facebook.presto.geospatial.KdbTree; import com.facebook.presto.geospatial.Rectangle; import com.facebook.presto.geospatial.serde.EsriGeometrySerde; import com.facebook.presto.geospatial.serde.GeometrySerializationType; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockBuilder; import com.facebook.presto.spi.function.Description; import com.facebook.presto.spi.function.ScalarFunction; import com.facebook.presto.spi.function.SqlNullable; import com.facebook.presto.spi.function.SqlType; import com.facebook.presto.spi.type.IntegerType; import com.facebook.presto.spi.type.KdbTreeType; import com.google.common.base.Joiner; import com.google.common.base.VerifyException; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.airlift.slice.BasicSliceInput; import io.airlift.slice.Slice; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.CoordinateSequence; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryCollection; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.MultiLineString; import org.locationtech.jts.geom.TopologyException; import org.locationtech.jts.geom.impl.PackedCoordinateSequenceFactory; import org.locationtech.jts.linearref.LengthIndexedLine; import java.util.ArrayList; import java.util.EnumSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Set; import static com.esri.core.geometry.NonSimpleResult.Reason.Clustering; import static com.esri.core.geometry.NonSimpleResult.Reason.Cracking; import static com.esri.core.geometry.NonSimpleResult.Reason.CrossOver; import static com.esri.core.geometry.NonSimpleResult.Reason.DegenerateSegments; import static com.esri.core.geometry.NonSimpleResult.Reason.OGCDisconnectedInterior; import static com.esri.core.geometry.NonSimpleResult.Reason.OGCPolygonSelfTangency; import static com.esri.core.geometry.NonSimpleResult.Reason.OGCPolylineSelfTangency; import static com.esri.core.geometry.ogc.OGCGeometry.createFromEsriGeometry; import static com.facebook.presto.geospatial.GeometryType.LINE_STRING; import static com.facebook.presto.geospatial.GeometryType.MULTI_LINE_STRING; import static com.facebook.presto.geospatial.GeometryType.MULTI_POINT; import static com.facebook.presto.geospatial.GeometryType.MULTI_POLYGON; import static com.facebook.presto.geospatial.GeometryType.POINT; import static com.facebook.presto.geospatial.GeometryType.POLYGON; import static com.facebook.presto.geospatial.GeometryUtils.createJtsEmptyLineString; import static com.facebook.presto.geospatial.GeometryUtils.createJtsEmptyPoint; import static com.facebook.presto.geospatial.GeometryUtils.createJtsEmptyPolygon; import static com.facebook.presto.geospatial.GeometryUtils.createJtsLineString; import static com.facebook.presto.geospatial.GeometryUtils.createJtsMultiPoint; import static com.facebook.presto.geospatial.GeometryUtils.createJtsPoint; import static com.facebook.presto.geospatial.GeometryUtils.flattenCollection; import static com.facebook.presto.geospatial.GeometryUtils.getGeometryInvalidReason; import static com.facebook.presto.geospatial.GeometryUtils.getPointCount; import static com.facebook.presto.geospatial.GeometryUtils.jtsGeometryFromWkt; import static com.facebook.presto.geospatial.GeometryUtils.wktFromJtsGeometry; import static com.facebook.presto.geospatial.serde.EsriGeometrySerde.deserializeEnvelope; import static com.facebook.presto.geospatial.serde.EsriGeometrySerde.deserializeType; import static com.facebook.presto.geospatial.serde.JtsGeometrySerde.deserialize; import static com.facebook.presto.geospatial.serde.JtsGeometrySerde.serialize; import static com.facebook.presto.plugin.geospatial.GeometryType.GEOMETRY; import static com.facebook.presto.plugin.geospatial.GeometryType.GEOMETRY_TYPE_NAME; import static com.facebook.presto.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; import static com.facebook.presto.spi.type.StandardTypes.BIGINT; import static com.facebook.presto.spi.type.StandardTypes.BOOLEAN; import static com.facebook.presto.spi.type.StandardTypes.DOUBLE; import static com.facebook.presto.spi.type.StandardTypes.INTEGER; import static com.facebook.presto.spi.type.StandardTypes.TINYINT; import static com.facebook.presto.spi.type.StandardTypes.VARBINARY; import static com.facebook.presto.spi.type.StandardTypes.VARCHAR; import static com.google.common.base.Preconditions.checkArgument; import static io.airlift.slice.Slices.utf8Slice; import static io.airlift.slice.Slices.wrappedBuffer; import static java.lang.Double.isInfinite; import static java.lang.Double.isNaN; import static java.lang.Math.toIntExact; import static java.lang.String.format; import static java.util.Arrays.setAll; import static java.util.Objects.requireNonNull; import static org.locationtech.jts.simplify.TopologyPreservingSimplifier.simplify; public final class GeoFunctions { private static final Joiner OR_JOINER = Joiner.on(" or "); private static final Slice EMPTY_POLYGON = serialize(createJtsEmptyPolygon()); private static final Map<Reason, String> NON_SIMPLE_REASONS = ImmutableMap.<Reason, String>builder() .put(DegenerateSegments, "Degenerate segments") .put(Clustering, "Repeated points") .put(Cracking, "Intersecting or overlapping segments") .put(CrossOver, "Self-intersection") .put(OGCPolylineSelfTangency, "Self-tangency") .put(OGCPolygonSelfTangency, "Self-tangency") .put(OGCDisconnectedInterior, "Disconnected interior") .build(); private static final int NUMBER_OF_DIMENSIONS = 3; private static final Block EMPTY_ARRAY_OF_INTS = IntegerType.INTEGER.createFixedSizeBlockBuilder(0).build(); private GeoFunctions() {} @Description("Returns a Geometry type LineString object from Well-Known Text representation (WKT)") @ScalarFunction("ST_LineFromText") @SqlType(GEOMETRY_TYPE_NAME) public static Slice parseLine(@SqlType(VARCHAR) Slice input) { Geometry geometry = jtsGeometryFromWkt(input.toStringUtf8()); validateType("ST_LineFromText", geometry, EnumSet.of(LINE_STRING)); return serialize(geometry); } @Description("Returns a LineString from an array of points") @ScalarFunction("ST_LineString") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stLineString(@SqlType("array(" + GEOMETRY_TYPE_NAME + ")") Block input) { CoordinateSequence coordinates = readPointCoordinates(input, "ST_LineString", true); if (coordinates.size() < 2) { return serialize(createJtsEmptyLineString()); } return serialize(createJtsLineString(coordinates)); } @Description("Returns a Geometry type Point object with the given coordinate values") @ScalarFunction("ST_Point") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stPoint(@SqlType(DOUBLE) double x, @SqlType(DOUBLE) double y) { return serialize(createJtsPoint(x, y)); } @SqlNullable @Description("Returns a multi-point geometry formed from input points") @ScalarFunction("ST_MultiPoint") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stMultiPoint(@SqlType("array(" + GEOMETRY_TYPE_NAME + ")") Block input) { CoordinateSequence coordinates = readPointCoordinates(input, "ST_MultiPoint", false); if (coordinates.size() == 0) { return null; } return serialize(createJtsMultiPoint(coordinates)); } private static CoordinateSequence readPointCoordinates(Block input, String functionName, boolean forbidDuplicates) { PackedCoordinateSequenceFactory coordinateSequenceFactory = new PackedCoordinateSequenceFactory(); double[] coordinates = new double[2 * input.getPositionCount()]; double lastX = Double.NaN; double lastY = Double.NaN; for (int i = 0; i < input.getPositionCount(); i++) { if (input.isNull(i)) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("Invalid input to %s: null at index %s", functionName, i + 1)); } BasicSliceInput slice = new BasicSliceInput(GEOMETRY.getSlice(input, i)); GeometrySerializationType type = GeometrySerializationType.getForCode(slice.readByte()); if (type != GeometrySerializationType.POINT) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("Invalid input to %s: geometry is not a point: %s at index %s", functionName, type.toString(), i + 1)); } double x = slice.readDouble(); double y = slice.readDouble(); if (Double.isNaN(x) || Double.isNaN(x)) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("Invalid input to %s: empty point at index %s", functionName, i + 1)); } if (forbidDuplicates && x == lastX && y == lastY) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("Invalid input to %s: consecutive duplicate points at index %s", functionName, i + 1)); } lastX = x; lastY = y; coordinates[2 * i] = x; coordinates[2 * i + 1] = y; } return coordinateSequenceFactory.create(coordinates, 2); } @Description("Returns a Geometry type Polygon object from Well-Known Text representation (WKT)") @ScalarFunction("ST_Polygon") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stPolygon(@SqlType(VARCHAR) Slice input) { Geometry geometry = jtsGeometryFromWkt(input.toStringUtf8()); validateType("ST_Polygon", geometry, EnumSet.of(POLYGON)); return serialize(geometry); } @Description("Returns the 2D Euclidean area of a geometry") @ScalarFunction("ST_Area") @SqlType(DOUBLE) public static double stArea(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { return deserialize(input).getArea(); } @Description("Returns a Geometry type object from Well-Known Text representation (WKT)") @ScalarFunction("ST_GeometryFromText") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stGeometryFromText(@SqlType(VARCHAR) Slice input) { return serialize(jtsGeometryFromWkt(input.toStringUtf8())); } @Description("Returns a Geometry type object from Well-Known Binary representation (WKB)") @ScalarFunction("ST_GeomFromBinary") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stGeomFromBinary(@SqlType(VARBINARY) Slice input) { return EsriGeometrySerde.serialize(geomFromBinary(input)); } @Description("Returns the Well-Known Text (WKT) representation of the geometry") @ScalarFunction("ST_AsText") @SqlType(VARCHAR) public static Slice stAsText(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { return utf8Slice(wktFromJtsGeometry(deserialize(input))); } @Description("Returns the Well-Known Binary (WKB) representation of the geometry") @ScalarFunction("ST_AsBinary") @SqlType(VARBINARY) public static Slice stAsBinary(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { try { return wrappedBuffer(EsriGeometrySerde.deserialize(input).asBinary()); } catch (GeometryException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Invalid geometry: " + e.getMessage(), e); } } @SqlNullable @Description("Returns the geometry that represents all points whose distance from the specified geometry is less than or equal to the specified distance") @ScalarFunction("ST_Buffer") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stBuffer(@SqlType(GEOMETRY_TYPE_NAME) Slice input, @SqlType(DOUBLE) double distance) { if (isNaN(distance)) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "distance is NaN"); } if (distance < 0) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "distance is negative"); } if (distance == 0) { return input; } Geometry geometry = deserialize(input); if (geometry.isEmpty()) { return null; } return serialize(geometry.buffer(distance)); } @Description("Returns the Point value that is the mathematical centroid of a Geometry") @ScalarFunction("ST_Centroid") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stCentroid(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); validateType("ST_Centroid", geometry, EnumSet.of(POINT, MULTI_POINT, LINE_STRING, MULTI_LINE_STRING, POLYGON, MULTI_POLYGON)); GeometryType geometryType = GeometryType.getForJtsGeometryType(geometry.getGeometryType()); if (geometryType == GeometryType.POINT) { return input; } if (geometry.getNumPoints() == 0) { return serialize(createJtsEmptyPoint()); } return serialize(geometry.getCentroid()); } @Description("Returns the minimum convex geometry that encloses all input geometries") @ScalarFunction("ST_ConvexHull") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stConvexHull(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { OGCGeometry geometry = EsriGeometrySerde.deserialize(input); if (geometry.isEmpty()) { return input; } if (GeometryType.getForEsriGeometryType(geometry.geometryType()) == POINT) { return input; } return EsriGeometrySerde.serialize(geometry.convexHull()); } @Description("Return the coordinate dimension of the Geometry") @ScalarFunction("ST_CoordDim") @SqlType(TINYINT) public static long stCoordinateDimension(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { return EsriGeometrySerde.deserialize(input).coordinateDimension(); } @Description("Returns the inherent dimension of this Geometry object, which must be less than or equal to the coordinate dimension") @ScalarFunction("ST_Dimension") @SqlType(TINYINT) public static long stDimension(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { return deserialize(input).getDimension(); } @SqlNullable @Description("Returns TRUE if the LineString or Multi-LineString's start and end points are coincident") @ScalarFunction("ST_IsClosed") @SqlType(BOOLEAN) public static Boolean stIsClosed(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); validateType("ST_IsClosed", geometry, EnumSet.of(LINE_STRING, MULTI_LINE_STRING)); if (geometry instanceof LineString) { return ((LineString) geometry).isClosed(); } else if (geometry instanceof MultiLineString) { return ((MultiLineString) geometry).isClosed(); } // This would be handled in validateType, but for completeness. throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("Invalid type for isClosed: %s", geometry.getGeometryType())); } @SqlNullable @Description("Returns TRUE if this Geometry is an empty geometrycollection, polygon, point etc") @ScalarFunction("ST_IsEmpty") @SqlType(BOOLEAN) public static Boolean stIsEmpty(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { return deserializeEnvelope(input).isEmpty(); } @Description("Returns TRUE if this Geometry has no anomalous geometric points, such as self intersection or self tangency") @ScalarFunction("ST_IsSimple") @SqlType(BOOLEAN) public static boolean stIsSimple(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { try { return deserialize(input).isSimple(); } catch (PrestoException e) { if (e.getCause() instanceof TopologyException) { return false; } throw e; } } @Description("Returns true if the input geometry is well formed") @ScalarFunction("ST_IsValid") @SqlType(BOOLEAN) public static boolean stIsValid(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { try { return deserialize(input).isValid(); } catch (PrestoException e) { if (e.getCause() instanceof TopologyException) { return false; } throw e; } } @Description("Returns the reason for why the input geometry is not valid. Returns null if the input is valid.") @ScalarFunction("geometry_invalid_reason") @SqlType(VARCHAR) @SqlNullable public static Slice invalidReason(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { try { Geometry geometry = deserialize(input); return utf8Slice(getGeometryInvalidReason(geometry)); } catch (PrestoException e) { if (e.getCause() instanceof TopologyException) { return utf8Slice(e.getMessage()); } throw e; } } @Description("Returns the length of a LineString or Multi-LineString using Euclidean measurement on a 2D plane (based on spatial ref) in projected units") @ScalarFunction("ST_Length") @SqlType(DOUBLE) public static double stLength(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); validateType("ST_Length", geometry, EnumSet.of(LINE_STRING, MULTI_LINE_STRING)); return geometry.getLength(); } @SqlNullable @Description("Returns a float between 0 and 1 representing the location of the closest point on the LineString to the given Point, as a fraction of total 2d line length.") @ScalarFunction("line_locate_point") @SqlType(DOUBLE) public static Double lineLocatePoint(@SqlType(GEOMETRY_TYPE_NAME) Slice lineSlice, @SqlType(GEOMETRY_TYPE_NAME) Slice pointSlice) { Geometry line = deserialize(lineSlice); Geometry point = deserialize(pointSlice); if (line.isEmpty() || point.isEmpty()) { return null; } GeometryType lineType = GeometryType.getForJtsGeometryType(line.getGeometryType()); if (lineType != GeometryType.LINE_STRING && lineType != GeometryType.MULTI_LINE_STRING) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("First argument to line_locate_point must be a LineString or a MultiLineString. Got: %s", line.getGeometryType())); } GeometryType pointType = GeometryType.getForJtsGeometryType(point.getGeometryType()); if (pointType != GeometryType.POINT) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("Second argument to line_locate_point must be a Point. Got: %s", point.getGeometryType())); } return new LengthIndexedLine(line).indexOf(point.getCoordinate()) / line.getLength(); } @Description("Returns the point in the line at the fractional length.") @ScalarFunction("line_interpolate_point") @SqlType(GEOMETRY_TYPE_NAME) public static Slice lineInterpolatePoint(@SqlType(GEOMETRY_TYPE_NAME) Slice lineSlice, @SqlType(DOUBLE) double fraction) { if (!(0.0 <= fraction && fraction <= 1.0)) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("line_interpolate_point: Fraction must be between 0 and 1, but is %s", fraction)); } Geometry geometry = deserialize(lineSlice); validateType("line_interpolate_point", geometry, ImmutableSet.of(LINE_STRING)); LineString line = (LineString) geometry; if (line.isEmpty()) { return serialize(createJtsEmptyPoint()); } org.locationtech.jts.geom.Coordinate coordinate = new LengthIndexedLine(line).extractPoint(fraction * line.getLength()); return serialize(createJtsPoint(coordinate)); } @SqlNullable @Description("Returns X maxima of a bounding box of a Geometry") @ScalarFunction("ST_XMax") @SqlType(DOUBLE) public static Double stXMax(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Envelope envelope = deserializeEnvelope(input); if (envelope.isEmpty()) { return null; } return envelope.getXMax(); } @SqlNullable @Description("Returns Y maxima of a bounding box of a Geometry") @ScalarFunction("ST_YMax") @SqlType(DOUBLE) public static Double stYMax(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Envelope envelope = deserializeEnvelope(input); if (envelope.isEmpty()) { return null; } return envelope.getYMax(); } @SqlNullable @Description("Returns X minima of a bounding box of a Geometry") @ScalarFunction("ST_XMin") @SqlType(DOUBLE) public static Double stXMin(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Envelope envelope = deserializeEnvelope(input); if (envelope.isEmpty()) { return null; } return envelope.getXMin(); } @SqlNullable @Description("Returns Y minima of a bounding box of a Geometry") @ScalarFunction("ST_YMin") @SqlType(DOUBLE) public static Double stYMin(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Envelope envelope = deserializeEnvelope(input); if (envelope.isEmpty()) { return null; } return envelope.getYMin(); } @SqlNullable @Description("Returns the cardinality of the collection of interior rings of a polygon") @ScalarFunction("ST_NumInteriorRing") @SqlType(BIGINT) public static Long stNumInteriorRings(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); validateType("ST_NumInteriorRing", geometry, EnumSet.of(POLYGON)); if (geometry.isEmpty()) { return null; } return Long.valueOf(((org.locationtech.jts.geom.Polygon) geometry).getNumInteriorRing()); } @SqlNullable @Description("Returns an array of interior rings of a polygon") @ScalarFunction("ST_InteriorRings") @SqlType("array(" + GEOMETRY_TYPE_NAME + ")") public static Block stInteriorRings(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); validateType("ST_InteriorRings", geometry, EnumSet.of(POLYGON)); if (geometry.isEmpty()) { return null; } org.locationtech.jts.geom.Polygon polygon = (org.locationtech.jts.geom.Polygon) geometry; BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, polygon.getNumInteriorRing()); for (int i = 0; i < polygon.getNumInteriorRing(); i++) { GEOMETRY.writeSlice(blockBuilder, serialize((LineString) polygon.getInteriorRingN(i))); } return blockBuilder.build(); } @Description("Returns the cardinality of the geometry collection") @ScalarFunction("ST_NumGeometries") @SqlType(INTEGER) public static long stNumGeometries(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); if (geometry.isEmpty()) { return 0; } return geometry.getNumGeometries(); } @Description("Returns a geometry that represents the point set union of the input geometries.") @ScalarFunction("ST_Union") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stUnion(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { return stUnion(ImmutableList.of(left, right)); } @Description("Returns a geometry that represents the point set union of the input geometries.") @ScalarFunction("geometry_union") @SqlType(GEOMETRY_TYPE_NAME) public static Slice geometryUnion(@SqlType("array(" + GEOMETRY_TYPE_NAME + ")") Block input) { return stUnion(getGeometrySlicesFromBlock(input)); } private static Slice stUnion(Iterable<Slice> slices) { // The current state of Esri/geometry-api-java does not allow support for multiple dimensions being // fed to the union operator without dropping the lower dimensions: // https://github.com/Esri/geometry-api-java/issues/199 // When operating over a collection of geometries, it is more efficient to reuse the same operator // for the entire operation. Therefore, split the inputs and operators by dimension, and then union // each dimension's result at the end. ListeningGeometryCursor[] cursorsByDimension = new ListeningGeometryCursor[NUMBER_OF_DIMENSIONS]; GeometryCursor[] operatorsByDimension = new GeometryCursor[NUMBER_OF_DIMENSIONS]; setAll(cursorsByDimension, i -> new ListeningGeometryCursor()); setAll(operatorsByDimension, i -> OperatorUnion.local().execute(cursorsByDimension[i], null, null)); Iterator<Slice> slicesIterator = slices.iterator(); if (!slicesIterator.hasNext()) { return null; } while (slicesIterator.hasNext()) { Slice slice = slicesIterator.next(); // Ignore null inputs if (slice.getInput().available() == 0) { continue; } for (OGCGeometry geometry : flattenCollection(EsriGeometrySerde.deserialize(slice))) { int dimension = geometry.dimension(); cursorsByDimension[dimension].tick(geometry.getEsriGeometry()); operatorsByDimension[dimension].tock(); } } List<OGCGeometry> outputs = new ArrayList<>(); for (GeometryCursor operator : operatorsByDimension) { OGCGeometry unionedGeometry = createFromEsriGeometry(operator.next(), null); if (unionedGeometry != null) { outputs.add(unionedGeometry); } } if (outputs.size() == 1) { return EsriGeometrySerde.serialize(outputs.get(0)); } return EsriGeometrySerde.serialize(new OGCConcreteGeometryCollection(outputs, null).flattenAndRemoveOverlaps().reduceFromMulti()); } @SqlNullable @Description("Returns the geometry element at the specified index (indices started with 1)") @ScalarFunction("ST_GeometryN") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stGeometryN(@SqlType(GEOMETRY_TYPE_NAME) Slice input, @SqlType(INTEGER) long index) { Geometry geometry = deserialize(input); if (geometry.isEmpty()) { return null; } GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType()); if (!type.isMultitype()) { if (index == 1) { return input; } return null; } GeometryCollection geometryCollection = ((GeometryCollection) geometry); if (index < 1 || index > geometryCollection.getNumGeometries()) { return null; } return serialize(geometryCollection.getGeometryN((int) index - 1)); } @SqlNullable @Description("Returns the vertex of a linestring at the specified index (indices started with 1) ") @ScalarFunction("ST_PointN") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stPointN(@SqlType(GEOMETRY_TYPE_NAME) Slice input, @SqlType(INTEGER) long index) { Geometry geometry = deserialize(input); validateType("ST_PointN", geometry, EnumSet.of(LINE_STRING)); LineString linestring = (LineString) geometry; if (index < 1 || index > linestring.getNumPoints()) { return null; } return serialize(linestring.getPointN(toIntExact(index) - 1)); } @SqlNullable @Description("Returns an array of geometries in the specified collection") @ScalarFunction("ST_Geometries") @SqlType("array(" + GEOMETRY_TYPE_NAME + ")") public static Block stGeometries(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); if (geometry.isEmpty()) { return null; } GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType()); if (!type.isMultitype()) { BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, 1); GEOMETRY.writeSlice(blockBuilder, serialize(geometry)); return blockBuilder.build(); } GeometryCollection collection = (GeometryCollection) geometry; BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, collection.getNumGeometries()); for (int i = 0; i < collection.getNumGeometries(); i++) { GEOMETRY.writeSlice(blockBuilder, serialize(collection.getGeometryN(i))); } return blockBuilder.build(); } @SqlNullable @Description("Returns the interior ring element at the specified index (indices start at 1)") @ScalarFunction("ST_InteriorRingN") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stInteriorRingN(@SqlType(GEOMETRY_TYPE_NAME) Slice input, @SqlType(INTEGER) long index) { Geometry geometry = deserialize(input); validateType("ST_InteriorRingN", geometry, EnumSet.of(POLYGON)); org.locationtech.jts.geom.Polygon polygon = (org.locationtech.jts.geom.Polygon) geometry; if (index < 1 || index > polygon.getNumInteriorRing()) { return null; } return serialize(polygon.getInteriorRingN(toIntExact(index) - 1)); } @Description("Returns the number of points in a Geometry") @ScalarFunction("ST_NumPoints") @SqlType(BIGINT) public static long stNumPoints(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { return getPointCount(EsriGeometrySerde.deserialize(input)); } @SqlNullable @Description("Returns TRUE if and only if the line is closed and simple") @ScalarFunction("ST_IsRing") @SqlType(BOOLEAN) public static Boolean stIsRing(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { OGCGeometry geometry = EsriGeometrySerde.deserialize(input); validateType("ST_IsRing", geometry, EnumSet.of(LINE_STRING)); OGCLineString line = (OGCLineString) geometry; return line.isClosed() && line.isSimple(); } @SqlNullable @Description("Returns the first point of a LINESTRING geometry as a Point") @ScalarFunction("ST_StartPoint") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stStartPoint(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); validateType("ST_StartPoint", geometry, EnumSet.of(LINE_STRING)); if (geometry.isEmpty()) { return null; } return serialize(((LineString) geometry).getStartPoint()); } @Description("Returns a \"simplified\" version of the given geometry") @ScalarFunction("simplify_geometry") @SqlType(GEOMETRY_TYPE_NAME) public static Slice simplifyGeometry(@SqlType(GEOMETRY_TYPE_NAME) Slice input, @SqlType(DOUBLE) double distanceTolerance) { if (isNaN(distanceTolerance)) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "distanceTolerance is NaN"); } if (distanceTolerance < 0) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "distanceTolerance is negative"); } if (distanceTolerance == 0) { return input; } return serialize(simplify(deserialize(input), distanceTolerance)); } @SqlNullable @Description("Returns the last point of a LINESTRING geometry as a Point") @ScalarFunction("ST_EndPoint") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stEndPoint(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); validateType("ST_EndPoint", geometry, EnumSet.of(LINE_STRING)); if (geometry.isEmpty()) { return null; } return serialize(((LineString) geometry).getEndPoint()); } @SqlNullable @Description("Returns an array of points in a geometry") @ScalarFunction("ST_Points") @SqlType("array(" + GEOMETRY_TYPE_NAME + ")") public static Block stPoints(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); if (geometry.isEmpty()) { return null; } BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, geometry.getNumPoints()); buildPointsBlock(geometry, blockBuilder); return blockBuilder.build(); } private static void buildPointsBlock(Geometry geometry, BlockBuilder blockBuilder) { GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType()); if (type == GeometryType.POINT) { GEOMETRY.writeSlice(blockBuilder, serialize(geometry)); } else if (type == GeometryType.GEOMETRY_COLLECTION) { GeometryCollection collection = (GeometryCollection) geometry; for (int i = 0; i < collection.getNumGeometries(); i++) { Geometry entry = collection.getGeometryN(i); buildPointsBlock(entry, blockBuilder); } } else { GeometryFactory geometryFactory = geometry.getFactory(); Coordinate[] vertices = geometry.getCoordinates(); for (Coordinate coordinate : vertices) { GEOMETRY.writeSlice(blockBuilder, serialize(geometryFactory.createPoint(coordinate))); } } } @SqlNullable @Description("Return the X coordinate of the point") @ScalarFunction("ST_X") @SqlType(DOUBLE) public static Double stX(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); validateType("ST_X", geometry, EnumSet.of(POINT)); if (geometry.isEmpty()) { return null; } return ((org.locationtech.jts.geom.Point) geometry).getX(); } @SqlNullable @Description("Return the Y coordinate of the point") @ScalarFunction("ST_Y") @SqlType(DOUBLE) public static Double stY(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); validateType("ST_Y", geometry, EnumSet.of(POINT)); if (geometry.isEmpty()) { return null; } return ((org.locationtech.jts.geom.Point) geometry).getY(); } @Description("Returns the closure of the combinatorial boundary of this Geometry") @ScalarFunction("ST_Boundary") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stBoundary(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { return serialize(deserialize(input).getBoundary()); } @Description("Returns the bounding rectangular polygon of a Geometry") @ScalarFunction("ST_Envelope") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stEnvelope(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Envelope envelope = deserializeEnvelope(input); if (envelope.isEmpty()) { return EMPTY_POLYGON; } return EsriGeometrySerde.serialize(envelope); } @SqlNullable @Description("Returns the lower left and upper right corners of bounding rectangular polygon of a Geometry") @ScalarFunction("ST_EnvelopeAsPts") @SqlType("array(" + GEOMETRY_TYPE_NAME + ")") public static Block stEnvelopeAsPts(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Envelope envelope = deserializeEnvelope(input); if (envelope.isEmpty()) { return null; } BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, 2); org.locationtech.jts.geom.Point lowerLeftCorner = createJtsPoint(envelope.getXMin(), envelope.getYMin()); org.locationtech.jts.geom.Point upperRightCorner = createJtsPoint(envelope.getXMax(), envelope.getYMax()); GEOMETRY.writeSlice(blockBuilder, serialize(lowerLeftCorner)); GEOMETRY.writeSlice(blockBuilder, serialize(upperRightCorner)); return blockBuilder.build(); } @Description("Returns the bounding rectangle of a Geometry expanded by distance.") @ScalarFunction("expand_envelope") @SqlType(GEOMETRY_TYPE_NAME) public static Slice expandEnvelope(@SqlType(GEOMETRY_TYPE_NAME) Slice input, @SqlType(DOUBLE) double distance) { if (isNaN(distance)) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "expand_envelope: distance is NaN"); } if (distance < 0) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("expand_envelope: distance %s is negative", distance)); } Envelope envelope = deserializeEnvelope(input); if (envelope.isEmpty()) { return EMPTY_POLYGON; } return EsriGeometrySerde.serialize(new Envelope( envelope.getXMin() - distance, envelope.getYMin() - distance, envelope.getXMax() + distance, envelope.getYMax() + distance)); } @Description("Returns the Geometry value that represents the point set difference of two geometries") @ScalarFunction("ST_Difference") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stDifference(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return EsriGeometrySerde.serialize(leftGeometry.difference(rightGeometry)); } @SqlNullable @Description("Returns the 2-dimensional cartesian minimum distance (based on spatial ref) between two geometries in projected units") @ScalarFunction("ST_Distance") @SqlType(DOUBLE) public static Double stDistance(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return leftGeometry.isEmpty() || rightGeometry.isEmpty() ? null : leftGeometry.distance(rightGeometry); } @SqlNullable @Description("Returns a line string representing the exterior ring of the POLYGON") @ScalarFunction("ST_ExteriorRing") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stExteriorRing(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); validateType("ST_ExteriorRing", geometry, EnumSet.of(POLYGON)); if (geometry.isEmpty()) { return null; } return serialize(((org.locationtech.jts.geom.Polygon) geometry).getExteriorRing()); } @Description("Returns the Geometry value that represents the point set intersection of two Geometries") @ScalarFunction("ST_Intersection") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stIntersection(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { GeometrySerializationType leftType = deserializeType(left); GeometrySerializationType rightType = deserializeType(right); if (leftType == GeometrySerializationType.ENVELOPE && rightType == GeometrySerializationType.ENVELOPE) { Envelope leftEnvelope = deserializeEnvelope(left); Envelope rightEnvelope = deserializeEnvelope(right); // Envelope#intersect updates leftEnvelope to the intersection of the two envelopes if (!leftEnvelope.intersect(rightEnvelope)) { return EMPTY_POLYGON; } Envelope intersection = leftEnvelope; if (intersection.getXMin() == intersection.getXMax()) { if (intersection.getYMin() == intersection.getYMax()) { return EsriGeometrySerde.serialize(createFromEsriGeometry(new Point(intersection.getXMin(), intersection.getXMax()), null)); } return EsriGeometrySerde.serialize(createFromEsriGeometry(new Polyline(new Point(intersection.getXMin(), intersection.getYMin()), new Point(intersection.getXMin(), intersection.getYMax())), null)); } if (intersection.getYMin() == intersection.getYMax()) { return EsriGeometrySerde.serialize(createFromEsriGeometry(new Polyline(new Point(intersection.getXMin(), intersection.getYMin()), new Point(intersection.getXMax(), intersection.getYMin())), null)); } return EsriGeometrySerde.serialize(intersection); } // If one side is an envelope, then if it contains the other's envelope we can just return the other geometry. if (leftType == GeometrySerializationType.ENVELOPE && deserializeEnvelope(left).contains(deserializeEnvelope(right))) { return right; } if (rightType == GeometrySerializationType.ENVELOPE && deserializeEnvelope(right).contains(deserializeEnvelope(left))) { return left; } OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return EsriGeometrySerde.serialize(leftGeometry.intersection(rightGeometry)); } @Description("Returns the Geometry value that represents the point set symmetric difference of two Geometries") @ScalarFunction("ST_SymDifference") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stSymmetricDifference(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return EsriGeometrySerde.serialize(leftGeometry.symDifference(rightGeometry)); } @SqlNullable @Description("Returns TRUE if and only if no points of right lie in the exterior of left, and at least one point of the interior of left lies in the interior of right") @ScalarFunction("ST_Contains") @SqlType(BOOLEAN) public static Boolean stContains(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { if (!envelopes(left, right, Envelope::contains)) { return false; } OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return leftGeometry.contains(rightGeometry); } @SqlNullable @Description("Returns TRUE if the supplied geometries have some, but not all, interior points in common") @ScalarFunction("ST_Crosses") @SqlType(BOOLEAN) public static Boolean stCrosses(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { if (!envelopes(left, right, Envelope::intersect)) { return false; } OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return leftGeometry.crosses(rightGeometry); } @SqlNullable @Description("Returns TRUE if the Geometries do not spatially intersect - if they do not share any space together") @ScalarFunction("ST_Disjoint") @SqlType(BOOLEAN) public static Boolean stDisjoint(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { if (!envelopes(left, right, Envelope::intersect)) { return true; } OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return leftGeometry.disjoint(rightGeometry); } @SqlNullable @Description("Returns TRUE if the given geometries represent the same geometry") @ScalarFunction("ST_Equals") @SqlType(BOOLEAN) public static Boolean stEquals(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return leftGeometry.Equals(rightGeometry); } @SqlNullable @Description("Returns TRUE if the Geometries spatially intersect in 2D - (share any portion of space) and FALSE if they don't (they are Disjoint)") @ScalarFunction("ST_Intersects") @SqlType(BOOLEAN) public static Boolean stIntersects(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { if (!envelopes(left, right, Envelope::intersect)) { return false; } OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return leftGeometry.intersects(rightGeometry); } @SqlNullable @Description("Returns TRUE if the Geometries share space, are of the same dimension, but are not completely contained by each other") @ScalarFunction("ST_Overlaps") @SqlType(BOOLEAN) public static Boolean stOverlaps(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { if (!envelopes(left, right, Envelope::intersect)) { return false; } OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return leftGeometry.overlaps(rightGeometry); } @SqlNullable @Description("Returns TRUE if this Geometry is spatially related to another Geometry") @ScalarFunction("ST_Relate") @SqlType(BOOLEAN) public static Boolean stRelate(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right, @SqlType(VARCHAR) Slice relation) { OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return leftGeometry.relate(rightGeometry, relation.toStringUtf8()); } @SqlNullable @Description("Returns TRUE if the geometries have at least one point in common, but their interiors do not intersect") @ScalarFunction("ST_Touches") @SqlType(BOOLEAN) public static Boolean stTouches(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { if (!envelopes(left, right, Envelope::intersect)) { return false; } OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return leftGeometry.touches(rightGeometry); } @SqlNullable @Description("Returns TRUE if the geometry A is completely inside geometry B") @ScalarFunction("ST_Within") @SqlType(BOOLEAN) public static Boolean stWithin(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { if (!envelopes(right, left, Envelope::contains)) { return false; } OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return leftGeometry.within(rightGeometry); } @Description("Returns the type of the geometry") @ScalarFunction("ST_GeometryType") @SqlType(VARCHAR) public static Slice stGeometryType(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { return EsriGeometrySerde.getGeometryType(input).standardName(); } @ScalarFunction @SqlNullable @Description("Returns an array of spatial partition IDs for a given geometry") @SqlType("array(int)") public static Block spatialPartitions(@SqlType(KdbTreeType.NAME) Object kdbTree, @SqlType(GEOMETRY_TYPE_NAME) Slice geometry) { Envelope envelope = deserializeEnvelope(geometry); if (envelope.isEmpty()) { // Empty geometry return null; } return spatialPartitions((KdbTree) kdbTree, new Rectangle(envelope.getXMin(), envelope.getYMin(), envelope.getXMax(), envelope.getYMax())); } @ScalarFunction @SqlNullable @Description("Returns an array of spatial partition IDs for a geometry representing a set of points within specified distance from the input geometry") @SqlType("array(int)") public static Block spatialPartitions(@SqlType(KdbTreeType.NAME) Object kdbTree, @SqlType(GEOMETRY_TYPE_NAME) Slice geometry, @SqlType(DOUBLE) double distance) { if (isNaN(distance)) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "distance is NaN"); } if (isInfinite(distance)) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "distance is infinite"); } if (distance < 0) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "distance is negative"); } Envelope envelope = deserializeEnvelope(geometry); if (envelope.isEmpty()) { return null; } Rectangle expandedEnvelope2D = new Rectangle(envelope.getXMin() - distance, envelope.getYMin() - distance, envelope.getXMax() + distance, envelope.getYMax() + distance); return spatialPartitions((KdbTree) kdbTree, expandedEnvelope2D); } // Package visible for SphericalGeoFunctions /*package*/ static Block spatialPartitions(KdbTree kdbTree, Rectangle envelope) { Map<Integer, Rectangle> partitions = kdbTree.findIntersectingLeaves(envelope); if (partitions.isEmpty()) { return EMPTY_ARRAY_OF_INTS; } // For input rectangles that represent a single point, return at most one partition // by excluding right and upper sides of partition rectangles. The logic that builds // KDB tree needs to make sure to add some padding to the right and upper sides of the // overall extent of the tree to avoid missing right-most and top-most points. boolean point = (envelope.getWidth() == 0 && envelope.getHeight() == 0); if (point) { for (Map.Entry<Integer, Rectangle> partition : partitions.entrySet()) { if (envelope.getXMin() < partition.getValue().getXMax() && envelope.getYMin() < partition.getValue().getYMax()) { BlockBuilder blockBuilder = IntegerType.INTEGER.createFixedSizeBlockBuilder(1); blockBuilder.writeInt(partition.getKey()); return blockBuilder.build(); } } throw new VerifyException(format("Cannot find half-open partition extent for a point: (%s, %s)", envelope.getXMin(), envelope.getYMin())); } BlockBuilder blockBuilder = IntegerType.INTEGER.createFixedSizeBlockBuilder(partitions.size()); for (int id : partitions.keySet()) { blockBuilder.writeInt(id); } return blockBuilder.build(); } private static OGCGeometry geomFromBinary(Slice input) { requireNonNull(input, "input is null"); OGCGeometry geometry; try { geometry = OGCGeometry.fromBinary(input.toByteBuffer().slice()); } catch (IllegalArgumentException | IndexOutOfBoundsException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Invalid WKB", e); } geometry.setSpatialReference(null); return geometry; } private static void validateType(String function, OGCGeometry geometry, Set<GeometryType> validTypes) { GeometryType type = GeometryType.getForEsriGeometryType(geometry.geometryType()); if (!validTypes.contains(type)) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("%s only applies to %s. Input type is: %s", function, OR_JOINER.join(validTypes), type)); } } private static void validateType(String function, Geometry geometry, Set<GeometryType> validTypes) { GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType()); if (!validTypes.contains(type)) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("%s only applies to %s. Input type is: %s", function, OR_JOINER.join(validTypes), type)); } } private static void verifySameSpatialReference(OGCGeometry leftGeometry, OGCGeometry rightGeometry) { checkArgument(Objects.equals(leftGeometry.getEsriSpatialReference(), rightGeometry.getEsriSpatialReference()), "Input geometries must have the same spatial reference"); } private static boolean envelopes(Slice left, Slice right, EnvelopesPredicate predicate) { Envelope leftEnvelope = deserializeEnvelope(left); Envelope rightEnvelope = deserializeEnvelope(right); if (leftEnvelope.isEmpty() || rightEnvelope.isEmpty()) { return false; } return predicate.apply(leftEnvelope, rightEnvelope); } private interface EnvelopesPredicate { boolean apply(Envelope left, Envelope right); } private static Iterable<Slice> getGeometrySlicesFromBlock(Block block) { requireNonNull(block, "block is null"); return () -> new Iterator<Slice>() { private int iteratorPosition; @Override public boolean hasNext() { return iteratorPosition != block.getPositionCount(); } @Override public Slice next() { if (!hasNext()) { throw new NoSuchElementException("Slices have been consumed"); } return GEOMETRY.getSlice(block, iteratorPosition++); } }; } }