gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.gcm.server; import static com.google.android.gcm.server.Constants.GCM_SEND_ENDPOINT; import static com.google.android.gcm.server.Constants.JSON_CANONICAL_IDS; import static com.google.android.gcm.server.Constants.JSON_ERROR; import static com.google.android.gcm.server.Constants.JSON_FAILURE; import static com.google.android.gcm.server.Constants.JSON_MESSAGE_ID; import static com.google.android.gcm.server.Constants.JSON_MULTICAST_ID; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION_BADGE; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION_BODY; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION_BODY_LOC_ARGS; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION_BODY_LOC_KEY; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION_CLICK_ACTION; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION_COLOR; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION_ICON; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION_SOUND; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION_TAG; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION_TITLE; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION_TITLE_LOC_ARGS; import static com.google.android.gcm.server.Constants.JSON_NOTIFICATION_TITLE_LOC_KEY; import static com.google.android.gcm.server.Constants.JSON_PAYLOAD; import static com.google.android.gcm.server.Constants.JSON_REGISTRATION_IDS; import static com.google.android.gcm.server.Constants.JSON_RESULTS; import static com.google.android.gcm.server.Constants.JSON_SUCCESS; import static com.google.android.gcm.server.Constants.PARAM_COLLAPSE_KEY; import static com.google.android.gcm.server.Constants.PARAM_DELAY_WHILE_IDLE; import static com.google.android.gcm.server.Constants.PARAM_DRY_RUN; import static com.google.android.gcm.server.Constants.PARAM_PRIORITY; import static com.google.android.gcm.server.Constants.PARAM_RESTRICTED_PACKAGE_NAME; import static com.google.android.gcm.server.Constants.PARAM_TIME_TO_LIVE; import static com.google.android.gcm.server.Constants.TOKEN_CANONICAL_REG_ID; import org.json.simple.JSONObject; import org.json.simple.JSONValue; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import java.io.BufferedReader; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Collections; import java.util.logging.Level; import java.util.logging.Logger; /** * Helper class to send messages to the GCM service using an API Key. */ public class Sender { protected static final String UTF8 = "UTF-8"; /** * Initial delay before first retry, without jitter. */ protected static final int BACKOFF_INITIAL_DELAY = 1000; /** * Maximum delay before a retry. */ protected static final int MAX_BACKOFF_DELAY = 1024000; protected final Random random = new Random(); protected static final Logger logger = Logger.getLogger(Sender.class.getName()); private final String key; /** * Default constructor. * * @param key API key obtained through the Google API Console. */ public Sender(String key) { this.key = nonNull(key); } /** * Sends a message to one device, retrying in case of unavailability. * * <p> * <strong>Note: </strong> this method uses exponential back-off to retry in * case of service unavailability and hence could block the calling thread * for many seconds. * * @param message message to be sent, including the device's registration id. * @param registrationId device where the message will be sent. * @param retries number of retries in case of service unavailability errors. * * @return result of the request (see its javadoc for more details). * * @throws IllegalArgumentException if registrationId is {@literal null}. * @throws InvalidRequestException if GCM didn't returned a 200 or 5xx status. * @throws IOException if message could not be sent. */ public Result send(Message message, String registrationId, int retries) throws IOException { int attempt = 0; Result result; int backoff = BACKOFF_INITIAL_DELAY; boolean tryAgain; do { attempt++; if (logger.isLoggable(Level.FINE)) { logger.fine("Attempt #" + attempt + " to send message " + message + " to regIds " + registrationId); } result = sendNoRetry(message, registrationId); tryAgain = result == null && attempt <= retries; if (tryAgain) { int sleepTime = backoff / 2 + random.nextInt(backoff); sleep(sleepTime); if (2 * backoff < MAX_BACKOFF_DELAY) { backoff *= 2; } } } while (tryAgain); if (result == null) { throw new IOException("Could not send message after " + attempt + " attempts"); } return result; } /** * Sends a message without retrying in case of service unavailability. See * {@link #send(Message, String, int)} for more info. * * @return result of the post, or {@literal null} if the GCM service was * unavailable or any network exception caused the request to fail. * * @throws InvalidRequestException if GCM didn't returned a 200 status. * @throws IllegalArgumentException if registrationId is {@literal null}. */ public Result sendNoRetry(Message message, String registrationId) throws IOException { nonNull(registrationId); List<String> registrationIds = Collections.singletonList(registrationId); MulticastResult multicastResult = sendNoRetry(message, registrationIds); if (multicastResult == null) { return null; } List<Result> results = multicastResult.getResults(); if (results.size() != 1) { logger.log(Level.WARNING, "Found " + results.size() + " results in single multicast request, expected one"); return null; } return results.get(0); } /** * Sends a message to many devices, retrying in case of unavailability. * * <p> * <strong>Note: </strong> this method uses exponential back-off to retry in * case of service unavailability and hence could block the calling thread * for many seconds. * * @param message message to be sent. * @param regIds registration id of the devices that will receive * the message. * @param retries number of retries in case of service unavailability errors. * * @return combined result of all requests made. * * @throws IllegalArgumentException if registrationIds is {@literal null} or * empty. * @throws InvalidRequestException if GCM didn't returned a 200 or 503 status. * @throws IOException if message could not be sent. */ public MulticastResult send(Message message, List<String> regIds, int retries) throws IOException { int attempt = 0; MulticastResult multicastResult; int backoff = BACKOFF_INITIAL_DELAY; // Map of results by registration id, it will be updated after each attempt // to send the messages Map<String, Result> results = new HashMap<String, Result>(); List<String> unsentRegIds = new ArrayList<String>(regIds); boolean tryAgain; List<Long> multicastIds = new ArrayList<Long>(); do { multicastResult = null; attempt++; if (logger.isLoggable(Level.FINE)) { logger.fine("Attempt #" + attempt + " to send message " + message + " to regIds " + unsentRegIds); } try { multicastResult = sendNoRetry(message, unsentRegIds); } catch(IOException e) { // no need for WARNING since exception might be already logged logger.log(Level.FINEST, "IOException on attempt " + attempt, e); } if (multicastResult != null) { long multicastId = multicastResult.getMulticastId(); logger.fine("multicast_id on attempt # " + attempt + ": " + multicastId); multicastIds.add(multicastId); unsentRegIds = updateStatus(unsentRegIds, results, multicastResult); tryAgain = !unsentRegIds.isEmpty() && attempt <= retries; } else { tryAgain = attempt <= retries; } if (tryAgain) { int sleepTime = backoff / 2 + random.nextInt(backoff); sleep(sleepTime); if (2 * backoff < MAX_BACKOFF_DELAY) { backoff *= 2; } } } while (tryAgain); if (multicastIds.isEmpty()) { // all JSON posts failed due to GCM unavailability throw new IOException("Could not post JSON requests to GCM after " + attempt + " attempts"); } // calculate summary int success = 0, failure = 0 , canonicalIds = 0; for (Result result : results.values()) { if (result.getMessageId() != null) { success++; if (result.getCanonicalRegistrationId() != null) { canonicalIds++; } } else { failure++; } } // build a new object with the overall result long multicastId = multicastIds.remove(0); MulticastResult.Builder builder = new MulticastResult.Builder(success, failure, canonicalIds, multicastId).retryMulticastIds(multicastIds); // add results, in the same order as the input for (String regId : regIds) { Result result = results.get(regId); builder.addResult(result); } return builder.build(); } /** * Updates the status of the messages sent to devices and the list of devices * that should be retried. * * @param unsentRegIds list of devices that are still pending an update. * @param allResults map of status that will be updated. * @param multicastResult result of the last multicast sent. * * @return updated version of devices that should be retried. */ private List<String> updateStatus(List<String> unsentRegIds, Map<String, Result> allResults, MulticastResult multicastResult) { List<Result> results = multicastResult.getResults(); if (results.size() != unsentRegIds.size()) { // should never happen, unless there is a flaw in the algorithm throw new RuntimeException("Internal error: sizes do not match. " + "currentResults: " + results + "; unsentRegIds: " + unsentRegIds); } List<String> newUnsentRegIds = new ArrayList<String>(); for (int i = 0; i < unsentRegIds.size(); i++) { String regId = unsentRegIds.get(i); Result result = results.get(i); allResults.put(regId, result); String error = result.getErrorCodeName(); if (error != null && (error.equals(Constants.ERROR_UNAVAILABLE) || error.equals(Constants.ERROR_INTERNAL_SERVER_ERROR))) { newUnsentRegIds.add(regId); } } return newUnsentRegIds; } /** * Sends a message without retrying in case of service unavailability. See * {@link #send(Message, List, int)} for more info. * * @return multicast results if the message was sent successfully, * {@literal null} if it failed but could be retried. * * @throws IllegalArgumentException if registrationIds is {@literal null} or * empty. * @throws InvalidRequestException if GCM didn't returned a 200 status. * @throws IOException if there was a JSON parsing error */ public MulticastResult sendNoRetry(Message message, List<String> registrationIds) throws IOException { if (nonNull(registrationIds).isEmpty()) { throw new IllegalArgumentException("registrationIds cannot be empty"); } Map<Object, Object> jsonRequest = new HashMap<Object, Object>(); setJsonField(jsonRequest, PARAM_PRIORITY, message.getPriority()); setJsonField(jsonRequest, PARAM_TIME_TO_LIVE, message.getTimeToLive()); setJsonField(jsonRequest, PARAM_COLLAPSE_KEY, message.getCollapseKey()); setJsonField(jsonRequest, PARAM_RESTRICTED_PACKAGE_NAME, message.getRestrictedPackageName()); setJsonField(jsonRequest, PARAM_DELAY_WHILE_IDLE, message.isDelayWhileIdle()); setJsonField(jsonRequest, PARAM_DRY_RUN, message.isDryRun()); jsonRequest.put(JSON_REGISTRATION_IDS, registrationIds); Map<String, String> payload = message.getData(); if (!payload.isEmpty()) { jsonRequest.put(JSON_PAYLOAD, payload); } if (message.getNotification() != null) { Notification notification = message.getNotification(); Map<Object, Object> nMap = new HashMap<Object, Object>(); if (notification.getBadge() != null) { setJsonField(nMap, JSON_NOTIFICATION_BADGE, notification.getBadge().toString()); } setJsonField(nMap, JSON_NOTIFICATION_BODY, notification.getBody()); setJsonField(nMap, JSON_NOTIFICATION_BODY_LOC_ARGS, notification.getBodyLocArgs()); setJsonField(nMap, JSON_NOTIFICATION_BODY_LOC_KEY, notification.getBodyLocKey()); setJsonField(nMap, JSON_NOTIFICATION_CLICK_ACTION, notification.getClickAction()); setJsonField(nMap, JSON_NOTIFICATION_COLOR, notification.getColor()); setJsonField(nMap, JSON_NOTIFICATION_ICON, notification.getIcon()); setJsonField(nMap, JSON_NOTIFICATION_SOUND, notification.getSound()); setJsonField(nMap, JSON_NOTIFICATION_TAG, notification.getTag()); setJsonField(nMap, JSON_NOTIFICATION_TITLE, notification.getTitle()); setJsonField(nMap, JSON_NOTIFICATION_TITLE_LOC_ARGS, notification.getTitleLocArgs()); setJsonField(nMap, JSON_NOTIFICATION_TITLE_LOC_KEY, notification.getTitleLocKey()); jsonRequest.put(JSON_NOTIFICATION, nMap); } String requestBody = JSONValue.toJSONString(jsonRequest); logger.finest("JSON request: " + requestBody); HttpURLConnection conn; int status; try { conn = post(GCM_SEND_ENDPOINT, "application/json", requestBody); status = conn.getResponseCode(); } catch (IOException e) { logger.log(Level.FINE, "IOException posting to GCM", e); return null; } String responseBody; if (status != 200) { try { responseBody = getAndClose(conn.getErrorStream()); logger.finest("JSON error response: " + responseBody); } catch (IOException e) { // ignore the exception since it will thrown an InvalidRequestException // anyways responseBody = "N/A"; logger.log(Level.FINE, "Exception reading response: ", e); } throw new InvalidRequestException(status, responseBody); } try { responseBody = getAndClose(conn.getInputStream()); } catch(IOException e) { logger.log(Level.WARNING, "IOException reading response", e); return null; } logger.finest("JSON response: " + responseBody); JSONParser parser = new JSONParser(); JSONObject jsonResponse; try { jsonResponse = (JSONObject) parser.parse(responseBody); int success = getNumber(jsonResponse, JSON_SUCCESS).intValue(); int failure = getNumber(jsonResponse, JSON_FAILURE).intValue(); int canonicalIds = getNumber(jsonResponse, JSON_CANONICAL_IDS).intValue(); long multicastId = getNumber(jsonResponse, JSON_MULTICAST_ID).longValue(); MulticastResult.Builder builder = new MulticastResult.Builder(success, failure, canonicalIds, multicastId); @SuppressWarnings("unchecked") List<Map<String, Object>> results = (List<Map<String, Object>>) jsonResponse.get(JSON_RESULTS); if (results != null) { for (Map<String, Object> jsonResult : results) { String messageId = (String) jsonResult.get(JSON_MESSAGE_ID); String canonicalRegId = (String) jsonResult.get(TOKEN_CANONICAL_REG_ID); String error = (String) jsonResult.get(JSON_ERROR); Result result = new Result.Builder() .messageId(messageId) .canonicalRegistrationId(canonicalRegId) .errorCode(error) .build(); builder.addResult(result); } } return builder.build(); } catch (ParseException e) { throw newIoException(responseBody, e); } catch (CustomParserException e) { throw newIoException(responseBody, e); } } private IOException newIoException(String responseBody, Exception e) { // log exception, as IOException constructor that takes a message and cause // is only available on Java 6 String msg = "Error parsing JSON response (" + responseBody + ")"; logger.log(Level.WARNING, msg, e); return new IOException(msg + ":" + e); } private static void close(Closeable closeable) { if (closeable != null) { try { closeable.close(); } catch (IOException e) { // ignore error logger.log(Level.FINEST, "IOException closing stream", e); } } } /** * Sets a JSON field, but only if the value is not {@literal null}. */ private void setJsonField(Map<Object, Object> json, String field, Object value) { if (value != null) { json.put(field, value); } } private Number getNumber(Map<?, ?> json, String field) { Object value = json.get(field); if (value == null) { throw new CustomParserException("Missing field: " + field); } if (!(value instanceof Number)) { throw new CustomParserException("Field " + field + " does not contain a number: " + value); } return (Number) value; } class CustomParserException extends RuntimeException { CustomParserException(String message) { super(message); } } /** * Make an HTTP post to a given URL. * * @return HTTP response. */ protected HttpURLConnection post(String url, String body) throws IOException { return post(url, "application/x-www-form-urlencoded;charset=UTF-8", body); } /** * Makes an HTTP POST request to a given endpoint. * * <p> * <strong>Note: </strong> the returned connected should not be disconnected, * otherwise it would kill persistent connections made using Keep-Alive. * * @param url endpoint to post the request. * @param contentType type of request. * @param body body of the request. * * @return the underlying connection. * * @throws IOException propagated from underlying methods. */ protected HttpURLConnection post(String url, String contentType, String body) throws IOException { if (url == null || contentType == null || body == null) { throw new IllegalArgumentException("arguments cannot be null"); } if (!url.startsWith("https://")) { logger.warning("URL does not use https: " + url); } logger.fine("Sending POST to " + url); logger.finest("POST body: " + body); byte[] bytes = body.getBytes(UTF8); HttpURLConnection conn = getConnection(url); conn.setDoOutput(true); conn.setUseCaches(false); conn.setFixedLengthStreamingMode(bytes.length); conn.setRequestMethod("POST"); conn.setRequestProperty("Content-Type", contentType); conn.setRequestProperty("Authorization", "key=" + key); OutputStream out = conn.getOutputStream(); try { out.write(bytes); } finally { close(out); } return conn; } /** * Creates a map with just one key-value pair. */ protected static final Map<String, String> newKeyValues(String key, String value) { Map<String, String> keyValues = new HashMap<String, String>(1); keyValues.put(nonNull(key), nonNull(value)); return keyValues; } /** * Creates a {@link StringBuilder} to be used as the body of an HTTP POST. * * @param name initial parameter for the POST. * @param value initial value for that parameter. * @return StringBuilder to be used an HTTP POST body. */ protected static StringBuilder newBody(String name, String value) { return new StringBuilder(nonNull(name)).append('=').append(nonNull(value)); } /** * Adds a new parameter to the HTTP POST body. * * @param body HTTP POST body. * @param name parameter's name. * @param value parameter's value. */ protected static void addParameter(StringBuilder body, String name, String value) { nonNull(body).append('&') .append(nonNull(name)).append('=').append(nonNull(value)); } /** * Gets an {@link HttpURLConnection} given an URL. */ protected HttpURLConnection getConnection(String url) throws IOException { return (HttpURLConnection) new URL(url).openConnection(); } /** * Convenience method to convert an InputStream to a String. * <p> * If the stream ends in a newline character, it will be stripped. * <p> * If the stream is {@literal null}, returns an empty string. */ protected static String getString(InputStream stream) throws IOException { if (stream == null) { return ""; } BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); StringBuilder content = new StringBuilder(); String newLine; do { newLine = reader.readLine(); if (newLine != null) { content.append(newLine).append('\n'); } } while (newLine != null); if (content.length() > 0) { // strip last newline content.setLength(content.length() - 1); } return content.toString(); } private static String getAndClose(InputStream stream) throws IOException { try { return getString(stream); } finally { if (stream != null) { close(stream); } } } static <T> T nonNull(T argument) { if (argument == null) { throw new IllegalArgumentException("argument cannot be null"); } return argument; } void sleep(long millis) { try { Thread.sleep(millis); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.range; import com.google.common.collect.Lists; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.util.InPlaceMergeSorter; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.aggregations.*; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.format.ValueFormat; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueParser; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * */ public class RangeAggregator extends BucketsAggregator { public static class Range { public String key; public double from = Double.NEGATIVE_INFINITY; String fromAsStr; public double to = Double.POSITIVE_INFINITY; String toAsStr; public Range(String key, double from, String fromAsStr, double to, String toAsStr) { this.key = key; this.from = from; this.fromAsStr = fromAsStr; this.to = to; this.toAsStr = toAsStr; } boolean matches(double value) { return value >= from && value < to; } @Override public String toString() { return "[" + from + " to " + to + ")"; } public void process(ValueParser parser, SearchContext context) { assert parser != null; if (fromAsStr != null) { from = parser.parseDouble(fromAsStr, context); } if (toAsStr != null) { to = parser.parseDouble(toAsStr, context); } } } private final ValuesSource.Numeric valuesSource; private final @Nullable ValueFormatter formatter; private final Range[] ranges; private final boolean keyed; private final InternalRange.Factory rangeFactory; private SortedNumericDoubleValues values; final double[] maxTo; public RangeAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format, InternalRange.Factory rangeFactory, List<Range> ranges, boolean keyed, AggregationContext aggregationContext, Aggregator parent) { super(name, BucketAggregationMode.MULTI_BUCKETS, factories, ranges.size() * (parent == null ? 1 : parent.estimatedBucketCount()), aggregationContext, parent); assert valuesSource != null; this.valuesSource = valuesSource; this.formatter = format != null ? format.formatter() : null; this.keyed = keyed; this.rangeFactory = rangeFactory; this.ranges = ranges.toArray(new Range[ranges.size()]); ValueParser parser = format != null ? format.parser() : ValueParser.RAW; for (int i = 0; i < this.ranges.length; i++) { this.ranges[i].process(parser, context.searchContext()); } sortRanges(this.ranges); maxTo = new double[this.ranges.length]; maxTo[0] = this.ranges[0].to; for (int i = 1; i < this.ranges.length; ++i) { maxTo[i] = Math.max(this.ranges[i].to,maxTo[i-1]); } } @Override public boolean shouldCollect() { return true; } @Override public void setNextReader(AtomicReaderContext reader) { values = valuesSource.doubleValues(); } private final long subBucketOrdinal(long owningBucketOrdinal, int rangeOrd) { return owningBucketOrdinal * ranges.length + rangeOrd; } @Override public void collect(int doc, long owningBucketOrdinal) throws IOException { values.setDocument(doc); final int valuesCount = values.count(); for (int i = 0, lo = 0; i < valuesCount; ++i) { final double value = values.valueAt(i); lo = collect(doc, value, owningBucketOrdinal, lo); } } private int collect(int doc, double value, long owningBucketOrdinal, int lowBound) throws IOException { int lo = lowBound, hi = ranges.length - 1; // all candidates are between these indexes int mid = (lo + hi) >>> 1; while (lo <= hi) { if (value < ranges[mid].from) { hi = mid - 1; } else if (value >= maxTo[mid]) { lo = mid + 1; } else { break; } mid = (lo + hi) >>> 1; } if (lo > hi) return lo; // no potential candidate // binary search the lower bound int startLo = lo, startHi = mid; while (startLo <= startHi) { final int startMid = (startLo + startHi) >>> 1; if (value >= maxTo[startMid]) { startLo = startMid + 1; } else { startHi = startMid - 1; } } // binary search the upper bound int endLo = mid, endHi = hi; while (endLo <= endHi) { final int endMid = (endLo + endHi) >>> 1; if (value < ranges[endMid].from) { endHi = endMid - 1; } else { endLo = endMid + 1; } } assert startLo == lowBound || value >= maxTo[startLo - 1]; assert endHi == ranges.length - 1 || value < ranges[endHi + 1].from; for (int i = startLo; i <= endHi; ++i) { if (ranges[i].matches(value)) { collectBucket(doc, subBucketOrdinal(owningBucketOrdinal, i)); } } return endHi + 1; } @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) { List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = Lists.newArrayListWithCapacity(ranges.length); for (int i = 0; i < ranges.length; i++) { Range range = ranges[i]; final long bucketOrd = subBucketOrdinal(owningBucketOrdinal, i); org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket = rangeFactory.createBucket(range.key, range.from, range.to, bucketDocCount(bucketOrd),bucketAggregations(bucketOrd), keyed, formatter); buckets.add(bucket); } // value source can be null in the case of unmapped fields return rangeFactory.create(name, buckets, formatter, keyed); } @Override public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = Lists.newArrayListWithCapacity(ranges.length); for (int i = 0; i < ranges.length; i++) { Range range = ranges[i]; org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket = rangeFactory.createBucket(range.key, range.from, range.to, 0, subAggs, keyed, formatter); buckets.add(bucket); } // value source can be null in the case of unmapped fields return rangeFactory.create(name, buckets, formatter, keyed); } private static final void sortRanges(final Range[] ranges) { new InPlaceMergeSorter() { @Override protected void swap(int i, int j) { final Range tmp = ranges[i]; ranges[i] = ranges[j]; ranges[j] = tmp; } @Override protected int compare(int i, int j) { int cmp = Double.compare(ranges[i].from, ranges[j].from); if (cmp == 0) { cmp = Double.compare(ranges[i].to, ranges[j].to); } return cmp; } }.sort(0, ranges.length); } public static class Unmapped extends NonCollectingAggregator { private final List<RangeAggregator.Range> ranges; private final boolean keyed; private final InternalRange.Factory factory; private final ValueFormatter formatter; public Unmapped(String name, List<RangeAggregator.Range> ranges, boolean keyed, ValueFormat format, AggregationContext context, Aggregator parent, InternalRange.Factory factory) { super(name, context, parent); this.ranges = ranges; ValueParser parser = format != null ? format.parser() : ValueParser.RAW; for (Range range : this.ranges) { range.process(parser, context.searchContext()); } this.keyed = keyed; this.formatter = format != null ? format.formatter() : null; this.factory = factory; } @Override public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = new ArrayList<>(ranges.size()); for (RangeAggregator.Range range : ranges) { buckets.add(factory.createBucket(range.key, range.from, range.to, 0, subAggs, keyed, formatter)); } return factory.create(name, buckets, formatter, keyed); } } public static class Factory extends ValuesSourceAggregatorFactory<ValuesSource.Numeric> { private final InternalRange.Factory rangeFactory; private final List<Range> ranges; private final boolean keyed; public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> valueSourceConfig, InternalRange.Factory rangeFactory, List<Range> ranges, boolean keyed) { super(name, rangeFactory.type(), valueSourceConfig); this.rangeFactory = rangeFactory; this.ranges = ranges; this.keyed = keyed; } @Override protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) { return new Unmapped(name, ranges, keyed, config.format(), aggregationContext, parent, rangeFactory); } @Override protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) { return new RangeAggregator(name, factories, valuesSource, config.format(), rangeFactory, ranges, keyed, aggregationContext, parent); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Vladimir N. Molotkov * @version $Revision$ */ package org.apache.harmony.security.tests.java.security.cert; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.PublicKey; import java.security.cert.TrustAnchor; import java.security.cert.X509Certificate; import java.security.spec.InvalidKeySpecException; import java.util.Arrays; import javax.security.auth.x500.X500Principal; import org.apache.harmony.security.tests.support.TestKeyPair; import org.apache.harmony.security.tests.support.cert.TestUtils; import junit.framework.TestCase; /** * Unit tests for <code>TrustAnchor</code> */ public class TrustAnchor_ImplTest extends TestCase { private static final String keyAlg = "DSA"; // Sample of some valid CA name private static final String validCaNameRfc2253 = "CN=Test CA,"+ "OU=Testing Division,"+ "O=Test It All,"+ "L=Test Town,"+ "ST=Testifornia,"+ "C=Testland"; /** * Test #1 for <code>TrustAnchor(X509Certificate, byte[])</code> constructor<br> * Assertion: creates <code>TrustAnchor</code> instance<br> * Test preconditions: valid parameters passed<br> * Expected: must pass without any exceptions */ public final void testTrustAnchorX509CertificatebyteArray01() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } String certAlias = "testca1"; // sub testcase 1 new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), getFullEncoding()); // sub testcase 2 new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), getEncodingPSOnly()); // sub testcase 3 new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), getEncodingESOnly()); // sub testcase 4 new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), getEncodingNoMinMax()); } /** * Test #2 for <code>TrustAnchor(X509Certificate, byte[])</code> constructor<br> * Assertion: creates <code>TrustAnchor</code> instance<br> * Test preconditions: <code>null</code> as nameConstraints passed<br> * Expected: must pass without any exceptions */ public final void testTrustAnchorX509CertificatebyteArray02() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } String certAlias = "testca1"; new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), null); } /** * Test #3 for <code>TrustAnchor(X509Certificate, byte[])</code> constructor<br> * Assertion: nameConstraints cloned by the constructor<br> * Test preconditions: modify passed nameConstraints<br> * Expected: modification must not change object internal state */ public final void testTrustAnchorX509CertificatebyteArray03() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } String certAlias = "testca1"; byte[] nc = getEncodingPSOnly(); byte[] ncCopy = nc.clone(); // sub testcase 5 - nameConstraints can be null TrustAnchor ta = new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), ncCopy); // modify ncCopy[0]=(byte)0; // check that above modification did not change // object internal state assertTrue(Arrays.equals(nc, ta.getNameConstraints())); } /** * Test #4 for <code>TrustAnchor(X509Certificate, byte[])</code> constructor<br> * Assertion: <code>NullPointerException</code> if <code>X509Certificate</code> * parameter is <code>null</code><br> * Test preconditions: pass <code>null</code> as <code>X509Certificate</code><br> * Expected: NullPointerException */ public final void testTrustAnchorX509CertificatebyteArray04() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } try { new TrustAnchor(null, getFullEncoding()); fail("NullPointerException has not been thrown"); } catch (NullPointerException ok) { } } /** * Test #5 for <code>TrustAnchor(X509Certificate, byte[])</code> constructor<br> * Assertion: <code>IllegalArgumentException</code> if nameConstraints * parameter can not be decoded<br> * Test preconditions: pass invalid nameConstraints encoding<br> * Expected: IllegalArgumentException */ public final void testTrustAnchorX509CertificatebyteArray05() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } String certAlias = "testca1"; // sub testcase 1: byte [] nameConstraints = getFullEncoding(); // corrupt encoding: // set wrong root seq length nameConstraints[2] = (byte)0x8d; try { new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), nameConstraints); fail("IllegalArgumentException has not been thrown"); } catch (IllegalArgumentException ok) { } // sub testcase 2: nameConstraints = getFullEncoding(); // corrupt encoding: // set wrong root seq length nameConstraints[2] = (byte)0x8b; try { new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), nameConstraints); fail("IllegalArgumentException has not been thrown"); } catch (IllegalArgumentException ok) { } // sub testcase 3: nameConstraints = getFullEncoding(); // corrupt encoding: // remove right class from seq tag nameConstraints[3] &= (byte)0x3f; try { new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), nameConstraints); fail("IllegalArgumentException has not been thrown"); } catch (IllegalArgumentException ok) { } // sub testcase 4: nameConstraints = getEncodingESOnly(); // corrupt encoding: // set wrong tagged value (excludedSubtrees SEQ OF) tag [2] nameConstraints[2] = (byte)0xa2; try { new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), nameConstraints); fail("IllegalArgumentException has not been thrown"); } catch (IllegalArgumentException ok) { } // sub testcase 5: nameConstraints = getEncodingESOnly(); // corrupt encoding: // remove CONSTRUCTED flag from tagged value (excludedSubtrees SEQ OF) tag nameConstraints[2] &= (byte)0xdf; try { new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), nameConstraints); fail("IllegalArgumentException has not been thrown"); } catch (IllegalArgumentException ok) { } // sub testcase 6: nameConstraints = getEncodingESOnly(); // corrupt encoding: // set CONSTRUCTED flag for PROMITIVE tagged value tag // (generalSubtree's 'base' as IA5String) nameConstraints[5] |= (byte)0x20; try { new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), nameConstraints); fail("IllegalArgumentException has not been thrown"); } catch (IllegalArgumentException ok) { } // sub testcase 7: nameConstraints = getEncodingESOnly(); // corrupt encoding: // remove scheme from URI // (generalSubtree's 'base' as IA5String (uniformResourceIdentifier)) nameConstraints[12] = nameConstraints[13] = nameConstraints[14] = (byte)0x6f; try { new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), nameConstraints); fail("IllegalArgumentException has not been thrown"); } catch (IllegalArgumentException ok) { } } /** * Test #6 for <code>TrustAnchor(X509Certificate, byte[])</code> constructor<br> * Assertion: creates <code>TrustAnchor</code> instance<br> * Test preconditions: valid parameters passed (base as OID)<br> * Expected: must pass without any exceptions */ public final void testTrustAnchorX509CertificatebyteArray06() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } String certAlias = "testca1"; byte [] nameConstraints = getEncodingOid(); new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), nameConstraints); } /** * Test #7 for <code>TrustAnchor(X509Certificate, byte[])</code> constructor<br> * Assertion: <code>IllegalArgumentException</code> if nameConstraints * parameter can not be decoded<br> * Test preconditions: pass invalid nameConstraints (OID) encoding<br> * Expected: IllegalArgumentException */ public final void testTrustAnchorX509CertificatebyteArray07() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } String certAlias = "testca1"; byte [] nameConstraints = getEncodingOid(); //corrupt Oid nameConstraints[10]= (byte) 0xFF; try { new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), nameConstraints); fail("IllegalArgumentException has not been thrown"); } catch (IllegalArgumentException ok) { } } /** * Test #8 for <code>TrustAnchor(X509Certificate, byte[])</code> constructor<br> * Assertion: <code>IllegalArgumentException</code> if nameConstraints * parameter can not be decoded<br> * Test preconditions: pass invalid nameConstraints encodings<br> * Expected: IllegalArgumentException */ public final void testTrustAnchorX509CertificatebyteArray08() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } String certAlias = "testca1"; // GeneralName tags for this test (1,2 and 3 are omitted) byte[] generalNameTag = new byte[] { (byte)0xa0,(byte)0xa4,(byte)0xa5, (byte)0x86,(byte)0x87,(byte)0x88 }; // wrong (for above tags) nameConstraints encoding byte[] wrongEncoding = new byte[] { (byte)0x30,(byte)0x0c, // sequence + length (byte)0xa1,(byte)0x0a, // excluded subtrees, tag, len (byte)0x30,(byte)0x08, // sequence of, tag, len (byte)0xa0, // element 6 - tag identifying GeneralName choice (byte)0x03, // GeneralName length (byte)0x01,(byte)0x01,(byte)0xff, // wrong GeneralName for any choice (byte)0x80,(byte)0x01,(byte)0x00 // minimum }; for (int i=0; i<generalNameTag.length; i++) { wrongEncoding[6] = generalNameTag[i]; try { new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), wrongEncoding); fail("IllegalArgumentException has not been thrown for tag " + (generalNameTag[i]&0xff)); } catch (IllegalArgumentException ok) { } } } /** * Test #9 for <code>TrustAnchor(X509Certificate, byte[])</code> constructor<br> * Assertion: <code>IllegalArgumentException</code> if nameConstraints * parameter can not be decoded<br> * Test preconditions: pass valid and then invalid nameConstraints encodings * (GeneralName choice is [0] OtherName)<br> * Expected: no exception for valid encoding and IllegalArgumentException for invalid * @throws KeyStoreException */ public final void testTrustAnchorX509CertificatebyteArray09() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } String certAlias = "testca1"; byte[] encoding = new byte[] { (byte)0x30,(byte)0x13,(byte)0xa1,(byte)0x11, (byte)0x30,(byte)0x0f,(byte)0xa0,(byte)0x0a, (byte)0x06,(byte)0x03,(byte)0x00,(byte)0x01,(byte)0x02, (byte)0xA0,(byte)0x03,1,1,(byte)0xff, (byte)0x80,(byte)0x01,(byte)0x00 }; try { new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), encoding); } catch (IllegalArgumentException failed) { fail("valid encoding not accepted"); } // now corrupt encoding: set OtherName value tag to 1 (must be 0) encoding[13] = 1; try { new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), encoding); fail("invalid encoding accepted"); } catch (IllegalArgumentException ok) { } } /** * Test for <code>getNameConstraints()</code> method<br> * Assertion: returns <code>nameConstraints</code> der encoding<br> * Test preconditions: valid nameConstraints parameter passed (not null)<br> * Expected: encoding passed to the ctor must match returned one<br> * Assertion: returns new <code>nameConstraints</code> der encoding each time<br> * Test preconditions: valid nameConstraints parameter passed (not null)<br> * Expected: must return new reference each time called */ public final void testGetNameConstraints() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } String certAlias = "testca1"; byte[] nc = getFullEncoding(); // sub testcase 1 TrustAnchor ta = new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), nc); byte[] ncRet = ta.getNameConstraints(); // assert 1 assertTrue(Arrays.equals(nc, ncRet)); assertNotSame(nc, ncRet); // assert 2 assertNotSame(ncRet, ta.getNameConstraints()); } /** * Test #2 for <code>getCAName()</code> method<br> * * Assertion: returns ... <code>null</code> if <code>TrustAnchor</code> * was not specified as public key and CA name or CA principal pair<br> * Test preconditions: test object is not specified as public key * and CA name or CA principal pair<br> * Expected: <code>null</code> as return value<br> * @throws KeyStoreException * */ public final void testGetCAPublicKey02() throws InvalidKeySpecException, KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } TrustAnchor ta = new TrustAnchor( (X509Certificate)ks.getCertificate("testca1"), null); assertNull(ta.getCAPublicKey()); } /** * Test #2 for <code>getCAName()</code> method<br> * * Assertion: returns ... <code>null</code> if <code>TrustAnchor</code> * was not specified as public key and CA name or CA principal pair<br> * Test preconditions: test object is not specified as public key * and CA name or CA principal pair<br> * Expected: <code>null</code> as return value<br> * @throws KeyStoreException */ public final void testGetCAName02() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } TrustAnchor ta = new TrustAnchor( (X509Certificate)ks.getCertificate("testca1"), null); assertNull(ta.getCAName()); } /** * Test #1 for <code>getCAName()</code> method<br> * * Assertion: returns most trusted CA certificate<br> * Test preconditions: valid certificate passed to the constructor<br> * Expected: the same certificate must be returned by the method<br> * @throws KeyStoreException * */ public final void testGetTrustedCert01() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } X509Certificate cert = (X509Certificate)ks.getCertificate("testca1"); TrustAnchor ta = new TrustAnchor(cert, null); assertEquals(cert, ta.getTrustedCert()); } /** * Test #2 for <code>getCA()</code> method<br> * * Assertion: returns ... <code>null</code> if <code>TrustAnchor</code> * was not specified as public key and CA name or CA principal pair<br> * Test preconditions: test object is not specified as public key * and CA name or CA principal pair<br> * Expected: <code>null</code> as return value<br> * @throws KeyStoreException */ public final void testGetCA02() throws KeyStoreException { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } TrustAnchor ta = new TrustAnchor( (X509Certificate)ks.getCertificate("testca1"), null); assertNull(ta.getCA()); } /** * Test for <code>toString()</code> method<br> * * Assertion: returns string representation of this <code>TrustAnchor</code> * Test preconditions: several valid test objects created<br> * Expected: method returns not <code>null</code> in all cases<br> */ public final void testToString() throws Exception { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } String certAlias = "test"; // sub testcase 1 TrustAnchor ta = new TrustAnchor( (X509Certificate)ks.getCertificate(certAlias), getFullEncoding()); assertNotNull("#1", ta.toString()); PublicKey pk = new TestKeyPair(keyAlg).getPublic(); // sub testcase 2 ta = new TrustAnchor(validCaNameRfc2253, pk, getEncodingESOnly()); assertNotNull("#2", ta.toString()); // sub testcase 3 X500Principal x500p = new X500Principal(validCaNameRfc2253); ta = new TrustAnchor(x500p, pk, getEncodingNoMinMax()); assertNotNull("#3", ta.toString()); // sub testcase 4 ta = new TrustAnchor(x500p, pk, null); assertNotNull("#4", ta.toString()); } // // Private stuff // /* * The following methods return valid DER encoding * for the following ASN.1 definition (as specified in RFC 3280 - * Internet X.509 Public Key Infrastructure. * Certificate and Certificate Revocation List (CRL) Profile. * http://www.ietf.org/rfc/rfc3280.txt): * * NameConstraints ::= SEQUENCE { * permittedSubtrees [0] GeneralSubtrees OPTIONAL, * excludedSubtrees [1] GeneralSubtrees OPTIONAL } * * GeneralSubtrees ::= SEQUENCE SIZE (1..MAX) OF GeneralSubtree * * GeneralSubtree ::= SEQUENCE { * base GeneralName, * minimum [0] BaseDistance DEFAULT 0, * maximum [1] BaseDistance OPTIONAL } * * BaseDistance ::= INTEGER (0..MAX) * * GeneralName ::= CHOICE { * otherName [0] OtherName, * rfc822Name [1] IA5String, * dNSName [2] IA5String, * x400Address [3] ORAddress, * directoryName [4] Name, * ediPartyName [5] EDIPartyName, * uniformResourceIdentifier [6] IA5String, * iPAddress [7] OCTET STRING, * registeredID [8] OBJECT IDENTIFIER} */ // // Full NameConstraints encoding // (generated by own encoder class created during test development) // // @return Full NameConstraints encoding // with all OPTIONAL values presented. // private static final byte[] getFullEncoding() { // DO NOT MODIFY! return new byte[] { (byte)0x30,(byte)0x81,(byte)0x8c,(byte)0xa0, (byte)0x44,(byte)0x30,(byte)0x16,(byte)0x86, (byte)0x0e,(byte)0x66,(byte)0x69,(byte)0x6c, (byte)0x65,(byte)0x3a,(byte)0x2f,(byte)0x2f, (byte)0x66,(byte)0x6f,(byte)0x6f,(byte)0x2e, (byte)0x63,(byte)0x6f,(byte)0x6d,(byte)0x80, (byte)0x01,(byte)0x00,(byte)0x81,(byte)0x01, (byte)0x01,(byte)0x30,(byte)0x16,(byte)0x86, (byte)0x0e,(byte)0x66,(byte)0x69,(byte)0x6c, (byte)0x65,(byte)0x3a,(byte)0x2f,(byte)0x2f, (byte)0x62,(byte)0x61,(byte)0x72,(byte)0x2e, (byte)0x63,(byte)0x6f,(byte)0x6d,(byte)0x80, (byte)0x01,(byte)0x00,(byte)0x81,(byte)0x01, (byte)0x01,(byte)0x30,(byte)0x12,(byte)0x86, (byte)0x0a,(byte)0x66,(byte)0x69,(byte)0x6c, (byte)0x65,(byte)0x3a,(byte)0x2f,(byte)0x2f, (byte)0x6d,(byte)0x75,(byte)0x75,(byte)0x80, (byte)0x01,(byte)0x00,(byte)0x81,(byte)0x01, (byte)0x01,(byte)0xa1,(byte)0x44,(byte)0x30, (byte)0x16,(byte)0x86,(byte)0x0e,(byte)0x68, (byte)0x74,(byte)0x74,(byte)0x70,(byte)0x3a, (byte)0x2f,(byte)0x2f,(byte)0x66,(byte)0x6f, (byte)0x6f,(byte)0x2e,(byte)0x63,(byte)0x6f, (byte)0x6d,(byte)0x80,(byte)0x01,(byte)0x00, (byte)0x81,(byte)0x01,(byte)0x01,(byte)0x30, (byte)0x16,(byte)0x86,(byte)0x0e,(byte)0x68, (byte)0x74,(byte)0x74,(byte)0x70,(byte)0x3a, (byte)0x2f,(byte)0x2f,(byte)0x62,(byte)0x61, (byte)0x72,(byte)0x2e,(byte)0x63,(byte)0x6f, (byte)0x6d,(byte)0x80,(byte)0x01,(byte)0x00, (byte)0x81,(byte)0x01,(byte)0x01,(byte)0x30, (byte)0x12,(byte)0x86,(byte)0x0a,(byte)0x68, (byte)0x74,(byte)0x74,(byte)0x70,(byte)0x3a, (byte)0x2f,(byte)0x2f,(byte)0x6d,(byte)0x75, (byte)0x75,(byte)0x80,(byte)0x01,(byte)0x00, (byte)0x81,(byte)0x01,(byte)0x01 }; } // // NameConstraints encoding without excludedSubtrees // (generated by own encoder class created during test development) // // @return NameConstraints encoding with // permittedSubtrees only; all OPTIONAL // values in permittedSubtrees are presented. // private static final byte[] getEncodingPSOnly() { // DO NOT MODIFY! return new byte[] { (byte)0x30,(byte)0x46,(byte)0xa0,(byte)0x44, (byte)0x30,(byte)0x16,(byte)0x86,(byte)0x0e, (byte)0x66,(byte)0x69,(byte)0x6c,(byte)0x65, (byte)0x3a,(byte)0x2f,(byte)0x2f,(byte)0x66, (byte)0x6f,(byte)0x6f,(byte)0x2e,(byte)0x63, (byte)0x6f,(byte)0x6d,(byte)0x80,(byte)0x01, (byte)0x00,(byte)0x81,(byte)0x01,(byte)0x01, (byte)0x30,(byte)0x16,(byte)0x86,(byte)0x0e, (byte)0x66,(byte)0x69,(byte)0x6c,(byte)0x65, (byte)0x3a,(byte)0x2f,(byte)0x2f,(byte)0x62, (byte)0x61,(byte)0x72,(byte)0x2e,(byte)0x63, (byte)0x6f,(byte)0x6d,(byte)0x80,(byte)0x01, (byte)0x00,(byte)0x81,(byte)0x01,(byte)0x01, (byte)0x30,(byte)0x12,(byte)0x86,(byte)0x0a, (byte)0x66,(byte)0x69,(byte)0x6c,(byte)0x65, (byte)0x3a,(byte)0x2f,(byte)0x2f,(byte)0x6d, (byte)0x75,(byte)0x75,(byte)0x80,(byte)0x01, (byte)0x00,(byte)0x81,(byte)0x01,(byte)0x01, }; } // // NameConstraints encoding without permittedSubtrees // (generated by own encoder class created during test development) // // @return NameConstraints encoding with // excludedSubtrees only; all OPTIONAL // values in excludedSubtrees are presented. // private static final byte[] getEncodingESOnly() { // DO NOT MODIFY! return new byte[] { (byte)0x30,(byte)0x46,(byte)0xa1,(byte)0x44, (byte)0x30,(byte)0x16,(byte)0x86,(byte)0x0e, (byte)0x68,(byte)0x74,(byte)0x74,(byte)0x70, // http (byte)0x3a,(byte)0x2f,(byte)0x2f,(byte)0x66, // ://f (byte)0x6f,(byte)0x6f,(byte)0x2e,(byte)0x63, // oo.c (byte)0x6f,(byte)0x6d,(byte)0x80,(byte)0x01, // om (byte)0x00,(byte)0x81,(byte)0x01,(byte)0x01, (byte)0x30,(byte)0x16,(byte)0x86,(byte)0x0e, (byte)0x68,(byte)0x74,(byte)0x74,(byte)0x70, (byte)0x3a,(byte)0x2f,(byte)0x2f,(byte)0x62, (byte)0x61,(byte)0x72,(byte)0x2e,(byte)0x63, (byte)0x6f,(byte)0x6d,(byte)0x80,(byte)0x01, (byte)0x00,(byte)0x81,(byte)0x01,(byte)0x01, (byte)0x30,(byte)0x12,(byte)0x86,(byte)0x0a, (byte)0x68,(byte)0x74,(byte)0x74,(byte)0x70, (byte)0x3a,(byte)0x2f,(byte)0x2f,(byte)0x6d, (byte)0x75,(byte)0x75,(byte)0x80,(byte)0x01, (byte)0x00,(byte)0x81,(byte)0x01,(byte)0x01, }; } // // NameConstraints full encoding with all (OPTIONAL) // minimum/maximum GeneralSubtree fields OMITTED // (generated by own encoder class created during test development) // // @return Full NameConstraints encoding // with all (OPTIONAL) minimum/maximum // GeneralSubtree fields OMITTED // private static final byte[] getEncodingNoMinMax() { // DO NOT MODIFY! return new byte[] { (byte)0x30,(byte)0x68,(byte)0xa0,(byte)0x32, (byte)0x30,(byte)0x10,(byte)0x86,(byte)0x0e, (byte)0x66,(byte)0x69,(byte)0x6c,(byte)0x65, (byte)0x3a,(byte)0x2f,(byte)0x2f,(byte)0x66, (byte)0x6f,(byte)0x6f,(byte)0x2e,(byte)0x63, (byte)0x6f,(byte)0x6d,(byte)0x30,(byte)0x10, (byte)0x86,(byte)0x0e,(byte)0x66,(byte)0x69, (byte)0x6c,(byte)0x65,(byte)0x3a,(byte)0x2f, (byte)0x2f,(byte)0x62,(byte)0x61,(byte)0x72, (byte)0x2e,(byte)0x63,(byte)0x6f,(byte)0x6d, (byte)0x30,(byte)0x0c,(byte)0x86,(byte)0x0a, (byte)0x66,(byte)0x69,(byte)0x6c,(byte)0x65, (byte)0x3a,(byte)0x2f,(byte)0x2f,(byte)0x6d, (byte)0x75,(byte)0x75,(byte)0xa1,(byte)0x32, (byte)0x30,(byte)0x10,(byte)0x86,(byte)0x0e, (byte)0x68,(byte)0x74,(byte)0x74,(byte)0x70, (byte)0x3a,(byte)0x2f,(byte)0x2f,(byte)0x66, (byte)0x6f,(byte)0x6f,(byte)0x2e,(byte)0x63, (byte)0x6f,(byte)0x6d,(byte)0x30,(byte)0x10, (byte)0x86,(byte)0x0e,(byte)0x68,(byte)0x74, (byte)0x74,(byte)0x70,(byte)0x3a,(byte)0x2f, (byte)0x2f,(byte)0x62,(byte)0x61,(byte)0x72, (byte)0x2e,(byte)0x63,(byte)0x6f,(byte)0x6d, (byte)0x30,(byte)0x0c,(byte)0x86,(byte)0x0a, (byte)0x68,(byte)0x74,(byte)0x74,(byte)0x70, (byte)0x3a,(byte)0x2f,(byte)0x2f,(byte)0x6d, (byte)0x75,(byte)0x75, }; } // Returns OID encoding // (generated by own encoder class created during test development) private static final byte[] getEncodingOid() { // DO NOT MODIFY! return new byte[] { (byte) 0x30, (byte) 0x09, (byte) 0xA0, (byte) 0x07, (byte) 0x30, (byte) 0x05, (byte) 0x88, (byte) 0x03, (byte) 0x2A, (byte) 0x03, (byte) 0x04 }; } }
package com.saulpower.GreenWireTest.database; import java.util.List; import de.greenrobot.dao.sync.GreenSync; import com.google.gson.reflect.TypeToken; import java.util.ArrayList; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteStatement; import de.greenrobot.dao.AbstractDao; import de.greenrobot.dao.Property; import de.greenrobot.dao.internal.SqlUtils; import de.greenrobot.dao.internal.DaoConfig; import de.greenrobot.dao.query.Query; import de.greenrobot.dao.query.QueryBuilder; import com.saulpower.GreenWireTest.database.CheckOut; import com.saulpower.GreenWireTest.database.CheckIn.TimeCardCaptureMethod; // THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT. /** * DAO for table CHECK_OUT. */ public class CheckOutDao extends AbstractDao<CheckOut, Long> { public static final String TABLENAME = "CHECK_OUT"; /** * Properties of entity CheckOut.<br/> * Can be used for QueryBuilder and for referencing column names. */ public static class Properties { public final static Property ExternalID = new Property(0, String.class, "externalID", false, "EXTERNAL_ID"); public final static Property Guid = new Property(1, String.class, "guid", false, "GUID"); public final static Property Name = new Property(2, String.class, "name", false, "NAME"); public final static Property CheckOutsStudentId = new Property(3, long.class, "checkOutsStudentId", false, "CHECK_OUTS_STUDENT_ID"); public final static Property TagString = new Property(4, String.class, "tagString", false, "TAG_STRING"); public final static Property TenantID = new Property(5, Long.class, "tenantID", false, "TENANT_ID"); public final static Property SaveResultSaveResultId = new Property(6, long.class, "saveResultSaveResultId", false, "SAVE_RESULT_SAVE_RESULT_ID"); public final static Property DateLastModified = new Property(7, String.class, "dateLastModified", false, "DATE_LAST_MODIFIED"); public final static Property Latitude = new Property(8, Float.class, "latitude", false, "LATITUDE"); public final static Property CaptureMethod = new Property(9, TimeCardCaptureMethod.class, "captureMethod", false, "CAPTURE_METHOD"); public final static Property SyncBaseId = new Property(10, Long.class, "syncBaseId", false, "SYNC_BASE_ID"); public final static Property ApproverID = new Property(11, String.class, "approverID", false, "APPROVER_ID"); public final static Property IsDeleted = new Property(12, Boolean.class, "isDeleted", false, "IS_DELETED"); public final static Property Version = new Property(13, Integer.class, "version", false, "VERSION"); public final static Property Id = new Property(14, Long.class, "id", true, "_id"); public final static Property DateCreated = new Property(15, String.class, "dateCreated", false, "DATE_CREATED"); public final static Property Longitude = new Property(16, Float.class, "longitude", false, "LONGITUDE"); }; private DaoSession daoSession; private Query<CheckOut> student_CheckOutsQuery; public CheckOutDao(DaoConfig config) { super(config); } public CheckOutDao(DaoConfig config, DaoSession daoSession) { super(config, daoSession); this.daoSession = daoSession; } /** Creates the underlying database table. */ public static void createTable(SQLiteDatabase db, boolean ifNotExists) { String constraint = ifNotExists? "IF NOT EXISTS ": ""; db.execSQL("CREATE TABLE " + constraint + "'CHECK_OUT' (" + // "'EXTERNAL_ID' TEXT," + // 0: externalID "'GUID' TEXT," + // 1: guid "'NAME' TEXT," + // 2: name "'CHECK_OUTS_STUDENT_ID' INTEGER NOT NULL ," + // 3: checkOutsStudentId "'TAG_STRING' TEXT," + // 4: tagString "'TENANT_ID' INTEGER," + // 5: tenantID "'SAVE_RESULT_SAVE_RESULT_ID' INTEGER NOT NULL ," + // 6: saveResultSaveResultId "'DATE_LAST_MODIFIED' TEXT," + // 7: dateLastModified "'LATITUDE' REAL," + // 8: latitude "'CAPTURE_METHOD' INTEGER," + // 9: captureMethod "'SYNC_BASE_ID' INTEGER REFERENCES 'SYNC_BASE'('SYNC_BASE_ID') ," + // 10: syncBaseId "'APPROVER_ID' TEXT," + // 11: approverID "'IS_DELETED' INTEGER," + // 12: isDeleted "'VERSION' INTEGER," + // 13: version "'_id' INTEGER PRIMARY KEY ," + // 14: id "'DATE_CREATED' TEXT," + // 15: dateCreated "'LONGITUDE' REAL);"); // 16: longitude } /** Drops the underlying database table. */ public static void dropTable(SQLiteDatabase db, boolean ifExists) { String sql = "DROP TABLE " + (ifExists ? "IF EXISTS " : "") + "'CHECK_OUT'"; db.execSQL(sql); } /** @inheritdoc */ @Override protected void bindValues(SQLiteStatement stmt, CheckOut entity) { stmt.clearBindings(); String externalID = entity.getExternalID(); if (externalID != null) { stmt.bindString(1, externalID); } String guid = entity.getGuid(); if (guid != null) { stmt.bindString(2, guid); } String name = entity.getName(); if (name != null) { stmt.bindString(3, name); } stmt.bindLong(4, entity.getCheckOutsStudentId()); String tagString = entity.getTagString(); if (tagString != null) { stmt.bindString(5, tagString); } Long tenantID = entity.getTenantID(); if (tenantID != null) { stmt.bindLong(6, tenantID); } stmt.bindLong(7, entity.getSaveResultSaveResultId()); String dateLastModified = entity.getDateLastModified(); if (dateLastModified != null) { stmt.bindString(8, dateLastModified); } Float latitude = entity.getLatitude(); if (latitude != null) { stmt.bindDouble(9, latitude); } TimeCardCaptureMethod captureMethod = entity.getCaptureMethod(); if (captureMethod != null) { stmt.bindLong(10, captureMethod.getValue()); } Long syncBaseId = entity.getSyncBaseId(); if (syncBaseId != null) { stmt.bindLong(11, syncBaseId); } String approverID = entity.getApproverID(); if (approverID != null) { stmt.bindString(12, approverID); } Boolean isDeleted = entity.getIsDeleted(); if (isDeleted != null) { stmt.bindLong(13, isDeleted ? 1l: 0l); } Integer version = entity.getVersion(); if (version != null) { stmt.bindLong(14, version); } Long id = entity.getId(); if (id != null) { stmt.bindLong(15, id); } String dateCreated = entity.getDateCreated(); if (dateCreated != null) { stmt.bindString(16, dateCreated); } Float longitude = entity.getLongitude(); if (longitude != null) { stmt.bindDouble(17, longitude); } } @Override protected void attachEntity(CheckOut entity) { super.attachEntity(entity); entity.__setDaoSession(daoSession); } /** @inheritdoc */ @Override public Long readKey(Cursor cursor, int offset) { return cursor.isNull(offset + 14) ? null : cursor.getLong(offset + 14); } /** @inheritdoc */ @Override public CheckOut readEntity(Cursor cursor, int offset) { CheckOut entity = new CheckOut( // cursor.isNull(offset + 0) ? null : cursor.getString(offset + 0), // externalID cursor.isNull(offset + 1) ? null : cursor.getString(offset + 1), // guid cursor.isNull(offset + 2) ? null : cursor.getString(offset + 2), // name cursor.getLong(offset + 3), // checkOutsStudentId cursor.isNull(offset + 4) ? null : cursor.getString(offset + 4), // tagString cursor.isNull(offset + 5) ? null : cursor.getLong(offset + 5), // tenantID cursor.getLong(offset + 6), // saveResultSaveResultId cursor.isNull(offset + 7) ? null : cursor.getString(offset + 7), // dateLastModified cursor.isNull(offset + 8) ? null : cursor.getFloat(offset + 8), // latitude cursor.isNull(offset + 9) ? null : TimeCardCaptureMethod.fromInt(cursor.getLong(offset + 9)), // captureMethod cursor.isNull(offset + 10) ? null : cursor.getLong(offset + 10), // syncBaseId cursor.isNull(offset + 11) ? null : cursor.getString(offset + 11), // approverID cursor.isNull(offset + 12) ? null : cursor.getShort(offset + 12) != 0, // isDeleted cursor.isNull(offset + 13) ? null : cursor.getInt(offset + 13), // version cursor.isNull(offset + 14) ? null : cursor.getLong(offset + 14), // id cursor.isNull(offset + 15) ? null : cursor.getString(offset + 15), // dateCreated cursor.isNull(offset + 16) ? null : cursor.getFloat(offset + 16) // longitude ); return entity; } /** @inheritdoc */ @Override public void readEntity(Cursor cursor, CheckOut entity, int offset) { entity.setExternalID(cursor.isNull(offset + 0) ? null : cursor.getString(offset + 0)); entity.setGuid(cursor.isNull(offset + 1) ? null : cursor.getString(offset + 1)); entity.setName(cursor.isNull(offset + 2) ? null : cursor.getString(offset + 2)); entity.setCheckOutsStudentId(cursor.getLong(offset + 3)); entity.setTagString(cursor.isNull(offset + 4) ? null : cursor.getString(offset + 4)); entity.setTenantID(cursor.isNull(offset + 5) ? null : cursor.getLong(offset + 5)); entity.setSaveResultSaveResultId(cursor.getLong(offset + 6)); entity.setDateLastModified(cursor.isNull(offset + 7) ? null : cursor.getString(offset + 7)); entity.setLatitude(cursor.isNull(offset + 8) ? null : cursor.getFloat(offset + 8)); entity.setCaptureMethod(cursor.isNull(offset + 9) ? null : TimeCardCaptureMethod.fromInt(cursor.getLong(offset + 9))); entity.setSyncBaseId(cursor.isNull(offset + 10) ? null : cursor.getLong(offset + 10)); entity.setApproverID(cursor.isNull(offset + 11) ? null : cursor.getString(offset + 11)); entity.setIsDeleted(cursor.isNull(offset + 12) ? null : cursor.getShort(offset + 12) != 0); entity.setVersion(cursor.isNull(offset + 13) ? null : cursor.getInt(offset + 13)); entity.setId(cursor.isNull(offset + 14) ? null : cursor.getLong(offset + 14)); entity.setDateCreated(cursor.isNull(offset + 15) ? null : cursor.getString(offset + 15)); entity.setLongitude(cursor.isNull(offset + 16) ? null : cursor.getFloat(offset + 16)); } /** @inheritdoc */ @Override protected Long updateKeyAfterInsert(CheckOut entity, long rowId) { entity.setId(rowId); return rowId; } /** @inheritdoc */ @Override public Long getKey(CheckOut entity) { if(entity != null) { return entity.getId(); } else { return null; } } /** @inheritdoc */ @Override protected boolean isEntityUpdateable() { return true; } /** Internal query to resolve the "checkOuts" to-many relationship of Student. */ public List<CheckOut> _queryStudent_CheckOuts(long checkOutsStudentId) { synchronized (this) { if (student_CheckOutsQuery == null) { QueryBuilder<CheckOut> queryBuilder = queryBuilder(); queryBuilder.where(Properties.CheckOutsStudentId.eq(null)); student_CheckOutsQuery = queryBuilder.build(); } } Query<CheckOut> query = student_CheckOutsQuery.forCurrentThread(); query.setParameter(0, checkOutsStudentId); return query.list(); } private String selectDeep; protected String getSelectDeep() { if (selectDeep == null) { StringBuilder builder = new StringBuilder("SELECT "); SqlUtils.appendColumns(builder, "T", getAllColumns()); builder.append(','); SqlUtils.appendColumns(builder, "T0", daoSession.getSaveResultDao().getAllColumns()); builder.append(" FROM CHECK_OUT T"); builder.append(" LEFT JOIN SAVE_RESULT T0 ON T.'SAVE_RESULT_SAVE_RESULT_ID'=T0.'_id'"); builder.append(' '); selectDeep = builder.toString(); } return selectDeep; } protected CheckOut loadCurrentDeep(Cursor cursor, boolean lock) { CheckOut entity = loadCurrent(cursor, 0, lock); int offset = getAllColumns().length; SaveResult saveResult = loadCurrentOther(daoSession.getSaveResultDao(), cursor, offset); if(saveResult != null) { entity.setSaveResult(saveResult); } return entity; } public CheckOut loadDeep(Long key) { assertSinglePk(); if (key == null) { return null; } StringBuilder builder = new StringBuilder(getSelectDeep()); builder.append("WHERE "); SqlUtils.appendColumnsEqValue(builder, "T", getPkColumns()); String sql = builder.toString(); String[] keyArray = new String[] { key.toString() }; Cursor cursor = db.rawQuery(sql, keyArray); try { boolean available = cursor.moveToFirst(); if (!available) { return null; } else if (!cursor.isLast()) { throw new IllegalStateException("Expected unique result, but count was " + cursor.getCount()); } return loadCurrentDeep(cursor, true); } finally { cursor.close(); } } /** Reads all available rows from the given cursor and returns a list of new ImageTO objects. */ public List<CheckOut> loadAllDeepFromCursor(Cursor cursor) { int count = cursor.getCount(); List<CheckOut> list = new ArrayList<CheckOut>(count); if (cursor.moveToFirst()) { if (identityScope != null) { identityScope.lock(); identityScope.reserveRoom(count); } try { do { list.add(loadCurrentDeep(cursor, false)); } while (cursor.moveToNext()); } finally { if (identityScope != null) { identityScope.unlock(); } } } return list; } protected List<CheckOut> loadDeepAllAndCloseCursor(Cursor cursor) { try { return loadAllDeepFromCursor(cursor); } finally { cursor.close(); } } /** A raw-style query where you can pass any WHERE clause and arguments. */ public List<CheckOut> queryDeep(String where, String... selectionArg) { Cursor cursor = db.rawQuery(getSelectDeep() + where, selectionArg); return loadDeepAllAndCloseCursor(cursor); } @Override protected void onPreInsertEntity(CheckOut entity) { entity.insertBase(daoSession.getSyncBaseDao()); entity.setSyncBaseId(entity.getSyncBaseId()); } @Override protected void onPreLoadEntity(CheckOut entity) { entity.loadBase(daoSession.getSyncBaseDao(), entity.getSyncBaseId()); } @Override protected void onPreRefreshEntity(CheckOut entity) { entity.loadBase(daoSession.getSyncBaseDao(), entity.getSyncBaseId()); } @Override protected void onPreUpdateEntity(CheckOut entity) { entity.updateBase(daoSession.getSyncBaseDao()); } @Override protected void onPreDeleteEntity(CheckOut entity) { entity.deleteBase(daoSession.getSyncBaseDao()); } static { GreenSync.registerListTypeToken("CheckOut", new TypeToken<List<CheckOut>>(){}.getType()); GreenSync.registerTypeToken("CheckOut", CheckOut.class); } }
/* * Copyright (c) 2019 flow.ci * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.flowci.core.common.rabbit; import com.flowci.core.common.config.QueueConfig; import com.flowci.core.common.helper.ThreadHelper; import com.flowci.util.StringHelper; import com.rabbitmq.client.*; import lombok.Getter; import lombok.extern.log4j.Log4j2; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; @Log4j2 @Getter public abstract class RabbitOperation implements AutoCloseable { protected final Connection conn; protected final Channel channel; protected final Integer concurrency; protected final String name; protected final ThreadPoolTaskExecutor executor; // key as queue name, value as instance protected final ConcurrentHashMap<String, QueueConsumer> consumers = new ConcurrentHashMap<>(); public RabbitOperation(Connection conn, Integer concurrency, String name) throws IOException { this.conn = conn; this.concurrency = concurrency; this.name = name; this.channel = conn.createChannel(); this.channel.basicQos(0, concurrency, false); this.executor = ThreadHelper.createTaskExecutor(concurrency, concurrency, 1000, name + "-"); } public String declare(String queue, boolean durable) throws IOException { return this.channel.queueDeclare(queue, durable, false, false, null).getQueue(); } public String declare(String queue, boolean durable, Integer maxPriority, String dlExName) throws IOException { Map<String, Object> props = new HashMap<>(1); props.put("x-max-priority", maxPriority); props.put("x-dead-letter-exchange", dlExName); props.put("x-dead-letter-routing-key", QueueConfig.JobDlRoutingKey); return this.channel.queueDeclare(queue, durable, false, false, props).getQueue(); } public boolean delete(String queue) { try { this.channel.queueDelete(queue); return true; } catch (IOException e) { return false; } } public boolean purge(String queue) { try { this.channel.queuePurge(queue); return true; } catch (IOException e) { return false; } } /** * Send to routing key with default exchange */ public boolean send(String routingKey, byte[] body) { try { this.channel.basicPublish(StringHelper.EMPTY, routingKey, null, body); return true; } catch (IOException e) { return false; } } /** * Send to routing key with default exchange and priority */ public boolean send(String routingKey, byte[] body, Integer priority, Long expireInSecond) { try { AMQP.BasicProperties props = new AMQP.BasicProperties.Builder() .priority(priority) .expiration(Long.toString(expireInSecond * 1000)) .build(); this.channel.basicPublish(StringHelper.EMPTY, routingKey, props, body); return true; } catch (IOException e) { return false; } } public QueueConsumer getConsumer(String queue) { return consumers.get(queue); } public QueueConsumer createConsumer(String queue, Function<Message, Boolean> consume) { QueueConsumer consumer = new QueueConsumer(queue, consume); consumers.put(queue, consumer); return consumer; } public boolean removeConsumer(String queue) { QueueConsumer consumer = consumers.remove(queue); if (Objects.isNull(consumer)) { return false; } return consumer.cancel(); } /** * It will be called when spring context stop * * @throws Exception */ @Override public void close() throws Exception { consumers.forEach((s, queueConsumer) -> queueConsumer.cancel()); channel.close(); executor.shutdown(); } public class QueueConsumer extends DefaultConsumer { private final String queue; private final Function<Message, Boolean> consume; QueueConsumer(String queue, Function<Message, Boolean> consume) { super(channel); this.queue = queue; this.consume = consume; } @Override public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException { consume(body, envelope); } public void consume(byte[] body, Envelope envelope) { executor.execute(() -> { Boolean ingoreForNow = consume.apply(new Message(getChannel(), body, envelope)); }); } public String start(boolean autoAck) { try { String tag = getChannel().basicConsume(queue, autoAck, this); log.info("[Consumer STARTED] queue {} with tag {}", queue, tag); return tag; } catch (IOException e) { log.warn(e.getMessage()); return null; } } boolean cancel() { try { if (Objects.isNull(getConsumerTag())) { return true; // not started } consume.apply(Message.STOP_SIGN); getChannel().basicCancel(getConsumerTag()); log.info("[Consumer STOP] queue {} with tag {}", queue, getConsumerTag()); return true; } catch (IOException e) { log.warn(e.getMessage()); return false; } } } @Getter public static class Message { public static final Message STOP_SIGN = new Message(null, new byte[0], null); private final Channel channel; private final byte[] body; private final Envelope envelope; public Message(Channel channel, byte[] body, Envelope envelope) { this.channel = channel; this.body = body; this.envelope = envelope; } public boolean sendAck() { try { getChannel().basicAck(envelope.getDeliveryTag(), false); return true; } catch (IOException e) { return false; } } } }
package com.yahoo.athenz.zts.cert; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.security.cert.X509Certificate; import java.util.List; import static org.testng.Assert.*; import org.bouncycastle.pkcs.PKCS10CertificationRequest; import org.mockito.Mockito; import org.testng.annotations.Test; import com.yahoo.athenz.auth.Authorizer; import com.yahoo.athenz.auth.Principal; import com.yahoo.athenz.auth.util.Crypto; import com.yahoo.athenz.auth.util.CryptoException; public class X509CertRequestTest { @Test public void testConstructorValidCsr() throws IOException { Path path = Paths.get("src/test/resources/valid_email.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); } @Test public void testConstructorInvalidCsr() { X509CertRequest certReq = null; try { certReq = new X509CertRequest("csr"); fail(); } catch (CryptoException ignored) { } assertNull(certReq); } @Test public void testParseCertRequest() throws IOException { Path path = Paths.get("src/test/resources/athenz.instanceid.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); StringBuilder errorMsg = new StringBuilder(256); assertTrue(certReq.parseCertRequest(errorMsg)); } @Test public void testParseCertRequestIPs() throws IOException { Path path = Paths.get("src/test/resources/multiple_ips.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); StringBuilder errorMsg = new StringBuilder(256); assertTrue(certReq.parseCertRequest(errorMsg)); List<String> values = certReq.getDnsNames(); assertEquals(values.size(), 2); assertTrue(values.contains("production.athenz.ostk.athenz.cloud")); assertTrue(values.contains("1001.instanceid.athenz.ostk.athenz.cloud")); values = certReq.getIpAddresses(); assertEquals(values.size(), 2); assertTrue(values.contains("10.11.12.13")); assertTrue(values.contains("10.11.12.14")); } @Test public void testParseCertRequestInvalid() throws IOException { Path path = Paths.get("src/test/resources/invalid_dns.csr"); String csr = new String(Files.readAllBytes(path)); StringBuilder errorMsg = new StringBuilder(256); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); assertFalse(certReq.parseCertRequest(errorMsg)); } @Test public void testCompareCommonName() throws IOException { Path path = Paths.get("src/test/resources/athenz.instanceid.csr"); String csr = new String(Files.readAllBytes(path)); StringBuilder errorMsg = new StringBuilder(256); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); certReq.parseCertRequest(errorMsg); assertTrue(certReq.compareCommonName("athenz.production")); assertEquals(certReq.getCommonName(), "athenz.production"); assertFalse(certReq.compareCommonName("sys.production")); assertFalse(certReq.compareCommonName("athenz.storage")); } @Test public void testInstanceId() throws IOException { Path path = Paths.get("src/test/resources/athenz.instanceid.csr"); String csr = new String(Files.readAllBytes(path)); StringBuilder errorMsg = new StringBuilder(256); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); certReq.parseCertRequest(errorMsg); assertEquals(certReq.getInstanceId(), "1001"); } @Test public void testDnsSuffix() throws IOException { Path path = Paths.get("src/test/resources/athenz.instanceid.csr"); String csr = new String(Files.readAllBytes(path)); StringBuilder errorMsg = new StringBuilder(256); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); certReq.parseCertRequest(errorMsg); assertEquals(certReq.getDnsSuffix(), "ostk.athenz.cloud"); } @Test public void testCompareDnsNames() throws IOException { Path path = Paths.get("src/test/resources/athenz.instanceid.csr"); String csr = new String(Files.readAllBytes(path)); StringBuilder errorMsg = new StringBuilder(256); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); certReq.parseCertRequest(errorMsg); path = Paths.get("src/test/resources/athenz.instanceid.pem"); String pem = new String(Files.readAllBytes(path)); X509Certificate cert = Crypto.loadX509Certificate(pem); assertTrue(certReq.compareDnsNames(cert)); } @Test public void testCompareDnsNamesMismatchSize() throws IOException { Path path = Paths.get("src/test/resources/athenz.instanceid.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); path = Paths.get("src/test/resources/valid_cn_x509.cert"); String pem = new String(Files.readAllBytes(path)); X509Certificate cert = Crypto.loadX509Certificate(pem); assertFalse(certReq.compareDnsNames(cert)); } @Test public void testCompareDnsNamesMismatchValues() throws IOException { Path path = Paths.get("src/test/resources/athenz.mismatch.dns.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); path = Paths.get("src/test/resources/athenz.instanceid.pem"); String pem = new String(Files.readAllBytes(path)); X509Certificate cert = Crypto.loadX509Certificate(pem); assertFalse(certReq.compareDnsNames(cert)); } @Test public void testComparePublicKeysCert() throws IOException { Path path = Paths.get("src/test/resources/valid_provider_refresh.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); path = Paths.get("src/test/resources/valid_provider_refresh.pem"); String pem = new String(Files.readAllBytes(path)); X509Certificate cert = Crypto.loadX509Certificate(pem); assertTrue(certReq.comparePublicKeys(cert)); } @Test public void testComparePublicKeysCertFailure() throws IOException { Path path = Paths.get("src/test/resources/valid_provider_refresh.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); X509Certificate cert = Mockito.mock(X509Certificate.class); Mockito.when(cert.getPublicKey()).thenReturn(null); assertFalse(certReq.comparePublicKeys(cert)); } @Test public void testComparePublicKeysCertCSRFailure() throws IOException { Path path = Paths.get("src/test/resources/valid_provider_refresh.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); PKCS10CertificationRequest req = Mockito.mock(PKCS10CertificationRequest.class); Mockito.when(req.getSubjectPublicKeyInfo()).thenReturn(null); certReq.setCertReq(req); path = Paths.get("src/test/resources/valid_provider_refresh.pem"); String pem = new String(Files.readAllBytes(path)); X509Certificate cert = Crypto.loadX509Certificate(pem); assertFalse(certReq.comparePublicKeys(cert)); } @Test public void testComparePublicKeysCertMismatch() throws IOException { Path path = Paths.get("src/test/resources/athenz.mismatch.dns.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); path = Paths.get("src/test/resources/athenz.instanceid.pem"); String pem = new String(Files.readAllBytes(path)); X509Certificate cert = Crypto.loadX509Certificate(pem); assertFalse(certReq.comparePublicKeys(cert)); } @Test public void testComparePublicKeysNull() throws IOException { Path path = Paths.get("src/test/resources/athenz.instanceid.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); assertFalse(certReq.comparePublicKeys((String) null)); } @Test public void testComparePublicKeysFailure() throws IOException { Path path = Paths.get("src/test/resources/athenz.instanceid.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); PKCS10CertificationRequest req = Mockito.mock(PKCS10CertificationRequest.class); Mockito.when(req.getSubjectPublicKeyInfo()).thenReturn(null); certReq.setCertReq(req); assertFalse(certReq.comparePublicKeys("publickey")); } @Test public void testValidateInvalidDnsNames() throws IOException { Path path = Paths.get("src/test/resources/athenz.instanceid.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); StringBuilder errorMsg = new StringBuilder(256); assertFalse(certReq.validate(null, "sys", "production", null, null, errorMsg)); } @Test public void testValidateInvalidInstanceId() throws IOException { Path path = Paths.get("src/test/resources/valid_email.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); StringBuilder errorMsg = new StringBuilder(256); assertFalse(certReq.validate(null, "athenz", "production", null, null, errorMsg)); } @Test public void testValidateInstanceIdMismatch() throws IOException { Path path = Paths.get("src/test/resources/athenz.instanceid.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); StringBuilder errorMsg = new StringBuilder(256); assertFalse(certReq.validate(null, "athenz", "production", "1002", null, errorMsg)); } @Test public void testValidateCnMismatch() throws IOException { Path path = Paths.get("src/test/resources/athenz.mismatch.cn.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); StringBuilder errorMsg = new StringBuilder(256); assertFalse(certReq.validate(null, "athenz", "production", "1001", null, errorMsg)); assertTrue(errorMsg.toString().contains("Unable to validate CSR common name")); } @Test public void testValidateDnsSuffixMismatch() throws IOException { Path path = Paths.get("src/test/resources/athenz.mismatch.dns.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); StringBuilder errorMsg = new StringBuilder(256); assertFalse(certReq.validate(null, "athenz", "production", "1001", null, errorMsg)); assertTrue(errorMsg.toString().contains("does not end with expected suffix")); } @Test public void testValidateDnsSuffixNotAuthorized() throws IOException { Path path = Paths.get("src/test/resources/athenz.instanceid.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); Authorizer authorizer = Mockito.mock(Authorizer.class); Principal provider = Mockito.mock(Principal.class); Mockito.when(authorizer.access("launch", "sys.auth:dns.ostk.athenz.cloud", provider, null)) .thenReturn(false); StringBuilder errorMsg = new StringBuilder(256); assertFalse(certReq.validate(provider, "athenz", "production", "1001", authorizer, errorMsg)); assertTrue(errorMsg.toString().contains("not authorized to handle")); } @Test public void testValidate() throws IOException { Path path = Paths.get("src/test/resources/athenz.instanceid.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); Authorizer authorizer = Mockito.mock(Authorizer.class); Principal provider = Mockito.mock(Principal.class); Mockito.when(authorizer.access("launch", "sys.auth:dns.ostk.athenz.cloud", provider, null)) .thenReturn(true); StringBuilder errorMsg = new StringBuilder(256); assertTrue(certReq.validate(provider, "athenz", "production", "1001", authorizer, errorMsg)); assertTrue(certReq.validate(provider, "athenz", "production", "1001", null, errorMsg)); } @Test public void testComparePublicKeysString() throws IOException { Path path = Paths.get("src/test/resources/valid.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); final String ztsPublicKey = "-----BEGIN PUBLIC KEY-----\n" + "MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAKrvfvBgXWqWAorw5hYJu3dpOJe0gp3n\n" + "TgiiPGT7+jzm6BRcssOBTPFIMkePT2a8Tq+FYSmFnHfbQjwmYw2uMK8CAwEAAQ==\n" + "-----END PUBLIC KEY-----"; assertTrue(certReq.comparePublicKeys(ztsPublicKey)); } @Test public void testValidateCertReqPublicKey() throws IOException { Path path = Paths.get("src/test/resources/valid.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); final String ztsPublicKey = "-----BEGIN PUBLIC KEY-----\n" + "MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAKrvfvBgXWqWAorw5hYJu3dpOJe0gp3n\n" + "TgiiPGT7+jzm6BRcssOBTPFIMkePT2a8Tq+FYSmFnHfbQjwmYw2uMK8CAwEAAQ==\n" + "-----END PUBLIC KEY-----"; assertTrue(certReq.comparePublicKeys(ztsPublicKey)); } @Test public void testValidateCertReqPublicKeyMismatch() throws IOException { Path path = Paths.get("src/test/resources/valid.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); final String ztsPublicKey = "-----BEGIN PUBLIC KEY-----\n" + "MFwwDQYJKoZIhvcNasdfsdfsadfwSAJBAKrvfvBgXWqWAorw5hYJu3dpOJe0gp3n\n" + "TgiiPGT7+jzm6BRcssOBTPFIMkePT2a8Tq+FYSmFnHfbQjwmYw2uMK8CAwEAAQ==\n" + "-----END PUBLIC KEY-----"; assertFalse(certReq.comparePublicKeys(ztsPublicKey)); } @Test public void testValidateCertReqPublicKeyWhitespace() throws IOException { Path path = Paths.get("src/test/resources/valid.csr"); String csr = new String(Files.readAllBytes(path)); X509CertRequest certReq = new X509CertRequest(csr); assertNotNull(certReq); final String ztsPublicKey1 = " -----BEGIN PUBLIC KEY-----\n" + "MFwwDQYJKoZIhvcNA QEBBQADSwAwSAJBAKrvfvBgXWqW Aorw5hYJu3dpOJe0gp3n\n\r\r\n" + "TgiiPGT7+jzm6BRcssOBTPFIMkePT2a8Tq+FYSmFnHfbQjwmYw2uMK8CAwEAAQ==\n\r" + "-----END PUBLIC KEY----- \n"; final String ztsPublicKey2 = "-----BEGIN PUBLIC KEY-----" + "MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAKrvfvBgXWqWAorw5hYJu3dpOJe0gp3n" + "TgiiPGT7+jzm6BRcssOBTPFIMkePT2a8Tq+FYSmFnHfbQjwmYw2uMK8CAwEAAQ==" + "-----END PUBLIC KEY-----"; assertTrue(certReq.comparePublicKeys(ztsPublicKey1)); assertTrue(certReq.comparePublicKeys(ztsPublicKey2)); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: tensorflow/core/protobuf/rewriter_config.proto package org.tensorflow.proto.framework; /** * Protobuf type {@code tensorflow.ScopedAllocatorOptions} */ public final class ScopedAllocatorOptions extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:tensorflow.ScopedAllocatorOptions) ScopedAllocatorOptionsOrBuilder { private static final long serialVersionUID = 0L; // Use ScopedAllocatorOptions.newBuilder() to construct. private ScopedAllocatorOptions(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ScopedAllocatorOptions() { enableOp_ = com.google.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ScopedAllocatorOptions(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ScopedAllocatorOptions( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); if (!((mutable_bitField0_ & 0x00000001) != 0)) { enableOp_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000001; } enableOp_.add(s); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { enableOp_ = enableOp_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.proto.framework.RewriterConfigProtos.internal_static_tensorflow_ScopedAllocatorOptions_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.proto.framework.RewriterConfigProtos.internal_static_tensorflow_ScopedAllocatorOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.proto.framework.ScopedAllocatorOptions.class, org.tensorflow.proto.framework.ScopedAllocatorOptions.Builder.class); } public static final int ENABLE_OP_FIELD_NUMBER = 1; private com.google.protobuf.LazyStringList enableOp_; /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public com.google.protobuf.ProtocolStringList getEnableOpList() { return enableOp_; } /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public int getEnableOpCount() { return enableOp_.size(); } /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public java.lang.String getEnableOp(int index) { return enableOp_.get(index); } /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public com.google.protobuf.ByteString getEnableOpBytes(int index) { return enableOp_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < enableOp_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, enableOp_.getRaw(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < enableOp_.size(); i++) { dataSize += computeStringSizeNoTag(enableOp_.getRaw(i)); } size += dataSize; size += 1 * getEnableOpList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.tensorflow.proto.framework.ScopedAllocatorOptions)) { return super.equals(obj); } org.tensorflow.proto.framework.ScopedAllocatorOptions other = (org.tensorflow.proto.framework.ScopedAllocatorOptions) obj; if (!getEnableOpList() .equals(other.getEnableOpList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getEnableOpCount() > 0) { hash = (37 * hash) + ENABLE_OP_FIELD_NUMBER; hash = (53 * hash) + getEnableOpList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.tensorflow.proto.framework.ScopedAllocatorOptions parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.proto.framework.ScopedAllocatorOptions parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.proto.framework.ScopedAllocatorOptions parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.proto.framework.ScopedAllocatorOptions parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.proto.framework.ScopedAllocatorOptions parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.proto.framework.ScopedAllocatorOptions parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.proto.framework.ScopedAllocatorOptions parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.proto.framework.ScopedAllocatorOptions parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.proto.framework.ScopedAllocatorOptions parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.tensorflow.proto.framework.ScopedAllocatorOptions parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.proto.framework.ScopedAllocatorOptions parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.proto.framework.ScopedAllocatorOptions parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.tensorflow.proto.framework.ScopedAllocatorOptions prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code tensorflow.ScopedAllocatorOptions} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:tensorflow.ScopedAllocatorOptions) org.tensorflow.proto.framework.ScopedAllocatorOptionsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.proto.framework.RewriterConfigProtos.internal_static_tensorflow_ScopedAllocatorOptions_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.proto.framework.RewriterConfigProtos.internal_static_tensorflow_ScopedAllocatorOptions_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.proto.framework.ScopedAllocatorOptions.class, org.tensorflow.proto.framework.ScopedAllocatorOptions.Builder.class); } // Construct using org.tensorflow.proto.framework.ScopedAllocatorOptions.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); enableOp_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.tensorflow.proto.framework.RewriterConfigProtos.internal_static_tensorflow_ScopedAllocatorOptions_descriptor; } @java.lang.Override public org.tensorflow.proto.framework.ScopedAllocatorOptions getDefaultInstanceForType() { return org.tensorflow.proto.framework.ScopedAllocatorOptions.getDefaultInstance(); } @java.lang.Override public org.tensorflow.proto.framework.ScopedAllocatorOptions build() { org.tensorflow.proto.framework.ScopedAllocatorOptions result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.tensorflow.proto.framework.ScopedAllocatorOptions buildPartial() { org.tensorflow.proto.framework.ScopedAllocatorOptions result = new org.tensorflow.proto.framework.ScopedAllocatorOptions(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) != 0)) { enableOp_ = enableOp_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.enableOp_ = enableOp_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.tensorflow.proto.framework.ScopedAllocatorOptions) { return mergeFrom((org.tensorflow.proto.framework.ScopedAllocatorOptions)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.tensorflow.proto.framework.ScopedAllocatorOptions other) { if (other == org.tensorflow.proto.framework.ScopedAllocatorOptions.getDefaultInstance()) return this; if (!other.enableOp_.isEmpty()) { if (enableOp_.isEmpty()) { enableOp_ = other.enableOp_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureEnableOpIsMutable(); enableOp_.addAll(other.enableOp_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.tensorflow.proto.framework.ScopedAllocatorOptions parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.tensorflow.proto.framework.ScopedAllocatorOptions) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private com.google.protobuf.LazyStringList enableOp_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureEnableOpIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { enableOp_ = new com.google.protobuf.LazyStringArrayList(enableOp_); bitField0_ |= 0x00000001; } } /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public com.google.protobuf.ProtocolStringList getEnableOpList() { return enableOp_.getUnmodifiableView(); } /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public int getEnableOpCount() { return enableOp_.size(); } /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public java.lang.String getEnableOp(int index) { return enableOp_.get(index); } /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public com.google.protobuf.ByteString getEnableOpBytes(int index) { return enableOp_.getByteString(index); } /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public Builder setEnableOp( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureEnableOpIsMutable(); enableOp_.set(index, value); onChanged(); return this; } /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public Builder addEnableOp( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureEnableOpIsMutable(); enableOp_.add(value); onChanged(); return this; } /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public Builder addAllEnableOp( java.lang.Iterable<java.lang.String> values) { ensureEnableOpIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, enableOp_); onChanged(); return this; } /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public Builder clearEnableOp() { enableOp_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * If present, only perform optimization for these ops. * </pre> * * <code>repeated string enable_op = 1;</code> */ public Builder addEnableOpBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureEnableOpIsMutable(); enableOp_.add(value); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:tensorflow.ScopedAllocatorOptions) } // @@protoc_insertion_point(class_scope:tensorflow.ScopedAllocatorOptions) private static final org.tensorflow.proto.framework.ScopedAllocatorOptions DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.tensorflow.proto.framework.ScopedAllocatorOptions(); } public static org.tensorflow.proto.framework.ScopedAllocatorOptions getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ScopedAllocatorOptions> PARSER = new com.google.protobuf.AbstractParser<ScopedAllocatorOptions>() { @java.lang.Override public ScopedAllocatorOptions parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ScopedAllocatorOptions(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ScopedAllocatorOptions> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ScopedAllocatorOptions> getParserForType() { return PARSER; } @java.lang.Override public org.tensorflow.proto.framework.ScopedAllocatorOptions getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright (C) 1999-2008 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.fastpath.util; import gnu.inet.encoding.Stringprep; import gnu.inet.encoding.StringprepException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import org.jivesoftware.openfire.fastpath.dataforms.FormManager; import org.jivesoftware.openfire.fastpath.settings.chat.ChatSettingsCreator; import org.jivesoftware.openfire.user.UserManager; import org.jivesoftware.openfire.user.UserNotFoundException; import org.jivesoftware.util.ClassUtils; import org.jivesoftware.util.JiveGlobals; import org.jivesoftware.xmpp.workgroup.Agent; import org.jivesoftware.xmpp.workgroup.AgentManager; import org.jivesoftware.xmpp.workgroup.RequestQueue; import org.jivesoftware.xmpp.workgroup.UserAlreadyExistsException; import org.jivesoftware.xmpp.workgroup.Workgroup; import org.jivesoftware.xmpp.workgroup.WorkgroupManager; import org.jivesoftware.xmpp.workgroup.dispatcher.AgentSelector; import org.jivesoftware.xmpp.workgroup.spi.dispatcher.BasicAgentSelector; import org.jivesoftware.xmpp.workgroup.utils.ModelUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xmpp.component.ComponentManagerFactory; import org.xmpp.packet.JID; /** * A Utility class to allow for creation and modification of workgroups and queues. * * @author Derek DeMoro */ public class WorkgroupUtils { private static final Logger Log = LoggerFactory.getLogger(WorkgroupUtils.class); public static String updateWorkgroup(String workgroupName, String displayName, String description, int maxSize, int minSize, long requestTimeout, long offerTimeout) { final WorkgroupManager workgroupManager = WorkgroupManager.getInstance(); Workgroup workgroup; try { workgroup = workgroupManager.getWorkgroup(new JID(workgroupName)); } catch (UserNotFoundException e) { return getUpdateMessage(false, "The JID specified is invalid."); } workgroup.setDisplayName(displayName); workgroup.setDescription(description); if (maxSize < minSize) { return getUpdateMessage(false, "Max size must be greater or equal to min size."); } workgroup.setMaxChats(maxSize); workgroup.setMinChats(minSize); workgroup.setRequestTimeout(requestTimeout); workgroup.setOfferTimeout(offerTimeout); return getUpdateMessage(true, "Workgroup has been updated"); } public static void toggleStatus(String workgroupName) { final WorkgroupManager workgroupManager = WorkgroupManager.getInstance(); Workgroup workgroup; try { workgroup = workgroupManager.getWorkgroup(new JID(workgroupName)); } catch (UserNotFoundException e) { return; } Workgroup.Status status = workgroup.getStatus(); if (status == Workgroup.Status.READY) { workgroup.setStatus(Workgroup.Status.CLOSED); } else { workgroup.setStatus(Workgroup.Status.READY); } } public static String getUpdateMessage(boolean successfull, String message) { String returnString; if (successfull) { returnString = " <div class=\"jive-success\">\n" + " <table cellpadding=\"0\" cellspacing=\"0\" border=\"0\">\n" + " <tbody>\n" + " <tr><td class=\"jive-icon\"><img src=\"images/success-16x16.gif\" width=\"16\" height=\"16\"\n" + " border=\"0\"></td>\n" + " <td class=\"jive-icon-label\">\n" + " " + message + "\n" + " </td></tr>\n" + " </tbody>\n" + " </table>\n" + " </div><br>"; } else { returnString = " <p class=\"jive-error-text\">\n" + " " + message + "\n" + " </p>"; } return returnString; } public synchronized static List<AgentSelector> getAvailableAgentSelectors() { List<AgentSelector> answer = new ArrayList<AgentSelector>(); // First, add in built-in list of algorithms. for (Class newClass : getBuiltInAgentSelectorClasses()) { try { AgentSelector algorithm = (AgentSelector)newClass.newInstance(); answer.add(algorithm); } catch (Exception e) { Log.error(e.getMessage(), e); } } // Now get custom algorithms. List<String> classNames = JiveGlobals.getProperties("agentSelector.classes"); for (String className : classNames) { install_algorithm: try { Class algorithmClass = loadClass(className); // Make sure that the intercepter isn't already installed. for (AgentSelector agentSelector : answer) { if (algorithmClass.equals(agentSelector.getClass())) { break install_algorithm; } } AgentSelector algorithm = (AgentSelector)algorithmClass.newInstance(); answer.add(algorithm); } catch (Exception e) { Log.error(e.getMessage(), e); } } return answer; } private static Collection<Class> getBuiltInAgentSelectorClasses() { return Arrays.asList((Class)BasicAgentSelector.class); } private static Class loadClass(String className) throws ClassNotFoundException { try { return ClassUtils.forName(className); } catch (ClassNotFoundException e) { return WorkgroupUtils.class.getClassLoader().loadClass(className); } } public synchronized static void addAgentSelectorClass(Class newClass) throws IllegalArgumentException { try { AgentSelector newAlgorithm = (AgentSelector)newClass.newInstance(); // Make sure the interceptor isn't already in the list. List<AgentSelector> availableAgentSelectors = getAvailableAgentSelectors(); for (AgentSelector algorithm : availableAgentSelectors) { if (newAlgorithm.getClass().equals(algorithm.getClass())) { return; } } // Add in the new algorithm availableAgentSelectors.add(newAlgorithm); // Write out new class names. JiveGlobals.deleteProperty("agentSelector.classes"); for (int i = 0; i < availableAgentSelectors.size(); i++) { String cName = availableAgentSelectors.get(i).getClass().getName(); JiveGlobals.setProperty("agentSelector.classes." + i, cName); } } catch (IllegalAccessException e) { throw new IllegalArgumentException(e.getMessage()); } catch (InstantiationException e2) { throw new IllegalArgumentException(e2.getMessage()); } catch (ClassCastException e5) { throw new IllegalArgumentException("Class is not a AgentSelector"); } } /** * Create a new Workgroup. * * @param workgroupName the name of the workgroup. * @param description the description of the workgroup. * @param agents the agents, in a comma delimited string. * @return a map of errors (if any) */ public static Map<String, String> createWorkgroup(String workgroupName, String description, String agents) { Map<String, String> errors = new HashMap<String, String>(); // Get a workgroup manager WorkgroupManager wgManager = WorkgroupManager.getInstance(); if (wgManager == null) { errors.put("general_error", "The server is down"); return errors; } String defaultQueueName = "Default Queue"; // Validate if (workgroupName == null) { errors.put("wgName", ""); } else { try { workgroupName = workgroupName.trim().toLowerCase(); workgroupName = Stringprep.nodeprep(workgroupName); } catch (StringprepException se) { errors.put("wgName", ""); } } // do a create if there were no errors RequestQueue queue = null; if (errors.size() == 0) { try { // Create new workgroup Workgroup workgroup = wgManager.createWorkgroup(workgroupName); workgroup.setDescription(description); // Create a default workgroup queue queue = workgroup.createRequestQueue(defaultQueueName); //workgroup.setMaxChats(maxChats); //workgroup.setMinChats(minChats); // Make the workgroup ready by default: workgroup.setStatus(Workgroup.Status.READY); // Create default messages and images for the new workgroup ChatSettingsCreator.getInstance().createDefaultSettings(workgroup.getJID()); // Add generic web form FormManager formManager = FormManager.getInstance(); formManager.createGenericForm(workgroup); } catch (UserAlreadyExistsException uaee) { errors.put("exists", ""); } catch (Exception e) { Log.error(e.getMessage(), e); errors.put("general", ""); } } if (ModelUtil.hasLength(agents)) { addAgents(queue, agents); } return errors; } /** * Adds agents to a request queue. * * @param queue the <code>RequestQueue</code> to add agents to. * @param agents a comma-delimited list of agents. */ public static void addAgents(RequestQueue queue, String agents) { WorkgroupManager workgroupManager = WorkgroupManager.getInstance(); AgentManager agentManager = workgroupManager.getAgentManager(); // loop thru all params StringTokenizer tokenizer = new StringTokenizer(agents, ", \t\n\r\f"); while (tokenizer.hasMoreTokens()) { String usernameToken = tokenizer.nextToken(); if (usernameToken.indexOf('@') != -1) { usernameToken = JID.escapeNode(usernameToken); } try { // See if they are a user in the system. UserManager.getInstance().getUser(usernameToken); usernameToken += ("@" + ComponentManagerFactory.getComponentManager().getServerName()); JID address = new JID(usernameToken.trim()); Agent agent; if (agentManager.hasAgent(address)) { agent = agentManager.getAgent(address); } else { agent = agentManager.createAgent(address); } queue.addMember(agent); } catch (Exception e) { Log.error(e.getMessage(), e); } } } }
package com.agilecrm.stubs; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlRootElement; import org.codehaus.jackson.annotate.JsonProperty; import org.json.JSONObject; import com.agilecrm.stubs.ContactField.FieldName; import com.agilecrm.stubs.ContactField.FieldType; @XmlRootElement public class Contact { @JsonProperty("id") private Integer id; @JsonProperty("count") private Integer count; @JsonProperty("owner_key") private String owner_key; @JsonProperty("widget_properties") private String widget_properties = null; @JsonProperty("type") private Type type; @JsonProperty("tags") private List<String> tags; @JsonProperty("lead_score") private Integer lead_score; @JsonProperty("star_value") private Short star_value; @JsonProperty("properties") private List<ContactField> properties; @JsonProperty("created_time") private String created_time; @JsonProperty("updated_time") private String updated_time; @JsonProperty("tags_with_time_json") private JSONObject tags_with_time_json; @JsonProperty("domainUser") private JSONObject domainUser; public static enum Type { PERSON, COMPANY }; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public Type getType() { return type; } public void setType(Type type) { this.type = type; } public List<String> getTags() { return tags; } public void setTags(List<String> tags) { this.tags = tags; } public Integer getLead_score() { return lead_score; } public void setLead_score(Integer lead_score) { this.lead_score = lead_score; } public Short getStar_value() { return star_value; } public void setStar_value(Short star_value) { this.star_value = star_value; } public List<ContactField> getProperties() { return properties; } public void setProperties(List<ContactField> properties) { this.properties = properties; } public String getCreated_time() { return created_time; } public void setCreated_time(String created_time) { this.created_time = created_time; } public String getUpdated_time() { return updated_time; } public void setUpdated_time(String updated_time) { this.updated_time = updated_time; } public JSONObject getTags_with_time_json() { return tags_with_time_json; } public void setTags_with_time_json(JSONObject tags_with_time_json) { this.tags_with_time_json = tags_with_time_json; } public JSONObject getDomainUser() { return domainUser; } public void setDomainUser(JSONObject domainUser) { this.domainUser = domainUser; } public Integer getCount() { return count; } public void setCount(Integer count) { this.count = count; } public String getOwner_key() { return owner_key; } public void setOwner_key(String owner_key) { this.owner_key = owner_key; } public String getWidget_properties() { return widget_properties; } public void setWidget_properties(String widget_properties) { this.widget_properties = widget_properties; } public void setContactField(FieldName name, String value) { if (properties == null) properties = new ArrayList<ContactField>(); for (ContactField property : properties) { if (name.getFieldName().equalsIgnoreCase(property.getName())) { property.setValue(value); return; } } ContactField contactField = new ContactField(); contactField.setType(FieldType.SYSTEM); contactField.setName(name.getFieldName()); contactField.setValue(value); properties.add(contactField); } public void setCustomField(String fieldName, String fieldValue) { if (properties == null) properties = new ArrayList<ContactField>(); ContactField contactField = new ContactField(); contactField.setName(fieldName); contactField.setValue(fieldValue); contactField.setType(FieldType.CUSTOM); properties.add(contactField); } public String toString() { return id + " " + type + " " + tags + " " + properties + " " + created_time + " " + updated_time; } }
/* * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.test.runners; import org.apache.commons.beanutils.MethodUtils; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.internal.AssumptionViolatedException; import org.junit.internal.runners.model.EachTestNotifier; import org.junit.internal.runners.model.ReflectiveCallable; import org.junit.internal.runners.statements.ExpectException; import org.junit.internal.runners.statements.Fail; import org.junit.internal.runners.statements.FailOnTimeout; import org.junit.internal.runners.statements.InvokeMethod; import org.junit.internal.runners.statements.RunAfters; import org.junit.internal.runners.statements.RunBefores; import org.junit.rules.RunRules; import org.junit.rules.TestRule; import org.junit.runner.Description; import org.junit.runner.JUnitCore; import org.junit.runner.Result; import org.junit.runner.RunWith; import org.junit.runner.Runner; import org.junit.runner.manipulation.Filter; import org.junit.runner.manipulation.Filterable; import org.junit.runner.manipulation.NoTestsRemainException; import org.junit.runner.manipulation.Sortable; import org.junit.runner.manipulation.Sorter; import org.junit.runner.notification.Failure; import org.junit.runner.notification.RunNotifier; import org.junit.runner.notification.StoppedByUserException; import org.junit.runners.model.FrameworkMethod; import org.junit.runners.model.InitializationError; import org.junit.runners.model.RunnerScheduler; import org.junit.runners.model.Statement; import org.junit.runners.model.TestClass; import org.kuali.rice.core.api.util.ShadowingInstrumentableClassLoader; import org.kuali.rice.test.MethodAware; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import static org.junit.internal.runners.rules.RuleFieldValidator.*; /** * A JUnit test {@link org.junit.runner.Runner} which uses a custom classloader with a copy of the classpath and allows * for transformers to be added to the ClassLoader for load-time weaving. * * <p>Useful when writing tests that use JPA with EclipseLink since it depends upon load-time weaving.</p> * * <p>In order to use this class, you must have a {@link BootstrapTest} annotation available somewhere in the hierarchy * of your test class (usually on the same class where the {@link RunWith} annotation is specified which references this * runner class). This informs the runner about a test that it can run to execute any one-time initialization for * the test suite. Ideally, this bootstrap test will execute code which loads JPA persistence units and any associated * ClassFileTransformers for load-time weaving. This is necessary because it is common for an integration test to have * references in the test class itself to JPA entities which need to be weaved. When this occurs, if the persistence * units and ClassFileTransformers are not properly loaded before the entity classes are loaded by the classloader, then * instrumentation will (silently!) fail to occur.</p> * * <p>Much of the code in this class was copied from the JUnit ParentRunner, BlockJUnit4ClassRunner, and * TomcatInstrumentableClassLoader.</p> * * @author Kuali Rice Team (rice.collab@kuali.org) */ public class LoadTimeWeavableTestRunner extends Runner implements Filterable, Sortable { private static final String[] JUNIT_CLASSLOADER_EXCLUDES = { "org.junit.", "junit.framework." }; private final TestClass originalTestClass; private TestClass fTestClass; private Method currentMethod; // static because we only need one custom loader per JVM in which the tests are running, otherwise the memory // usage gets crazy! private static ClassLoader customLoader; private Sorter fSorter = Sorter.NULL; private List<FrameworkMethod> originalFilteredChildren = null; private List<FrameworkMethod> filteredChildren = null; private static final ThreadLocal<Boolean> runningBootstrapTest = new ThreadLocal<Boolean>() { @Override protected Boolean initialValue() { return Boolean.FALSE; } }; private RunnerScheduler fScheduler = new RunnerScheduler() { public void schedule(Runnable childStatement) { childStatement.run(); } public void finished() { // do nothing } }; /** * Constructs a new {@code ParentRunner} that will run {@code @TestClass} */ public LoadTimeWeavableTestRunner(Class<?> testClass) throws InitializationError { this.originalTestClass = new TestClass(testClass); if (LoadTimeWeavableTestRunner.customLoader == null) { LoadTimeWeavableTestRunner.customLoader = new ShadowingInstrumentableClassLoader(testClass.getClassLoader(), JUNIT_CLASSLOADER_EXCLUDES); } validate(); } private TestClass getCustomTestClass(Class<?> originalTestClass, ClassLoader customLoader) { try { Class<?> newTestClass = customLoader.loadClass(originalTestClass.getName()); if (newTestClass == originalTestClass) { throw new IllegalStateException(newTestClass.getName() + " loaded from custom class loader should have been a different instance but was the same!"); } return new TestClass(newTestClass); } catch (ClassNotFoundException e) { throw new IllegalStateException("Failed to load test class from custom classloader: " + originalTestClass.getName()); } } protected ClassLoader getCustomClassLoader() { return customLoader; } /** * Adds to {@code errors} if any method in this class is annotated with * {@code annotation}, but: * <ul> * <li>is not public, or * <li>takes parameters, or * <li>returns something other than void, or * <li>is static (given {@code isStatic is false}), or * <li>is not static (given {@code isStatic is true}). */ protected void validatePublicVoidNoArgMethods(Class<? extends Annotation> annotation, boolean isStatic, List<Throwable> errors) { List<FrameworkMethod> methods = getOriginalTestClass().getAnnotatedMethods(annotation); for (FrameworkMethod eachTestMethod : methods) { eachTestMethod.validatePublicVoidNoArg(isStatic, errors); } } private void validateClassRules(List<Throwable> errors) { CLASS_RULE_VALIDATOR.validate(getOriginalTestClass(), errors); CLASS_RULE_METHOD_VALIDATOR.validate(getOriginalTestClass(), errors); } /** * Constructs a {@code Statement} to run all of the tests in the test class. Override to add pre-/post-processing. * Here is an outline of the implementation: * <ul> * <li>Call {@link #runChild(org.junit.runners.model.FrameworkMethod, org.junit.runner.notification.RunNotifier)} on each object returned by {@link #getChildren()} (subject to any imposed filter and sort).</li> * <li>ALWAYS run all non-overridden {@code @BeforeClass} methods on this class * and superclasses before the previous step; if any throws an * Exception, stop execution and pass the exception on. * <li>ALWAYS run all non-overridden {@code @AfterClass} methods on this class * and superclasses before any of the previous steps; all AfterClass methods are * always executed: exceptions thrown by previous steps are combined, if * necessary, with exceptions from AfterClass methods into a * {@link org.junit.runners.model.MultipleFailureException}. * </ul> * * @return {@code Statement} */ protected Statement classBlock(final RunNotifier notifier) { Statement statement = childrenInvoker(notifier); statement = withBeforeClasses(statement); statement = withAfterClasses(statement); statement = withClassRules(statement); return statement; } /** * Returns a {@link org.junit.runners.model.Statement}: run all non-overridden {@code @BeforeClass} methods on this class * and superclasses before executing {@code statement}; if any throws an * Exception, stop execution and pass the exception on. */ protected Statement withBeforeClasses(Statement statement) { List<FrameworkMethod> befores = getTestClass() .getAnnotatedMethods(BeforeClass.class); return befores.isEmpty() ? statement : new RunBefores(statement, befores, null); } /** * Returns a {@link org.junit.runners.model.Statement}: run all non-overridden {@code @AfterClass} methods on this class * and superclasses before executing {@code statement}; all AfterClass methods are * always executed: exceptions thrown by previous steps are combined, if * necessary, with exceptions from AfterClass methods into a * {@link org.junit.runners.model.MultipleFailureException}. */ protected Statement withAfterClasses(Statement statement) { List<FrameworkMethod> afters = getTestClass() .getAnnotatedMethods(AfterClass.class); return afters.isEmpty() ? statement : new RunAfters(statement, afters, null); } /** * Returns a {@link org.junit.runners.model.Statement}: apply all * static fields assignable to {@link org.junit.rules.TestRule} * annotated with {@link org.junit.ClassRule}. * * @param statement the base statement * @return a RunRules statement if any class-level {@link org.junit.Rule}s are * found, or the base statement */ private Statement withClassRules(Statement statement) { List<TestRule> classRules = classRules(); return classRules.isEmpty() ? statement : new RunRules(statement, classRules, getDescription()); } /** * @return the {@code ClassRule}s that can transform the block that runs * each method in the tested class. */ protected List<TestRule> classRules() { List<TestRule> result = getTestClass().getAnnotatedMethodValues(null, ClassRule.class, TestRule.class); result.addAll(getTestClass().getAnnotatedFieldValues(null, ClassRule.class, TestRule.class)); return result; } /** * Returns a {@link org.junit.runners.model.Statement}: Call {@link #runChild(org.junit.runners.model.FrameworkMethod, org.junit.runner.notification.RunNotifier)} * on each object returned by {@link #getChildren()} (subject to any imposed * filter and sort) */ protected Statement childrenInvoker(final RunNotifier notifier) { return new Statement() { @Override public void evaluate() { runChildren(notifier); } }; } private void runChildren(final RunNotifier notifier) { for (final FrameworkMethod each : getFilteredChildren()) { fScheduler.schedule(new Runnable() { public void run() { LoadTimeWeavableTestRunner.this.runChild(each, notifier); } }); } fScheduler.finished(); } /** * Returns a name used to describe this Runner */ protected String getName() { return getOriginalTestClass().getName(); } /** * Returns a {@link org.junit.runners.model.TestClass} object wrapping the class to be executed. */ public final TestClass getTestClass() { if (fTestClass == null) { throw new IllegalStateException("Attempted to access test class but it has not yet been initialized!"); } return fTestClass; } /** * Returns the original test class that was passed to this test runner. */ public final TestClass getOriginalTestClass() { return originalTestClass; } /** * Runs a {@link org.junit.runners.model.Statement} that represents a leaf (aka atomic) test. */ protected final void runLeaf(Statement statement, Description description, RunNotifier notifier) { EachTestNotifier eachNotifier = new EachTestNotifier(notifier, description); eachNotifier.fireTestStarted(); try { statement.evaluate(); } catch (AssumptionViolatedException e) { eachNotifier.addFailedAssumption(e); } catch (Throwable e) { eachNotifier.addFailure(e); } finally { eachNotifier.fireTestFinished(); } } /** * @return the annotations that should be attached to this runner's * description. */ protected Annotation[] getRunnerAnnotations() { return getOriginalTestClass().getAnnotations(); } // // Implementation of Runner // @Override public Description getDescription() { Description description = Description.createSuiteDescription(getName(), getRunnerAnnotations()); for (FrameworkMethod child : getOriginalFilteredChildren()) { description.addChild(describeOriginalChild(child)); } return description; } @Override public void run(final RunNotifier notifier) { ClassLoader currentContextClassLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(customLoader); try { if (runBootstrapTest(notifier, getOriginalTestClass())) { this.fTestClass = getCustomTestClass(getOriginalTestClass().getJavaClass(), customLoader); EachTestNotifier testNotifier = new EachTestNotifier(notifier, getDescription()); try { Statement statement = classBlock(notifier); statement.evaluate(); } catch (AssumptionViolatedException e) { testNotifier.fireTestIgnored(); } catch (StoppedByUserException e) { throw e; } catch (Throwable e) { testNotifier.addFailure(e); } } } finally { Thread.currentThread().setContextClassLoader(currentContextClassLoader); } } protected boolean runBootstrapTest(RunNotifier notifier, TestClass testClass) { if (!runningBootstrapTest.get().booleanValue()) { runningBootstrapTest.set(Boolean.TRUE); try { BootstrapTest bootstrapTest = getBootstrapTestAnnotation(testClass.getJavaClass()); if (bootstrapTest != null) { Result result = JUnitCore.runClasses(bootstrapTest.value()); List<Failure> failures = result.getFailures(); for (Failure failure : failures) { notifier.fireTestFailure(failure); } return result.getFailureCount() == 0; } else { throw new IllegalStateException("LoadTimeWeavableTestRunner, must be coupled with an @BootstrapTest annotation to define the bootstrap test to execute."); } } finally { runningBootstrapTest.set(Boolean.FALSE); } } return true; } private BootstrapTest getBootstrapTestAnnotation(Class<?> testClass) { BootstrapTest bootstrapTest = testClass.getAnnotation(BootstrapTest.class); if (bootstrapTest != null) { return bootstrapTest; } else if (testClass.getSuperclass() != null) { return getBootstrapTestAnnotation(testClass.getSuperclass()); } else { return null; } } // // Implementation of Filterable and Sortable // public void filter(Filter filter) throws NoTestsRemainException { for (Iterator<FrameworkMethod> iter = getOriginalFilteredChildren().iterator(); iter.hasNext(); ) { FrameworkMethod each = iter.next(); if (shouldRun(filter, each)) { try { filter.apply(each); } catch (NoTestsRemainException e) { iter.remove(); } } else { iter.remove(); } } if (getOriginalFilteredChildren().isEmpty()) { throw new NoTestsRemainException(); } } public void sort(Sorter sorter) { fSorter = sorter; for (FrameworkMethod each : getOriginalFilteredChildren()) { sortChild(each); } Collections.sort(getOriginalFilteredChildren(), comparator()); } // // Private implementation // private void validate() throws InitializationError { List<Throwable> errors = new ArrayList<Throwable>(); collectInitializationErrors(errors); if (!errors.isEmpty()) { throw new InitializationError(errors); } } private List<FrameworkMethod> getOriginalFilteredChildren() { if (originalFilteredChildren == null) { originalFilteredChildren = new ArrayList<FrameworkMethod>(getOriginalChildren()); } return originalFilteredChildren; } private List<FrameworkMethod> getFilteredChildren() { if (getOriginalFilteredChildren() == null) { throw new IllegalStateException("Attempted to get filtered children before original filtered children were initialized."); } if (filteredChildren == null) { filteredChildren = new ArrayList<FrameworkMethod>(); List<FrameworkMethod> testMethods = computeTestMethods(); for (FrameworkMethod originalMethod : getOriginalFilteredChildren()) { for (FrameworkMethod testMethod : testMethods) { if (originalMethod.isShadowedBy(testMethod)) { filteredChildren.add(testMethod); } } } } return filteredChildren; } private void sortChild(FrameworkMethod child) { fSorter.apply(child); } private boolean shouldRun(Filter filter, FrameworkMethod each) { return filter.shouldRun(describeOriginalChild(each)); } private Comparator<? super FrameworkMethod> comparator() { return new Comparator<FrameworkMethod>() { public int compare(FrameworkMethod o1, FrameworkMethod o2) { return fSorter.compare(describeChild(o1), describeChild(o2)); } }; } // // Implementation of ParentRunner // /** * Runs the test corresponding to {@code child}, which can be assumed to be * an element of the list returned by {@link #getChildren()}. * Subclasses are responsible for making sure that relevant test events are * reported through {@code notifier} */ protected void runChild(final FrameworkMethod method, RunNotifier notifier) { this.currentMethod = method.getMethod(); try { Description description = describeChild(method); if (method.getAnnotation(Ignore.class) != null) { notifier.fireTestIgnored(description); } else { runLeaf(methodBlock(method), description, notifier); } } finally { this.currentMethod = null; } } /** * Returns a {@link org.junit.runner.Description} for {@code child}, which can be assumed to * be an element of the list returned by {@link #getChildren()} */ protected Description describeChild(FrameworkMethod method) { return Description.createTestDescription(getTestClass().getJavaClass(), testName(method), method.getAnnotations()); } protected Description describeOriginalChild(FrameworkMethod method) { return Description.createTestDescription(getOriginalTestClass().getJavaClass(), testName(method), method.getAnnotations()); } /** * Returns a list of objects that define the children of this Runner. */ protected List<FrameworkMethod> getChildren() { return computeTestMethods(); } protected List<FrameworkMethod> getOriginalChildren() { return computeOriginalTestMethods(); } // // Override in subclasses // /** * Returns the methods that run tests. Default implementation returns all * methods annotated with {@code @Test} on this class and superclasses that * are not overridden. */ protected List<FrameworkMethod> computeTestMethods() { return getTestClass().getAnnotatedMethods(Test.class); } protected List<FrameworkMethod> computeOriginalTestMethods() { return getOriginalTestClass().getAnnotatedMethods(Test.class); } /** * Adds to {@code errors} a throwable for each problem noted with the test class (available from {@link #getTestClass()}). * Default implementation adds an error for each method annotated with * {@code @BeforeClass} or {@code @AfterClass} that is not * {@code public static void} with no arguments. */ protected void collectInitializationErrors(List<Throwable> errors) { validatePublicVoidNoArgMethods(BeforeClass.class, true, errors); validatePublicVoidNoArgMethods(AfterClass.class, true, errors); validateClassRules(errors); validateNoNonStaticInnerClass(errors); validateConstructor(errors); validateInstanceMethods(errors); validateFields(errors); validateMethods(errors); } protected void validateNoNonStaticInnerClass(List<Throwable> errors) { if (getOriginalTestClass().isANonStaticInnerClass()) { String gripe = "The inner class " + getOriginalTestClass().getName() + " is not static."; errors.add(new Exception(gripe)); } } /** * Adds to {@code errors} if the test class has more than one constructor, * or if the constructor takes parameters. Override if a subclass requires * different validation rules. */ protected void validateConstructor(List<Throwable> errors) { validateOnlyOneConstructor(errors); validateZeroArgConstructor(errors); } /** * Adds to {@code errors} if the test class has more than one constructor * (do not override) */ protected void validateOnlyOneConstructor(List<Throwable> errors) { if (!hasOneConstructor()) { String gripe = "Test class should have exactly one public constructor"; errors.add(new Exception(gripe)); } } /** * Adds to {@code errors} if the test class's single constructor takes * parameters (do not override) */ protected void validateZeroArgConstructor(List<Throwable> errors) { if (!getOriginalTestClass().isANonStaticInnerClass() && hasOneConstructor() && (getOriginalTestClass().getOnlyConstructor().getParameterTypes().length != 0)) { String gripe = "Test class should have exactly one public zero-argument constructor"; errors.add(new Exception(gripe)); } } private boolean hasOneConstructor() { return getOriginalTestClass().getJavaClass().getConstructors().length == 1; } /** * Adds to {@code errors} for each method annotated with {@code @Test}, * {@code @Before}, or {@code @After} that is not a public, void instance * method with no arguments. * * @deprecated unused API, will go away in future version */ @Deprecated protected void validateInstanceMethods(List<Throwable> errors) { validatePublicVoidNoArgMethods(After.class, false, errors); validatePublicVoidNoArgMethods(Before.class, false, errors); validateTestMethods(errors); if (computeOriginalTestMethods().size() == 0) { errors.add(new Exception("No runnable methods")); } } protected void validateFields(List<Throwable> errors) { RULE_VALIDATOR.validate(getOriginalTestClass(), errors); } private void validateMethods(List<Throwable> errors) { RULE_METHOD_VALIDATOR.validate(getOriginalTestClass(), errors); } /** * Adds to {@code errors} for each method annotated with {@code @Test}that * is not a public, void instance method with no arguments. */ protected void validateTestMethods(List<Throwable> errors) { validatePublicVoidNoArgMethods(Test.class, false, errors); } /** * Returns a new fixture for running a test. Default implementation executes * the test class's no-argument constructor (validation should have ensured * one exists). */ protected Object createTest() throws Exception { Object test = getTestClass().getOnlyConstructor().newInstance(); setTestName(test, currentMethod); setTestMethod(test, currentMethod); return test; } /** * Sets the {@link java.lang.reflect.Method} on the test case if it is {@link org.kuali.rice.test.MethodAware} * @param method the current method to be run * @param test the test instance */ protected void setTestMethod(Object test, Method method) throws Exception { Class<?> methodAwareClass = Class.forName(MethodAware.class.getName(), true, getCustomClassLoader()); if (methodAwareClass.isInstance(test)) { Method setTestMethod = methodAwareClass.getMethod("setTestMethod", Method.class); setTestMethod.invoke(test, method); } } protected void setTestName(final Object test, final Method testMethod) throws Exception { String name = testMethod == null ? "" : testMethod.getName(); final Method setNameMethod = MethodUtils.getAccessibleMethod(test.getClass(), "setName", new Class[]{String.class}); if (setNameMethod != null) { setNameMethod.invoke(test, name); } } /** * Returns the name that describes {@code method} for {@link org.junit.runner.Description}s. * Default implementation is the method's name */ protected String testName(FrameworkMethod method) { return method.getName(); } /** * Returns a Statement that, when executed, either returns normally if * {@code method} passes, or throws an exception if {@code method} fails. * * Here is an outline of the default implementation: * * <ul> * <li>Invoke {@code method} on the result of {@code createTest()}, and * throw any exceptions thrown by either operation. * <li>HOWEVER, if {@code method}'s {@code @Test} annotation has the {@code * expecting} attribute, return normally only if the previous step threw an * exception of the correct type, and throw an exception otherwise. * <li>HOWEVER, if {@code method}'s {@code @Test} annotation has the {@code * timeout} attribute, throw an exception if the previous step takes more * than the specified number of milliseconds. * <li>ALWAYS run all non-overridden {@code @Before} methods on this class * and superclasses before any of the previous steps; if any throws an * Exception, stop execution and pass the exception on. * <li>ALWAYS run all non-overridden {@code @After} methods on this class * and superclasses after any of the previous steps; all After methods are * always executed: exceptions thrown by previous steps are combined, if * necessary, with exceptions from After methods into a * {@link org.junit.runners.model.MultipleFailureException}. * <li>ALWAYS allow {@code @Rule} fields to modify the execution of the * above steps. A {@code Rule} may prevent all execution of the above steps, * or add additional behavior before and after, or modify thrown exceptions. * For more information, see {@link org.junit.rules.TestRule} * </ul> * * This can be overridden in subclasses, either by overriding this method, * or the implementations creating each sub-statement. */ protected Statement methodBlock(FrameworkMethod method) { Object test; try { test = new ReflectiveCallable() { @Override protected Object runReflectiveCall() throws Throwable { return createTest(); } }.run(); } catch (Throwable e) { return new Fail(e); } Statement statement = methodInvoker(method, test); statement = possiblyExpectingExceptions(method, test, statement); statement = withPotentialTimeout(method, test, statement); statement = withBefores(method, test, statement); statement = withAfters(method, test, statement); statement = withRules(method, test, statement); return statement; } // // Statement builders // /** * Returns a {@link org.junit.runners.model.Statement} that invokes {@code method} on {@code test} */ protected Statement methodInvoker(FrameworkMethod method, Object test) { return new InvokeMethod(method, test); } /** * Returns a {@link org.junit.runners.model.Statement}: if {@code method}'s {@code @Test} annotation * has the {@code expecting} attribute, return normally only if {@code next} * throws an exception of the correct type, and throw an exception * otherwise. * * @deprecated Will be private soon: use Rules instead */ @Deprecated protected Statement possiblyExpectingExceptions(FrameworkMethod method, Object test, Statement next) { Test annotation = method.getAnnotation(Test.class); return expectsException(annotation) ? new ExpectException(next, getExpectedException(annotation)) : next; } /** * Returns a {@link org.junit.runners.model.Statement}: if {@code method}'s {@code @Test} annotation * has the {@code timeout} attribute, throw an exception if {@code next} * takes more than the specified number of milliseconds. * * @deprecated Will be private soon: use Rules instead */ @Deprecated protected Statement withPotentialTimeout(FrameworkMethod method, Object test, Statement next) { long timeout = getTimeout(method.getAnnotation(Test.class)); return timeout > 0 ? new FailOnTimeout(next, timeout) : next; } /** * Returns a {@link org.junit.runners.model.Statement}: run all non-overridden {@code @Before} * methods on this class and superclasses before running {@code next}; if * any throws an Exception, stop execution and pass the exception on. * * @deprecated Will be private soon: use Rules instead */ @Deprecated protected Statement withBefores(FrameworkMethod method, Object target, Statement statement) { List<FrameworkMethod> befores = getTestClass().getAnnotatedMethods(Before.class); return befores.isEmpty() ? statement : new RunBefores(statement, befores, target); } /** * Returns a {@link org.junit.runners.model.Statement}: run all non-overridden {@code @After} * methods on this class and superclasses before running {@code next}; all * After methods are always executed: exceptions thrown by previous steps * are combined, if necessary, with exceptions from After methods into a * {@link org.junit.runners.model.MultipleFailureException}. * * @deprecated Will be private soon: use Rules instead */ @Deprecated protected Statement withAfters(FrameworkMethod method, Object target, Statement statement) { List<FrameworkMethod> afters = getTestClass().getAnnotatedMethods( After.class); return afters.isEmpty() ? statement : new RunAfters(statement, afters, target); } private Statement withRules(FrameworkMethod method, Object target, Statement statement) { List<TestRule> testRules = getTestRules(target); Statement result = statement; result = withMethodRules(method, testRules, target, result); result = withTestRules(method, testRules, result); return result; } private Statement withMethodRules(FrameworkMethod method, List<TestRule> testRules, Object target, Statement result) { for (org.junit.rules.MethodRule each : getMethodRules(target)) { if (!testRules.contains(each)) { result = each.apply(result, method, target); } } return result; } private List<org.junit.rules.MethodRule> getMethodRules(Object target) { return rules(target); } /** * @param target the test case instance * @return a list of MethodRules that should be applied when executing this * test */ protected List<org.junit.rules.MethodRule> rules(Object target) { return getTestClass().getAnnotatedFieldValues(target, Rule.class, org.junit.rules.MethodRule.class); } /** * Returns a {@link org.junit.runners.model.Statement}: apply all non-static value fields * annotated with {@link org.junit.Rule}. * * @param statement The base statement * @return a RunRules statement if any class-level {@link org.junit.Rule}s are * found, or the base statement */ private Statement withTestRules(FrameworkMethod method, List<TestRule> testRules, Statement statement) { return testRules.isEmpty() ? statement : new RunRules(statement, testRules, describeChild(method)); } /** * @param target the test case instance * @return a list of TestRules that should be applied when executing this * test */ protected List<TestRule> getTestRules(Object target) { List<TestRule> result = getTestClass().getAnnotatedMethodValues(target, Rule.class, TestRule.class); result.addAll(getTestClass().getAnnotatedFieldValues(target, Rule.class, TestRule.class)); return result; } private Class<? extends Throwable> getExpectedException(Test annotation) { if (annotation == null || annotation.expected() == Test.None.class) { return null; } else { return annotation.expected(); } } private boolean expectsException(Test annotation) { return getExpectedException(annotation) != null; } private long getTimeout(Test annotation) { if (annotation == null) { return 0; } return annotation.timeout(); } }
package org.zstack.core.thread; import org.springframework.beans.factory.annotation.Autowired; import org.zstack.core.jmx.JmxFacade; import org.zstack.header.core.progress.ChainInfo; import org.zstack.header.exception.CloudRuntimeException; import org.zstack.utils.logging.CLogger; import org.zstack.utils.logging.CLoggerImpl; import javax.annotation.Nonnull; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; public class ThreadFacadeImpl implements ThreadFacade, ThreadFactory, RejectedExecutionHandler, ThreadFacadeMXBean { private static final CLogger _logger = CLoggerImpl.getLogger(ThreadFacadeImpl.class); private final Map<PeriodicTask, ScheduledFuture<?>> _periodicTasks = new ConcurrentHashMap<PeriodicTask, ScheduledFuture<?>>(); private final Map<CancelablePeriodicTask, ScheduledFuture<?>> cancelablePeriodicTasks = new ConcurrentHashMap<CancelablePeriodicTask, ScheduledFuture<?>>(); private static final AtomicInteger seqNum = new AtomicInteger(0); private ScheduledThreadPoolExecutorExt _pool; private ScheduledThreadPoolExecutorExt _syncpool; // for sync tasks private DispatchQueue dpq; private final TimerPool timerPool = new TimerPool(5); @Autowired private JmxFacade jmxf; private static class TimerWrapper extends Timer { private int cancelledTimerTaskCount = 0; private static final int PURGE_CANCELLED_TIMER_TASK_THRESHOLD = 2000; void notifyCancel() { if (cancelledTimerTaskCount++ >= PURGE_CANCELLED_TIMER_TASK_THRESHOLD) { cancelledTimerTaskCount = 0; this.purge(); } } } private static class TimerPool { int poolSize; List<TimerWrapper> pool; // never use a long type counter for self increment. two issues // 1) Java will silently overflow a number; even a long will be overflow someday // 2) big number causes extremely bad performance for mod operation // instead, reset the counter when it exceeds COUNTER_RESET_THRESHOLD to maintain // decent performance for mod operation. int counter = 0; static final int COUNTER_RESET_THRESHOLD = 1000000; private TimerPool(int poolSize) { this.poolSize = poolSize; pool = new ArrayList<TimerWrapper>(poolSize); for (int i = 0; i < poolSize; i++) { pool.add(new TimerWrapper()); } } TimerWrapper getTimer() { int index = ++counter % poolSize; if (counter > COUNTER_RESET_THRESHOLD) { counter = 0; } return pool.get(index); } void stop() { for (TimerWrapper wrapper : pool) { wrapper.cancel(); } } } @Override public Map<String, SyncTaskStatistic> getSyncTaskStatistics() { return dpq.getSyncTaskStatistics(); } @Override public Map<String, ChainTaskStatistic> getChainTaskStatistics() { return dpq.getChainTaskStatistics(); } @Override public ThreadPoolStatistic getThreadPoolStatistic() { long completedTask = _pool.getCompletedTaskCount(); long pendingTask = _pool.getTaskCount() - completedTask; return new ThreadPoolStatistic( _pool.getPoolSize(), _pool.getActiveCount(), completedTask, pendingTask, _pool.getCorePoolSize(), _pool.getMaximumPoolSize(), _pool.getQueue().size() ); } public static class Worker<T> implements Callable<T> { private final Task<T> _task; public Worker(Task<T> task) { _task = task; } @Override public T call() throws Exception { try { return _task.call(); } catch (Exception e) { _logger.warn(_task.getName() + " throws out an unhandled exception, this thread will terminate immediately", e); throw e; } catch (Throwable t) { _logger.warn(_task.getName() + " throws out an unhandled throwable, this thread will terminate immediately", t); throw new CloudRuntimeException(_task.getName() + " throws out an unhandled throwable, this thread will terminate immediately", t); } } } @Override public int getSyncThreadNum(int totalThreadNum) { int n = totalThreadNum / 3; return Math.min(totalThreadNum, Math.max(n, 150)); } public void init() { int totalThreadNum = ThreadGlobalProperty.MAX_THREAD_NUM; if (totalThreadNum < 10) { _logger.warn(String.format("ThreadFacade.maxThreadNum is configured to %s, which is too small for running zstack. Change it to 10", ThreadGlobalProperty.MAX_THREAD_NUM)); totalThreadNum = 10; } _pool = new ScheduledThreadPoolExecutorExt(totalThreadNum, this, this); _syncpool = new ScheduledThreadPoolExecutorExt(getSyncThreadNum(totalThreadNum), this, this); _logger.debug(String.format("create ThreadFacade with max thread number:%s", totalThreadNum)); dpq = new DispatchQueueImpl(); jmxf.registerBean("ThreadFacade", this); } public void destroy() { _pool.shutdownNow(); _syncpool.shutdown(); } @Override public <T> Future<T> submit(Task<T> task) { return _pool.submit(new Worker<T>(task)); } public <T> Future<T> submitSyncPool(Task<T> task) { return _syncpool.submit(new Worker<T>(task)); } @Override public Thread newThread(@Nonnull Runnable arg0) { return new Thread(arg0, "zs-thread-" + seqNum.getAndIncrement()); } @Override public void rejectedExecution(Runnable arg0, ThreadPoolExecutor arg1) { _logger.warn("Task " + arg0.getClass().getSimpleName() + " got rejected by ThreadPool, the pool looks full"); } private Map<PeriodicTask, ScheduledFuture<?>> getPeriodicTasks() { return _periodicTasks; } @Override public Future<Void> submitPeriodicTask(final PeriodicTask task, long delay) { assert task.getInterval() != 0; assert task.getTimeUnit() != null; @SuppressWarnings("unchecked") ScheduledFuture<Void> ret = (ScheduledFuture<Void>) _pool.scheduleAtFixedRate(new Runnable() { public void run() { try { task.run(); } catch (Throwable e) { _logger.warn("An unhandled exception happened during executing periodic task: " + task.getName() + ", cancel it", e); final Map<PeriodicTask, ScheduledFuture<?>> periodicTasks = getPeriodicTasks(); final ScheduledFuture<?> ft = periodicTasks.get(task); if (ft != null) { ft.cancel(true); periodicTasks.remove(task); } else { _logger.warn("Not found feature for task " + task.getName() + ", the exception happened too soon, will try to cancel the task next time the exception happens"); } } } }, delay, task.getInterval(), task.getTimeUnit()); _periodicTasks.put(task, ret); return ret; } @Override public Future<Void> submitPeriodicTask(PeriodicTask task) { return submitPeriodicTask(task, 0); } @Override public void registerHook(ThreadAroundHook hook) { _pool.registerHook(hook); _syncpool.registerHook(hook); } @Override public void unregisterHook(ThreadAroundHook hook) { _pool.unregisterHook(hook); _syncpool.unregisterHook(hook); } @Override public <T> Future<T> syncSubmit(SyncTask<T> task) { return dpq.syncSubmit(task); } @Override public Future<Void> chainSubmit(ChainTask task) { return dpq.chainSubmit(task); } @Override public boolean isChainTaskRunning(String signature) { return dpq.isChainTaskRunning(signature); } @Override public ChainInfo getChainTaskInfo(String signature) { return dpq.getChainTaskInfo(signature); } @Override public ChainInfo cleanChainTaskInfo(String signature, Integer index, Boolean cleanUp, Boolean isRunningTask) { return dpq.cleanChainTaskInfo(signature, index, cleanUp, isRunningTask); } @Override public Set<String> getApiRunningTaskSignature(String apiId) { return dpq.getApiRunningTaskSignature(apiId); } public interface TimeoutTaskReceipt { boolean cancel(); } @Override public TimeoutTaskReceipt submitTimeoutTask(final Runnable task, TimeUnit unit, long delay) { return submitTimeoutTask(task, unit, delay, false); } @Override public TimeoutTaskReceipt submitTimeoutTask(Runnable task, TimeUnit unit, long delay, boolean executeRightNow) { final TimerWrapper timer = timerPool.getTimer(); class TimerTaskWorker extends java.util.TimerTask implements TimeoutTaskReceipt { @Override @AsyncThread public void run() { try { task.run(); } catch (Throwable t) { _logger.warn(String.format("Unhandled exception happened when running %s", task.getClass().getName()), t); } finally { this.cancel(); } } @Override public boolean cancel() { boolean ret = super.cancel(); timer.notifyCancel(); return ret; } } TimerTaskWorker worker = new TimerTaskWorker(); timer.schedule(worker, unit.toMillis(delay)); if (executeRightNow) { executeRightNow(task); } return worker; } @AsyncThread private void executeRightNow(final Runnable task) { try { task.run(); } catch (Throwable t) { _logger.warn(String.format("Unhandled exception happened when running %s", task.getClass().getName()), t); } } @Override public Runnable submitTimerTask(final TimerTask task, TimeUnit unit, long delay) { final TimerWrapper timer = timerPool.getTimer(); java.util.TimerTask t = new java.util.TimerTask() { @Override public void run() { try { if (task.run()) { cancel(); } } catch (Throwable t) { _logger.warn(String.format("Unhandled exception happened when running %s", task.getClass().getName()), t); } } }; timer.schedule(t, unit.toMillis(delay)); return t::cancel; } @Override public boolean start() { return true; } @Override public boolean stop() { _pool.shutdown(); timerPool.stop(); return true; } @Override public Future<Void> submitCancelablePeriodicTask(CancelablePeriodicTask task) { return submitCancelablePeriodicTask(task, 0); } @Override public Future<Void> submitCancelablePeriodicTask(final CancelablePeriodicTask task, long delay) { @SuppressWarnings("unchecked") ScheduledFuture<Void> ret = (ScheduledFuture<Void>) _pool.scheduleAtFixedRate(new Runnable() { private void cancelTask() { ScheduledFuture<?> ft = cancelablePeriodicTasks.get(task); if (ft != null) { ft.cancel(true); cancelablePeriodicTasks.remove(task); } else { _logger.warn("cannot find feature for task " + task.getName() + ", the exception happened too soon, will try to cancel the task next time the exception happens"); } } public void run() { try { boolean cancel = task.run(); if (cancel) { cancelTask(); } } catch (Throwable e) { _logger.warn("An unhandled exception happened during executing periodic task: " + task.getName() + ", cancel it", e); cancelTask(); } } }, delay, task.getInterval(), task.getTimeUnit()); cancelablePeriodicTasks.put(task, ret); return ret; } @Override public void printThreadsAndTasks() { long completedTask = _pool.getCompletedTaskCount(); long pendingTask = _pool.getTaskCount() - completedTask; StringBuilder builder = new StringBuilder(); builder.append("check thread poolSize and tasks: "); builder.append(String.format("poolSize: %s, activeSize: %s, corePoolSize: %s, maximumPoolSize: %s, " + "completedTasks: %s, pendingTasks: %s, queueTasks: %s", _pool.getPoolSize(), _pool.getActiveCount(), _pool.getCorePoolSize(), _pool.getMaximumPoolSize(), completedTask, pendingTask, _pool.getQueue().size())); _logger.debug(builder.toString()); } }
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.artifact_cache; import com.facebook.buck.event.BuckEventBus; import com.facebook.buck.event.NetworkEvent.BytesReceivedEvent; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.log.CommandThreadFactory; import com.facebook.buck.log.Logger; import com.facebook.buck.slb.HttpLoadBalancer; import com.facebook.buck.slb.HttpService; import com.facebook.buck.slb.LoadBalancedService; import com.facebook.buck.slb.RetryingHttpService; import com.facebook.buck.slb.SingleUriService; import com.facebook.buck.timing.DefaultClock; import com.facebook.buck.util.HumanReadableException; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.ListeningExecutorService; import java.io.IOException; import java.net.URI; import java.nio.file.Path; import java.util.Map; import java.util.concurrent.TimeUnit; import okhttp3.ConnectionPool; import okhttp3.Interceptor; import okhttp3.MediaType; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.Response; import okhttp3.ResponseBody; import okio.Buffer; import okio.BufferedSource; import okio.ForwardingSource; import okio.Okio; import okio.Source; /** * Creates instances of the {@link ArtifactCache}. */ public class ArtifactCaches { private static final Logger LOG = Logger.get(ArtifactCaches.class); private interface NetworkCacheFactory { ArtifactCache newInstance(NetworkCacheArgs args); } private static final NetworkCacheFactory HTTP_PROTOCOL = new NetworkCacheFactory() { @Override public ArtifactCache newInstance(NetworkCacheArgs args) { return new HttpArtifactCache(args); } }; private static final NetworkCacheFactory THRIFT_PROTOCOL = new NetworkCacheFactory() { @Override public ArtifactCache newInstance(NetworkCacheArgs args) { return new ThriftArtifactCache(args); } }; private ArtifactCaches() { } private static Request.Builder addHeadersToBuilder( Request.Builder builder, ImmutableMap<String, String> headers) { ImmutableSet<Map.Entry<String, String>> entries = headers.entrySet(); for (Map.Entry<String, String> header : entries) { builder.addHeader(header.getKey(), header.getValue()); } return builder; } /** * Creates a new instance of the cache for use during a build. * * @param buckConfig describes what kind of cache to create * @param buckEventBus event bus * @param projectFilesystem filesystem to store files on * @param wifiSsid current WiFi ssid to decide if we want the http cache or not * @return a cache * @throws InterruptedException */ public static ArtifactCache newInstance( ArtifactCacheBuckConfig buckConfig, BuckEventBus buckEventBus, ProjectFilesystem projectFilesystem, Optional<String> wifiSsid, ListeningExecutorService httpWriteExecutorService) throws InterruptedException { ArtifactCacheConnectEvent.Started started = ArtifactCacheConnectEvent.started(); buckEventBus.post(started); ArtifactCache artifactCache = newInstanceInternal( buckConfig, buckEventBus, projectFilesystem, wifiSsid, httpWriteExecutorService); buckEventBus.post(ArtifactCacheConnectEvent.finished(started)); return artifactCache; } /** * Creates a new instance of the cache to be used to serve the dircache from the WebServer. * * @param buckConfig describes how to configure te cache * @param projectFilesystem filesystem to store files on * @return a cache */ public static Optional<ArtifactCache> newServedCache( ArtifactCacheBuckConfig buckConfig, final ProjectFilesystem projectFilesystem) { return buckConfig.getServedLocalCache().transform( new Function<DirCacheEntry, ArtifactCache>() { @Override public ArtifactCache apply(DirCacheEntry input) { return createDirArtifactCache( Optional.<BuckEventBus>absent(), input, projectFilesystem); } }); } private static ArtifactCache newInstanceInternal( ArtifactCacheBuckConfig buckConfig, BuckEventBus buckEventBus, ProjectFilesystem projectFilesystem, Optional<String> wifiSsid, ListeningExecutorService httpWriteExecutorService) throws InterruptedException { ImmutableSet<ArtifactCacheBuckConfig.ArtifactCacheMode> modes = buckConfig.getArtifactCacheModes(); if (modes.isEmpty()) { return new NoopArtifactCache(); } ImmutableList.Builder<ArtifactCache> builder = ImmutableList.builder(); for (ArtifactCacheBuckConfig.ArtifactCacheMode mode : modes) { switch (mode) { case dir: builder.add( createDirArtifactCache( Optional.of(buckEventBus), buckConfig.getDirCache(), projectFilesystem)); break; case http: initializeDistributedCaches( buckConfig, buckEventBus, projectFilesystem, wifiSsid, httpWriteExecutorService, builder, HTTP_PROTOCOL); break; case thrift_over_http: initializeDistributedCaches( buckConfig, buckEventBus, projectFilesystem, wifiSsid, httpWriteExecutorService, builder, THRIFT_PROTOCOL); break; } } ImmutableList<ArtifactCache> artifactCaches = builder.build(); ArtifactCache result; if (artifactCaches.size() == 1) { // Don't bother wrapping a single artifact cache in MultiArtifactCache. result = artifactCaches.get(0); } else { result = new MultiArtifactCache(artifactCaches); } // Always support reading two-level cache stores (in case we performed any in the past). result = new TwoLevelArtifactCacheDecorator( result, projectFilesystem, buckEventBus, buckConfig.getTwoLevelCachingEnabled(), buckConfig.getTwoLevelCachingMinimumSize(), buckConfig.getTwoLevelCachingMaximumSize()); return result; } private static void initializeDistributedCaches( ArtifactCacheBuckConfig buckConfig, BuckEventBus buckEventBus, ProjectFilesystem projectFilesystem, Optional<String> wifiSsid, ListeningExecutorService httpWriteExecutorService, ImmutableList.Builder<ArtifactCache> builder, NetworkCacheFactory factory) { for (HttpCacheEntry cacheEntry : buckConfig.getHttpCaches()) { if (!cacheEntry.isWifiUsableForDistributedCache(wifiSsid)) { LOG.warn("HTTP cache is disabled because WiFi is not usable."); continue; } builder.add(createHttpArtifactCache( cacheEntry, buckConfig.getHostToReportToRemoteCacheServer(), buckEventBus, projectFilesystem, httpWriteExecutorService, buckConfig, factory)); } } private static ArtifactCache createDirArtifactCache( Optional<BuckEventBus> buckEventBus, DirCacheEntry dirCacheConfig, ProjectFilesystem projectFilesystem) { Path cacheDir = dirCacheConfig.getCacheDir(); try { DirArtifactCache dirArtifactCache = new DirArtifactCache( "dir", projectFilesystem, cacheDir, dirCacheConfig.getCacheReadMode().isDoStore(), dirCacheConfig.getMaxSizeBytes()); if (!buckEventBus.isPresent()) { return dirArtifactCache; } return new LoggingArtifactCacheDecorator(buckEventBus.get(), dirArtifactCache, new DirArtifactCacheEvent.DirArtifactCacheEventFactory()); } catch (IOException e) { throw new HumanReadableException( "Failure initializing artifact cache directory: %s", cacheDir); } } private static ArtifactCache createHttpArtifactCache( HttpCacheEntry cacheDescription, final String hostToReportToRemote, final BuckEventBus buckEventBus, ProjectFilesystem projectFilesystem, ListeningExecutorService httpWriteExecutorService, ArtifactCacheBuckConfig config, NetworkCacheFactory factory) { // Setup the default client to use. OkHttpClient.Builder storeClientBuilder = new OkHttpClient.Builder(); storeClientBuilder.networkInterceptors().add( new Interceptor() { @Override public Response intercept(Chain chain) throws IOException { return chain.proceed( chain.request().newBuilder() .addHeader("X-BuckCache-User", System.getProperty("user.name", "<unknown>")) .addHeader("X-BuckCache-Host", hostToReportToRemote) .build()); } }); int timeoutSeconds = cacheDescription.getTimeoutSeconds(); setTimeouts(storeClientBuilder, timeoutSeconds); storeClientBuilder.connectionPool( new ConnectionPool( /* maxIdleConnections */ (int) config.getThreadPoolSize(), /* keepAliveDurationMs */ config.getThreadPoolKeepAliveDurationMillis(), TimeUnit.MILLISECONDS) ); final ImmutableMap<String, String> readHeaders = cacheDescription.getReadHeaders(); final ImmutableMap<String, String> writeHeaders = cacheDescription.getWriteHeaders(); // If write headers are specified, add them to every default client request. if (!writeHeaders.isEmpty()) { storeClientBuilder.networkInterceptors().add( new Interceptor() { @Override public Response intercept(Chain chain) throws IOException { return chain.proceed( addHeadersToBuilder(chain.request().newBuilder(), writeHeaders).build() ); } }); } OkHttpClient storeClient = storeClientBuilder.build(); // For fetches, use a client with a read timeout. OkHttpClient.Builder fetchClientBuilder = storeClient.newBuilder(); setTimeouts(fetchClientBuilder, timeoutSeconds); // If read headers are specified, add them to every read client request. if (!readHeaders.isEmpty()) { fetchClientBuilder.networkInterceptors().add( new Interceptor() { @Override public Response intercept(Chain chain) throws IOException { return chain.proceed( addHeadersToBuilder(chain.request().newBuilder(), readHeaders).build() ); } }); } fetchClientBuilder.networkInterceptors().add((new Interceptor() { @Override public Response intercept(Chain chain) throws IOException { Response originalResponse = chain.proceed(chain.request()); return originalResponse.newBuilder() .body(new ProgressResponseBody(originalResponse.body(), buckEventBus)) .build(); } })); OkHttpClient fetchClient = fetchClientBuilder.build(); HttpService fetchService; HttpService storeService; switch (config.getLoadBalancingType()) { case CLIENT_SLB: HttpLoadBalancer clientSideSlb = config.getSlbConfig().createHttpClientSideSlb( new DefaultClock(), buckEventBus, new CommandThreadFactory("ArtifactCaches.HttpLoadBalancer")); fetchService = new RetryingHttpService( buckEventBus, new LoadBalancedService(clientSideSlb, fetchClient, buckEventBus), config.getMaxFetchRetries()); storeService = new LoadBalancedService(clientSideSlb, storeClient, buckEventBus); break; case SINGLE_SERVER: URI url = cacheDescription.getUrl(); fetchService = new SingleUriService(url, fetchClient); storeService = new SingleUriService(url, storeClient); break; default: throw new IllegalArgumentException("Unknown HttpLoadBalancer type: " + config.getLoadBalancingType()); } String cacheName = cacheDescription.getName() .transform(new Function<String, String>() { @Override public String apply(String input) { return "http-" + input; } }) .or("http"); boolean doStore = cacheDescription.getCacheReadMode().isDoStore(); return factory.newInstance( NetworkCacheArgs.builder() .setThriftEndpointPath(config.getHybridThriftEndpoint()) .setCacheName(cacheName) .setRepository(config.getRepository()) .setScheduleType(config.getScheduleType()) .setFetchClient(fetchService) .setStoreClient(storeService) .setDoStore(doStore) .setProjectFilesystem(projectFilesystem) .setBuckEventBus(buckEventBus) .setHttpWriteExecutorService(httpWriteExecutorService) .setErrorTextTemplate(cacheDescription.getErrorMessageFormat()) .build()); } private static OkHttpClient.Builder setTimeouts( OkHttpClient.Builder builder, int timeoutSeconds) { return builder .connectTimeout(timeoutSeconds, TimeUnit.SECONDS) .readTimeout(timeoutSeconds, TimeUnit.SECONDS) .writeTimeout(timeoutSeconds, TimeUnit.SECONDS); } private static class ProgressResponseBody extends ResponseBody { private final ResponseBody responseBody; private BuckEventBus buckEventBus; private BufferedSource bufferedSource; public ProgressResponseBody( ResponseBody responseBody, BuckEventBus buckEventBus) throws IOException { this.responseBody = responseBody; this.buckEventBus = buckEventBus; this.bufferedSource = Okio.buffer(source(responseBody.source())); } @Override public MediaType contentType() { return responseBody.contentType(); } @Override public long contentLength() { return responseBody.contentLength(); } @Override public BufferedSource source() { return bufferedSource; } private Source source(Source source) { return new ForwardingSource(source) { @Override public long read(Buffer sink, long byteCount) throws IOException { long bytesRead = super.read(sink, byteCount); // read() returns the number of bytes read, or -1 if this source is exhausted. if (byteCount != -1) { buckEventBus.post(new BytesReceivedEvent(byteCount)); } return bytesRead; } }; } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.hint; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.LogicalPosition; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.ex.FoldingModelEx; import com.intellij.openapi.editor.ex.util.EditorUIUtil; import com.intellij.openapi.editor.ex.util.EditorUtil; import com.intellij.openapi.editor.impl.EditorImpl; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.HintHint; import com.intellij.ui.LightweightHint; import com.intellij.ui.ScreenUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.CompoundBorder; import java.awt.*; import java.awt.image.BufferedImage; public class EditorFragmentComponent extends JPanel { private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.hint.EditorFragmentComponent"); private static final int LINE_BORDER_THICKNESS = 1; private static final int EMPTY_BORDER_THICKNESS = 2; private EditorFragmentComponent(Component component, EditorEx editor, int startLine, int endLine, boolean showFolding, boolean showGutter) { editor.setPurePaintingMode(true); try { doInit(component, editor, startLine, endLine, showFolding, showGutter); } finally { editor.setPurePaintingMode(false); } } private void doInit(Component anchorComponent, EditorEx editor, int startLine, int endLine, boolean showFolding, boolean showGutter) { Document doc = editor.getDocument(); final int endOffset = endLine < doc.getLineCount() ? doc.getLineEndOffset(endLine) : doc.getTextLength(); boolean newRendering = editor instanceof EditorImpl; int widthAdjustment = newRendering ? EditorUtil.getSpaceWidth(Font.PLAIN, editor) : 0; final int textImageWidth = Math.min( editor.getMaxWidthInRange(doc.getLineStartOffset(startLine), endOffset) + widthAdjustment, getWidthLimit(editor) ); FoldingModelEx foldingModel = editor.getFoldingModel(); boolean isFoldingEnabled = foldingModel.isFoldingEnabled(); if (!showFolding) { foldingModel.setFoldingEnabled(false); } Point p1 = editor.logicalPositionToXY(new LogicalPosition(startLine, 0)); Point p2 = editor.logicalPositionToXY(new LogicalPosition(Math.max(endLine, startLine + 1), 0)); int y1 = p1.y; int y2 = p2.y; final int textImageHeight = y2 - y1 == 0 ? editor.getLineHeight() : y2 - y1; LOG.assertTrue(textImageHeight > 0, "Height: " + textImageHeight + "; startLine:" + startLine + "; endLine:" + endLine + "; p1:" + p1 + "; p2:" + p2); int savedScrollOffset = newRendering ? 0 : editor.getScrollingModel().getHorizontalScrollOffset(); if (savedScrollOffset > 0) { editor.getScrollingModel().scrollHorizontally(0); } final BufferedImage textImage = UIUtil.createImage(anchorComponent == null ? editor.getContentComponent() : anchorComponent, textImageWidth, textImageHeight, BufferedImage.TYPE_INT_RGB); Graphics textGraphics = textImage.getGraphics(); EditorUIUtil.setupAntialiasing(textGraphics); final JComponent rowHeader; final BufferedImage markersImage; final int markersImageWidth; if (showGutter) { rowHeader = editor.getGutterComponentEx(); markersImageWidth = Math.max(1, rowHeader.getWidth()); markersImage = UIUtil.createImage(editor.getComponent(), markersImageWidth, textImageHeight, BufferedImage.TYPE_INT_RGB); Graphics markerGraphics = markersImage.getGraphics(); EditorUIUtil.setupAntialiasing(markerGraphics); markerGraphics.translate(0, -y1); markerGraphics.setClip(0, y1, rowHeader.getWidth(), textImageHeight); markerGraphics.setColor(getBackgroundColor(editor)); markerGraphics.fillRect(0, y1, rowHeader.getWidth(), textImageHeight); rowHeader.paint(markerGraphics); } else { markersImageWidth = 0; rowHeader = null; markersImage = null; } textGraphics.translate(0, -y1); textGraphics.setClip(0, y1, textImageWidth, textImageHeight); final boolean wasVisible = editor.setCaretVisible(false); editor.getContentComponent().paint(textGraphics); if (wasVisible) { editor.setCaretVisible(true); } if (!showFolding) { foldingModel.setFoldingEnabled(isFoldingEnabled); } if (savedScrollOffset > 0) { editor.getScrollingModel().scrollHorizontally(savedScrollOffset); } JComponent component = new JComponent() { @Override public Dimension getPreferredSize() { return new Dimension(textImageWidth + markersImageWidth, textImageHeight); } @Override protected void paintComponent(Graphics graphics) { if (markersImage != null) { UIUtil.drawImage(graphics, markersImage, 0, 0, null); UIUtil.drawImage(graphics, textImage, rowHeader.getWidth(), 0, null); } else { UIUtil.drawImage(graphics, textImage, 0, 0, null); } } }; setLayout(new BorderLayout()); add(component); setBorder(createEditorFragmentBorder(editor)); } private static int getWidthLimit(@NotNull Editor editor) { Component component = editor.getComponent(); int screenWidth = ScreenUtil.getScreenRectangle(component).width; if (screenWidth > 0) return screenWidth; Window window = SwingUtilities.getWindowAncestor(component); return window == null ? Integer.MAX_VALUE : window.getWidth(); } /** * @param y {@code y} coordinate in layered pane coordinate system. */ @Nullable public static LightweightHint showEditorFragmentHintAt(Editor editor, TextRange range, int y, boolean showUpward, boolean showFolding, boolean hideByAnyKey, boolean hideByScrolling, boolean useCaretRowBackground) { if (ApplicationManager.getApplication().isUnitTestMode()) return null; Document document = editor.getDocument(); int startOffset = range.getStartOffset(); int startLine = document.getLineNumber(startOffset); CharSequence text = document.getCharsSequence(); // There is a possible case that we have a situation like below: // line 1 // line 2 <fragment start> // line 3<fragment end> // We don't want to include 'line 2' to the target fragment then. boolean incrementLine = false; for (int offset = startOffset, max = Math.min(range.getEndOffset(), text.length()); offset < max; offset++) { char c = text.charAt(offset); incrementLine = StringUtil.isWhiteSpace(c); if (!incrementLine || c == '\n') { break; } } if (incrementLine) { startLine++; } int endLine = Math.min(document.getLineNumber(range.getEndOffset()) + 1, document.getLineCount() - 1); if (startLine >= endLine) return null; EditorFragmentComponent fragmentComponent = createEditorFragmentComponent(editor, startLine, endLine, showFolding, true, useCaretRowBackground); if (showUpward) { y -= fragmentComponent.getPreferredSize().height; y = Math.max(0,y); } final JComponent c = editor.getComponent(); int x = SwingUtilities.convertPoint(c, new Point(JBUI.scale(-3),0), UIUtil.getRootPane(c)).x; //IDEA-68016 Point p = new Point(x, y); LightweightHint hint = new MyComponentHint(fragmentComponent); HintManagerImpl.getInstanceImpl().showEditorHint(hint, editor, p, (hideByAnyKey ? HintManager.HIDE_BY_ANY_KEY : 0) | (hideByScrolling ? HintManager.HIDE_BY_SCROLLING : 0) | HintManager.HIDE_BY_TEXT_CHANGE | HintManager.HIDE_BY_MOUSEOVER, 0, false, new HintHint(editor, p)); return hint; } /** * @param component Should be provided if editor is not currently displayable. * Makes for correct rendering on multi-monitor configurations. */ public static EditorFragmentComponent createEditorFragmentComponent(Component component, Editor editor, int startLine, int endLine, boolean showFolding, boolean showGutter) { return createEditorFragmentComponent(component, editor, startLine, endLine, showFolding, showGutter, true); } public static EditorFragmentComponent createEditorFragmentComponent(Editor editor, int startLine, int endLine, boolean showFolding, boolean showGutter) { return createEditorFragmentComponent(editor, startLine, endLine, showFolding, showGutter, true); } public static EditorFragmentComponent createEditorFragmentComponent(Editor editor, int startLine, int endLine, boolean showFolding, boolean showGutter, boolean useCaretRowBackground) { return createEditorFragmentComponent(null, editor, startLine, endLine, showFolding, showGutter, useCaretRowBackground); } /** * @param component Should be provided if editor is not currently displayable. * Makes for correct rendering on multi-monitor configurations. */ public static EditorFragmentComponent createEditorFragmentComponent(Component component, Editor editor, int startLine, int endLine, boolean showFolding, boolean showGutter, boolean useCaretRowBackground) { final EditorEx editorEx = (EditorEx)editor; final Color old = editorEx.getBackgroundColor(); Color backColor = getBackgroundColor(editor, useCaretRowBackground); editorEx.setBackgroundColor(backColor); EditorFragmentComponent fragmentComponent = new EditorFragmentComponent(component, editorEx, startLine, endLine, showFolding, showGutter); fragmentComponent.setBackground(backColor); editorEx.setBackgroundColor(old); return fragmentComponent; } @Nullable public static LightweightHint showEditorFragmentHint(Editor editor, TextRange range, boolean showFolding, boolean hideByAnyKey){ if (!(editor instanceof EditorEx)) return null; JRootPane rootPane = editor.getComponent().getRootPane(); if (rootPane == null) return null; JLayeredPane layeredPane = rootPane.getLayeredPane(); int lineHeight = editor.getLineHeight(); int overhang = editor.getScrollingModel().getVisibleArea().y - editor.logicalPositionToXY(editor.offsetToLogicalPosition(range.getEndOffset())).y; int yRelative = overhang > 0 && overhang < lineHeight ? lineHeight - overhang + JBUI.scale(LINE_BORDER_THICKNESS + EMPTY_BORDER_THICKNESS) : 0; Point point = SwingUtilities.convertPoint(((EditorEx)editor).getScrollPane().getViewport(), -2, yRelative, layeredPane); return showEditorFragmentHintAt(editor, range, point.y, true, showFolding, hideByAnyKey, true, false); } public static Color getBackgroundColor(Editor editor){ return getBackgroundColor(editor, true); } public static Color getBackgroundColor(Editor editor, boolean useCaretRowBackground){ EditorColorsScheme colorsScheme = editor.getColorsScheme(); Color color = colorsScheme.getColor(EditorColors.CARET_ROW_COLOR); if (!useCaretRowBackground || color == null){ color = colorsScheme.getDefaultBackground(); } return color; } @NotNull public static CompoundBorder createEditorFragmentBorder(@NotNull Editor editor) { Color borderColor = editor.getColorsScheme().getColor(EditorColors.SELECTED_TEARLINE_COLOR); Border outsideBorder = JBUI.Borders.customLine(borderColor, LINE_BORDER_THICKNESS); Border insideBorder = JBUI.Borders.empty(EMPTY_BORDER_THICKNESS, EMPTY_BORDER_THICKNESS); return BorderFactory.createCompoundBorder(outsideBorder, insideBorder); } private static class MyComponentHint extends LightweightHint { public MyComponentHint(JComponent component) { super(component); setForceLightweightPopup(true); } @Override public void hide() { // needed for Alt-Q multiple times // Q: not good? SwingUtilities.invokeLater( () -> super.hide() ); } } }
package net.progval.android.andquote; import net.progval.android.andquote.utils.OpenQuoteApi; import java.io.InputStream; import android.app.Activity; import android.util.Log; import android.text.Html; import android.os.Bundle; import android.view.View; import android.widget.Toast; import android.view.KeyEvent; import android.widget.Button; import android.webkit.WebView; import android.view.ViewGroup; import android.text.InputType; import android.widget.TextView; import android.widget.EditText; import android.widget.ScrollView; import android.app.ProgressDialog; import android.view.WindowManager; import android.widget.LinearLayout; import android.content.SharedPreferences; import android.view.View.OnClickListener; import android.view.inputmethod.EditorInfo; import android.preference.PreferenceManager; import android.widget.TextView.OnEditorActionListener; import org.msgpack.MessagePack; import org.msgpack.type.ArrayValue; import org.msgpack.type.Value; import org.msgpack.type.MapValue; import net.progval.android.andquote.utils.MsgPackUtils; public class QuoteActivity extends Activity implements OnClickListener { private static SharedPreferences settings; private LinearLayout layout; private OpenQuoteApi api; private OpenQuoteApi.State state; private OpenQuoteApi.Quote quote; private TextView contentview, scoreview; private WebView imageview; private LinearLayout comments, navigation; private EditText gotopage; private static MessagePack messagePack = new MessagePack(); @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Bundle extra = this.getIntent().getExtras(); QuoteActivity.settings = PreferenceManager.getDefaultSharedPreferences(this); this.state = new OpenQuoteApi.State(); this.state.site_id = extra.getString("site_id"); this.state.site_name = extra.getString("site_name"); this.quote = OpenQuoteApi.Quote.unserialize(extra.getString("quote")); this.setTitle(this.state.site_name + " - " + this.quote.getId()); this.api = new OpenQuoteApi(this.settings.getString("api.url", "")); LinearLayout layout = new LinearLayout(this); if (!quote.getImageUrl().equals("null")) { this.layout = layout; this.setContentView(layout); } else { ScrollView scrollview = new ScrollView(this); layout.addView(scrollview); this.setContentView(layout); this.layout = new LinearLayout(this); scrollview.addView(this.layout); } this.layout.setOrientation(this.layout.VERTICAL); Log.d("AndQuote", quote.getImageUrl()); Log.d("AndQuote", Boolean.valueOf(this.settings.getBoolean("nav.quote.enable", false)).toString()); Log.d("AndQuote", Boolean.valueOf(this.settings.getBoolean("nav.quote.img_enable", true)).toString()); if (quote.getImageUrl().equals("null")) { Log.d("AndQuote", "one"); if (this.settings.getBoolean("nav.quote.enable", false)) this.inflateNavigation(); } else { Log.d("AndQuote", "one"); if (this.settings.getBoolean("nav.quote.img_enable", true)) this.inflateNavigation(); } this.contentview = new TextView(this); this.contentview.setText(Html.fromHtml(this.quote.getContent())); this.layout.addView(this.contentview); this.scoreview = new TextView(this); this.scoreview.setText(this.quote.getScore()); this.layout.addView(this.scoreview); this.fetchExtraData(); } private void inflateNavigation() { this.navigation = new LinearLayout(this); Button prev = new Button(this); prev.setText("<="); prev.setTag("prev"); prev.setOnClickListener(this); Button next = new Button(this); next.setText("=>"); next.setTag("next"); next.setOnClickListener(this); this.gotopage = new EditText(this); this.gotopage.setImeOptions(EditorInfo.IME_ACTION_GO); this.gotopage.setInputType(InputType.TYPE_CLASS_NUMBER); this.layout.setDescendantFocusability(ViewGroup.FOCUS_BEFORE_DESCENDANTS); this.layout.setFocusableInTouchMode(true); this.gotopage.setText(String.format("%d", quote.getId())); this.gotopage.setOnEditorActionListener( new OnEditorActionListener() { public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { boolean handled = false; if (actionId == EditorInfo.IME_ACTION_GO) { try { int id = Integer.parseInt(((EditText) v).getText().toString()); QuoteActivity.this.loadQuote(id); handled = true; } catch (NumberFormatException e) { // Never trust user input } } return handled; } }); this.navigation.addView(prev); this.navigation.addView(this.gotopage); this.navigation.addView(next); this.layout.addView(this.navigation); } public void loadQuote(int id) { final ProgressDialog dialog = ProgressDialog.show(QuoteActivity.this, "", this.getResources().getString(R.string.siteactivity_loading_quotes), true); class QuoteRenderer implements OpenQuoteApi.ProgressListener { private OpenQuoteApi api; public QuoteRenderer(OpenQuoteApi api) { this.api = api; } public void onProgressUpdate(int progress) {} public void onFail(int status_message) { dialog.dismiss(); Toast.makeText(QuoteActivity.this, status_message, Toast.LENGTH_LONG).show(); } public void onSuccess(InputStream stream) { try { MapValue map = messagePack.read(stream).asMapValue(); if (!MsgPackUtils.in(map, "quote")) this.onFail(R.string.quoteactivity_doesnotexist); else { OpenQuoteApi.Quote quote = new OpenQuoteApi.Quote(MsgPackUtils.get(map, "quote").asMapValue()); QuoteActivity.this.loadQuote(quote); } } catch (java.io.IOException e) { e.printStackTrace(); } dialog.dismiss(); } } this.api.safeGet(new QuoteRenderer(this.api), String.format("/%s/quotes/show/%d/", this.state.site_id, id)); } public void onClick(View v) { String tag = (String) ((Button) v).getTag(); if (tag.equals("prev")) this.loadQuote(this.quote.getId()-1); else if (tag.equals("next")) this.loadQuote(this.quote.getId()+1); } public void loadQuote(OpenQuoteApi.Quote quote) { this.quote = quote; this.contentview.setText(Html.fromHtml(this.quote.getContent())); this.gotopage.setText(String.format("%d", this.quote.getId())); this.scoreview.setText(this.quote.getScore()); this.setTitle(this.state.site_name + " - " + this.quote.getId()); if (quote.getImageUrl() != null) { this.setImage(quote.getImageUrl()); } if (!quote.getImageUrl().equals("null") && this.settings.getBoolean("nav.img_enable", false)) this.gotopage.setText(String.format("%d", quote.getId())); } private void fetchExtraData() { class QuoteLoader implements OpenQuoteApi.ProgressListener { private OpenQuoteApi api; private TextView scoreview; public QuoteLoader(OpenQuoteApi api, TextView scoreview) { this.api = api; this.scoreview = scoreview; } public void onProgressUpdate(int progress) {} public void onFail(int status_message) { Toast.makeText(QuoteActivity.this, status_message, Toast.LENGTH_LONG).show(); } public void onSuccess(InputStream stream) { try { MapValue map = messagePack.read(stream).asMapValue(); OpenQuoteApi.Quote quote = new OpenQuoteApi.Quote(MsgPackUtils.get(map, "quote").asMapValue()); QuoteActivity.this.setQuote(quote); if (quote.getAuthor() != null) this.scoreview.setText(quote.getScore() + " -- " + quote.getAuthor()); QuoteActivity.this.renderComments(OpenQuoteApi.Comment.parseComments(MsgPackUtils.get(map, "comments").asArrayValue())); if (quote.getImageUrl() != null) { QuoteActivity.this.setImage(quote.getImageUrl()); } } catch (java.io.IOException e) { e.printStackTrace(); } } } this.api.safeGet(new QuoteLoader(this.api, this.scoreview), String.format("/%s/quotes/show/%d/", this.state.site_id, this.quote.getId())); } public void setQuote(OpenQuoteApi.Quote quote) { this.quote = quote; } public void setImage(String url) { if (this.imageview == null) { this.imageview = new WebView(this); LinearLayout.LayoutParams params = new LinearLayout.LayoutParams( LinearLayout.LayoutParams.FILL_PARENT, LinearLayout.LayoutParams.FILL_PARENT); this.imageview.setLayoutParams(params); this.imageview.setScrollBarStyle(View.SCROLLBARS_INSIDE_OVERLAY); this.setContentView(this.layout); // Kick the scrollview out this.layout.addView(this.imageview); } this.imageview.loadUrl(url); } public void renderComments(OpenQuoteApi.Comment[] comments) { this.comments = new LinearLayout(this); this.comments.setOrientation(this.comments.VERTICAL); this.layout.addView(this.comments); this.renderComments(comments, this.comments); } public void renderComments(OpenQuoteApi.Comment[] comments, LinearLayout layout) { for (int i=0; i<comments.length; i++) this.renderComment(comments[i], layout); } public void renderComment(OpenQuoteApi.Comment comment, LinearLayout layout) { TextView textview = new TextView(this); textview.setText(comment.getContent()); layout.addView(textview); LinearLayout repliesLayout = new LinearLayout(this); layout.setPadding(20, 0, 0, 0); layout.addView(repliesLayout); repliesLayout.setOrientation(this.layout.VERTICAL); this.renderComments(comment.getReplies(), repliesLayout); } }
/* * Copyright 2014 Mario Guggenberger <mg@protyposis.net> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.protyposis.android.spectaculum; import android.content.Context; import android.graphics.Bitmap; import android.graphics.SurfaceTexture; import android.opengl.*; import android.os.Handler; import android.util.AttributeSet; import android.util.Log; import android.view.GestureDetector; import android.view.MotionEvent; import android.view.ScaleGestureDetector; import android.view.SurfaceHolder; import net.protyposis.android.spectaculum.effects.Effect; import net.protyposis.android.spectaculum.effects.EffectException; import net.protyposis.android.spectaculum.effects.Parameter; import net.protyposis.android.spectaculum.effects.ParameterHandler; import net.protyposis.android.spectaculum.gles.*; /** * Created by Mario on 14.06.2014. */ public class SpectaculumView extends GLSurfaceView implements SurfaceTexture.OnFrameAvailableListener, Effect.Listener, GLRenderer.EffectEventListener, GLRenderer.OnFrameCapturedCallback { private static final String TAG = SpectaculumView.class.getSimpleName(); public interface EffectEventListener extends GLRenderer.EffectEventListener {} public interface OnFrameCapturedCallback extends GLRenderer.OnFrameCapturedCallback {} private GLRenderer mRenderer; private InputSurfaceHolder mInputSurfaceHolder; private Handler mRunOnUiThreadHandler = new Handler(); private ScaleGestureDetector mScaleGestureDetector; private GestureDetector mGestureDetector; private EffectEventListener mEffectEventListener; private OnFrameCapturedCallback mOnFrameCapturedCallback; private PipelineResolution mPipelineResolution = PipelineResolution.SOURCE; private float mZoomLevel = 1.0f; private float mZoomSnappingRange = 0.02f; private float mPanX; private float mPanY; private float mPanSnappingRange = 0.02f; private boolean mTouchEnabled = false; protected int mImageWidth; protected int mImageHeight; public SpectaculumView(Context context) { super(context); init(context); } public SpectaculumView(Context context, AttributeSet attrs) { super(context, attrs); init(context); } private void init(Context context) { if(isInEditMode()) { // do not start renderer in layout editor return; } if(!net.protyposis.android.spectaculum.gles.GLUtils.isGlEs2Supported(context)) { Log.e(TAG, "GLES 2.0 is not supported"); return; } LibraryHelper.setContext(context); mRenderer = new GLRenderer(); mRenderer.setOnExternalSurfaceTextureCreatedListener(mExternalSurfaceTextureCreatedListener); mRenderer.setEffectEventListener(mRendererEffectEventListener); mInputSurfaceHolder = new InputSurfaceHolder(); setEGLContextClientVersion(2); setRenderer(mRenderer); setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); // TODO setPreserveEGLContextOnPause(true); mScaleGestureDetector = new ScaleGestureDetector(context, new ScaleGestureDetector.SimpleOnScaleGestureListener() { @Override public boolean onScale(ScaleGestureDetector detector) { mZoomLevel *= detector.getScaleFactor(); if(LibraryHelper.isBetween(mZoomLevel, 1-mZoomSnappingRange, 1+mZoomSnappingRange)) { mZoomLevel = 1.0f; } // limit zooming to magnification zooms (zoom-ins) if(mZoomLevel < 1.0f) { mZoomLevel = 1.0f; } setZoom(mZoomLevel); return true; } }); mGestureDetector = new GestureDetector(context, new GestureDetector.SimpleOnGestureListener() { @Override public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { // divide by zoom level to adjust panning speed to zoomed picture size // multiply by fixed scaling factor to compensate for panning lag mPanX += distanceX / getWidth() / mZoomLevel * 1.2f; mPanY += distanceY / getHeight() / mZoomLevel * 1.2f; float panSnappingRange = mPanSnappingRange / mZoomLevel; if(LibraryHelper.isBetween(mPanX, -panSnappingRange, +panSnappingRange)) { mPanX = 0; } if(LibraryHelper.isBetween(mPanY, -panSnappingRange, +panSnappingRange)) { mPanY = 0; } // limit panning to the texture bounds so it always covers the complete view float maxPanX = Math.abs((1.0f / mZoomLevel) - 1.0f); float maxPanY = Math.abs((1.0f / mZoomLevel) - 1.0f); mPanX = LibraryHelper.clamp(mPanX, -maxPanX, maxPanX); mPanY = LibraryHelper.clamp(mPanY, -maxPanY, maxPanY); setPan(mPanX, mPanY); return true; } @Override public boolean onDoubleTap(MotionEvent e) { mZoomLevel = 1; mPanX = 0; mPanY = 0; setZoom(mZoomLevel); setPan(mPanX, mPanY); return true; } }); } /** * Sets the zoom factor of the texture in the view. 1.0 means no zoom, 2.0 2x zoom, etc. */ public void setZoom(float zoomFactor) { mZoomLevel = zoomFactor; mRenderer.setZoomLevel(mZoomLevel); requestRender(GLRenderer.RenderRequest.GEOMETRY); } /** * Gets the zoom level. * @see #setZ(float) for an explanation if the value * @return */ public float getZoomLevel() { return mZoomLevel; } /** * Sets the panning of the texture in the view. (0.0, 0.0) centers the texture and means no * panning, (-1.0, -1.0) moves the texture to the lower right quarter. */ public void setPan(float x, float y) { mPanX = x; mPanY = y; mRenderer.setPan(-mPanX, mPanY); requestRender(GLRenderer.RenderRequest.GEOMETRY); } /** * Gets the horizontal panning. Zero means centered, positive is to the left. */ public float getPanX() { return mPanX; } /** * Gets the vertical panning. Zero means centered, positive is to the bottom. */ public float getPanY() { return mPanY; } /** * Enables or disables touch zoom/pan gestures. When disabled, a parent container (e.g. an activity) * can still pass touch events to this view's {@link #onTouchEvent(MotionEvent)} to process * zoom/pan gestures. * @see #isTouchEnabled() */ public void setTouchEnabled(boolean enabled) { mTouchEnabled = enabled; } /** * Checks if touch gestures are enabled. Touch gestures are disabled by default. * @see #setTouchEnabled(boolean) */ public boolean isTouchEnabled() { return mTouchEnabled; } /** * Resizes the video view according to the video size to keep aspect ratio. * Code copied from {@link android.widget.VideoView#onMeasure(int, int)}. */ @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { Log.i("@@@@", "onMeasure(" + MeasureSpec.toString(widthMeasureSpec) + ", " + MeasureSpec.toString(heightMeasureSpec) + ")"); int width = getDefaultSize(mImageWidth, widthMeasureSpec); int height = getDefaultSize(mImageHeight, heightMeasureSpec); if (mImageWidth > 0 && mImageHeight > 0) { int widthSpecMode = MeasureSpec.getMode(widthMeasureSpec); int widthSpecSize = MeasureSpec.getSize(widthMeasureSpec); int heightSpecMode = MeasureSpec.getMode(heightMeasureSpec); int heightSpecSize = MeasureSpec.getSize(heightMeasureSpec); if (widthSpecMode == MeasureSpec.EXACTLY && heightSpecMode == MeasureSpec.EXACTLY) { // the size is fixed width = widthSpecSize; height = heightSpecSize; // for compatibility, we adjust size based on aspect ratio if ( mImageWidth * height < width * mImageHeight) { //Log.i("@@@", "image too wide, correcting"); width = height * mImageWidth / mImageHeight; } else if ( mImageWidth * height > width * mImageHeight) { //Log.i("@@@", "image too tall, correcting"); height = width * mImageHeight / mImageWidth; } } else if (widthSpecMode == MeasureSpec.EXACTLY) { // only the width is fixed, adjust the height to match aspect ratio if possible width = widthSpecSize; height = width * mImageHeight / mImageWidth; if (heightSpecMode == MeasureSpec.AT_MOST && height > heightSpecSize) { // couldn't match aspect ratio within the constraints height = heightSpecSize; } } else if (heightSpecMode == MeasureSpec.EXACTLY) { // only the height is fixed, adjust the width to match aspect ratio if possible height = heightSpecSize; width = height * mImageWidth / mImageHeight; if (widthSpecMode == MeasureSpec.AT_MOST && width > widthSpecSize) { // couldn't match aspect ratio within the constraints width = widthSpecSize; } } else { // neither the width nor the height are fixed, try to use actual video size width = mImageWidth; height = mImageHeight; if (heightSpecMode == MeasureSpec.AT_MOST && height > heightSpecSize) { // too tall, decrease both width and height height = heightSpecSize; width = height * mImageWidth / mImageHeight; } if (widthSpecMode == MeasureSpec.AT_MOST && width > widthSpecSize) { // too wide, decrease both width and height width = widthSpecSize; height = width * mImageHeight / mImageWidth; } } } else { // no size yet, just adopt the given spec sizes } setMeasuredDimension(width, height); } @Override public boolean onTouchEvent(MotionEvent event) { /* * NOTE: These calls should not be simplified to a logical chain, because the evaluation * would stop at the first true value and not execute the following functions. */ boolean event1 = mScaleGestureDetector.onTouchEvent(event); boolean event2 = mGestureDetector.onTouchEvent(event); return event1 || event2; } @Override public boolean dispatchTouchEvent(MotionEvent event) { if(!mTouchEnabled) { // Touch events are disabled and we return false to route all events to the parent return false; } return super.dispatchTouchEvent(event); } /** * Implement this method to receive the input surface holder when it is ready to be used. * The input surface holder holds the surface and surface texture to which input data, i.e. image * data from some source that should be processed and displayed, should be written to display * it in the view. * * External callers should add a callback to the holder through {@link InputSurfaceHolder#addCallback(InputSurfaceHolder.Callback)} * to be notified about this event in {@link InputSurfaceHolder.Callback#surfaceCreated(InputSurfaceHolder)}. * * @param inputSurfaceHolder the input surface holder which holds the surface where image data should be written to */ public void onInputSurfaceCreated(InputSurfaceHolder inputSurfaceHolder) { // nothing to do here } /** * Gets the input surface holder that holds the surface where image data should be written to * for processing and display. The holder is always available but only holds an actual surface * after {@link #onInputSurfaceCreated(InputSurfaceHolder)} respectively * {@link InputSurfaceHolder.Callback#surfaceCreated(InputSurfaceHolder)} have been called. * * The input surface holder holds the input surface (texture) that is used to write image data * into the processing pipeline, opposed to the surface holder from {@link #getHolder()} that holds * the surface to which the final result of the processing pipeline will be written to for display. * * @return the input surface holder or null if it is not available yet */ public InputSurfaceHolder getInputHolder() { return mInputSurfaceHolder; } @Override public void surfaceDestroyed(SurfaceHolder holder) { // Delete the external texture, else it stays in RAM if(getInputHolder().getExternalSurfaceTexture() != null) { getInputHolder().getExternalSurfaceTexture().delete(); getInputHolder().update(null); } super.surfaceDestroyed(holder); } /** * Adds one or more effects to the view. Added effects can then be activated/selected by calling * {@link #selectEffect(int)}. The effect indices start at zero and are in the order that they * are added to the view. * @param effects effects to add */ public void addEffect(final Effect... effects) { for(Effect effect : effects) { effect.addListener(this); effect.setParameterHandler(new ParameterHandler(this)); } queueEvent(new Runnable() { @Override public void run() { mRenderer.addEffect(effects); } }); } /** * Selects/activates the effect with the given index as it has been added through {@link #addEffect(Effect...)}. * @param index the index of the effect to activate */ public void selectEffect(final int index) { queueEvent(new Runnable() { @Override public void run() { mRenderer.selectEffect(index); requestRender(GLRenderer.RenderRequest.EFFECT); } }); } /** * Gets called when an effect has been initialized after being selected for the first time * with {@link #selectEffect(int)}. Effect initialization happens asynchronously and can take * some time when a lot of data (framebuffers, textures, ...) is loaded. * Can be overwritten in subclasses but must be called through. External callers should use * {@link #setEffectEventListener(EffectEventListener)}. * @param index the index of the initialized effect * @param effect the initialized effect */ @Override public void onEffectInitialized(int index, Effect effect) { if(mEffectEventListener != null) { mEffectEventListener.onEffectInitialized(index, effect); } requestRender(GLRenderer.RenderRequest.EFFECT); } /** * Gets called when an effect has been successfully selected with {@link #selectEffect(int)}. * Can be overwritten in subclasses but must be called through. External callers should use * {@link #setEffectEventListener(EffectEventListener)}. * @param index the index of the selected effect * @param effect the selected effect */ @Override public void onEffectSelected(int index, Effect effect) { if(mEffectEventListener != null) { mEffectEventListener.onEffectSelected(index, effect); } } /** * Gets called when an effect selection with {@link #selectEffect(int)} fails. * Can be overwritten in subclasses but must be called through. External callers should use * {@link #setEffectEventListener(EffectEventListener)}. * @param index the index of the failed effect * @param effect the failed effect */ @Override public void onEffectError(int index, Effect effect, EffectException e) { Log.e(TAG, "effect error", e); if(mEffectEventListener != null) { mEffectEventListener.onEffectError(index, effect, e); } } /** * Sets an event listener that gets called when effect-related event happens. * @param listener the event listener to be called on an event */ public void setEffectEventListener(EffectEventListener listener) { mEffectEventListener = listener; } /** * Gets called when a parameter of an effect has changed. This method then triggers a fresh * rendering of the effect. Can be overridden in subclasses but must be called through. * @param effect the effect of which a parameter value has changed * @see net.protyposis.android.spectaculum.effects.Effect.Listener */ @Override public void onEffectChanged(Effect effect) { requestRender(GLRenderer.RenderRequest.EFFECT); } /** * Gets called when a parameter is added to an effect. * Can be overridden in subclasses but must be called through. * @param effect the effect to which a parameter was added * @param parameter the added parameter * @see net.protyposis.android.spectaculum.effects.Effect.Listener */ @Override public void onParameterAdded(Effect effect, Parameter parameter) { // nothing to do here } /** * Gets called when a parameter is removed from an effect. * Can be overridden in subclasses but must be called through. * @param effect the effect from which a parameter was removed * @param parameter the removed parameter * @see net.protyposis.android.spectaculum.effects.Effect.Listener */ @Override public void onParameterRemoved(Effect effect, Parameter parameter) { // nothing to do here } /** * Gets called when a new image frame has been written to the surface texture and requests a * fresh rendering of the view. The texture can be obtained through {@link #onInputSurfaceCreated(InputSurfaceHolder)} * or {@link #getInputHolder()}. * Can be overridden in subclasses but must be called through. * @param surfaceTexture the updated surface texture */ @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { requestRender(GLRenderer.RenderRequest.ALL); } /** * Requests a render pass of the specified render pipeline section. * @param renderRequest specifies the pipeline section to be rendered */ protected void requestRender(final GLRenderer.RenderRequest renderRequest) { queueEvent(new Runnable() { @Override public void run() { mRenderer.setRenderRequest(renderRequest); requestRender(); } }); } /** * Requests a capture of the current frame on the view. The frame is asynchronously requested * from the renderer and will be passed back on the UI thread to {@link #onFrameCaptured(Bitmap)} * and the event listener that can be set with {@link #setOnFrameCapturedCallback(OnFrameCapturedCallback)}. */ public void captureFrame() { queueEvent(new Runnable() { @Override public void run() { mRenderer.saveCurrentFrame(new GLRenderer.OnFrameCapturedCallback() { @Override public void onFrameCaptured(final Bitmap bitmap) { mRunOnUiThreadHandler.post(new Runnable() { @Override public void run() { SpectaculumView.this.onFrameCaptured(bitmap); } }); } }); } }); } /** * Receives a captured frame from the renderer. Can be overwritten in subclasses but must be * called through. External callers should use {@link #setOnFrameCapturedCallback(OnFrameCapturedCallback)}. */ @Override public void onFrameCaptured(Bitmap bitmap) { if(mOnFrameCapturedCallback != null) { mOnFrameCapturedCallback.onFrameCaptured(bitmap); } } /** * Sets a callback event handler that receives a bitmap of the captured frame. */ public void setOnFrameCapturedCallback(OnFrameCapturedCallback callback) { mOnFrameCapturedCallback = callback; } /** * Sets the resolution mode of the processing pipeline. * @see PipelineResolution */ public void setPipelineResolution(PipelineResolution resolution) { mPipelineResolution = resolution; } /** * Gets the configured resolution mode of the processing pipeline. */ public PipelineResolution getPipelineResolution() { return mPipelineResolution; } /** * Sets the resolution of the source data and recomputes the layout. This implicitly also sets * the resolution of the view output surface if pipeline resolution mode {@link PipelineResolution#SOURCE} * is set. In SOURCE mode, output will therefore be computed in the input resolution and then * at the very end scaled (most often downscaled) to fit the view in the layout. * * TODO decouple input, processing and output resolution * * @param width the width of the input image data * @param height the height of the input image data */ public void updateResolution(int width, int height) { if(width == mImageWidth && height == mImageHeight) { // Don't do anything if resolution has stayed the same return; } mImageWidth = width; mImageHeight = height; // If desired, set output resolution to source resolution if (width != 0 && height != 0 && mPipelineResolution == PipelineResolution.SOURCE) { getHolder().setFixedSize(width, height); } // Resize view according to the new size to fit the layout requestLayout(); } private GLRenderer.OnExternalSurfaceTextureCreatedListener mExternalSurfaceTextureCreatedListener = new GLRenderer.OnExternalSurfaceTextureCreatedListener() { @Override public void onExternalSurfaceTextureCreated(final ExternalSurfaceTexture surfaceTexture) { // dispatch event to UI thread mRunOnUiThreadHandler.post(new Runnable() { @Override public void run() { // Create an input surface holder and call the event handler mInputSurfaceHolder.update(surfaceTexture); onInputSurfaceCreated(mInputSurfaceHolder); } }); surfaceTexture.setOnFrameAvailableListener(SpectaculumView.this); } }; /** * Effect event listener that transfers the events to the UI thread. */ private EffectEventListener mRendererEffectEventListener = new EffectEventListener() { @Override public void onEffectInitialized(final int index, final Effect effect) { mRunOnUiThreadHandler.post(new Runnable() { @Override public void run() { SpectaculumView.this.onEffectInitialized(index, effect); } }); } @Override public void onEffectSelected(final int index, final Effect effect) { mRunOnUiThreadHandler.post(new Runnable() { @Override public void run() { SpectaculumView.this.onEffectSelected(index, effect); } }); } @Override public void onEffectError(final int index, final Effect effect, final EffectException e) { mRunOnUiThreadHandler.post(new Runnable() { @Override public void run() { SpectaculumView.this.onEffectError(index, effect, e); } }); } }; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.offheap; import static org.junit.Assert.*; import static org.mockito.Mockito.mock; import java.nio.ByteBuffer; import org.assertj.core.api.JUnitSoftAssertions; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.apache.geode.internal.cache.EntryEventImpl; import org.apache.geode.internal.offheap.MemoryBlock.State; import org.apache.geode.test.junit.categories.UnitTest; @Category(UnitTest.class) public class TinyMemoryBlockJUnitTest { private MemoryAllocatorImpl ma; private OutOfOffHeapMemoryListener ooohml; private OffHeapMemoryStats stats; private Slab[] slabs; @Rule public ExpectedException expectedException = ExpectedException.none(); @Rule public JUnitSoftAssertions softly = new JUnitSoftAssertions(); @Before public void setUp() throws Exception { slabs = new Slab[] {new SlabImpl((int) OffHeapStorage.MIN_SLAB_SIZE), new SlabImpl((int) OffHeapStorage.MIN_SLAB_SIZE), new SlabImpl((int) OffHeapStorage.MIN_SLAB_SIZE)}; ooohml = mock(OutOfOffHeapMemoryListener.class); stats = mock(OffHeapMemoryStats.class); ma = (MemoryAllocatorImpl) MemoryAllocatorImpl.createForUnitTest(ooohml, stats, slabs); } @After public void tearDown() throws Exception { MemoryAllocatorImpl.freeOffHeapMemory(); } protected Object getValue() { return Long.valueOf(Long.MAX_VALUE); } private StoredObject createChunk(byte[] v, boolean isSerialized, boolean isCompressed) { StoredObject chunk = (StoredObject) ma.allocateAndInitialize(v, isSerialized, isCompressed); return chunk; } private StoredObject createValueAsSerializedStoredObject(Object value, boolean isCompressed) { byte[] valueInSerializedByteArray = EntryEventImpl.serialize(value); boolean isSerialized = true; StoredObject createdObject = createChunk(valueInSerializedByteArray, isSerialized, isCompressed); return createdObject; } private byte[] convertValueToByteArray(Object value) { return ByteBuffer.allocate(Long.SIZE / Byte.SIZE).putLong((Long) value).array(); } private StoredObject createValueAsUnserializedStoredObject(Object value, boolean isCompressed) { byte[] valueInByteArray; if (value instanceof Long) { valueInByteArray = convertValueToByteArray(value); } else { valueInByteArray = (byte[]) value; } boolean isSerialized = false; StoredObject createdObject = createChunk(valueInByteArray, isSerialized, isCompressed); return createdObject; } @Test public void constructorReturnsNonNullMemoryBlock() { MemoryBlock mb = new TestableFreeListManager.TinyMemoryBlock(slabs[0].getMemoryAddress(), 0); softly.assertThat(mb).isNotNull(); } @Test public void stateAlwaysEqualsDeallocated() { MemoryBlock mb = new TestableFreeListManager.TinyMemoryBlock(slabs[0].getMemoryAddress(), 0); softly.assertThat(mb.getState()).isEqualTo(State.DEALLOCATED); } @Test public void getMemoryAddressReturnsAddressBlockWasContructedFrom() { MemoryBlock mb = new TestableFreeListManager.TinyMemoryBlock(slabs[0].getMemoryAddress(), 0); softly.assertThat(mb.getAddress()).isEqualTo(slabs[0].getMemoryAddress()); } @Test public void getBlockSizeReturnsReturnsSizeOfUnderlyingChunk() { MemoryBlock mb = new TestableFreeListManager.TinyMemoryBlock( new OffHeapStoredObject(slabs[0].getMemoryAddress(), slabs[0].getSize()).getAddress(), 0); softly.assertThat(mb.getBlockSize()).isEqualTo(slabs[0].getSize()); } @Test public void getNextBlockThrowsUnsupportedOperationException() { expectedException.expect(UnsupportedOperationException.class); MemoryBlock mb = new TestableFreeListManager.TinyMemoryBlock( new OffHeapStoredObject(slabs[0].getMemoryAddress(), slabs[0].getSize()).getAddress(), 0); mb.getNextBlock(); fail("getNextBlock failed to throw UnsupportedOperationException"); } @Test public void getSlabIdThrowsUnsupportedOperationException() { expectedException.expect(UnsupportedOperationException.class); MemoryBlock mb = new TestableFreeListManager.TinyMemoryBlock( new OffHeapStoredObject(slabs[0].getMemoryAddress(), slabs[0].getSize()).getAddress(), 0); mb.getSlabId(); fail("getSlabId failed to throw UnsupportedOperationException"); } @Test public void getFreeListIdReturnsIdBlockWasConstructedWith() { MemoryBlock mb0 = new TestableFreeListManager.TinyMemoryBlock( new OffHeapStoredObject(slabs[0].getMemoryAddress(), slabs[0].getSize()).getAddress(), 0); MemoryBlock mb1 = new TestableFreeListManager.TinyMemoryBlock( new OffHeapStoredObject(slabs[1].getMemoryAddress(), slabs[1].getSize()).getAddress(), 1); softly.assertThat(mb0.getFreeListId()).isEqualTo(0); softly.assertThat(mb1.getFreeListId()).isEqualTo(1); } @Test public void getRefCountReturnsZero() { MemoryBlock mb0 = new TestableFreeListManager.TinyMemoryBlock( new OffHeapStoredObject(slabs[0].getMemoryAddress(), slabs[0].getSize()).getAddress(), 0); MemoryBlock mb1 = new TestableFreeListManager.TinyMemoryBlock( new OffHeapStoredObject(slabs[1].getMemoryAddress(), slabs[1].getSize()).getAddress(), 1); softly.assertThat(mb0.getRefCount()).isEqualTo(0); softly.assertThat(mb1.getRefCount()).isEqualTo(0); } @Test public void getDataTypeReturnsNA() { Object obj = getValue(); boolean compressed = false; StoredObject storedObject0 = createValueAsSerializedStoredObject(obj, compressed); MemoryBlock mb = new TestableFreeListManager.TinyMemoryBlock(((MemoryBlock) storedObject0).getAddress(), 0); softly.assertThat(mb.getDataType()).isEqualTo("N/A"); } @Test public void getDataValueReturnsNull() { Object obj = getValue(); boolean compressed = false; StoredObject storedObject0 = createValueAsSerializedStoredObject(obj, compressed); MemoryBlock mb = new TestableFreeListManager.TinyMemoryBlock(((MemoryBlock) storedObject0).getAddress(), 0); softly.assertThat(mb.getDataValue()).isNull(); } @Test public void isSerializedReturnsFalse() { Object obj = getValue(); boolean compressed = false; StoredObject storedObject0 = createValueAsSerializedStoredObject(obj, compressed); StoredObject storedObject1 = createValueAsUnserializedStoredObject(obj, compressed); MemoryBlock mb0 = new TestableFreeListManager.TinyMemoryBlock(((MemoryBlock) storedObject0).getAddress(), 0); MemoryBlock mb1 = new TestableFreeListManager.TinyMemoryBlock(((MemoryBlock) storedObject1).getAddress(), 0); softly.assertThat(mb0.isSerialized()).isFalse(); softly.assertThat(mb1.isSerialized()).isFalse(); } @Test public void isCompressedReturnsFalse() { Object obj = getValue(); boolean compressed = false; StoredObject storedObject0 = createValueAsUnserializedStoredObject(obj, compressed); StoredObject storedObject1 = createValueAsUnserializedStoredObject(obj, compressed = true); MemoryBlock mb0 = new TestableFreeListManager.TinyMemoryBlock(((MemoryBlock) storedObject0).getAddress(), 0); MemoryBlock mb1 = new TestableFreeListManager.TinyMemoryBlock(((MemoryBlock) storedObject1).getAddress(), 0); softly.assertThat(mb0.isCompressed()).isFalse(); softly.assertThat(mb1.isCompressed()).isFalse(); } @Test public void equalsComparesAddressesOfTinyMemoryBlocks() { MemoryBlock mb0 = new TestableFreeListManager.TinyMemoryBlock(slabs[0].getMemoryAddress(), 0); MemoryBlock mb1 = new TestableFreeListManager.TinyMemoryBlock(slabs[0].getMemoryAddress(), 0); MemoryBlock mb2 = new TestableFreeListManager.TinyMemoryBlock(slabs[1].getMemoryAddress(), 0); softly.assertThat(mb0.equals(mb1)).isTrue(); softly.assertThat(mb0.equals(mb2)).isFalse(); } @Test public void equalsNotTinyMemoryBlockReturnsFalse() { MemoryBlock mb = new TestableFreeListManager.TinyMemoryBlock(slabs[0].getMemoryAddress(), 0); softly.assertThat(mb.equals(slabs[0])).isFalse(); } @Test public void hashCodeReturnsHashOfUnderlyingMemory() { MemoryBlock mb = new TestableFreeListManager.TinyMemoryBlock(slabs[0].getMemoryAddress(), 0); softly.assertThat(mb.hashCode()).isEqualTo( new OffHeapStoredObject(slabs[0].getMemoryAddress(), slabs[0].getSize()).hashCode()); } private static class TestableFreeListManager extends FreeListManager { TestableFreeListManager(MemoryAllocatorImpl ma, final Slab[] slabs) { super(ma, slabs); } } }
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.common.options.processor; import static com.google.common.truth.Truth.assertAbout; import static com.google.testing.compile.JavaSourceSubjectFactory.javaSource; import com.google.common.io.Resources; import com.google.testing.compile.JavaFileObjects; import javax.tools.JavaFileObject; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Unit tests for the compile-time checks in {@link OptionProcessor}. */ @RunWith(JUnit4.class) public class OptionProcessorTest { private static JavaFileObject getFile(String pathToFile) { return JavaFileObjects.forResource( Resources.getResource( "com/google/devtools/common/options/processor/optiontestsources/" + pathToFile)); } @Test public void optionsInNonOptionBasesAreRejected() { assertAbout(javaSource()) .that(getFile("OptionInNonOptionBase.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "@Option annotated fields can only be in classes that inherit from OptionsBase."); } @Test public void privatelyDeclaredOptionsAreRejected() { assertAbout(javaSource()) .that(getFile("PrivateOptionField.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining("@Option annotated fields should be public."); } @Test public void protectedOptionsAreRejected() { assertAbout(javaSource()) .that(getFile("ProtectedOptionField.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining("@Option annotated fields should be public."); } @Test public void staticOptionsAreRejected() { assertAbout(javaSource()) .that(getFile("StaticOptionField.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining("@Option annotated fields should not be static."); } @Test public void finalOptionsAreRejected() { assertAbout(javaSource()) .that(getFile("FinalOptionField.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining("@Option annotated fields should not be final."); } @Test public void namelessOptionsAreRejected() { assertAbout(javaSource()) .that(getFile("NamelessOption.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining("Option must have an actual name."); } @Test public void badNamesAreRejected() { assertAbout(javaSource()) .that(getFile("BadNameForDocumentedOption.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Options that are used on the command line as flags must have names made from word " + "characters only."); assertAbout(javaSource()) .that(getFile("BadNameWithEqualsSign.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Options that are used on the command line as flags must have names made from word " + "characters only."); } @Test public void badNamesForHiddenOptionsPass() { assertAbout(javaSource()) .that(getFile("BadNameForInternalOption.java")) .processedWith(new OptionProcessor()) .compilesWithoutError(); } @Test public void deprecatedCategorySaysUndocumented() { assertAbout(javaSource()) .that(getFile("DeprecatedUndocumentedCategory.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Documentation level is no longer read from the option category. Category " + "\"undocumented\" is disallowed, see OptionMetadataTags for the relevant tags."); } @Test public void deprecatedCategorySaysHidden() { assertAbout(javaSource()) .that(getFile("DeprecatedHiddenCategory.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Documentation level is no longer read from the option category. Category " + "\"hidden\" is disallowed, see OptionMetadataTags for the relevant tags."); } @Test public void deprecatedCategorySaysInternal() { assertAbout(javaSource()) .that(getFile("DeprecatedInternalCategory.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Documentation level is no longer read from the option category. Category " + "\"internal\" is disallowed, see OptionMetadataTags for the relevant tags."); } @Test public void optionMustHaveEffectExplicitlyStated() { assertAbout(javaSource()) .that(getFile("EffectlessOption.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Option does not list at least one OptionEffectTag. " + "If the option has no effect, please be explicit and add NO_OP. " + "Otherwise, add a tag representing its effect."); } @Test public void contradictingEffectTagsAreRejected() { assertAbout(javaSource()) .that(getFile("OptionWithContradictingNoopEffects.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Option includes NO_OP with other effects. This doesn't make much sense. " + "Please remove NO_OP or the actual effects from the list, whichever is correct."); assertAbout(javaSource()) .that(getFile("OptionWithContradictingUnknownEffects.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Option includes UNKNOWN with other, known, effects. " + "Please remove UNKNOWN from the list."); } @Test public void contradictoryDocumentationCategoryIsRejected() { assertAbout(javaSource()) .that(getFile("HiddenOptionWithCategory.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Option has metadata tag HIDDEN but does not have category UNDOCUMENTED. " + "Please fix."); assertAbout(javaSource()) .that(getFile("InternalOptionWithCategory.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Option has metadata tag INTERNAL but does not have category UNDOCUMENTED. " + "Please fix."); } @Test public void defaultConvertersAreFound() { assertAbout(javaSource()) .that(getFile("AllDefaultConverters.java")) .processedWith(new OptionProcessor()) .compilesWithoutError(); } @Test public void converterReturnsListForAllowMultipleIsAllowed() { assertAbout(javaSource()) .that(getFile("MultipleOptionWithListTypeConverter.java")) .processedWith(new OptionProcessor()) .compilesWithoutError(); } @Test public void correctCustomConverterForPrimitiveTypePasses() { assertAbout(javaSource()) .that(getFile("CorrectCustomConverterForPrimitiveType.java")) .processedWith(new OptionProcessor()) .compilesWithoutError(); } @Test public void converterlessOptionIsRejected() { assertAbout(javaSource()) .that(getFile("ConverterlessOption.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Cannot find valid converter for option of type " + "java.util.Map<java.lang.String,java.lang.String>"); } @Test public void allowMultipleOptionWithCollectionTypeIsRejected() { assertAbout(javaSource()) .that(getFile("CollectionTypeForAllowMultipleOption.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Option that allows multiple occurrences must be of type java.util.List<E>, " + "but is of type java.util.Collection<java.lang.String>"); } @Test public void allowMultipleOptionWithNonListTypeIsRejected() { assertAbout(javaSource()) .that(getFile("NonListTypeForAllowMultipleOption.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Option that allows multiple occurrences must be of type java.util.List<E>, " + "but is of type java.lang.String"); } @Test public void allowMultipleOptionsWithDefaultValuesAreRejected() { assertAbout(javaSource()) .that(getFile("AllowMultipleOptionWithDefaultValue.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Default values for multiple options are not allowed - use \"null\" special value"); } @Test public void optionWithIncorrectConverterIsRejected() { assertAbout(javaSource()) .that(getFile("IncorrectConverterType.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Type of field (java.lang.String) must be assignable from the converter's return type " + "(java.lang.Integer)"); } @Test public void allowMultipleOptionWithIncorrectConverterIsRejected() { assertAbout(javaSource()) .that(getFile("IncorrectConverterTypeForAllowMultipleOption.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Type of field (java.lang.String) must be assignable from the converter's return type " + "(java.lang.Integer)"); } @Test public void expansionOptionThatAllowsMultipleIsRejected() { assertAbout(javaSource()) .that(getFile("ExpansionOptionWithAllowMultiple.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Can't set an option to accumulate multiple values and let it expand to other flags."); } @Test public void expansionOptionWithImplicitRequirementIsRejected() { assertAbout(javaSource()) .that(getFile("ExpansionOptionWithImplicitRequirement.java")) .processedWith(new OptionProcessor()) .failsToCompile() .withErrorContaining( "Can't set an option to be both an expansion option and have implicit requirements."); } }
/** * Copyright 2014 Ricardo Padilha * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.dsys.commons.api.lang; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; /** * Similar to TimeUnit, to convert between binary units. Follows IEEE 1541-2002. * Zetta/zebi and yotta/yebi prefixes were removed because they cannot be * converted to/from bits reliably using 64-bit longs. Also, conversion between * ISO and IEEE prefixes (e.g., megabits to mebibits and vice-versa) always goes * through an intermediary step of bits. * * @author Ricardo Padilha */ public enum BinaryUnit { /** * Unit of conversion */ BITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toBits(n); } @Override public long toBits(final long n) { return n; } @Override public long toKilobits(final long n) { return n / (C1 / C0); } @Override public long toMegabits(final long n) { return n / (C2 / C0); } @Override public long toGigabits(final long n) { return n / (C3 / C0); } @Override public long toTerabits(final long n) { return n / (C4 / C0); } @Override public long toPetabits(final long n) { return n / (C5 / C0); } @Override public long toExabits(final long n) { return n / (C6 / C0); } @Override public long toKibibits(final long n) { return n / (B1 / B0); } @Override public long toMebibits(final long n) { return n / (B2 / B0); } @Override public long toGibibits(final long n) { return n / (B3 / B0); } @Override public long toTebibits(final long n) { return n / (B4 / B0); } @Override public long toPebibits(final long n) { return n / (B5 / B0); } @Override public long toExbibits(final long n) { return n / (B6 / B0); } }, /** * 10^3 bits */ KILOBITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toKilobits(n); } @Override public long toBits(final long n) { return n * (C1 / C0); } @Override public long toKilobits(final long n) { return n; } @Override public long toMegabits(final long n) { return n / (C2 / C1); } @Override public long toGigabits(final long n) { return n / (C3 / C1); } @Override public long toTerabits(final long n) { return n / (C4 / C1); } @Override public long toPetabits(final long n) { return n / (C5 / C1); } @Override public long toExabits(final long n) { return n / (C6 / C1); } @Override public long toKibibits(final long n) { return BITS.toKibibits(toBits(n)); } @Override public long toMebibits(final long n) { return BITS.toMebibits(toBits(n)); } @Override public long toGibibits(final long n) { return BITS.toGibibits(toBits(n)); } @Override public long toTebibits(final long n) { return BITS.toTebibits(toBits(n)); } @Override public long toPebibits(final long n) { return BITS.toPebibits(toBits(n)); } @Override public long toExbibits(final long n) { return BITS.toExbibits(toBits(n)); } }, /** * 10^6 bits */ MEGABITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toMegabits(n); } @Override public long toBits(final long n) { return n * (C2 / C0); } @Override public long toKilobits(final long n) { return n * (C2 / C1); } @Override public long toMegabits(final long n) { return n; } @Override public long toGigabits(final long n) { return n / (C3 / C2); } @Override public long toTerabits(final long n) { return n / (C4 / C2); } @Override public long toPetabits(final long n) { return n / (C5 / C2); } @Override public long toExabits(final long n) { return n / (C6 / C2); } @Override public long toKibibits(final long n) { return BITS.toKibibits(toBits(n)); } @Override public long toMebibits(final long n) { return BITS.toMebibits(toBits(n)); } @Override public long toGibibits(final long n) { return BITS.toGibibits(toBits(n)); } @Override public long toTebibits(final long n) { return BITS.toTebibits(toBits(n)); } @Override public long toPebibits(final long n) { return BITS.toPebibits(toBits(n)); } @Override public long toExbibits(final long n) { return BITS.toExbibits(toBits(n)); } }, /** * 10^9 bits */ GIGABITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toGigabits(n); } @Override public long toBits(final long n) { return n * (C3 / C0); } @Override public long toKilobits(final long n) { return n * (C3 / C1); } @Override public long toMegabits(final long n) { return n * (C3 / C2); } @Override public long toGigabits(final long n) { return n; } @Override public long toTerabits(final long n) { return n / (C4 / C3); } @Override public long toPetabits(final long n) { return n / (C5 / C3); } @Override public long toExabits(final long n) { return n / (C6 / C3); } @Override public long toKibibits(final long n) { return BITS.toKibibits(toBits(n)); } @Override public long toMebibits(final long n) { return BITS.toMebibits(toBits(n)); } @Override public long toGibibits(final long n) { return BITS.toGibibits(toBits(n)); } @Override public long toTebibits(final long n) { return BITS.toTebibits(toBits(n)); } @Override public long toPebibits(final long n) { return BITS.toPebibits(toBits(n)); } @Override public long toExbibits(final long n) { return BITS.toExbibits(toBits(n)); } }, /** * 10^12 bits */ TERABITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toTerabits(n); } @Override public long toBits(final long n) { return n * (C4 / C0); } @Override public long toKilobits(final long n) { return n * (C4 / C1); } @Override public long toMegabits(final long n) { return n * (C4 / C2); } @Override public long toGigabits(final long n) { return n * (C4 / C3); } @Override public long toTerabits(final long n) { return n; } @Override public long toPetabits(final long n) { return n / (C5 / C4); } @Override public long toExabits(final long n) { return n / (C6 / C4); } @Override public long toKibibits(final long n) { return BITS.toKibibits(toBits(n)); } @Override public long toMebibits(final long n) { return BITS.toMebibits(toBits(n)); } @Override public long toGibibits(final long n) { return BITS.toGibibits(toBits(n)); } @Override public long toTebibits(final long n) { return BITS.toTebibits(toBits(n)); } @Override public long toPebibits(final long n) { return BITS.toPebibits(toBits(n)); } @Override public long toExbibits(final long n) { return BITS.toExbibits(toBits(n)); } }, /** * 10^15 bits */ PETABITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toPetabits(n); } @Override public long toBits(final long n) { return n * (C5 / C0); } @Override public long toKilobits(final long n) { return n * (C5 / C1); } @Override public long toMegabits(final long n) { return n * (C5 / C2); } @Override public long toGigabits(final long n) { return n * (C5 / C3); } @Override public long toTerabits(final long n) { return n * (C5 / C4); } @Override public long toPetabits(final long n) { return n; } @Override public long toExabits(final long n) { return n / (C6 / C5); } @Override public long toKibibits(final long n) { return BITS.toKibibits(toBits(n)); } @Override public long toMebibits(final long n) { return BITS.toMebibits(toBits(n)); } @Override public long toGibibits(final long n) { return BITS.toGibibits(toBits(n)); } @Override public long toTebibits(final long n) { return BITS.toTebibits(toBits(n)); } @Override public long toPebibits(final long n) { return BITS.toPebibits(toBits(n)); } @Override public long toExbibits(final long n) { return BITS.toExbibits(toBits(n)); } }, /** * 10^18 bits */ EXABITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toExabits(n); } @Override public long toBits(final long n) { return n * (C6 / C0); } @Override public long toKilobits(final long n) { return n * (C6 / C1); } @Override public long toMegabits(final long n) { return n * (C6 / C2); } @Override public long toGigabits(final long n) { return n * (C6 / C3); } @Override public long toTerabits(final long n) { return n * (C6 / C4); } @Override public long toPetabits(final long n) { return n * (C6 / C5); } @Override public long toExabits(final long n) { return n; } @Override public long toKibibits(final long n) { return BITS.toKibibits(toBits(n)); } @Override public long toMebibits(final long n) { return BITS.toMebibits(toBits(n)); } @Override public long toGibibits(final long n) { return BITS.toGibibits(toBits(n)); } @Override public long toTebibits(final long n) { return BITS.toTebibits(toBits(n)); } @Override public long toPebibits(final long n) { return BITS.toPebibits(toBits(n)); } @Override public long toExbibits(final long n) { return BITS.toExbibits(toBits(n)); } }, /** * 2^10 bits */ KIBIBITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toKibibits(n); } @Override public long toBits(final long n) { return n * (B1 / B0); } @Override public long toKilobits(final long n) { return BITS.toKilobits(toBits(n)); } @Override public long toMegabits(final long n) { return BITS.toMegabits(toBits(n)); } @Override public long toGigabits(final long n) { return BITS.toGigabits(toBits(n)); } @Override public long toTerabits(final long n) { return BITS.toTerabits(toBits(n)); } @Override public long toPetabits(final long n) { return BITS.toPetabits(toBits(n)); } @Override public long toExabits(final long n) { return BITS.toExabits(toBits(n)); } @Override public long toKibibits(final long n) { return n; } @Override public long toMebibits(final long n) { return n / (B2 / B1); } @Override public long toGibibits(final long n) { return n / (B3 / B1); } @Override public long toTebibits(final long n) { return n / (B4 / B1); } @Override public long toPebibits(final long n) { return n / (B5 / B1); } @Override public long toExbibits(final long n) { return n / (B6 / B1); } }, /** * 2^20 bits */ MEBIBITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toMebibits(n); } @Override public long toBits(final long n) { return n * (B2 / B0); } @Override public long toKilobits(final long n) { return BITS.toKilobits(toBits(n)); } @Override public long toMegabits(final long n) { return BITS.toMegabits(toBits(n)); } @Override public long toGigabits(final long n) { return BITS.toGigabits(toBits(n)); } @Override public long toTerabits(final long n) { return BITS.toTerabits(toBits(n)); } @Override public long toPetabits(final long n) { return BITS.toPetabits(toBits(n)); } @Override public long toExabits(final long n) { return BITS.toExabits(toBits(n)); } @Override public long toKibibits(final long n) { return n * (B2 / B1); } @Override public long toMebibits(final long n) { return n; } @Override public long toGibibits(final long n) { return n / (B3 / B2); } @Override public long toTebibits(final long n) { return n / (B4 / B2); } @Override public long toPebibits(final long n) { return n / (B5 / B2); } @Override public long toExbibits(final long n) { return n / (B6 / B2); } }, /** * 2^30 bits */ GIBIBITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toGibibits(n); } @Override public long toBits(final long n) { return n * (B3 / B0); } @Override public long toKilobits(final long n) { return BITS.toKilobits(toBits(n)); } @Override public long toMegabits(final long n) { return BITS.toMegabits(toBits(n)); } @Override public long toGigabits(final long n) { return BITS.toGigabits(toBits(n)); } @Override public long toTerabits(final long n) { return BITS.toTerabits(toBits(n)); } @Override public long toPetabits(final long n) { return BITS.toPetabits(toBits(n)); } @Override public long toExabits(final long n) { return BITS.toExabits(toBits(n)); } @Override public long toKibibits(final long n) { return n * (B3 / B1); } @Override public long toMebibits(final long n) { return n * (B3 / B2); } @Override public long toGibibits(final long n) { return n; } @Override public long toTebibits(final long n) { return n / (B4 / B3); } @Override public long toPebibits(final long n) { return n / (B5 / B3); } @Override public long toExbibits(final long n) { return n / (B6 / B3); } }, /** * 2^40 bits */ TEBIBITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toTebibits(n); } @Override public long toBits(final long n) { return n * (B4 / B0); } @Override public long toKilobits(final long n) { return BITS.toKilobits(toBits(n)); } @Override public long toMegabits(final long n) { return BITS.toMegabits(toBits(n)); } @Override public long toGigabits(final long n) { return BITS.toGigabits(toBits(n)); } @Override public long toTerabits(final long n) { return BITS.toTerabits(toBits(n)); } @Override public long toPetabits(final long n) { return BITS.toPetabits(toBits(n)); } @Override public long toExabits(final long n) { return BITS.toExabits(toBits(n)); } @Override public long toKibibits(final long n) { return n * (B4 / B1); } @Override public long toMebibits(final long n) { return n * (B4 / B2); } @Override public long toGibibits(final long n) { return n * (B4 / B3); } @Override public long toTebibits(final long n) { return n; } @Override public long toPebibits(final long n) { return n / (B5 / B4); } @Override public long toExbibits(final long n) { return n / (B6 / B4); } }, /** * 2^50 bits */ PEBIBITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toPebibits(n); } @Override public long toBits(final long n) { return n * (B5 / B0); } @Override public long toKilobits(final long n) { return BITS.toKilobits(toBits(n)); } @Override public long toMegabits(final long n) { return BITS.toMegabits(toBits(n)); } @Override public long toGigabits(final long n) { return BITS.toGigabits(toBits(n)); } @Override public long toTerabits(final long n) { return BITS.toTerabits(toBits(n)); } @Override public long toPetabits(final long n) { return BITS.toPetabits(toBits(n)); } @Override public long toExabits(final long n) { return BITS.toExabits(toBits(n)); } @Override public long toKibibits(final long n) { return n * (B5 / B1); } @Override public long toMebibits(final long n) { return n * (B5 / B2); } @Override public long toGibibits(final long n) { return n * (B5 / B3); } @Override public long toTebibits(final long n) { return n * (B5 / B4); } @Override public long toPebibits(final long n) { return n; } @Override public long toExbibits(final long n) { return n / (B6 / B5); } }, /** * 2^60 bits */ EXBIBITS { @Override public long convert(final long n, final BinaryUnit u) { return u.toExbibits(n); } @Override public long toBits(final long n) { return n * (B6 / B0); } @Override public long toKilobits(final long n) { return BITS.toKilobits(toBits(n)); } @Override public long toMegabits(final long n) { return BITS.toMegabits(toBits(n)); } @Override public long toGigabits(final long n) { return BITS.toGigabits(toBits(n)); } @Override public long toTerabits(final long n) { return BITS.toTerabits(toBits(n)); } @Override public long toPetabits(final long n) { return BITS.toPetabits(toBits(n)); } @Override public long toExabits(final long n) { return BITS.toExabits(toBits(n)); } @Override public long toKibibits(final long n) { return n * (B6 / B1); } @Override public long toMebibits(final long n) { return n * (B6 / B2); } @Override public long toGibibits(final long n) { return n * (B6 / B3); } @Override public long toTebibits(final long n) { return n * (B6 / B4); } @Override public long toPebibits(final long n) { return n * (B6 / B5); } @Override public long toExbibits(final long n) { return n; } }; // bits to a byte private static final long A0 = 8L; // binary constants private static final long B0 = 1L; private static final long B1 = B0 * 1024L; private static final long B2 = B1 * 1024L; private static final long B3 = B2 * 1024L; private static final long B4 = B3 * 1024L; private static final long B5 = B4 * 1024L; private static final long B6 = B5 * 1024L; // SI constants private static final long C0 = 1L; private static final long C1 = C0 * 1000L; private static final long C2 = C1 * 1000L; private static final long C3 = C2 * 1000L; private static final long C4 = C3 * 1000L; private static final long C5 = C4 * 1000L; private static final long C6 = C5 * 1000L; public abstract long convert(@Nonnegative long value, @Nonnull BinaryUnit sourceUnit); public abstract long toBits(@Nonnegative long value); public abstract long toKilobits(@Nonnegative long value); public abstract long toMegabits(@Nonnegative long value); public abstract long toGigabits(@Nonnegative long value); public abstract long toTerabits(@Nonnegative long value); public abstract long toPetabits(@Nonnegative long value); public abstract long toExabits(@Nonnegative long value); public abstract long toKibibits(@Nonnegative long value); public abstract long toMebibits(@Nonnegative long value); public abstract long toGibibits(@Nonnegative long value); public abstract long toTebibits(@Nonnegative long value); public abstract long toPebibits(@Nonnegative long value); public abstract long toExbibits(@Nonnegative long value); public static long bytesToBits(@Nonnegative final long value) { return value * A0; } public static long bitsToBytes(@Nonnegative final long value) { return value / A0; } }
package com.azure.data.cosmos; import com.azure.data.cosmos.internal.FailureValidator; import com.azure.data.cosmos.internal.TestConfigurations; import com.azure.data.cosmos.rx.TestSuiteBase; import org.testng.annotations.AfterClass; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeClass; import org.testng.annotations.DataProvider; import org.testng.annotations.Factory; import org.testng.annotations.Test; import reactor.core.publisher.Mono; import java.util.ArrayList; import java.util.List; import java.util.UUID; import static org.assertj.core.api.Assertions.assertThat; public class CosmosKeyCredentialTest extends TestSuiteBase { private static final int TIMEOUT = 50000; private static final int SETUP_TIMEOUT = 20000; private static final int SHUTDOWN_TIMEOUT = 20000; private final List<String> databases = new ArrayList<>(); private final String databaseId = CosmosDatabaseForTest.generateId(); private CosmosClient client; private CosmosDatabase database; private CosmosContainer container; @Factory(dataProvider = "clientBuildersWithDirect") public CosmosKeyCredentialTest(CosmosClientBuilder clientBuilder) { super(clientBuilder); this.subscriberValidationTimeout = TIMEOUT; } @DataProvider(name = "crudArgProvider") public Object[][] crudArgProvider() { return new Object[][] { // collection name, is name base { UUID.randomUUID().toString()} , // with special characters in the name. {"+ -_,:.|~" + UUID.randomUUID().toString() + " +-_,:.|~"} , }; } private CosmosContainerProperties getCollectionDefinition(String collectionName) { PartitionKeyDefinition partitionKeyDef = new PartitionKeyDefinition(); ArrayList<String> paths = new ArrayList<>(); paths.add("/mypk"); partitionKeyDef.paths(paths); return new CosmosContainerProperties( collectionName, partitionKeyDef); } private CosmosItemProperties getDocumentDefinition(String documentId) { final String uuid = UUID.randomUUID().toString(); return new CosmosItemProperties(String.format("{ " + "\"id\": \"%s\", " + "\"mypk\": \"%s\", " + "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]" + "}" , documentId, uuid)); } @Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "crudArgProvider") public void createCollectionWithSecondaryKey(String collectionName) throws InterruptedException { CosmosContainerProperties collectionDefinition = getCollectionDefinition(collectionName); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.MASTER_KEY); cosmosKeyCredential.key(TestConfigurations.SECONDARY_MASTER_KEY); Mono<CosmosContainerResponse> createObservable = database .createContainer(collectionDefinition); CosmosResponseValidator<CosmosContainerResponse> validator = new CosmosResponseValidator.Builder<CosmosContainerResponse>() .withId(collectionDefinition.id()).build(); validateSuccess(createObservable, validator); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.SECONDARY_MASTER_KEY); safeDeleteAllCollections(database); } @Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "crudArgProvider") public void readCollectionWithSecondaryKey(String collectionName) throws InterruptedException { CosmosContainerProperties collectionDefinition = getCollectionDefinition(collectionName); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.MASTER_KEY); Mono<CosmosContainerResponse> createObservable = database.createContainer(collectionDefinition); CosmosContainer collection = createObservable.block().container(); cosmosKeyCredential.key(TestConfigurations.SECONDARY_MASTER_KEY); Mono<CosmosContainerResponse> readObservable = collection.read(); CosmosResponseValidator<CosmosContainerResponse> validator = new CosmosResponseValidator.Builder<CosmosContainerResponse>() .withId(collection.id()).build(); validateSuccess(readObservable, validator); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.SECONDARY_MASTER_KEY); safeDeleteAllCollections(database); } @Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "crudArgProvider") public void deleteCollectionWithSecondaryKey(String collectionName) throws InterruptedException { CosmosContainerProperties collectionDefinition = getCollectionDefinition(collectionName); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.MASTER_KEY); Mono<CosmosContainerResponse> createObservable = database.createContainer(collectionDefinition); CosmosContainer collection = createObservable.block().container(); cosmosKeyCredential.key(TestConfigurations.SECONDARY_MASTER_KEY); Mono<CosmosContainerResponse> deleteObservable = collection.delete(); CosmosResponseValidator<CosmosContainerResponse> validator = new CosmosResponseValidator.Builder<CosmosContainerResponse>() .nullResource().build(); validateSuccess(deleteObservable, validator); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.SECONDARY_MASTER_KEY); } @Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "crudArgProvider") public void replaceCollectionWithSecondaryKey(String collectionName) throws InterruptedException { // create a collection CosmosContainerProperties collectionDefinition = getCollectionDefinition(collectionName); Mono<CosmosContainerResponse> createObservable = database.createContainer(collectionDefinition); CosmosContainer collection = createObservable.block().container(); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.MASTER_KEY); CosmosContainerProperties collectionSettings = collection.read().block().properties(); // sanity check assertThat(collectionSettings.indexingPolicy().indexingMode()).isEqualTo(IndexingMode.CONSISTENT); cosmosKeyCredential.key(TestConfigurations.SECONDARY_MASTER_KEY); // replace indexing mode IndexingPolicy indexingMode = new IndexingPolicy(); indexingMode.indexingMode(IndexingMode.LAZY); collectionSettings.indexingPolicy(indexingMode); Mono<CosmosContainerResponse> readObservable = collection.replace(collectionSettings, new CosmosContainerRequestOptions()); // validate CosmosResponseValidator<CosmosContainerResponse> validator = new CosmosResponseValidator.Builder<CosmosContainerResponse>() .indexingMode(IndexingMode.LAZY).build(); validateSuccess(readObservable, validator); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.SECONDARY_MASTER_KEY); safeDeleteAllCollections(database); } @Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "crudArgProvider") public void createDocumentWithSecondaryKey(String documentId) throws InterruptedException { // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.MASTER_KEY); cosmosKeyCredential.key(TestConfigurations.SECONDARY_MASTER_KEY); CosmosItemProperties properties = getDocumentDefinition(documentId); Mono<CosmosItemResponse> createObservable = container.createItem(properties, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(properties.id()) .build(); validateSuccess(createObservable, validator); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.SECONDARY_MASTER_KEY); } @Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "crudArgProvider") public void readDocumentWithSecondaryKey(String documentId) throws InterruptedException { // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.MASTER_KEY); cosmosKeyCredential.key(TestConfigurations.SECONDARY_MASTER_KEY); CosmosItemProperties docDefinition = getDocumentDefinition(documentId); CosmosItem document = container.createItem(docDefinition, new CosmosItemRequestOptions()).block().item(); waitIfNeededForReplicasToCatchUp(clientBuilder()); CosmosItemRequestOptions options = new CosmosItemRequestOptions(); options.partitionKey(new PartitionKey(docDefinition.get("mypk"))); Mono<CosmosItemResponse> readObservable = document.read(options); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(document.id()) .build(); validateSuccess(readObservable, validator); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.SECONDARY_MASTER_KEY); } @Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "crudArgProvider") public void deleteDocumentWithSecondaryKey(String documentId) throws InterruptedException { // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.MASTER_KEY); cosmosKeyCredential.key(TestConfigurations.SECONDARY_MASTER_KEY); CosmosItemProperties docDefinition = getDocumentDefinition(documentId); CosmosItem document = container.createItem(docDefinition, new CosmosItemRequestOptions()).block().item(); CosmosItemRequestOptions options = new CosmosItemRequestOptions(); options.partitionKey(new PartitionKey(docDefinition.get("mypk"))); Mono<CosmosItemResponse> deleteObservable = document.delete(options); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .nullResource().build(); validateSuccess(deleteObservable, validator); // attempt to read document which was deleted waitIfNeededForReplicasToCatchUp(clientBuilder()); Mono<CosmosItemResponse> readObservable = document.read(options); FailureValidator notFoundValidator = new FailureValidator.Builder().resourceNotFound().build(); validateFailure(readObservable, notFoundValidator); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.SECONDARY_MASTER_KEY); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDatabaseWithSecondaryKey() throws Exception { // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.MASTER_KEY); cosmosKeyCredential.key(TestConfigurations.SECONDARY_MASTER_KEY); CosmosDatabaseProperties databaseDefinition = new CosmosDatabaseProperties(CosmosDatabaseForTest.generateId()); databases.add(databaseDefinition.id()); // create the database Mono<CosmosDatabaseResponse> createObservable = client.createDatabase(databaseDefinition, new CosmosDatabaseRequestOptions()); // validate CosmosResponseValidator<CosmosDatabaseResponse> validator = new CosmosResponseValidator.Builder<CosmosDatabaseResponse>() .withId(databaseDefinition.id()).build(); validateSuccess(createObservable, validator); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.SECONDARY_MASTER_KEY); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void readDatabaseWithSecondaryKey() throws Exception { // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.MASTER_KEY); cosmosKeyCredential.key(TestConfigurations.SECONDARY_MASTER_KEY); // read database Mono<CosmosDatabaseResponse> readObservable = client.getDatabase(databaseId).read(); // validate CosmosResponseValidator<CosmosDatabaseResponse> validator = new CosmosResponseValidator.Builder<CosmosDatabaseResponse>() .withId(databaseId).build(); validateSuccess(readObservable, validator); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.SECONDARY_MASTER_KEY); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void deleteDatabaseWithSecondaryKey() throws Exception { // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.MASTER_KEY); cosmosKeyCredential.key(TestConfigurations.SECONDARY_MASTER_KEY); // create the database CosmosDatabaseProperties databaseDefinition = new CosmosDatabaseProperties(CosmosDatabaseForTest.generateId()); databases.add(databaseDefinition.id()); CosmosDatabase database = client.createDatabase(databaseDefinition, new CosmosDatabaseRequestOptions()).block().database(); // delete the database Mono<CosmosDatabaseResponse> deleteObservable = database.delete(); // validate CosmosResponseValidator<CosmosDatabaseResponse> validator = new CosmosResponseValidator.Builder<CosmosDatabaseResponse>() .nullResource().build(); validateSuccess(deleteObservable, validator); // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.SECONDARY_MASTER_KEY); } @Test(groups = { "simple" }, timeOut = TIMEOUT, expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Illegal base64 character .*") public void invalidSecondaryKey() throws Exception { // sanity check assertThat(client.cosmosKeyCredential().key()).isEqualTo(TestConfigurations.MASTER_KEY); cosmosKeyCredential.key("Invalid Secondary Key"); // create the database, and this should throw Illegal Argument Exception for secondary key CosmosDatabaseProperties databaseDefinition = new CosmosDatabaseProperties(CosmosDatabaseForTest.generateId()); client.createDatabase(databaseDefinition, new CosmosDatabaseRequestOptions()).block().database(); } @AfterMethod(groups = { "simple" }, timeOut = SETUP_TIMEOUT) public void afterMethod() { // Set back master key before every test cosmosKeyCredential.key(TestConfigurations.MASTER_KEY); } @BeforeClass(groups = { "simple" }, timeOut = SETUP_TIMEOUT) public void beforeClass() { client = clientBuilder().build(); database = createDatabase(client, databaseId); container = getSharedMultiPartitionCosmosContainer(client); } @AfterClass(groups = { "simple" }, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterClass() { safeDeleteDatabase(database); for(String dbId: databases) { safeDeleteDatabase(client.getDatabase(dbId)); } safeClose(client); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.behavior; import org.apache.wicket.Application; import org.apache.wicket.Component; import org.apache.wicket.IComponentAwareEventSink; import org.apache.wicket.IRequestListener; import org.apache.wicket.event.IEvent; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.html.IComponentAwareHeaderContributor; import org.apache.wicket.markup.parser.XmlTag.TagType; import org.apache.wicket.util.io.IClusterable; import org.apache.wicket.util.lang.Args; import org.danekja.java.util.function.serializable.SerializableBiConsumer; import org.danekja.java.util.function.serializable.SerializableFunction; /** * Behaviors are kind of plug-ins for Components. They allow functionality to be added to a * component and get essential events forwarded by the component. They can be bound to a concrete * component (using the bind method which is called when the behavior is attached), but they don't * need to. They can modify the components markup by changing the rendered ComponentTag. Behaviors * can have their own models as well, and they are notified when these are to be detached by the * component. * <p> * You also cannot modify a components model with a behavior. * </p> * * @see IRequestListener * @see org.apache.wicket.markup.html.IHeaderContributor * @see org.apache.wicket.behavior.AbstractAjaxBehavior * @see org.apache.wicket.AttributeModifier * * @author Ralf Ebert * @author Eelco Hillenius * @author Igor Vaynberg (ivaynberg) */ public abstract class Behavior implements IClusterable, IComponentAwareEventSink, IComponentAwareHeaderContributor { private static final long serialVersionUID = 1L; /** * Constructor */ public Behavior() { if (Application.exists()) { Application.get().getBehaviorInstantiationListeners().onInstantiation(this); } } /** * Called when a component is about to render. * * @param component * the component that has this behavior coupled */ public void beforeRender(Component component) { } /** * Called when a component that has this behavior coupled was rendered. * * @param component * the component that has this behavior coupled */ public void afterRender(Component component) { } /** * Bind this handler to the given component. This method is called by the host component * immediately after this behavior is added to it. This method is useful if you need to do * initialization based on the component it is attached and you can't wait to do it at render * time. Keep in mind that if you decide to keep a reference to the host component, it is not * thread safe anymore, and should thus only be used in situations where you do not reuse the * behavior for multiple components. * * @param component * the component to bind to */ public void bind(Component component) { } /** * Notifies the behavior it is removed from the specified component * * @param component * the component this behavior is unbound from */ public void unbind(Component component) { } /** * Allows the behavior to detach any state it has attached during request processing. * * @param component * the component that initiates the detachment of this behavior */ public void detach(Component component) { } /** * In case an unexpected exception happened anywhere between * {@linkplain #onComponentTag(org.apache.wicket.Component, org.apache.wicket.markup.ComponentTag)} and * {@linkplain #afterRender(org.apache.wicket.Component)}, * onException() will be called for any behavior. Typically, if you clean up resources in * {@link #afterRender(Component)}, you should do the same in the implementation of this method. * * @param component * the component that has a reference to this behavior and during which processing * the exception occurred * @param exception * the unexpected exception */ public void onException(Component component, RuntimeException exception) { } /** * This method returns false if the behavior generates a callback url (for example ajax * behaviors) * * @param component * the component that has this behavior coupled. * * @return boolean true or false. */ public boolean getStatelessHint(Component component) { if (this instanceof IRequestListener) { // this behavior implements a callback interface, so it cannot be stateless return false; } return true; } /** * Called when a components is rendering and wants to render this behavior. If false is returned * this behavior will be ignored. * * @param component * the component that has this behavior coupled * * @return true if this behavior must be executed/rendered */ public boolean isEnabled(Component component) { return true; } /** * Called any time a component that has this behavior registered is rendering the component tag. * * @param component * the component that renders this tag currently * @param tag * the tag that is rendered */ public void onComponentTag(Component component, ComponentTag tag) { } /** * Specifies whether or not this behavior is temporary. Temporary behaviors are removed at the * end of request and never reattached. Such behaviors are useful for modifying component * rendering only when it renders next. Usecases include javascript effects, initial clientside * dom setup, etc. * * @param component * * @return true if this behavior is temporary */ public boolean isTemporary(Component component) { return false; } /** * Checks whether or not an {@link IRequestListener} can be invoked on this behavior. For further * information please read the javadoc on {@link Component#canCallListener()}, * this method has the same semantics. * * WARNING: Read the javadoc of {@link Component#canCallListener()} for important * security-related information. * * @param component * component this behavior is attached to * @return {@literal true} iff the listener method can be invoked */ public boolean canCallListener(Component component) { return isEnabled(component) && component.canCallListener(); } /** * Render to the web response whatever the component wants to contribute to the head section. * * @param component * * @param response * Response object */ @Override public void renderHead(Component component, IHeaderResponse response) { } /** * Called immediately after the onConfigure method in a component. Since this is before the * rendering cycle has begun, the behavior can modify the configuration of the component (i.e. * setVisible(false)) * * @param component * the component being configured */ public void onConfigure(Component component) { } /** * Called to notify the behavior about any events sent to the component * * @see org.apache.wicket.IComponentAwareEventSink#onEvent(org.apache.wicket.Component, * org.apache.wicket.event.IEvent) */ @Override public void onEvent(Component component, IEvent<?> event) { } /** * Called to notify that the component is being removed from its parent * @param component * the removed component */ public void onRemove(Component component) { } /** * Creates a {@link Behavior} that uses the given {@code SerializableConsumer consumer} to do * something with the component's tag. * * <p> * Usage:<br/> * <code>component.add(onTag(tag -> tag.put(key, value)));</code> * </p> * * @param onTagConsumer * the {@code SerializableConsumer} that accepts the {@link ComponentTag} * @return The created behavior */ public static Behavior onTag(SerializableBiConsumer<Component, ComponentTag> onTagConsumer) { Args.notNull(onTagConsumer, "onTagConsumer"); return new Behavior() { @Override public void onComponentTag(Component component, ComponentTag tag) { onTagConsumer.accept(component, tag); } }; } /** * Creates a {@link Behavior} that uses the given {@code SerializableFunction function} to do * something with a component's attribute. * * <p> * Usage:<br/> * <code>component.add(onAttribute("class", * currentValue -> condition(currentValue) ? "positive" : "negative"));</code> * </p> * * @param name * the name of the attribute to manipulate * @param onAttribute * the {@code SerializableFunction} that accepts the old value of the attribute and * returns a new value * @return The created behavior */ public static Behavior onAttribute(String name, SerializableFunction<String, CharSequence> onAttribute) { Args.notEmpty(name, "name"); Args.notNull(onAttribute, "onAttribute"); return new Behavior() { private static final long serialVersionUID = 1L; @Override public void onComponentTag(Component component, ComponentTag tag) { if (tag.getType() != TagType.CLOSE) { String oldValue = tag.getAttribute(name); tag.put(name, onAttribute.apply(oldValue)); } } }; } }
/* * Copyright 2020 Mark Adamcin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.adamcin.oakpal.core.opear; import net.adamcin.oakpal.api.Result; import net.adamcin.oakpal.core.OakpalPlan; import net.adamcin.oakpal.testing.TestPackageUtil; import org.apache.commons.io.FileUtils; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.InputStream; import java.net.URL; import java.net.URLClassLoader; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.jar.JarOutputStream; import java.util.jar.Manifest; import static net.adamcin.oakpal.api.Fun.result1; import static net.adamcin.oakpal.core.opear.OpearFile.NAME_CLASS_PATH; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; @RunWith(MockitoJUnitRunner.class) public class OpearFileTest { private static final Logger LOGGER = LoggerFactory.getLogger(OpearFileTest.class); @Test public void testFindDefaultPlanLocation() { Result<OpearFile> opearResult = OpearFile.fromDirectory(new File("src/test/resources/plans/bar")); assertFalse("opearResult should not be a failure", opearResult.isFailure()); Result<URL> urlResult = opearResult.map(Opear::getDefaultPlan); assertFalse("urlResult should not be a failure", urlResult.isFailure()); Result<OakpalPlan> planResult = urlResult.flatMap(OakpalPlan::fromJson); assertFalse("plan should load successfully", planResult.isFailure()); List<String> checklists = planResult.map(OakpalPlan::getChecklists).getOrDefault(Collections.emptyList()); assertEquals("checklists should contain test/bar checklist", Collections.singletonList("test/bar"), checklists); Result<OpearFile> notAPlanResult = OpearFile.fromDirectory(new File("src/test/resources/plans/none")); assertFalse("notAPlanResult should not be a failure", notAPlanResult.isFailure()); Result<URL> notAPlanUrlResult = notAPlanResult.map(Opear::getDefaultPlan); assertFalse("notAPlanUrlResult should not be a failure", notAPlanUrlResult.isFailure()); Result<OakpalPlan> notAPlanPlanResult = notAPlanUrlResult.flatMap(OakpalPlan::fromJson); assertFalse("notAPlanPlan should load successfully", notAPlanPlanResult.isFailure()); List<String> notAPlanChecklists = notAPlanPlanResult.map(OakpalPlan::getChecklists).getOrDefault(Collections.emptyList()); assertEquals("notAPlanChecklists should contain no checklists", Collections.singletonList("net.adamcin.oakpal.core/basic"), notAPlanChecklists); } @Test public void testFindPlanLocation() throws Exception { final File fooDir = new File("src/test/resources/plans/foo"); Result<OpearFile> opearResult = OpearFile.fromDirectory(fooDir); Result<URL> fooUrlResult = opearResult.flatMap(opear -> opear.getSpecificPlan("other-plan.json")); assertEquals("foo plan url should be correct", new URL(fooDir.toURI().toURL(), "other-plan.json"), fooUrlResult.getOrDefault(OakpalPlan.BASIC_PLAN_URL)); Result<URL> foo2UrlResult = opearResult.flatMap(opear -> opear.getSpecificPlan("no-plan.json")); assertTrue("foo2 plan url should be failure", foo2UrlResult.isFailure()); } @Test public void testEmptyPlanJar() throws Exception { final File targetDir = new File("target/test-temp"); targetDir.mkdirs(); final File mfJar = new File(targetDir, "emptyplan.jar"); final File mfCache = new File(targetDir, "emptyplan.cache"); if (mfJar.exists()) { mfJar.delete(); } final File mfDir = new File("src/test/resources/plans/xEmpty"); final File mfFile = new File(mfDir, JarFile.MANIFEST_NAME); try (InputStream mfStream = new FileInputStream(mfFile)) { Manifest manifest = new Manifest(mfStream); try (JarOutputStream mfJarOut = new JarOutputStream(new FileOutputStream(mfJar), manifest)) { // nothing to add mfJarOut.putNextEntry(new JarEntry("other-plan.json")); mfJarOut.write("{}".getBytes(StandardCharsets.UTF_8)); } } Result<OpearFile> opearResult = result1((File file) -> new JarFile(file, true)) .apply(mfJar) .flatMap(jar -> OpearFile.fromJar(jar, mfCache)); opearResult.throwCause(Exception.class); assertTrue("opear result should be success ", opearResult.isSuccess()); Result<OakpalPlan> plan = opearResult.map(OpearFile::getDefaultPlan).flatMap(OakpalPlan::fromJson); plan.throwCause(Exception.class); assertTrue("opear plan should be empty", plan.map(OakpalPlan::getChecklists).getOrDefault(Arrays.asList("not a checklist")).isEmpty()); } @Test public void testGetPlanClassPath() { Result<OpearFile> opearResult = OpearFile.fromDirectory( new File("src/test/resources/OpearFileTest/folders_on_classpath")); assertFalse("opearResult should not be a failure", opearResult.isFailure()); OpearFile opearFile = opearResult.getOrDefault(null); assertNotNull("opearFile is not null", opearFile); assertArrayEquals("classpath should be", new String[]{"classes", "test-classes"}, opearFile.metadata.getPlanClassPath()); } @Test public void testGetHashCacheKey() throws Exception { Result<String> cacheKeyDeletedResult = OpearFile.getHashCacheKey("/no/such/path"); assertTrue("cacheKey is failure", cacheKeyDeletedResult.isFailure()); assertTrue("cacheKey failure is FileNotFoundException", cacheKeyDeletedResult.findCause(FileNotFoundException.class).isPresent()); buildDeepTestJar(); Result<String> cacheKeyResult = OpearFile.getHashCacheKey(deepTestTarget.getPath()); assertTrue("cacheKey is success", cacheKeyResult.isSuccess()); String cacheKey = cacheKeyResult.getOrDefault(""); assertEquals("cacheKey should be 43 characters long: " + cacheKey, 43, cacheKey.length()); final String pattern = "^[0-9A-Za-z_-]*$"; assertTrue(String.format("cacheKey %s matches regex %s", cacheKey, pattern), cacheKey.matches(pattern)); } @Test public void testFromJar_mkdirsFail() throws Exception { buildDeepTestJar(); final File cacheDir = new File("target/test-output/OpearFileTest/testFromJar_mkdirsFail/cache"); if (cacheDir.exists()) { FileUtils.deleteDirectory(cacheDir); } FileUtils.touch(new File(cacheDir, OpearFile.getHashCacheKey(deepTestTarget.getPath()).getOrDefault("failed_to_fail"))); assertTrue("fail with jar when nondirectory present at cache id", OpearFile.fromJar(new JarFile(deepTestTarget), cacheDir).isFailure()); } @Test public void testFromJar_useCacheDirectory() throws Exception { buildDeepTestJar(); final File cacheDir = new File("target/test-output/OpearFileTest/testFromJar_useCacheDirectory/cache"); if (cacheDir.exists()) { FileUtils.deleteDirectory(cacheDir); } assertTrue("succeed with jar when cache is fresh", OpearFile.fromJar(new JarFile(deepTestTarget), cacheDir).isSuccess()); assertTrue("succeed with cache dir when cache is present", OpearFile.fromJar(new JarFile(deepTestTarget), cacheDir).isSuccess()); } @Test public void testCacheJar_fail() throws Exception { buildDeepTestJar(); final File cacheDir = new File("target/test-output/OpearFileTest/testCacheJar_fail/cache"); if (cacheDir.exists()) { FileUtils.deleteDirectory(cacheDir); } new File(cacheDir, "deep-plan.json").mkdirs(); assertTrue("fail to cache entry", OpearFile.cacheJar(new JarFile(deepTestTarget), cacheDir).isFailure()); FileUtils.deleteDirectory(cacheDir); FileUtils.touch(new File(cacheDir, "META-INF")); assertTrue("fail to cache directory entry", OpearFile.cacheJar(new JarFile(deepTestTarget), cacheDir).isFailure()); } @Test public void testReadNonExistingManifest() { assertTrue("non-existing file can't be read", OpearFile.readExpectedManifest( new File("src/test/resources/OpearFileTest/non-existing-file.mf")) .isFailure()); } @Test public void testValidateOpearManifest() throws Exception { assertTrue("invalid manifest when null", OpearFile.validateOpearManifest(null).isFailure()); assertTrue("invalid manifest when no bsn specified", OpearFile.validateOpearManifest( new Manifest(new ByteArrayInputStream(new byte[0]))) .isFailure()); } @Test public void testValidateUriHeaderValues() { final Manifest manifest = new Manifest(); manifest.getMainAttributes().put(NAME_CLASS_PATH, "../somedir"); assertTrue(".. should fail", OpearFile.validateUriHeaderValues(manifest, NAME_CLASS_PATH).isFailure()); manifest.getMainAttributes().put(NAME_CLASS_PATH, "/somedir"); assertTrue("/ should succeed", OpearFile.validateUriHeaderValues(manifest, NAME_CLASS_PATH).isSuccess()); manifest.getMainAttributes().put(NAME_CLASS_PATH, "/somedir/../.."); assertTrue("/../.. should fail", OpearFile.validateUriHeaderValues(manifest, NAME_CLASS_PATH).isFailure()); } @Test public void testGetPlanClassLoader_empty() throws Exception { final File cacheDir = new File("target/test-output/OpearFileTest/testGetPlanClassLoader_empty/cache"); if (cacheDir.exists()) { FileUtils.deleteDirectory(cacheDir); } OpearFile opearFile = new OpearFile(cacheDir, new OpearFile.OpearMetadata(new String[0], new String[0], true)); final ClassLoader parent = new URLClassLoader(new URL[0], null); assertSame("same classloader with empty classpath", parent, opearFile.getPlanClassLoader(parent)); } @Test public void testGetPlanClassLoader() throws Exception { final File cacheDir = new File("target/test-output/OpearFileTest/testGetPlanClassLoader/cache"); if (cacheDir.exists()) { FileUtils.deleteDirectory(cacheDir); } buildDeepTestJar(); final Result<OpearFile> opearResult = OpearFile.fromJar(new JarFile(deepTestTarget), cacheDir); assertTrue("is successful", opearResult.isSuccess()); OpearFile opearFile = opearResult.getOrDefault(null); assertNotNull("not null", opearFile); final String checklistName = "OAKPAL-INF/checklists/embedded-checklist.json"; final ClassLoader controlCl = new URLClassLoader(new URL[]{embedModuleTarget.toURI().toURL()}, null); assertNotNull("control checklist URL not null", controlCl.getResource(checklistName)); final ClassLoader classLoader = opearFile.getPlanClassLoader(new URLClassLoader(new URL[0], null)); final URL embeddedChecklistUrl = classLoader.getResource(checklistName); assertNotNull("checklist URL not null: " + printClassLoader(classLoader), embeddedChecklistUrl); } private String printClassLoader(final ClassLoader classLoader) { if (classLoader instanceof URLClassLoader) { return Arrays.toString(((URLClassLoader) classLoader).getURLs()); } else { return classLoader.toString(); } } final File baseDir = new File("src/test/resources/OpearFileTest"); final File testTarget = new File("target/test-output/OpearFileTest"); final File deepTestSrc = new File(baseDir, "deep_test_src"); final File deepTestTarget = new File(testTarget, "deep_test.jar"); final File embedModuleSrc = new File(baseDir, "embedded_module_src"); final File embedModuleTarget = new File(testTarget, "embedded_module.jar"); private void buildEmbeddedModuleJar() throws Exception { TestPackageUtil.buildJarFromDir(embedModuleSrc, embedModuleTarget, Collections.emptyMap()); } private void buildDeepTestJar() throws Exception { buildEmbeddedModuleJar(); TestPackageUtil.buildJarFromDir(deepTestSrc, deepTestTarget, Collections.singletonMap(embedModuleTarget.getName(), embedModuleTarget)); } }
/* * Copyright (C) 2011 Thomas Akehurst * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.tomakehurst.wiremock.http.ssl; import com.github.tomakehurst.wiremock.common.Notifier; import javax.net.ssl.*; import java.net.Socket; import java.security.KeyStoreException; import java.security.Principal; import java.security.PrivateKey; import java.security.cert.X509Certificate; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import static java.lang.System.lineSeparator; import static java.util.Collections.emptyList; import static java.util.Objects.requireNonNull; public class CertificateGeneratingX509ExtendedKeyManager extends DelegatingX509ExtendedKeyManager { private final DynamicKeyStore dynamicKeyStore; private final HostNameMatcher hostNameMatcher; private final OnceOnlyNotifier notifier; public CertificateGeneratingX509ExtendedKeyManager( X509ExtendedKeyManager keyManager, DynamicKeyStore dynamicKeyStore, HostNameMatcher hostNameMatcher, Notifier notifier ) { super(keyManager); this.dynamicKeyStore = requireNonNull(dynamicKeyStore); this.hostNameMatcher = requireNonNull(hostNameMatcher); this.notifier = new OnceOnlyNotifier(notifier); } @Override public PrivateKey getPrivateKey(String alias) { PrivateKey original = super.getPrivateKey(alias); return original != null ? original : dynamicKeyStore.getPrivateKey(alias); } @Override public X509Certificate[] getCertificateChain(String alias) { X509Certificate[] original = super.getCertificateChain(alias); return original != null ? original : dynamicKeyStore.getCertificateChain(alias); } @Override public String chooseServerAlias(String keyType, Principal[] issuers, Socket socket) { String defaultAlias = super.chooseServerAlias(keyType, issuers, socket); ExtendedSSLSession handshakeSession = getHandshakeSession(socket); return tryToChooseServerAlias(keyType, defaultAlias, handshakeSession); } private ExtendedSSLSession getHandshakeSession(Socket socket) { if (socket instanceof SSLSocket) { SSLSocket sslSocket = (SSLSocket) socket; SSLSession sslSession = getHandshakeSessionIfSupported(sslSocket); return getHandshakeSession(sslSession); } else { return null; } } private SSLSession getHandshakeSessionIfSupported(SSLSocket sslSocket) { try { return sslSocket.getHandshakeSession(); } catch (UnsupportedOperationException e) { notify("your SSL Provider does not support SSLSocket.getHandshakeSession()", e); return null; } } @Override public String chooseEngineServerAlias(String keyType, Principal[] issuers, SSLEngine engine) { String defaultAlias = super.chooseEngineServerAlias(keyType, issuers, engine); ExtendedSSLSession handshakeSession = getHandshakeSession(engine); return tryToChooseServerAlias(keyType, defaultAlias, handshakeSession); } private ExtendedSSLSession getHandshakeSession(SSLEngine sslEngine) { SSLSession sslSession = getHandshakeSessionIfSupported(sslEngine); return getHandshakeSession(sslSession); } private SSLSession getHandshakeSessionIfSupported(SSLEngine sslEngine) { try { return sslEngine.getHandshakeSession(); } catch (UnsupportedOperationException | NullPointerException e) { notify("your SSL Provider does not support SSLEngine.getHandshakeSession()", e); return null; } } private static ExtendedSSLSession getHandshakeSession(SSLSession handshakeSession) { if (handshakeSession instanceof ExtendedSSLSession) { return (ExtendedSSLSession) handshakeSession; } else { return null; } } /** * @param keyType non null, may be invalid * @param defaultAlias nullable * @param handshakeSession nullable */ private String tryToChooseServerAlias(String keyType, String defaultAlias, ExtendedSSLSession handshakeSession) { if (defaultAlias != null && handshakeSession != null) { return chooseServerAlias(keyType, defaultAlias, handshakeSession); } else { return defaultAlias; } } /** * @param keyType non null, guaranteed to be valid * @param defaultAlias non null, guaranteed to match a private key entry * @param handshakeSession non null */ private String chooseServerAlias(String keyType, String defaultAlias, ExtendedSSLSession handshakeSession) { List<SNIHostName> requestedServerNames = getSNIHostNames(handshakeSession); if (requestedServerNames.isEmpty()) { return defaultAlias; } else { return chooseServerAlias(keyType, defaultAlias, requestedServerNames); } } private List<SNIHostName> getSNIHostNames(ExtendedSSLSession handshakeSession) { List<SNIServerName> requestedServerNames = getRequestedServerNames(handshakeSession); return requestedServerNames.stream() .filter(SNIHostName.class::isInstance) .map(SNIHostName.class::cast) .collect(Collectors.toList()); } private List<SNIServerName> getRequestedServerNames(ExtendedSSLSession handshakeSession) { try { return handshakeSession.getRequestedServerNames(); } catch (UnsupportedOperationException e) { notify("your SSL Provider does not support ExtendedSSLSession.getRequestedServerNames()", e); return emptyList(); } } /** * @param keyType non null, guaranteed to be valid * @param defaultAlias non null, guaranteed to match a private key entry * @param requestedServerNames non null, non empty */ private String chooseServerAlias(String keyType, String defaultAlias, List<SNIHostName> requestedServerNames) { X509Certificate[] certificateChain = super.getCertificateChain(defaultAlias); if (certificateChain != null && matches(certificateChain[0], requestedServerNames)) { return defaultAlias; } else { try { SNIHostName requestedServerName = requestedServerNames.get(0); dynamicKeyStore.generateCertificateIfNecessary(keyType, requestedServerName); return requestedServerName.getAsciiName(); } catch (KeyStoreException | CertificateGenerationUnsupportedException e) { notify("certificates cannot be generated; perhaps the sun internal classes are not available?", e); return defaultAlias; } } } private boolean matches(X509Certificate x509Certificate, List<SNIHostName> requestedServerNames) { return requestedServerNames.stream().anyMatch(sniHostName -> hostNameMatcher.matches(x509Certificate, sniHostName)); } private void notify(String reason, Exception e) { notifier.error("Dynamic certificate generation is not supported because " + reason + lineSeparator() + "All sites will be served using the normal WireMock HTTPS certificate.", e); } private static class OnceOnlyNotifier implements Notifier { private final Notifier notifier; private final OnceOnly onceOnly = new OnceOnly(); private OnceOnlyNotifier(Notifier notifier) { this.notifier = notifier; } @Override public void info(String message) { if (onceOnly.unused()) { notifier.info(message); } } @Override public void error(String message) { if (onceOnly.unused()) { notifier.error(message); } } @Override public void error(String message, Throwable t) { if (onceOnly.unused()) { notifier.error(message, t); } } } private static class OnceOnly { private final AtomicBoolean used = new AtomicBoolean(false); boolean unused() { return used.compareAndSet(false, true); } } }
/* * Copyright 2012 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.localsearch.decider.acceptor.lateacceptance; import org.junit.Test; import org.optaplanner.core.api.score.buildin.simple.SimpleScore; import org.optaplanner.core.impl.localsearch.decider.acceptor.AbstractAcceptorTest; import org.optaplanner.core.impl.localsearch.scope.LocalSearchMoveScope; import org.optaplanner.core.impl.localsearch.scope.LocalSearchPhaseScope; import org.optaplanner.core.impl.localsearch.scope.LocalSearchStepScope; import org.optaplanner.core.impl.solver.scope.DefaultSolverScope; import org.optaplanner.core.impl.testdata.domain.TestdataSolution; import static org.junit.Assert.*; public class LateAcceptanceAcceptorTest extends AbstractAcceptorTest { @Test public void lateAcceptanceSize() { LateAcceptanceAcceptor acceptor = new LateAcceptanceAcceptor(); acceptor.setLateAcceptanceSize(3); acceptor.setHillClimbingEnabled(false); DefaultSolverScope<TestdataSolution> solverScope = new DefaultSolverScope<>(); solverScope.setBestScore(SimpleScore.valueOf(-1000)); LocalSearchPhaseScope<TestdataSolution> phaseScope = new LocalSearchPhaseScope<>(solverScope); LocalSearchStepScope<TestdataSolution> lastCompletedStepScope = new LocalSearchStepScope<>(phaseScope, -1); lastCompletedStepScope.setScore(SimpleScore.valueOf(Integer.MIN_VALUE)); phaseScope.setLastCompletedStepScope(lastCompletedStepScope); acceptor.phaseStarted(phaseScope); // lateScore = -1000 LocalSearchStepScope<TestdataSolution> stepScope0 = new LocalSearchStepScope<>(phaseScope); LocalSearchMoveScope<TestdataSolution> moveScope0 = buildMoveScope(stepScope0, -500); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -900))); assertEquals(true, acceptor.isAccepted(moveScope0)); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -800))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope0, -2000))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -1000))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -900))); // Repeated call stepScope0.setStep(moveScope0.getMove()); stepScope0.setScore(moveScope0.getScore()); solverScope.setBestScore(moveScope0.getScore()); acceptor.stepEnded(stepScope0); phaseScope.setLastCompletedStepScope(stepScope0); // lateScore = -1000 LocalSearchStepScope<TestdataSolution> stepScope1 = new LocalSearchStepScope<>(phaseScope); LocalSearchMoveScope<TestdataSolution> moveScope1 = buildMoveScope(stepScope1, -700); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope1, -900))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope1, -2000))); assertEquals(true, acceptor.isAccepted(moveScope1)); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope1, -1000))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope1, -1001))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -900))); // Repeated call stepScope1.setStep(moveScope1.getMove()); stepScope1.setScore(moveScope1.getScore()); // bestScore unchanged acceptor.stepEnded(stepScope1); phaseScope.setLastCompletedStepScope(stepScope1); // lateScore = -1000 LocalSearchStepScope<TestdataSolution> stepScope2 = new LocalSearchStepScope<>(phaseScope); LocalSearchMoveScope<TestdataSolution> moveScope2 = buildMoveScope(stepScope1, -400); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope2, -900))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope2, -2000))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope2, -1001))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope2, -1000))); assertEquals(true, acceptor.isAccepted(moveScope2)); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -900))); // Repeated call stepScope2.setStep(moveScope2.getMove()); stepScope2.setScore(moveScope2.getScore()); solverScope.setBestScore(moveScope2.getScore()); acceptor.stepEnded(stepScope2); phaseScope.setLastCompletedStepScope(stepScope2); // lateScore = -500 LocalSearchStepScope<TestdataSolution> stepScope3 = new LocalSearchStepScope<>(phaseScope); LocalSearchMoveScope<TestdataSolution> moveScope3 = buildMoveScope(stepScope1, -200); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope3, -900))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope3, -500))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope3, -501))); assertEquals(true, acceptor.isAccepted(moveScope3)); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope3, -2000))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope0, -900))); // Repeated call stepScope3.setStep(moveScope3.getMove()); stepScope3.setScore(moveScope3.getScore()); solverScope.setBestScore(moveScope3.getScore()); acceptor.stepEnded(stepScope3); phaseScope.setLastCompletedStepScope(stepScope3); // lateScore = -700 (not the best score of -500!) LocalSearchStepScope<TestdataSolution> stepScope4 = new LocalSearchStepScope<>(phaseScope); LocalSearchMoveScope<TestdataSolution> moveScope4 = buildMoveScope(stepScope1, -300); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope4, -700))); assertEquals(true, acceptor.isAccepted(moveScope4)); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope4, -500))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope4, -2000))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope4, -701))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -700))); // Repeated call stepScope4.setStep(moveScope4.getMove()); stepScope4.setScore(moveScope4.getScore()); // bestScore unchanged acceptor.stepEnded(stepScope4); phaseScope.setLastCompletedStepScope(stepScope4); // lateScore = -400 LocalSearchStepScope<TestdataSolution> stepScope5 = new LocalSearchStepScope<>(phaseScope); LocalSearchMoveScope<TestdataSolution> moveScope5 = buildMoveScope(stepScope1, -300); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope5, -401))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope5, -400))); assertEquals(true, acceptor.isAccepted(moveScope5)); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope5, -2000))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope5, -600))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope0, -401))); // Repeated call stepScope5.setStep(moveScope5.getMove()); stepScope5.setScore(moveScope5.getScore()); // bestScore unchanged acceptor.stepEnded(stepScope5); phaseScope.setLastCompletedStepScope(stepScope5); acceptor.phaseEnded(phaseScope); } @Test public void hillClimbingEnabled() { LateAcceptanceAcceptor acceptor = new LateAcceptanceAcceptor(); acceptor.setLateAcceptanceSize(2); acceptor.setHillClimbingEnabled(true); DefaultSolverScope<TestdataSolution> solverScope = new DefaultSolverScope<>(); solverScope.setBestScore(SimpleScore.valueOf(-1000)); LocalSearchPhaseScope<TestdataSolution> phaseScope = new LocalSearchPhaseScope<>(solverScope); LocalSearchStepScope<TestdataSolution> lastCompletedStepScope = new LocalSearchStepScope<>(phaseScope, -1); lastCompletedStepScope.setScore(solverScope.getBestScore()); phaseScope.setLastCompletedStepScope(lastCompletedStepScope); acceptor.phaseStarted(phaseScope); // lateScore = -1000, lastCompletedStepScore = Integer.MIN_VALUE LocalSearchStepScope<TestdataSolution> stepScope0 = new LocalSearchStepScope<>(phaseScope); LocalSearchMoveScope<TestdataSolution> moveScope0 = buildMoveScope(stepScope0, -500); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -900))); assertEquals(true, acceptor.isAccepted(moveScope0)); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -800))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope0, -2000))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -1000))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -900))); // Repeated call stepScope0.setStep(moveScope0.getMove()); stepScope0.setScore(moveScope0.getScore()); solverScope.setBestScore(moveScope0.getScore()); acceptor.stepEnded(stepScope0); phaseScope.setLastCompletedStepScope(stepScope0); // lateScore = -1000, lastCompletedStepScore = -500 LocalSearchStepScope<TestdataSolution> stepScope1 = new LocalSearchStepScope<>(phaseScope); LocalSearchMoveScope<TestdataSolution> moveScope1 = buildMoveScope(stepScope1, -700); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope1, -900))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope1, -2000))); assertEquals(true, acceptor.isAccepted(moveScope1)); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope1, -1000))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope1, -1001))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -900))); // Repeated call stepScope1.setStep(moveScope1.getMove()); stepScope1.setScore(moveScope1.getScore()); // bestScore unchanged acceptor.stepEnded(stepScope1); phaseScope.setLastCompletedStepScope(stepScope1); // lateScore = -500, lastCompletedStepScore = -700 LocalSearchStepScope<TestdataSolution> stepScope2 = new LocalSearchStepScope<>(phaseScope); LocalSearchMoveScope<TestdataSolution> moveScope2 = buildMoveScope(stepScope1, -400); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope2, -700))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope2, -2000))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope2, -701))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope2, -600))); assertEquals(true, acceptor.isAccepted(moveScope2)); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -700))); // Repeated call stepScope2.setStep(moveScope2.getMove()); stepScope2.setScore(moveScope2.getScore()); solverScope.setBestScore(moveScope2.getScore()); acceptor.stepEnded(stepScope2); phaseScope.setLastCompletedStepScope(stepScope2); // lateScore = -700, lastCompletedStepScore = -400 LocalSearchStepScope<TestdataSolution> stepScope3 = new LocalSearchStepScope<>(phaseScope); LocalSearchMoveScope<TestdataSolution> moveScope3 = buildMoveScope(stepScope1, -200); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope3, -900))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope3, -700))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope3, -701))); assertEquals(true, acceptor.isAccepted(moveScope3)); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope3, -2000))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope0, -900))); // Repeated call stepScope3.setStep(moveScope3.getMove()); stepScope3.setScore(moveScope3.getScore()); solverScope.setBestScore(moveScope3.getScore()); acceptor.stepEnded(stepScope3); phaseScope.setLastCompletedStepScope(stepScope3); // lateScore = -400 (not the best score of -200!), lastCompletedStepScore = -200 LocalSearchStepScope<TestdataSolution> stepScope4 = new LocalSearchStepScope<>(phaseScope); LocalSearchMoveScope<TestdataSolution> moveScope4 = buildMoveScope(stepScope1, -300); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope4, -400))); assertEquals(true, acceptor.isAccepted(moveScope4)); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope4, -500))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope4, -2000))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope4, -401))); assertEquals(true, acceptor.isAccepted(buildMoveScope(stepScope0, -400))); // Repeated call stepScope4.setStep(moveScope4.getMove()); stepScope4.setScore(moveScope4.getScore()); // bestScore unchanged acceptor.stepEnded(stepScope4); phaseScope.setLastCompletedStepScope(stepScope4); // lateScore = -200, lastCompletedStepScore = -300 LocalSearchStepScope<TestdataSolution> stepScope5 = new LocalSearchStepScope<>(phaseScope); LocalSearchMoveScope<TestdataSolution> moveScope5 = buildMoveScope(stepScope1, -300); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope5, -301))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope5, -400))); assertEquals(true, acceptor.isAccepted(moveScope5)); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope5, -2000))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope5, -600))); assertEquals(false, acceptor.isAccepted(buildMoveScope(stepScope0, -301))); // Repeated call stepScope5.setStep(moveScope5.getMove()); stepScope5.setScore(moveScope5.getScore()); // bestScore unchanged acceptor.stepEnded(stepScope5); phaseScope.setLastCompletedStepScope(stepScope5); acceptor.phaseEnded(phaseScope); } @Test(expected = IllegalArgumentException.class) public void zeroLateAcceptanceSize() { LateAcceptanceAcceptor acceptor = new LateAcceptanceAcceptor(); acceptor.setLateAcceptanceSize(0); acceptor.phaseStarted(null); } @Test(expected = IllegalArgumentException.class) public void negativeLateAcceptanceSize() { LateAcceptanceAcceptor acceptor = new LateAcceptanceAcceptor(); acceptor.setLateAcceptanceSize(-1); acceptor.phaseStarted(null); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.reactivex.rxjava3.android.schedulers; import android.os.Handler; import android.os.Looper; import android.os.Message; import io.reactivex.rxjava3.android.testutil.CountingRunnable; import io.reactivex.rxjava3.core.Scheduler; import io.reactivex.rxjava3.core.Scheduler.Worker; import io.reactivex.rxjava3.disposables.Disposable; import io.reactivex.rxjava3.functions.Consumer; import io.reactivex.rxjava3.functions.Function; import io.reactivex.rxjava3.plugins.RxJavaPlugins; import java.util.Arrays; import java.util.Collection; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.ParameterizedRobolectricTestRunner; import org.robolectric.annotation.Config; import org.robolectric.shadows.ShadowLooper; import org.robolectric.shadows.ShadowMessageQueue; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.concurrent.TimeUnit.SECONDS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.robolectric.Shadows.shadowOf; import static org.robolectric.shadows.ShadowLooper.idleMainLooper; import static org.robolectric.shadows.ShadowLooper.pauseMainLooper; import static org.robolectric.shadows.ShadowLooper.runUiThreadTasks; import static org.robolectric.shadows.ShadowLooper.runUiThreadTasksIncludingDelayedTasks; import static org.robolectric.shadows.ShadowLooper.unPauseMainLooper; @RunWith(ParameterizedRobolectricTestRunner.class) @Config(manifest=Config.NONE, sdk = 16) public final class HandlerSchedulerTest { @ParameterizedRobolectricTestRunner.Parameters(name = "async = {0}") public static Collection<Object[]> data() { return Arrays.asList(new Object[][]{ {true}, {false} }); } private final Scheduler scheduler; private final boolean async; public HandlerSchedulerTest(boolean async) { this.scheduler = new HandlerScheduler(new Handler(Looper.getMainLooper()), async); this.async = async; } @Before public void setUp() { RxJavaPlugins.reset(); pauseMainLooper(); // Take manual control of looper task queue. } @After public void tearDown() { RxJavaPlugins.reset(); unPauseMainLooper(); } @Test public void directScheduleOncePostsImmediately() { CountingRunnable counter = new CountingRunnable(); scheduler.scheduleDirect(counter); runUiThreadTasks(); assertEquals(1, counter.get()); } @Test public void directScheduleOnceWithNegativeDelayPostsImmediately() { CountingRunnable counter = new CountingRunnable(); scheduler.scheduleDirect(counter, -1, TimeUnit.MINUTES); runUiThreadTasks(); assertEquals(1, counter.get()); } @Test public void directScheduleOnceUsesHook() { final CountingRunnable newCounter = new CountingRunnable(); final AtomicReference<Runnable> runnableRef = new AtomicReference<>(); RxJavaPlugins.setScheduleHandler(new Function<Runnable, Runnable>() { @Override public Runnable apply(Runnable runnable) { runnableRef.set(runnable); return newCounter; } }); CountingRunnable counter = new CountingRunnable(); scheduler.scheduleDirect(counter); // Verify our runnable was passed to the schedulers hook. assertSame(counter, runnableRef.get()); runUiThreadTasks(); // Verify the scheduled runnable was the one returned from the hook. assertEquals(1, newCounter.get()); assertEquals(0, counter.get()); } @Test public void directScheduleOnceDisposedDoesNotRun() { CountingRunnable counter = new CountingRunnable(); Disposable disposable = scheduler.scheduleDirect(counter); disposable.dispose(); runUiThreadTasks(); assertEquals(0, counter.get()); } @Test public void directScheduleOnceWithDelayPostsWithDelay() { CountingRunnable counter = new CountingRunnable(); scheduler.scheduleDirect(counter, 1, MINUTES); runUiThreadTasks(); assertEquals(0, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(1, counter.get()); } @Test public void directScheduleOnceWithDelayUsesHook() { final CountingRunnable newCounter = new CountingRunnable(); final AtomicReference<Runnable> runnableRef = new AtomicReference<>(); RxJavaPlugins.setScheduleHandler(new Function<Runnable, Runnable>() { @Override public Runnable apply(Runnable runnable) { runnableRef.set(runnable); return newCounter; } }); CountingRunnable counter = new CountingRunnable(); scheduler.scheduleDirect(counter, 1, MINUTES); // Verify our runnable was passed to the schedulers hook. assertSame(counter, runnableRef.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); // Verify the scheduled runnable was the one returned from the hook. assertEquals(1, newCounter.get()); assertEquals(0, counter.get()); } @Test public void directScheduleOnceWithDelayDisposedDoesNotRun() { CountingRunnable counter = new CountingRunnable(); Disposable disposable = scheduler.scheduleDirect(counter, 1, MINUTES); idleMainLooper(30, SECONDS); disposable.dispose(); idleMainLooper(30, SECONDS); runUiThreadTasks(); assertEquals(0, counter.get()); } @Test @Ignore("Implementation delegated to default RxJava implementation") public void directSchedulePeriodicallyReschedulesItself() { CountingRunnable counter = new CountingRunnable(); scheduler.schedulePeriodicallyDirect(counter, 1, 1, MINUTES); runUiThreadTasks(); assertEquals(0, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(1, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(3, counter.get()); } @Test @Ignore("Implementation delegated to default RxJava implementation") public void directSchedulePeriodicallyUsesHookOnce() { final CountingRunnable newCounter = new CountingRunnable(); final AtomicReference<Runnable> runnableRef = new AtomicReference<>(); RxJavaPlugins.setScheduleHandler(new Function<Runnable, Runnable>() { @Override public Runnable apply(Runnable runnable) { runnableRef.set(runnable); return newCounter; } }); CountingRunnable counter = new CountingRunnable(); scheduler.schedulePeriodicallyDirect(counter, 1, 1, MINUTES); // Verify our action was passed to the schedulers hook. assertSame(counter, runnableRef.get()); runnableRef.set(null); idleMainLooper(1, MINUTES); runUiThreadTasks(); // Verify the scheduled action was the one returned from the hook. assertEquals(1, newCounter.get()); assertEquals(0, counter.get()); // Ensure the hook was not called again when the runnable re-scheduled itself. assertNull(runnableRef.get()); } @Test @Ignore("Implementation delegated to default RxJava implementation") public void directSchedulePeriodicallyDisposedDoesNotRun() { CountingRunnable counter = new CountingRunnable(); Disposable disposable = scheduler.schedulePeriodicallyDirect(counter, 1, 1, MINUTES); runUiThreadTasks(); assertEquals(0, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(1, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); disposable.dispose(); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); } @Test @Ignore("Implementation delegated to default RxJava implementation") public void directSchedulePeriodicallyDisposedDuringRunDoesNotReschedule() { final AtomicReference<Disposable> disposableRef = new AtomicReference<>(); CountingRunnable counter = new CountingRunnable() { @Override public void run() { super.run(); if (get() == 2) { disposableRef.get().dispose(); } } }; Disposable disposable = scheduler.schedulePeriodicallyDirect(counter, 1, 1, MINUTES); disposableRef.set(disposable); runUiThreadTasks(); assertEquals(0, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(1, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); // Dispose will have happened here during the last run() execution. idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); } @Test @Ignore("Implementation delegated to default RxJava implementation") public void directSchedulePeriodicallyThrowingDoesNotReschedule() { CountingRunnable counter = new CountingRunnable() { @Override public void run() { super.run(); if (get() == 2) { throw new RuntimeException("Broken!"); } } }; scheduler.schedulePeriodicallyDirect(counter, 1, 1, MINUTES); runUiThreadTasks(); assertEquals(0, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(1, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); // Exception will have happened here during the last run() execution. idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); } @Test public void workerScheduleOncePostsImmediately() { Worker worker = scheduler.createWorker(); CountingRunnable counter = new CountingRunnable(); worker.schedule(counter); runUiThreadTasks(); assertEquals(1, counter.get()); } @Test public void workerScheduleOnceWithNegativeDelayPostsImmediately() { Worker worker = scheduler.createWorker(); CountingRunnable counter = new CountingRunnable(); worker.schedule(counter, -1, TimeUnit.MINUTES); runUiThreadTasks(); assertEquals(1, counter.get()); } @Test public void workerScheduleOnceUsesHook() { final CountingRunnable newCounter = new CountingRunnable(); final AtomicReference<Runnable> runnableRef = new AtomicReference<>(); RxJavaPlugins.setScheduleHandler(new Function<Runnable, Runnable>() { @Override public Runnable apply(Runnable runnable) { runnableRef.set(runnable); return newCounter; } }); Worker worker = scheduler.createWorker(); CountingRunnable counter = new CountingRunnable(); worker.schedule(counter); // Verify our runnable was passed to the schedulers hook. assertSame(counter, runnableRef.get()); runUiThreadTasks(); // Verify the scheduled runnable was the one returned from the hook. assertEquals(1, newCounter.get()); assertEquals(0, counter.get()); } @Test public void workerScheduleOnceDisposedDoesNotRun() { Worker worker = scheduler.createWorker(); CountingRunnable counter = new CountingRunnable(); Disposable disposable = worker.schedule(counter); disposable.dispose(); runUiThreadTasks(); assertEquals(0, counter.get()); } @Test public void workerScheduleOnceWithDelayPostsWithDelay() { Worker worker = scheduler.createWorker(); CountingRunnable counter = new CountingRunnable(); worker.schedule(counter, 1, MINUTES); runUiThreadTasks(); assertEquals(0, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(1, counter.get()); } @Test public void workerScheduleOnceWithDelayUsesHook() { final CountingRunnable newCounter = new CountingRunnable(); final AtomicReference<Runnable> runnableRef = new AtomicReference<>(); RxJavaPlugins.setScheduleHandler(new Function<Runnable, Runnable>() { @Override public Runnable apply(Runnable runnable) { runnableRef.set(runnable); return newCounter; } }); Worker worker = scheduler.createWorker(); CountingRunnable counter = new CountingRunnable(); worker.schedule(counter, 1, MINUTES); // Verify our runnable was passed to the schedulers hook. assertSame(counter, runnableRef.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); // Verify the scheduled runnable was the one returned from the hook. assertEquals(1, newCounter.get()); assertEquals(0, counter.get()); } @Test public void workerScheduleOnceWithDelayDisposedDoesNotRun() { Worker worker = scheduler.createWorker(); CountingRunnable counter = new CountingRunnable(); Disposable disposable = worker.schedule(counter, 1, MINUTES); idleMainLooper(30, SECONDS); disposable.dispose(); idleMainLooper(30, SECONDS); runUiThreadTasks(); assertEquals(0, counter.get()); } @Test @Ignore("Implementation delegated to default RxJava implementation") public void workerSchedulePeriodicallyReschedulesItself() { Worker worker = scheduler.createWorker(); CountingRunnable counter = new CountingRunnable(); worker.schedulePeriodically(counter, 1, 1, MINUTES); runUiThreadTasks(); assertEquals(0, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(1, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(3, counter.get()); } @Test @Ignore("Implementation delegated to default RxJava implementation") public void workerSchedulePeriodicallyUsesHookOnce() { Worker worker = scheduler.createWorker(); final CountingRunnable newCounter = new CountingRunnable(); final AtomicReference<Runnable> runnableRef = new AtomicReference<>(); RxJavaPlugins.setScheduleHandler(new Function<Runnable, Runnable>() { @Override public Runnable apply(Runnable runnable) { runnableRef.set(runnable); return newCounter; } }); CountingRunnable counter = new CountingRunnable(); worker.schedulePeriodically(counter, 1, 1, MINUTES); // Verify our action was passed to the schedulers hook. assertSame(counter, runnableRef.get()); runnableRef.set(null); idleMainLooper(1, MINUTES); runUiThreadTasks(); // Verify the scheduled action was the one returned from the hook. assertEquals(1, newCounter.get()); assertEquals(0, counter.get()); // Ensure the hook was not called again when the runnable re-scheduled itself. assertNull(runnableRef.get()); } @Test @Ignore("Implementation delegated to default RxJava implementation") public void workerSchedulePeriodicallyDisposedDoesNotRun() { Worker worker = scheduler.createWorker(); CountingRunnable counter = new CountingRunnable(); Disposable disposable = worker.schedulePeriodically(counter, 1, 1, MINUTES); runUiThreadTasks(); assertEquals(0, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(1, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); disposable.dispose(); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); } @Test @Ignore("Implementation delegated to default RxJava implementation") public void workerSchedulePeriodicallyDisposedDuringRunDoesNotReschedule() { Worker worker = scheduler.createWorker(); final AtomicReference<Disposable> disposableRef = new AtomicReference<>(); CountingRunnable counter = new CountingRunnable() { @Override public void run() { super.run(); if (get() == 2) { disposableRef.get().dispose(); } } }; Disposable disposable = worker.schedulePeriodically(counter, 1, 1, MINUTES); disposableRef.set(disposable); runUiThreadTasks(); assertEquals(0, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(1, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); // Dispose will have happened here during the last run() execution. idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); } @Test @Ignore("Implementation delegated to default RxJava implementation") public void workerSchedulePeriodicallyThrowingDoesNotReschedule() { Worker worker = scheduler.createWorker(); CountingRunnable counter = new CountingRunnable() { @Override public void run() { super.run(); if (get() == 2) { throw new RuntimeException("Broken!"); } } }; worker.schedulePeriodically(counter, 1, 1, MINUTES); runUiThreadTasks(); assertEquals(0, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(1, counter.get()); idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); // Exception will have happened here during the last run() execution. idleMainLooper(1, MINUTES); runUiThreadTasks(); assertEquals(2, counter.get()); } @Test public void workerDisposableTracksDisposedState() { Worker worker = scheduler.createWorker(); CountingRunnable counter = new CountingRunnable(); Disposable disposable = worker.schedule(counter); assertFalse(disposable.isDisposed()); disposable.dispose(); assertTrue(disposable.isDisposed()); } @Test public void workerUnsubscriptionDuringSchedulingCancelsScheduledAction() { final AtomicReference<Worker> workerRef = new AtomicReference<>(); RxJavaPlugins.setScheduleHandler(new Function<Runnable, Runnable>() { @Override public Runnable apply(Runnable runnable) { // Purposefully unsubscribe in an asinine point after the normal unsubscribed check. workerRef.get().dispose(); return runnable; } }); Worker worker = scheduler.createWorker(); workerRef.set(worker); CountingRunnable counter = new CountingRunnable(); worker.schedule(counter); runUiThreadTasks(); assertEquals(0, counter.get()); } @Test public void workerDisposeCancelsScheduled() { Worker worker = scheduler.createWorker(); CountingRunnable counter = new CountingRunnable(); worker.schedule(counter, 1, MINUTES); worker.dispose(); runUiThreadTasks(); assertEquals(0, counter.get()); } @Test public void workerUnsubscriptionDoesNotAffectOtherWorkers() { Worker workerA = scheduler.createWorker(); CountingRunnable counterA = new CountingRunnable(); workerA.schedule(counterA, 1, MINUTES); Worker workerB = scheduler.createWorker(); CountingRunnable counterB = new CountingRunnable(); workerB.schedule(counterB, 1, MINUTES); workerA.dispose(); runUiThreadTasksIncludingDelayedTasks(); assertEquals(0, counterA.get()); assertEquals(1, counterB.get()); } @Test public void workerTracksDisposedState() { Worker worker = scheduler.createWorker(); assertFalse(worker.isDisposed()); worker.dispose(); assertTrue(worker.isDisposed()); } @Test public void disposedWorkerReturnsDisposedDisposables() { Worker worker = scheduler.createWorker(); worker.dispose(); Disposable disposable = worker.schedule(new CountingRunnable()); assertTrue(disposable.isDisposed()); } @Test public void throwingActionRoutedToRxJavaPlugins() { Consumer<? super Throwable> originalErrorHandler = RxJavaPlugins.getErrorHandler(); try { final AtomicReference<Throwable> throwableRef = new AtomicReference<>(); RxJavaPlugins.setErrorHandler(new Consumer<Throwable>() { @Override public void accept(Throwable throwable) throws Exception { throwableRef.set(throwable); } }); Worker worker = scheduler.createWorker(); final NullPointerException npe = new NullPointerException(); Runnable action = new Runnable() { @Override public void run() { throw npe; } }; worker.schedule(action); runUiThreadTasks(); assertSame(npe, throwableRef.get()); } finally { RxJavaPlugins.setErrorHandler(originalErrorHandler); } } @Test public void directScheduleOnceInputValidation() { try { scheduler.scheduleDirect(null); fail(); } catch (NullPointerException e) { assertEquals("run == null", e.getMessage()); } try { scheduler.scheduleDirect(null, 1, MINUTES); fail(); } catch (NullPointerException e) { assertEquals("run == null", e.getMessage()); } try { scheduler.scheduleDirect(new CountingRunnable(), 1, null); fail(); } catch (NullPointerException e) { assertEquals("unit == null", e.getMessage()); } } @Test @Ignore("Implementation delegated to default RxJava implementation") public void directSchedulePeriodicallyInputValidation() { try { scheduler.schedulePeriodicallyDirect(null, 1, 1, MINUTES); fail(); } catch (NullPointerException e) { assertEquals("run == null", e.getMessage()); } try { scheduler.schedulePeriodicallyDirect(new CountingRunnable(), 1, -1, MINUTES); fail(); } catch (IllegalArgumentException e) { assertEquals("period < 0: -1", e.getMessage()); } try { scheduler.schedulePeriodicallyDirect(new CountingRunnable(), 1, 1, null); fail(); } catch (NullPointerException e) { assertEquals("unit == null", e.getMessage()); } } @Test public void workerScheduleOnceInputValidation() { Worker worker = scheduler.createWorker(); try { worker.schedule(null); fail(); } catch (NullPointerException e) { assertEquals("run == null", e.getMessage()); } try { worker.schedule(null, 1, MINUTES); fail(); } catch (NullPointerException e) { assertEquals("run == null", e.getMessage()); } try { worker.schedule(new CountingRunnable(), 1, null); fail(); } catch (NullPointerException e) { assertEquals("unit == null", e.getMessage()); } } @Test @Ignore("Implementation delegated to default RxJava implementation") public void workerSchedulePeriodicallyInputValidation() { Worker worker = scheduler.createWorker(); try { worker.schedulePeriodically(null, 1, 1, MINUTES); fail(); } catch (NullPointerException e) { assertEquals("run == null", e.getMessage()); } try { worker.schedulePeriodically(new CountingRunnable(), 1, -1, MINUTES); fail(); } catch (IllegalArgumentException e) { assertEquals("period < 0: -1", e.getMessage()); } try { worker.schedulePeriodically(new CountingRunnable(), 1, 1, null); fail(); } catch (NullPointerException e) { assertEquals("unit == null", e.getMessage()); } } @Test public void directScheduleSetAsync() { ShadowMessageQueue mainMessageQueue = shadowOf(Looper.getMainLooper().getQueue()); scheduler.scheduleDirect(new Runnable() { @Override public void run() { } }); Message message = mainMessageQueue.getHead(); assertEquals(async, message.isAsynchronous()); } @Test public void workerScheduleSetAsync() { ShadowMessageQueue mainMessageQueue = shadowOf(Looper.getMainLooper().getQueue()); Worker worker = scheduler.createWorker(); worker.schedule(new Runnable() { @Override public void run() { } }); Message message = mainMessageQueue.getHead(); assertEquals(async, message.isAsynchronous()); } @Test public void workerSchedulePeriodicallySetAsync() { ShadowMessageQueue mainMessageQueue = shadowOf(Looper.getMainLooper().getQueue()); Worker worker = scheduler.createWorker(); worker.schedulePeriodically(new Runnable() { @Override public void run() { } }, 1, 1, MINUTES); Message message = mainMessageQueue.getHead(); assertEquals(async, message.isAsynchronous()); } }
package in.srain.cube.views.ptr.demo.ui; import android.content.Intent; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import in.srain.cube.mints.base.BlockMenuFragment; import in.srain.cube.util.LocalDisplay; import in.srain.cube.views.ptr.PtrDefaultHandler; import in.srain.cube.views.ptr.PtrDefaultHandler2; import in.srain.cube.views.ptr.PtrFrameLayout; import in.srain.cube.views.ptr.PtrHandler; import in.srain.cube.views.ptr.PtrHandler2; import in.srain.cube.views.ptr.demo.R; import in.srain.cube.views.ptr.demo.ui.classic.*; import in.srain.cube.views.ptr.demo.ui.storehouse.StoreHouseUsingPointList; import in.srain.cube.views.ptr.demo.ui.storehouse.StoreHouseUsingString; import in.srain.cube.views.ptr.demo.ui.storehouse.StoreHouseUsingStringArray; import in.srain.cube.views.ptr.demo.ui.viewpager.ViewPagerActivity; import in.srain.cube.views.ptr.header.StoreHouseHeader; import java.util.ArrayList; public class PtrDemoHomeFragment extends BlockMenuFragment { @Override protected void addItemInfo(ArrayList<BlockMenuFragment.MenuItemInfo> itemInfos) { // GridView itemInfos.add(newItemInfo(R.string.ptr_demo_block_grid_view, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(WithGridView.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_frame_layout, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(WithTextViewInFrameLayoutFragment.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_only_text_view, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(EvenOnlyATextView.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_list_view, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(WithListView.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_web_view, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(WithWebView.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_with_list_view_and_empty_view, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(WithListViewAndEmptyView.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_scroll_view, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(WithScrollView.class, null); } })); itemInfos.add(null); itemInfos.add(null); itemInfos.add(newItemInfo(R.string.ptr_demo_block_keep_header, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(KeepHeader.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_hide_header, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(HideHeader.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_release_to_refresh, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(ReleaseToRefresh.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_pull_to_refresh, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(PullToRefresh.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_auto_fresh, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(AutoRefresh.class, null); } })); itemInfos.add(null); itemInfos.add(newItemInfo(R.string.ptr_demo_block_storehouse_header_using_string_array, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(StoreHouseUsingStringArray.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_storehouse_header_using_string, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(StoreHouseUsingString.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_storehouse_header_using_point_list, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(StoreHouseUsingPointList.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_material_style, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(MaterialStyleFragment.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_material_style_pin_content, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(MaterialStylePinContentFragment.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_with_long_press, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(WithLongPressFragment.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_block_with_view_pager, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(); intent.setClass(getContext(), ViewPagerActivity.class); startActivity(intent); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_rentals_style, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(RentalsStyleFragment.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_enable_next_ptr_at_once, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { getContext().pushFragmentToBackStack(EnableNextPTRAtOnce.class, null); } })); itemInfos.add(newItemInfo(R.string.ptr_demo_placeholder, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { } })); itemInfos.add(newItemInfo(R.string.ptr_demo_placeholder, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { } })); itemInfos.add(newItemInfo(R.string.ptr_demo_placeholder, R.color.cube_mints_4d90fe, new OnClickListener() { @Override public void onClick(View v) { } })); } @Override protected View createView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = super.createView(inflater, container, savedInstanceState); view.setBackgroundColor(getResources().getColor(R.color.cube_mints_333333)); final PtrFrameLayout ptrFrameLayout = (PtrFrameLayout) view.findViewById(R.id.fragment_ptr_home_ptr_frame); StoreHouseHeader header = new StoreHouseHeader(getContext()); header.setPadding(0, LocalDisplay.dp2px(20), 0, LocalDisplay.dp2px(20)); header.initWithString("Ultra PTR"); ptrFrameLayout.setDurationToCloseHeader(1500); ptrFrameLayout.setHeaderView(header); ptrFrameLayout.addPtrUIHandler(header); StoreHouseHeader footer = new StoreHouseHeader(getContext()); footer.setPadding(0, LocalDisplay.dp2px(20), 0, LocalDisplay.dp2px(20)); footer.initWithString("Ultra PTR"); ptrFrameLayout.setDurationToCloseHeader(1500); ptrFrameLayout.setFooterView(footer); ptrFrameLayout.addPtrUIHandler(footer); ptrFrameLayout.setPtrHandler(new PtrHandler2() { @Override public boolean checkCanDoRefresh(PtrFrameLayout frame, View content, View header) { return PtrDefaultHandler.checkContentCanBePulledDown(frame, content, header); } @Override public boolean checkCanDoLoadMore(PtrFrameLayout frame, View content, View footer) { return PtrDefaultHandler2.checkContentCanBePulledUp(frame, content, footer); } @Override public void onLoadMoreBegin(PtrFrameLayout frame) { ptrFrameLayout.postDelayed(new Runnable() { @Override public void run() { ptrFrameLayout.refreshComplete(); } }, 1500); } @Override public void onRefreshBegin(PtrFrameLayout frame) { ptrFrameLayout.postDelayed(new Runnable() { @Override public void run() { ptrFrameLayout.refreshComplete(); } }, 1500); } }); return view; } @Override protected int getLayoutId() { return R.layout.fragmengt_ptr_home; } @Override protected void setupViews(View view) { super.setupViews(view); setHeaderTitle(R.string.ptr_demo_block_for_home); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.datastreams; import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; import java.util.List; import java.util.Map; import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; /** * Contains integration tests that simulate the new indexing strategy upgrade scenarios. */ public class DataStreamUpgradeRestIT extends ESRestTestCase { public void testCompatibleMappingUpgrade() throws Exception { // Create pipeline Request putPipelineRequest = new Request("PUT", "/_ingest/pipeline/mysql-error1"); putPipelineRequest.setJsonEntity("{\"processors\":[]}"); assertOK(client().performRequest(putPipelineRequest)); // Create a template Request putComposableIndexTemplateRequest = new Request("POST", "/_index_template/mysql-error"); putComposableIndexTemplateRequest.setJsonEntity(""" { "index_patterns": [ "logs-mysql-*" ], "priority": 200, "composed_of": [ "logs-mappings", "logs-settings" ], "data_stream": {}, "template": { "mappings": { "properties": { "thread_id": { "type": "long" } } }, "settings": { "index.default_pipeline": "mysql-error1" } } }"""); assertOK(client().performRequest(putComposableIndexTemplateRequest)); // Create a data stream and index first doc Request indexRequest = new Request("POST", "/logs-mysql-error/_doc"); indexRequest.setJsonEntity(""" {"@timestamp": "2020-12-12","message":"abc","thread_id":23}"""); assertOK(client().performRequest(indexRequest)); // Create new pipeline and update default pipeline: putPipelineRequest = new Request("PUT", "/_ingest/pipeline/mysql-error2"); putPipelineRequest.setJsonEntity(""" {"processors":[{"rename":{"field":"thread_id","target_field":"thread.id","ignore_failure":true}}]}"""); assertOK(client().performRequest(putPipelineRequest)); Request updateSettingsRequest = new Request("PUT", "/logs-mysql-error/_settings"); updateSettingsRequest.setJsonEntity("{ \"index\": { \"default_pipeline\" : \"mysql-error2\" }}"); assertOK(client().performRequest(updateSettingsRequest)); // Update template putComposableIndexTemplateRequest = new Request("POST", "/_index_template/mysql-error"); putComposableIndexTemplateRequest.setJsonEntity(""" { "index_patterns": [ "logs-mysql-*" ], "priority": 200, "composed_of": [ "logs-mappings", "logs-settings" ], "data_stream": {}, "template": { "mappings": { "properties": { "thread": { "properties": { "id": { "type": "long" } } } } }, "settings": { "index.default_pipeline": "mysql-error2" } } }"""); assertOK(client().performRequest(putComposableIndexTemplateRequest)); // Update mapping Request putMappingRequest = new Request("PUT", "/logs-mysql-error/_mappings"); putMappingRequest.addParameters(Map.of("write_index_only", "true")); putMappingRequest.setJsonEntity(""" {"properties":{"thread":{"properties":{"id":{"type":"long"}}}}}"""); assertOK(client().performRequest(putMappingRequest)); // Delete old pipeline Request deletePipeline = new Request("DELETE", "/_ingest/pipeline/mysql-error1"); assertOK(client().performRequest(deletePipeline)); // Index more docs indexRequest = new Request("POST", "/logs-mysql-error/_doc"); indexRequest.setJsonEntity(""" {"@timestamp": "2020-12-12","message":"abc","thread_id":24}"""); assertOK(client().performRequest(indexRequest)); indexRequest = new Request("POST", "/logs-mysql-error/_doc"); indexRequest.setJsonEntity(""" {"@timestamp": "2020-12-12","message":"abc","thread":{"id":24}}"""); assertOK(client().performRequest(indexRequest)); Request refreshRequest = new Request("POST", "/logs-mysql-error/_refresh"); assertOK(client().performRequest(refreshRequest)); verifyTotalHitCount("logs-mysql-error", "{\"query\":{\"match\":{\"thread.id\": 24}}}", 2, "thread.id"); Request deleteDateStreamRequest = new Request("DELETE", "/_data_stream/logs-mysql-error"); assertOK(client().performRequest(deleteDateStreamRequest)); } public void testConflictingMappingUpgrade() throws Exception { // Create pipeline Request putPipelineRequest = new Request("PUT", "/_ingest/pipeline/mysql-error1"); putPipelineRequest.setJsonEntity("{\"processors\":[]}"); assertOK(client().performRequest(putPipelineRequest)); // Create a template Request putComposableIndexTemplateRequest = new Request("POST", "/_index_template/mysql-error"); putComposableIndexTemplateRequest.setJsonEntity(""" { "index_patterns": [ "logs-mysql-*" ], "priority": 200, "composed_of": [ "logs-mappings", "logs-settings" ], "data_stream": {}, "template": { "mappings": { "properties": { "thread": { "type": "long" } } }, "settings": { "index.default_pipeline": "mysql-error1" } } }"""); assertOK(client().performRequest(putComposableIndexTemplateRequest)); // Create a data stream and index first doc Request indexRequest = new Request("POST", "/logs-mysql-error/_doc"); indexRequest.setJsonEntity("{\"@timestamp\": \"2020-12-12\",\"message\":\"abc\",\"thread\":23}"); assertOK(client().performRequest(indexRequest)); // Create new pipeline and update default pipeline: putPipelineRequest = new Request("PUT", "/_ingest/pipeline/mysql-error2"); putPipelineRequest.setJsonEntity(""" {"processors":[{"rename":{"field":"thread","target_field":"thread.id","ignore_failure":true}}]}"""); assertOK(client().performRequest(putPipelineRequest)); Request updateSettingsRequest = new Request("PUT", "/logs-mysql-error/_settings"); updateSettingsRequest.setJsonEntity("{ \"index\": { \"default_pipeline\" : \"mysql-error2\" }}"); assertOK(client().performRequest(updateSettingsRequest)); // Update template putComposableIndexTemplateRequest = new Request("POST", "/_index_template/mysql-error"); putComposableIndexTemplateRequest.setJsonEntity(""" { "index_patterns": [ "logs-mysql-*" ], "priority": 200, "composed_of": [ "logs-mappings", "logs-settings" ], "data_stream": {}, "template": { "mappings": { "properties": { "thread": { "properties": { "id": { "type": "long" } } } } }, "settings": { "index.default_pipeline": "mysql-error2" } } }"""); assertOK(client().performRequest(putComposableIndexTemplateRequest)); // Update mapping Request putMappingRequest = new Request("PUT", "/logs-mysql-error/_mappings"); putMappingRequest.addParameters(Map.of("write_index_only", "true")); putMappingRequest.setJsonEntity("{\"properties\":{\"thread\":{\"properties\":{\"id\":{\"type\":\"long\"}}}}}"); Exception e = expectThrows(ResponseException.class, () -> client().performRequest(putMappingRequest)); assertThat(e.getMessage(), containsString("can't merge a non object mapping [thread] with an object mapping")); // Rollover Request rolloverRequest = new Request("POST", "/logs-mysql-error/_rollover"); assertOK(client().performRequest(rolloverRequest)); // Delete old pipeline Request deletePipeline = new Request("DELETE", "/_ingest/pipeline/mysql-error1"); assertOK(client().performRequest(deletePipeline)); // Index more docs indexRequest = new Request("POST", "/logs-mysql-error/_doc"); indexRequest.setJsonEntity(""" {"@timestamp": "2020-12-12","message":"abc","thread":24}"""); assertOK(client().performRequest(indexRequest)); indexRequest = new Request("POST", "/logs-mysql-error/_doc"); indexRequest.setJsonEntity(""" {"@timestamp": "2020-12-12","message":"abc","thread":{"id":24}}"""); assertOK(client().performRequest(indexRequest)); Request refreshRequest = new Request("POST", "/logs-mysql-error/_refresh"); assertOK(client().performRequest(refreshRequest)); verifyTotalHitCount("logs-mysql-error", """ {"query":{"match":{"thread.id": 24}}}""", 2, "thread.id"); Request deleteDateStreamRequest = new Request("DELETE", "/_data_stream/logs-mysql-error"); assertOK(client().performRequest(deleteDateStreamRequest)); } static void verifyTotalHitCount(String index, String requestBody, int expectedTotalHits, String requiredField) throws IOException { Request request = new Request("GET", "/" + index + "/_search"); request.addParameter(TOTAL_HITS_AS_INT_PARAM, "true"); request.setJsonEntity(requestBody); Response response = client().performRequest(request); assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); Map<?, ?> responseBody = XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false); int totalHits = (int) XContentMapValues.extractValue("hits.total", responseBody); assertThat(totalHits, equalTo(expectedTotalHits)); List<?> hits = (List<?>) XContentMapValues.extractValue("hits.hits", responseBody); assertThat(hits.size(), equalTo(expectedTotalHits)); for (Object element : hits) { Map<?, ?> hit = (Map<?, ?>) element; Object value = XContentMapValues.extractValue("_source." + requiredField, hit); assertThat(value, notNullValue()); } } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.workbench.ks.integration; import static java.util.Collections.emptyMap; import static org.jbpm.workbench.ks.utils.KieServerUtils.createKieServicesClient; import static org.jbpm.workbench.ks.utils.KieServerUtils.getAdminCredentialsProvider; import static org.jbpm.workbench.ks.utils.KieServerUtils.getCredentialsProvider; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.ServiceLoader; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.function.Function; import javax.annotation.PostConstruct; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.event.Event; import javax.enterprise.event.Observes; import javax.inject.Inject; import org.jbpm.workbench.ks.integration.event.ServerInstanceRegistered; import org.kie.api.KieServices; import org.kie.api.runtime.KieContainer; import org.kie.server.api.KieServerConstants; import org.kie.server.client.CredentialsProvider; import org.kie.server.client.KieServicesClient; import org.kie.server.client.balancer.LoadBalancer; import org.kie.server.client.impl.AbstractKieServicesClientImpl; import org.kie.server.controller.api.model.events.ServerInstanceConnected; import org.kie.server.controller.api.model.events.ServerInstanceDisconnected; import org.kie.server.controller.api.model.events.ServerTemplateDeleted; import org.kie.server.controller.api.model.events.ServerTemplateUpdated; import org.kie.server.controller.api.model.runtime.ServerInstance; import org.kie.server.controller.api.model.runtime.ServerInstanceKey; import org.kie.server.controller.api.model.spec.Capability; import org.kie.server.controller.api.model.spec.ContainerSpec; import org.kie.server.controller.api.model.spec.ServerTemplate; import org.kie.server.controller.api.model.spec.ServerTemplateList; import org.kie.server.controller.impl.client.KieServicesClientProvider; import org.kie.workbench.common.screens.server.management.service.SpecManagementService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.uberfire.commons.services.cdi.Startup; @Startup @ApplicationScoped public class KieServerIntegration { private static final Logger logger = LoggerFactory.getLogger(KieServerIntegration.class); protected static final String SERVER_TEMPLATE_KEY = "_SERVER_TEMPLATE_MAIN_CLIENT_"; private KieServices kieServices; private ConcurrentMap<String, Map<String, KieServicesClient>> serverTemplatesClients = new ConcurrentHashMap<String, Map<String, KieServicesClient>>(); private ConcurrentMap<String, KieServicesClient> adminClients = new ConcurrentHashMap<String, KieServicesClient>(); private ConcurrentMap<String, ServerInstanceKey> serverInstancesById = new ConcurrentHashMap<String, ServerInstanceKey>(); private List<KieServicesClientProvider> clientProviders = new ArrayList<>(); private List<KieServicesClientProvider> allClientProviders = new ArrayList<>(); @Inject private SpecManagementService specManagementService; @Inject private Event<ServerInstanceRegistered> serverInstanceRegisteredEvent; @PostConstruct public void createAvailableClients() { ServiceLoader<KieServicesClientProvider> loader = ServiceLoader.load(KieServicesClientProvider.class); loader.forEach(provider -> { // skip default http/rest based client providers and use admin client created here if (!provider.supports("http")) { clientProviders.add(provider); } allClientProviders.add(provider); }); clientProviders.sort((KieServicesClientProvider one, KieServicesClientProvider two) -> one.getPriority().compareTo(two.getPriority())); kieServices = KieServices.Factory.get(); ServerTemplateList serverTemplates = specManagementService.listServerTemplates(); logger.debug("Found {} server templates, creating clients for them...", serverTemplates.getServerTemplates().length); for (ServerTemplate serverTemplate : serverTemplates.getServerTemplates()) { buildClientsForServer(serverTemplate); } } protected void setKieServices(final KieServices kieServices) { this.kieServices = kieServices; } public KieServicesClient getServerClient(String serverTemplateId) { return serverTemplatesClients.getOrDefault(serverTemplateId, emptyMap()).get(SERVER_TEMPLATE_KEY); } public KieServicesClient getServerClient(String serverTemplateId, String containerId) { KieServicesClient client = serverTemplatesClients.getOrDefault(serverTemplateId, emptyMap()).get(containerId); if (client == null) { logger.warn("Container {} not found in server template {}, returning global kie server client", containerId, serverTemplateId); client = getServerClient(serverTemplateId); } return client; } public KieServicesClient getAdminServerClient(String serverTemplateId, String serverInstanceId) { try { ServerInstanceKey instance = specManagementService.getServerTemplate(serverTemplateId).getServerInstanceKeys() .stream() .filter(si -> si.getServerInstanceId().equals(serverInstanceId)) .findFirst() .get(); String url = instance.getUrl(); KieServicesClient client = clientProviders .stream() .filter(provider -> provider.supports(url)) .findFirst() .get() .get(url); logger.debug("Using client {}", client); return client; } catch (Exception e) { return adminClients.get(serverTemplateId); } } public KieServicesClient getAdminServerClientCheckEndpoints(String serverTemplateId) { KieServicesClient adminClient = adminClients.get(serverTemplateId); if (adminClient != null) { LoadBalancer loadBalancer = ((AbstractKieServicesClientImpl) adminClient).getLoadBalancer(); loadBalancer.checkFailedEndpoints(); } return adminClient; } protected void indexServerInstances(ServerTemplate serverTemplate) { serverTemplate.getServerInstanceKeys().forEach(serverInstanceKey -> serverInstancesById.put(serverInstanceKey.getServerInstanceId(), serverInstanceKey)); } protected void removeServerInstancesFromIndex(String serverTemplateId) { Iterator<Map.Entry<String, ServerInstanceKey>> iterator = serverInstancesById.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<String, ServerInstanceKey> entry = iterator.next(); if (entry.getValue().getServerTemplateId().equals(serverTemplateId)) { iterator.remove(); } } } public void onServerTemplateUpdated(@Observes ServerTemplateUpdated serverTemplateUpdated) { buildClientsForServer(serverTemplateUpdated.getServerTemplate()); } public void onServerTemplateDeleted(@Observes ServerTemplateDeleted serverTemplateDeleted) { // remove all clients for this server template and its containers final Map<String, KieServicesClient> clients = serverTemplatesClients.remove(serverTemplateDeleted.getServerTemplateId()); if (clients != null) { logger.debug("KieServerClient removed and closed for server template {}", serverTemplateDeleted.getServerTemplateId()); } // remove admin client adminClients.remove(serverTemplateDeleted.getServerTemplateId()); removeServerInstancesFromIndex(serverTemplateDeleted.getServerTemplateId()); } public void onServerInstanceDisconnected(@Observes ServerInstanceDisconnected serverInstanceDisconnected) { ServerInstanceKey serverInstanceKey = serverInstancesById.get(serverInstanceDisconnected.getServerInstanceId()); if (serverInstanceKey != null) { serverTemplatesClients.computeIfPresent(serverInstanceKey.getServerTemplateId(), (serverTemplateId, clients) -> { clients.forEach((key, client) -> { LoadBalancer loadBalancer = ((AbstractKieServicesClientImpl) client).getLoadBalancer(); loadBalancer.markAsFailed(serverInstanceKey.getUrl()); logger.debug("Server instance '{}' for server template {} removed from client thus won't be used for operations", serverInstanceKey.getUrl(), serverInstanceKey.getServerTemplateId()); logger.debug("KieServerClient load balancer updated for server template {}", serverTemplateId.equals(SERVER_TEMPLATE_KEY) ? serverInstanceKey.getServerTemplateId() : serverTemplateId); }); return clients; }); serverInstancesById.remove(serverInstanceKey.getServerInstanceId()); // update admin client KieServicesClient adminClient = adminClients.get(serverInstanceKey.getServerTemplateId()); if (adminClient != null) { LoadBalancer loadBalancer = ((AbstractKieServicesClientImpl) adminClient).getLoadBalancer(); loadBalancer.markAsFailed(serverInstanceKey.getUrl()); logger.debug("Server instance {} for server template {} removed from client thus won't be used for operations", serverInstanceKey.getUrl(), serverInstanceKey.getServerTemplateId()); } } } public void onServerInstanceConnected(@Observes ServerInstanceConnected serverInstanceConnected) { ServerInstance serverInstance = serverInstanceConnected.getServerInstance(); serverTemplatesClients.computeIfPresent(serverInstance.getServerTemplateId(), (serverTemplateId, clients) -> { clients.forEach((key, client) -> { // update regular clients updateOrBuildClient(client, serverInstance); logger.debug("KieServerClient load balancer updated for server template {}", serverTemplateId.equals(SERVER_TEMPLATE_KEY) ? serverInstance.getServerTemplateId() : serverTemplateId); }); return clients; }); serverInstancesById.put(serverInstance.getServerInstanceId(), serverInstance); KieServicesClient adminClient = adminClients.get(serverInstance.getServerTemplateId()); // update admin clients updateOrBuildClient(adminClient, serverInstance); // once all steps are completed successfully notify other parts interested so the serverClient can actually be used serverInstanceRegisteredEvent.fire(new ServerInstanceRegistered(serverInstanceConnected.getServerInstance())); } public List<Object> broadcastToKieServers(String serverTemplateId, Function<KieServicesClient, Object> operation) { List<Object> results = new ArrayList<>(); ServerTemplate serverTemplate = specManagementService.getServerTemplate(serverTemplateId); if (serverTemplate.getServerInstanceKeys() == null || serverTemplate.getServerInstanceKeys().isEmpty()) { return results; } for (ServerInstanceKey instanceUrl : serverTemplate.getServerInstanceKeys()) { try { KieServicesClient client = getClient(instanceUrl.getUrl()); Object result = operation.apply(client); results.add(result); logger.debug("KIE Server at {} returned result {} for broadcast operation {}", instanceUrl, result, operation); } catch (Exception e) { logger.debug("Unable to send breadcast to {} due to {}", instanceUrl, e.getMessage(), e); } } return results; } protected KieServicesClient getClient(String url) { KieServicesClient client = allClientProviders.stream().filter(provider -> provider.supports(url)).findFirst().get().get(url); logger.debug("Using client {}", client); return client; } protected void updateOrBuildClient(KieServicesClient client, ServerInstance serverInstance) { if (client != null) { LoadBalancer loadBalancer = ((AbstractKieServicesClientImpl) client).getLoadBalancer(); loadBalancer.activate(serverInstance.getUrl()); logger.debug("Server instance {} for server template {} activated on client thus will be used for operations", serverInstance.getUrl(), serverInstance.getServerTemplateId()); } else { logger.debug("No kie server client yet created, attempting to create one for server template {}", serverInstance.getServerTemplateId()); ServerTemplate serverTemplate = specManagementService.getServerTemplate(serverInstance.getServerTemplateId()); buildClientsForServer(serverTemplate); } } protected void buildClientsForServer(ServerTemplate serverTemplate) { KieServicesClient kieServicesClient = createClientForTemplate(serverTemplate, null, getCredentialsProvider()); if (kieServicesClient != null) { serverTemplatesClients.computeIfAbsent(serverTemplate.getId(), (k) -> new ConcurrentHashMap<String, KieServicesClient>()); serverTemplatesClients.get(serverTemplate.getId()).put(SERVER_TEMPLATE_KEY, kieServicesClient); } if (serverTemplate.getContainersSpec() != null) { for (ContainerSpec containerSpec : serverTemplate.getContainersSpec()) { try { if (serverTemplatesClients.get(serverTemplate.getId()).containsKey(containerSpec.getId())) { logger.debug("KieServerClient for {} is already created", containerSpec.getId()); continue; } KieContainer kieContainer = kieServices.newKieContainer(containerSpec.getReleasedId()); KieServicesClient kieServicesClientForContainer = createClientForTemplate(serverTemplate, kieContainer.getClassLoader(), getCredentialsProvider()); if (kieServicesClient != null) { serverTemplatesClients.get(serverTemplate.getId()).put(containerSpec.getId(), kieServicesClientForContainer); } } catch (Exception e) { logger.warn("Failed ot create kie server client for container {} due to {}", containerSpec.getId(), e.getMessage()); } } } // lastly create admin client KieServicesClient adminKieServicesClient = createClientForTemplate(serverTemplate, null, getAdminCredentialsProvider()); if (adminKieServicesClient != null) { adminClients.put(serverTemplate.getId(), adminKieServicesClient); } } protected KieServicesClient createClientForTemplate(ServerTemplate serverTemplate, ClassLoader classLoader, CredentialsProvider credentialsProvider) { if (serverTemplate.getServerInstanceKeys() == null || serverTemplate.getServerInstanceKeys().isEmpty()) { return null; } try { StringBuilder endpoints = new StringBuilder(); for (ServerInstanceKey serverInstanceKey : serverTemplate.getServerInstanceKeys()) { endpoints.append(serverInstanceKey.getUrl() + "|"); } endpoints.deleteCharAt(endpoints.length() - 1); logger.debug("Creating client that will use following list of endpoints {}", endpoints); final List<String> mappedCapabilities = new ArrayList<>(); if (serverTemplate.getCapabilities().contains(Capability.PROCESS.name())) { mappedCapabilities.add(KieServerConstants.CAPABILITY_BPM); mappedCapabilities.add(KieServerConstants.CAPABILITY_BPM_UI); mappedCapabilities.add(KieServerConstants.CAPABILITY_CASE); } if (serverTemplate.getCapabilities().contains(Capability.RULE.name())) { mappedCapabilities.add(KieServerConstants.CAPABILITY_BRM); } if (serverTemplate.getCapabilities().contains(Capability.PLANNING.name())) { mappedCapabilities.add(KieServerConstants.CAPABILITY_BRP); } final KieServicesClient kieServicesClient = createKieServicesClient(endpoints.toString(), classLoader, credentialsProvider, mappedCapabilities.toArray(new String[mappedCapabilities.size()])); logger.debug("KieServerClient created successfully for server template {}", serverTemplate.getId()); indexServerInstances(serverTemplate); return kieServicesClient; } catch (Exception e) { logger.error("Unable to create kie server client for server template {} due to {}", serverTemplate, e.getMessage(), e); return null; } } protected Map<String, Map<String, KieServicesClient>> getServerTemplatesClients() { return serverTemplatesClients; } protected Map<String, ServerInstanceKey> getServerInstancesById() { return serverInstancesById; } protected void setKieServicesClientProviders(List<KieServicesClientProvider> providers) { this.allClientProviders = providers; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.github.tteofili.calabrize.impl; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.math3.distribution.EnumeratedDistribution; import org.apache.commons.math3.util.Pair; import org.nd4j.linalg.api.iter.NdIndexIterator; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.impl.transforms.SetRange; import org.nd4j.linalg.api.ops.impl.transforms.SoftMax; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.ops.transforms.Transforms; /** * A min char/word-level vanilla RNN model, based on Andrej Karpathy's python code. * See also: * * @see <a href="http://karpathy.github.io/2015/05/21/rnn-effectiveness">The Unreasonable Effectiveness of Recurrent Neural Networks</a> * @see <a href="https://gist.github.com/karpathy/d4dee566867f8291f086">Minimal character-level language model with a Vanilla Recurrent Neural Network, in Python/numpy</a> */ public class RNN { // hyperparameters protected float learningRate; protected final int seqLength; // no. of steps to unroll the RNN for protected final int hiddenLayerSize; protected final int epochs; protected final boolean useChars; protected final int batch; protected final int vocabSize; protected final Map<String, Integer> charToIx; protected final Map<Integer, String> ixToChar; protected final List<String> data; private final static double eps = 1e-8; private final static double decay = 0.9; private final V2HCalabrianEncoder encoder = new V2HCalabrianEncoder(); // model parameters private final INDArray wxh; // input to hidden private final INDArray whh; // hidden to hidden private final INDArray why; // hidden to output private final INDArray bh; // hidden bias private final INDArray by; // output bias private INDArray hPrev = null; // memory state public RNN(float learningRate, int seqLength, int hiddenLayerSize, int epochs, String text) { this(learningRate, seqLength, hiddenLayerSize, epochs, text, 1, true); } public RNN(float learningRate, int seqLength, int hiddenLayerSize, int epochs, String text, int batch, boolean useChars) { this.learningRate = learningRate; this.seqLength = seqLength; this.hiddenLayerSize = hiddenLayerSize; this.epochs = epochs; this.batch = batch; this.useChars = useChars; data = Arrays.asList(useChars ? toStrings(text.toCharArray()) : text.split(" ")); for (String d : data) { data.add(encoder.encode(d)); } // data = new LinkedList<>(); // Collections.addAll(data, textTokens); Set<String> tokens = new HashSet<>(data); vocabSize = tokens.size(); System.out.printf("data has %d tokens, %d unique.\n", data.size(), vocabSize); charToIx = new HashMap<>(); ixToChar = new HashMap<>(); int i = 0; for (String c : tokens) { charToIx.put(c, i); ixToChar.put(i, c); i++; } wxh = Nd4j.randn(hiddenLayerSize, vocabSize).mul(0.01); whh = Nd4j.randn(hiddenLayerSize, hiddenLayerSize).mul(0.01); why = Nd4j.randn(vocabSize, hiddenLayerSize).mul(0.01); bh = Nd4j.zeros(hiddenLayerSize, 1); by = Nd4j.zeros(vocabSize, 1); } private String[] toStrings(char[] chars) { String[] strings = new String[chars.length]; for (int i = 0; i < chars.length; i++) { strings[i] = String.valueOf(chars[i]); } return strings; } public void learn() { int currentEpoch = 0; int n = 0; int p = 0; // memory variables for Adagrad INDArray mWxh = Nd4j.zerosLike(wxh); INDArray mWhh = Nd4j.zerosLike(whh); INDArray mWhy = Nd4j.zerosLike(why); INDArray mbh = Nd4j.zerosLike(bh); INDArray mby = Nd4j.zerosLike(by); // loss at iteration 0 double smoothLoss = -Math.log(1.0 / vocabSize) * seqLength; while (true) { // prepare inputs (we're sweeping from left to right in steps seqLength long) if (p + seqLength + 1 >= data.size() || n == 0) { hPrev = Nd4j.zeros(hiddenLayerSize, 1); // reset RNN memory p = 0; // go from start of data currentEpoch++; if (currentEpoch == epochs) { System.out.println("training finished: e:" + epochs + ", l: " + smoothLoss + ", h:(" + learningRate + ", " + seqLength + ", " + hiddenLayerSize + ")"); break; } } INDArray inputs = getSequence(p, true); INDArray targets = getSequence(p + 1, false); // sample from the model every now and then if (n % 1000 == 0 && n > 0) { String txt = sample(inputs.getInt(0)); System.out.printf("\n---\n %s \n----\n", txt); } INDArray dWxh = Nd4j.zerosLike(wxh); INDArray dWhh = Nd4j.zerosLike(whh); INDArray dWhy = Nd4j.zerosLike(why); INDArray dbh = Nd4j.zerosLike(bh); INDArray dby = Nd4j.zerosLike(by); // forward seqLength characters through the net and fetch gradient double loss = lossFun(inputs, targets, dWxh, dWhh, dWhy, dbh, dby); smoothLoss = smoothLoss * 0.999 + loss * 0.001; if (Double.isNaN(smoothLoss)) { System.out.println("loss is NaN (over/underflow occured, try adjusting hyperparameters)"); break; } if (n % 100 == 0) { System.out.printf("iter %d, loss: %f\n", n, smoothLoss); // print progress } if (n % batch == 0) { // perform parameter update with RMSprop mWxh = mWxh.mul(decay).add(1 - decay).mul((dWxh).mul(dWxh)); wxh.subi(dWxh.mul(learningRate).div(Transforms.sqrt(mWxh).add(eps))); mWhh = mWhh.mul(decay).add(1 - decay).mul((dWhh).mul(dWhh)); whh.subi(dWhh.mul(learningRate).div(Transforms.sqrt(mWhh).add(eps))); mWhy = mWhy.mul(decay).add(1 - decay).mul((dWhy).mul(dWhy)); why.subi(dWhy.mul(learningRate).div(Transforms.sqrt(mWhy).add(eps))); mbh = mbh.mul(decay).add(1 - decay).mul((dbh).mul(dbh)); bh.subi(dbh.mul(learningRate).div(Transforms.sqrt(mbh).add(eps))); mby = mby.mul(decay).add(1 - decay).mul((dby).mul(dby)); by.subi(dby.mul(learningRate).div(Transforms.sqrt(mby).add(eps))); } p += seqLength; // move data pointer n++; // iteration counter } } private INDArray getSequence(int p, boolean translate) { INDArray inputs = Nd4j.create(seqLength); int c = 0; for (String ch : data.subList(p, p + seqLength)) { if (translate) { ch = encoder.encode(ch); } Integer ix = charToIx.get(ch); inputs.putScalar(c, ix); c++; } return inputs; } /** * inputs, targets are both list of integers * hprev is Hx1 array of initial hidden state * returns the modified loss, gradients on model parameters */ private double lossFun(INDArray inputs, INDArray targets, INDArray dWxh, INDArray dWhh, INDArray dWhy, INDArray dbh, INDArray dby) { INDArray xs = Nd4j.zeros(inputs.length(), vocabSize); INDArray hs = null; INDArray ys = null; INDArray ps = null; INDArray hs1 = Nd4j.create(hPrev.shape()); Nd4j.copy(hPrev, hs1); double loss = 0; // forward pass for (int t = 0; t < inputs.length(); t++) { int tIndex = inputs.getScalar(t).getInt(0); xs.putScalar(t, tIndex, 1); // encode in 1-of-k representation INDArray hsRow = t == 0 ? hs1 : hs.getRow(t - 1); INDArray hst = Transforms.tanh(wxh.mmul(xs.getRow(t).transpose()).add(whh.mmul(hsRow)).add(bh)); // hidden state if (hs == null) { hs = init(inputs.length(), hst.shape()); } hs.putRow(t, hst); INDArray yst = (why.mmul(hst)).add(by); // unnormalized log probabilities for next chars if (ys == null) { ys = init(inputs.length(), yst.shape()); } ys.putRow(t, yst); INDArray pst = Nd4j.getExecutioner().execAndReturn(new SoftMax(yst)); // probabilities for next chars if (ps == null) { ps = init(inputs.length(), pst.shape()); } ps.putRow(t, pst); loss += -Math.log(pst.getDouble(targets.getInt(t),0)); // softmax (cross-entropy loss) } // backward pass: compute gradients going backwards INDArray dhNext = Nd4j.zerosLike(hPrev); for (int t = inputs.length() - 1; t >= 0; t--) { INDArray dy = ps.getRow(t); dy.putRow(targets.getInt(t), dy.getRow(targets.getInt(t)).sub(1)); // backprop into y INDArray hst = hs.getRow(t); dWhy.addi(dy.mmul(hst.transpose())); // derivative of hy layer dby.addi(dy); INDArray dh = why.transpose().mmul(dy).add(dhNext); // backprop into h INDArray dhraw = (Nd4j.ones(hst.shape()).sub(hst.mul(hst))).mul(dh); // backprop through tanh nonlinearity dbh.addi(dhraw); dWxh.addi(dhraw.mmul(xs.getRow(t))); INDArray hsRow = t == 0 ? hs1 : hs.getRow(t - 1); dWhh.addi(dhraw.mmul(hsRow.transpose())); dhNext = whh.transpose().mmul(dhraw); } this.hPrev = hs.getRow(inputs.length() - 1); return loss; } protected INDArray init(int t, int[] aShape) { INDArray as; int[] shape = new int[1 + aShape.length]; shape[0] = t; System.arraycopy(aShape, 0, shape, 1, aShape.length); as = Nd4j.create(shape); return as; } /** * sample a sequence of integers from the model, using current (hPrev) memory state, seedIx is seed letter for first time step */ public String sample(int seedIx) { INDArray x = Nd4j.zeros(vocabSize, 1); x.putScalar(seedIx, 1); int sampleSize = 144; INDArray ixes = Nd4j.create(sampleSize); INDArray h = hPrev.dup(); for (int t = 0; t < sampleSize; t++) { h = Transforms.tanh(wxh.mmul(x).add(whh.mmul(h)).add(bh)); INDArray y = (why.mmul(h)).add(by); INDArray pm = Nd4j.getExecutioner().execAndReturn(new SoftMax(y)).ravel(); List<Pair<Integer, Double>> d = new LinkedList<>(); for (int pi = 0; pi < vocabSize; pi++) { d.add(new Pair<>(pi, pm.getDouble(0, pi))); } try { EnumeratedDistribution<Integer> distribution = new EnumeratedDistribution<>(d); int ix = distribution.sample(); x = Nd4j.zeros(vocabSize, 1); x.putScalar(ix, 1); ixes.putScalar(t, ix); } catch (Exception e) { } } return getSampleString(ixes); } protected String getSampleString(INDArray ixes) { StringBuilder txt = new StringBuilder(); NdIndexIterator ndIndexIterator = new NdIndexIterator(ixes.shape()); while (ndIndexIterator.hasNext()) { int[] next = ndIndexIterator.next(); if (!useChars && txt.length() > 0) { txt.append(' '); } txt.append(ixToChar.get(ixes.getInt(next))); } return txt.toString(); } public int getVocabSize() { return vocabSize; } @Override public String toString() { return getClass().getName() + "{" + "learningRate=" + learningRate + ", seqLength=" + seqLength + ", hiddenLayerSize=" + hiddenLayerSize + ", epochs=" + epochs + ", vocabSize=" + vocabSize + ", useChars=" + useChars + ", batch=" + batch + '}'; } public void serialize(String prefix) throws IOException { BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(new File(prefix + new Date().toString() + ".txt"))); bufferedWriter.write("wxh"); bufferedWriter.write(wxh.toString()); bufferedWriter.write("whh"); bufferedWriter.write(whh.toString()); bufferedWriter.write("why"); bufferedWriter.write(why.toString()); bufferedWriter.write("bh"); bufferedWriter.write(bh.toString()); bufferedWriter.write("by"); bufferedWriter.write(by.toString()); bufferedWriter.flush(); bufferedWriter.close(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.spi; import io.airlift.slice.Slice; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.OptionalLong; import java.util.Set; import static com.facebook.presto.spi.StandardErrorCode.INTERNAL_ERROR; import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @Deprecated public interface ConnectorMetadata { /** * Returns the schemas provided by this connector. */ List<String> listSchemaNames(ConnectorSession session); /** * Returns a table handle for the specified table name, or null if the connector does not contain the table. */ ConnectorTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName); /** * Return a list of table layouts that satisfy the given constraint. * <p> * For each layout, connectors must return an "unenforced constraint" representing the part of the constraint summary that isn't guaranteed by the layout. */ default List<ConnectorTableLayoutResult> getTableLayouts( ConnectorSession session, ConnectorTableHandle table, Constraint<ColumnHandle> constraint, Optional<Set<ColumnHandle>> desiredColumns) { throw new UnsupportedOperationException("not yet implemented"); } default ConnectorTableLayout getTableLayout(ConnectorSession session, ConnectorTableLayoutHandle handle) { throw new UnsupportedOperationException("not yet implemented"); } /** * Return the metadata for the specified table handle. * * @throws RuntimeException if table handle is no longer valid */ ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle table); /** * List table names, possibly filtered by schema. An empty list is returned if none match. */ List<SchemaTableName> listTables(ConnectorSession session, String schemaNameOrNull); /** * Returns the handle for the sample weight column, or null if the table does not contain sampled data. * * @throws RuntimeException if the table handle is no longer valid */ default ColumnHandle getSampleWeightColumnHandle(ConnectorSession session, ConnectorTableHandle tableHandle) { return null; } /** * Returns true if this catalog supports creation of sampled tables */ default boolean canCreateSampledTables(ConnectorSession session) { return false; } /** * Gets all of the columns on the specified table, or an empty map if the columns can not be enumerated. * * @throws RuntimeException if table handle is no longer valid */ Map<String, ColumnHandle> getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle); /** * Gets the metadata for the specified table column. * * @throws RuntimeException if table or column handles are no longer valid */ ColumnMetadata getColumnMetadata(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle columnHandle); /** * Gets the metadata for all columns that match the specified table prefix. */ Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSession session, SchemaTablePrefix prefix); /** * Creates a table using the specified table metadata. */ default void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support creating tables"); } /** * Drops the specified table * * @throws RuntimeException if the table can not be dropped or table handle is no longer valid */ default void dropTable(ConnectorSession session, ConnectorTableHandle tableHandle) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support dropping tables"); } /** * Rename the specified table */ default void renameTable(ConnectorSession session, ConnectorTableHandle tableHandle, SchemaTableName newTableName) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support renaming tables"); } /** * Add the specified column */ default void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnMetadata column) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support adding columns"); } /** * Rename the specified column */ default void renameColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle source, String target) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support renaming columns"); } /** * Begin the atomic creation of a table with data. */ default ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support creating tables with data"); } /** * Commit a table creation with data after the data is written. */ default void commitCreateTable(ConnectorSession session, ConnectorOutputTableHandle tableHandle, Collection<Slice> fragments) { throw new PrestoException(INTERNAL_ERROR, "ConnectorMetadata beginCreateTable() is implemented without commitCreateTable()"); } /** * Rollback a table creation */ default void rollbackCreateTable(ConnectorSession session, ConnectorOutputTableHandle tableHandle) {} /** * Begin insert query */ default ConnectorInsertTableHandle beginInsert(ConnectorSession session, ConnectorTableHandle tableHandle) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support inserts"); } /** * Commit insert query */ default void commitInsert(ConnectorSession session, ConnectorInsertTableHandle insertHandle, Collection<Slice> fragments) { throw new PrestoException(INTERNAL_ERROR, "ConnectorMetadata beginInsert() is implemented without commitInsert()"); } /** * Rollback insert query */ default void rollbackInsert(ConnectorSession session, ConnectorInsertTableHandle insertHandle) {} /** * Get the column handle that will generate row IDs for the delete operation. * These IDs will be passed to the {@code deleteRows()} method of the * {@link UpdatablePageSource} that created them. */ default ColumnHandle getUpdateRowIdColumnHandle(ConnectorSession session, ConnectorTableHandle tableHandle) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support updates or deletes"); } /** * Begin delete query */ default ConnectorTableHandle beginDelete(ConnectorSession session, ConnectorTableHandle tableHandle) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support deletes"); } /** * Commit delete query * * @param fragments all fragments returned by {@link com.facebook.presto.spi.UpdatablePageSource#finish()} */ default void commitDelete(ConnectorSession session, ConnectorTableHandle tableHandle, Collection<Slice> fragments) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support deletes"); } /** * Rollback delete query */ default void rollbackDelete(ConnectorSession session, ConnectorTableHandle tableHandle) {} /** * Create the specified view. The data for the view is opaque to the connector. */ default void createView(ConnectorSession session, SchemaTableName viewName, String viewData, boolean replace) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support creating views"); } /** * Drop the specified view. */ default void dropView(ConnectorSession session, SchemaTableName viewName) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support dropping views"); } /** * List view names, possibly filtered by schema. An empty list is returned if none match. */ default List<SchemaTableName> listViews(ConnectorSession session, String schemaNameOrNull) { return emptyList(); } /** * Gets the view data for views that match the specified table prefix. */ default Map<SchemaTableName, ConnectorViewDefinition> getViews(ConnectorSession session, SchemaTablePrefix prefix) { return emptyMap(); } /** * @return whether delete without table scan is supported */ default boolean supportsMetadataDelete(ConnectorSession session, ConnectorTableHandle tableHandle, ConnectorTableLayoutHandle tableLayoutHandle) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support deletes"); } /** * Delete the provided table layout * * @return number of rows deleted, or null for unknown */ default OptionalLong metadataDelete(ConnectorSession session, ConnectorTableHandle tableHandle, ConnectorTableLayoutHandle tableLayoutHandle) { throw new PrestoException(NOT_SUPPORTED, "This connector does not support deletes"); } }
/* * Copyright 2001-2013 Stephen Colebourne * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.joda.time; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import junit.framework.TestCase; import junit.framework.TestSuite; /** * This class is a Junit unit test for Minutes. * * @author Stephen Colebourne */ public class TestMinutes extends TestCase { // Test in 2002/03 as time zones are more well known // (before the late 90's they were all over the place) private static final DateTimeZone PARIS = DateTimeZone.forID("Europe/Paris"); public static void main(String[] args) { junit.textui.TestRunner.run(suite()); } public static TestSuite suite() { return new TestSuite(TestMinutes.class); } public TestMinutes(String name) { super(name); } protected void setUp() throws Exception { } protected void tearDown() throws Exception { } //----------------------------------------------------------------------- public void testConstants() { assertEquals(0, Minutes.ZERO.getMinutes()); assertEquals(1, Minutes.ONE.getMinutes()); assertEquals(2, Minutes.TWO.getMinutes()); assertEquals(3, Minutes.THREE.getMinutes()); assertEquals(Integer.MAX_VALUE, Minutes.MAX_VALUE.getMinutes()); assertEquals(Integer.MIN_VALUE, Minutes.MIN_VALUE.getMinutes()); } //----------------------------------------------------------------------- public void testFactory_minutes_int() { assertSame(Minutes.ZERO, Minutes.minutes(0)); assertSame(Minutes.ONE, Minutes.minutes(1)); assertSame(Minutes.TWO, Minutes.minutes(2)); assertSame(Minutes.THREE, Minutes.minutes(3)); assertSame(Minutes.MAX_VALUE, Minutes.minutes(Integer.MAX_VALUE)); assertSame(Minutes.MIN_VALUE, Minutes.minutes(Integer.MIN_VALUE)); assertEquals(-1, Minutes.minutes(-1).getMinutes()); assertEquals(4, Minutes.minutes(4).getMinutes()); } //----------------------------------------------------------------------- public void testFactory_minutesBetween_RInstant() { DateTime start = new DateTime(2006, 6, 9, 12, 3, 0, 0, PARIS); DateTime end1 = new DateTime(2006, 6, 9, 12, 6, 0, 0, PARIS); DateTime end2 = new DateTime(2006, 6, 9, 12, 9, 0, 0, PARIS); assertEquals(3, Minutes.minutesBetween(start, end1).getMinutes()); assertEquals(0, Minutes.minutesBetween(start, start).getMinutes()); assertEquals(0, Minutes.minutesBetween(end1, end1).getMinutes()); assertEquals(-3, Minutes.minutesBetween(end1, start).getMinutes()); assertEquals(6, Minutes.minutesBetween(start, end2).getMinutes()); } public void testFactory_minutesBetween_RPartial() { LocalTime start = new LocalTime(12, 3); LocalTime end1 = new LocalTime(12, 6); @SuppressWarnings("deprecation") TimeOfDay end2 = new TimeOfDay(12, 9); assertEquals(3, Minutes.minutesBetween(start, end1).getMinutes()); assertEquals(0, Minutes.minutesBetween(start, start).getMinutes()); assertEquals(0, Minutes.minutesBetween(end1, end1).getMinutes()); assertEquals(-3, Minutes.minutesBetween(end1, start).getMinutes()); assertEquals(6, Minutes.minutesBetween(start, end2).getMinutes()); } public void testFactory_minutesIn_RInterval() { DateTime start = new DateTime(2006, 6, 9, 12, 3, 0, 0, PARIS); DateTime end1 = new DateTime(2006, 6, 9, 12, 6, 0, 0, PARIS); DateTime end2 = new DateTime(2006, 6, 9, 12, 9, 0, 0, PARIS); assertEquals(0, Minutes.minutesIn((ReadableInterval) null).getMinutes()); assertEquals(3, Minutes.minutesIn(new Interval(start, end1)).getMinutes()); assertEquals(0, Minutes.minutesIn(new Interval(start, start)).getMinutes()); assertEquals(0, Minutes.minutesIn(new Interval(end1, end1)).getMinutes()); assertEquals(6, Minutes.minutesIn(new Interval(start, end2)).getMinutes()); } public void testFactory_standardMinutesIn_RPeriod() { assertEquals(0, Minutes.standardMinutesIn((ReadablePeriod) null).getMinutes()); assertEquals(0, Minutes.standardMinutesIn(Period.ZERO).getMinutes()); assertEquals(1, Minutes.standardMinutesIn(new Period(0, 0, 0, 0, 0, 1, 0, 0)).getMinutes()); assertEquals(123, Minutes.standardMinutesIn(Period.minutes(123)).getMinutes()); assertEquals(-987, Minutes.standardMinutesIn(Period.minutes(-987)).getMinutes()); assertEquals(1, Minutes.standardMinutesIn(Period.seconds(119)).getMinutes()); assertEquals(2, Minutes.standardMinutesIn(Period.seconds(120)).getMinutes()); assertEquals(2, Minutes.standardMinutesIn(Period.seconds(121)).getMinutes()); assertEquals(120, Minutes.standardMinutesIn(Period.hours(2)).getMinutes()); try { Minutes.standardMinutesIn(Period.months(1)); fail(); } catch (IllegalArgumentException ex) { // expected } } public void testFactory_parseMinutes_String() { assertEquals(0, Minutes.parseMinutes((String) null).getMinutes()); assertEquals(0, Minutes.parseMinutes("PT0M").getMinutes()); assertEquals(1, Minutes.parseMinutes("PT1M").getMinutes()); assertEquals(-3, Minutes.parseMinutes("PT-3M").getMinutes()); assertEquals(2, Minutes.parseMinutes("P0Y0M0DT2M").getMinutes()); assertEquals(2, Minutes.parseMinutes("PT0H2M").getMinutes()); try { Minutes.parseMinutes("P1Y1D"); fail(); } catch (IllegalArgumentException ex) { // expected } try { Minutes.parseMinutes("P1DT1M"); fail(); } catch (IllegalArgumentException ex) { // expected } } //----------------------------------------------------------------------- public void testGetMethods() { Minutes test = Minutes.minutes(20); assertEquals(20, test.getMinutes()); } public void testGetFieldType() { Minutes test = Minutes.minutes(20); assertEquals(DurationFieldType.minutes(), test.getFieldType()); } public void testGetPeriodType() { Minutes test = Minutes.minutes(20); assertEquals(PeriodType.minutes(), test.getPeriodType()); } //----------------------------------------------------------------------- public void testIsGreaterThan() { assertEquals(true, Minutes.THREE.isGreaterThan(Minutes.TWO)); assertEquals(false, Minutes.THREE.isGreaterThan(Minutes.THREE)); assertEquals(false, Minutes.TWO.isGreaterThan(Minutes.THREE)); assertEquals(true, Minutes.ONE.isGreaterThan(null)); assertEquals(false, Minutes.minutes(-1).isGreaterThan(null)); } public void testIsLessThan() { assertEquals(false, Minutes.THREE.isLessThan(Minutes.TWO)); assertEquals(false, Minutes.THREE.isLessThan(Minutes.THREE)); assertEquals(true, Minutes.TWO.isLessThan(Minutes.THREE)); assertEquals(false, Minutes.ONE.isLessThan(null)); assertEquals(true, Minutes.minutes(-1).isLessThan(null)); } //----------------------------------------------------------------------- public void testToString() { Minutes test = Minutes.minutes(20); assertEquals("PT20M", test.toString()); test = Minutes.minutes(-20); assertEquals("PT-20M", test.toString()); } //----------------------------------------------------------------------- public void testSerialization() throws Exception { Minutes test = Minutes.THREE; ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(test); oos.close(); byte[] bytes = baos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(bytes); ObjectInputStream ois = new ObjectInputStream(bais); Minutes result = (Minutes) ois.readObject(); ois.close(); assertSame(test, result); } //----------------------------------------------------------------------- public void testToStandardWeeks() { Minutes test = Minutes.minutes(60 * 24 * 7 * 2); Weeks expected = Weeks.weeks(2); assertEquals(expected, test.toStandardWeeks()); } public void testToStandardDays() { Minutes test = Minutes.minutes(60 * 24 * 2); Days expected = Days.days(2); assertEquals(expected, test.toStandardDays()); } public void testToStandardHours() { Minutes test = Minutes.minutes(3 * 60); Hours expected = Hours.hours(3); assertEquals(expected, test.toStandardHours()); } public void testToStandardSeconds() { Minutes test = Minutes.minutes(3); Seconds expected = Seconds.seconds(3 * 60); assertEquals(expected, test.toStandardSeconds()); try { Minutes.MAX_VALUE.toStandardSeconds(); fail(); } catch (ArithmeticException ex) { // expected } } public void testToStandardDuration() { Minutes test = Minutes.minutes(20); Duration expected = new Duration(20L * DateTimeConstants.MILLIS_PER_MINUTE); assertEquals(expected, test.toStandardDuration()); expected = new Duration(((long) Integer.MAX_VALUE) * DateTimeConstants.MILLIS_PER_MINUTE); assertEquals(expected, Minutes.MAX_VALUE.toStandardDuration()); } //----------------------------------------------------------------------- public void testPlus_int() { Minutes test2 = Minutes.minutes(2); Minutes result = test2.plus(3); assertEquals(2, test2.getMinutes()); assertEquals(5, result.getMinutes()); assertEquals(1, Minutes.ONE.plus(0).getMinutes()); try { Minutes.MAX_VALUE.plus(1); fail(); } catch (ArithmeticException ex) { // expected } } public void testPlus_Minutes() { Minutes test2 = Minutes.minutes(2); Minutes test3 = Minutes.minutes(3); Minutes result = test2.plus(test3); assertEquals(2, test2.getMinutes()); assertEquals(3, test3.getMinutes()); assertEquals(5, result.getMinutes()); assertEquals(1, Minutes.ONE.plus(Minutes.ZERO).getMinutes()); assertEquals(1, Minutes.ONE.plus((Minutes) null).getMinutes()); try { Minutes.MAX_VALUE.plus(Minutes.ONE); fail(); } catch (ArithmeticException ex) { // expected } } public void testMinus_int() { Minutes test2 = Minutes.minutes(2); Minutes result = test2.minus(3); assertEquals(2, test2.getMinutes()); assertEquals(-1, result.getMinutes()); assertEquals(1, Minutes.ONE.minus(0).getMinutes()); try { Minutes.MIN_VALUE.minus(1); fail(); } catch (ArithmeticException ex) { // expected } } public void testMinus_Minutes() { Minutes test2 = Minutes.minutes(2); Minutes test3 = Minutes.minutes(3); Minutes result = test2.minus(test3); assertEquals(2, test2.getMinutes()); assertEquals(3, test3.getMinutes()); assertEquals(-1, result.getMinutes()); assertEquals(1, Minutes.ONE.minus(Minutes.ZERO).getMinutes()); assertEquals(1, Minutes.ONE.minus((Minutes) null).getMinutes()); try { Minutes.MIN_VALUE.minus(Minutes.ONE); fail(); } catch (ArithmeticException ex) { // expected } } public void testMultipliedBy_int() { Minutes test = Minutes.minutes(2); assertEquals(6, test.multipliedBy(3).getMinutes()); assertEquals(2, test.getMinutes()); assertEquals(-6, test.multipliedBy(-3).getMinutes()); assertSame(test, test.multipliedBy(1)); Minutes halfMax = Minutes.minutes(Integer.MAX_VALUE / 2 + 1); try { halfMax.multipliedBy(2); fail(); } catch (ArithmeticException ex) { // expected } } public void testDividedBy_int() { Minutes test = Minutes.minutes(12); assertEquals(6, test.dividedBy(2).getMinutes()); assertEquals(12, test.getMinutes()); assertEquals(4, test.dividedBy(3).getMinutes()); assertEquals(3, test.dividedBy(4).getMinutes()); assertEquals(2, test.dividedBy(5).getMinutes()); assertEquals(2, test.dividedBy(6).getMinutes()); assertSame(test, test.dividedBy(1)); try { Minutes.ONE.dividedBy(0); fail(); } catch (ArithmeticException ex) { // expected } } public void testNegated() { Minutes test = Minutes.minutes(12); assertEquals(-12, test.negated().getMinutes()); assertEquals(12, test.getMinutes()); try { Minutes.MIN_VALUE.negated(); fail(); } catch (ArithmeticException ex) { // expected } } //----------------------------------------------------------------------- public void testAddToLocalDate() { Minutes test = Minutes.minutes(26); LocalDateTime date = new LocalDateTime(2006, 6, 1, 0, 0, 0, 0); LocalDateTime expected = new LocalDateTime(2006, 6, 1, 0, 26, 0, 0); assertEquals(expected, date.plus(test)); } }
/* * Copyright 2017 OICR * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.dockstore.common; import com.google.common.base.Optional; import com.google.common.io.ByteStreams; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; import org.apache.commons.configuration2.INIConfiguration; import org.apache.commons.configuration2.builder.ConfigurationBuilder; import org.apache.commons.configuration2.builder.ReloadingFileBasedConfigurationBuilder; import org.apache.commons.configuration2.builder.fluent.Parameters; import org.apache.commons.configuration2.ex.ConfigurationException; import org.apache.commons.configuration2.reloading.PeriodicReloadingTrigger; import org.apache.commons.exec.CommandLine; import org.apache.commons.exec.DefaultExecuteResultHandler; import org.apache.commons.exec.DefaultExecutor; import org.apache.commons.exec.ExecuteException; import org.apache.commons.exec.Executor; import org.apache.commons.exec.PumpStreamHandler; import org.apache.commons.exec.environment.EnvironmentUtils; import org.apache.commons.io.output.TeeOutputStream; import org.apache.commons.lang3.tuple.ImmutablePair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author xliu */ public final class Utilities { private static final Map<String, ConfigurationBuilder<INIConfiguration>> MAP = new HashMap<>(); private static final Logger LOG = LoggerFactory.getLogger(Utilities.class); private static final long DEFAULT_TIMEOUT_MILLISECONDS = 0; // if 0, do not set timeout at all public static final String PROBLEMS_RUNNING_COMMAND = "problems running command: {}"; private Utilities() { // hide the default constructor for a utility class } /** * The singleton map os not entirely awesome, but this allows our legacy code to * benefit from live reloads for configuration while the application is running * * @param configFile the path to the config file which should be loaded * @return configuration file */ public static INIConfiguration parseConfig(String configFile) { if (!MAP.containsKey(configFile)) { ReloadingFileBasedConfigurationBuilder<INIConfiguration> builder = new ReloadingFileBasedConfigurationBuilder<>( INIConfiguration.class).configure(new Parameters().properties().setFileName(configFile)); PeriodicReloadingTrigger trigger = new PeriodicReloadingTrigger(builder.getReloadingController(), null, 1, TimeUnit.MINUTES); trigger.start(); MAP.put(configFile, builder); } try { return MAP.get(configFile).getConfiguration(); } catch (ConfigurationException ex) { throw new RuntimeException("Could not read " + configFile); } } public static ImmutablePair<String, String> executeCommand(String command) { return executeCommand(command, null); } public static ImmutablePair<String, String> executeCommand(String command, File workingDir) { return executeCommand(command, workingDir, DEFAULT_TIMEOUT_MILLISECONDS); } public static ImmutablePair<String, String> executeCommand(String command, File workingDir, long timeout) { return executeCommand(command, true, Optional.of(ByteStreams.nullOutputStream()), Optional.of(ByteStreams.nullOutputStream()), workingDir, timeout); } public static ImmutablePair<String, String> executeCommand(String command, OutputStream stdoutStream, OutputStream stderrStream) { return executeCommand(command, true, Optional.of(stdoutStream), Optional.of(stderrStream), null); } public static ImmutablePair<String, String> executeCommand(String command, OutputStream stdoutStream, OutputStream stderrStream, File workingDir) { return executeCommand(command, true, Optional.of(stdoutStream), Optional.of(stderrStream), workingDir); } public static ImmutablePair<String, String> executeCommand(String command, OutputStream stdoutStream, OutputStream stderrStream, File workingDir, Map<String, String> additionalEnvironment) { return executeCommand(command, true, Optional.of(stdoutStream), Optional.of(stderrStream), workingDir, additionalEnvironment, DEFAULT_TIMEOUT_MILLISECONDS); } /** * Execute a command and return stdout and stderr * * @param command the command to execute * @return the stdout and stderr */ private static ImmutablePair<String, String> executeCommand(String command, final boolean dumpOutput, Optional<OutputStream> stdoutStream, Optional<OutputStream> stderrStream, File workingDir) { return executeCommand(command, dumpOutput, stdoutStream, stderrStream, workingDir, null, DEFAULT_TIMEOUT_MILLISECONDS); } /** * Execute a command with custom timeout and return stdout and stderr * * @param command the command to execute * @param timeout the max time in milliseconds to wait for execution to finish * @return the stdout and stderr */ private static ImmutablePair<String, String> executeCommand(String command, final boolean dumpOutput, Optional<OutputStream> stdoutStream, Optional<OutputStream> stderrStream, File workingDir, long timeout) { return executeCommand(command, dumpOutput, stdoutStream, stderrStream, workingDir, null, timeout); } /** * Execute a command and return stdout and stderr * * @param command the command to execute * @param additionalEnvironment additional environment variables that are added to the system environment; can be null * @return the stdout and stderr */ private static ImmutablePair<String, String> executeCommand(String command, final boolean dumpOutput, Optional<OutputStream> stdoutStream, Optional<OutputStream> stderrStream, File workingDir, Map<String, String> additionalEnvironment, long timeout) { // TODO: limit our output in case the called program goes crazy // these are for returning the output for use by this try (ByteArrayOutputStream localStdoutStream = new ByteArrayOutputStream(); ByteArrayOutputStream localStdErrStream = new ByteArrayOutputStream()) { OutputStream stdout = localStdoutStream; OutputStream stderr = localStdErrStream; if (stdoutStream.isPresent()) { assert stderrStream.isPresent(); // in this branch, we want a copy of the output for Consonance stdout = new TeeOutputStream(localStdoutStream, stdoutStream.get()); stderr = new TeeOutputStream(localStdErrStream, stderrStream.get()); } DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler(); String utf8 = StandardCharsets.UTF_8.name(); try { final CommandLine parse = CommandLine.parse(command); // When running bash commands directly, we need this to substitute variables parse.setSubstitutionMap(additionalEnvironment); Executor executor = new DefaultExecutor(); if (workingDir != null) { LOG.info("working directory is " + workingDir.toString()); executor.setWorkingDirectory(workingDir); } executor.setExitValue(0); if (dumpOutput) { LOG.info("CMD: " + command); } final Map<String, String> procEnvironment = EnvironmentUtils.getProcEnvironment(); if (additionalEnvironment != null) { procEnvironment.putAll(additionalEnvironment); } // get stdout and stderr executor.setStreamHandler(new PumpStreamHandler(stdout, stderr)); executor.execute(parse, procEnvironment, resultHandler); // wait for the duration of the timeout, unless timeout set to 0 if (timeout == 0) { resultHandler.waitFor(); } else { resultHandler.waitFor(timeout); } // not sure why commons-exec does not throw an exception if (resultHandler.getExitValue() != 0) { LOG.error(PROBLEMS_RUNNING_COMMAND, command, resultHandler.getException()); throw new ExecuteException(PROBLEMS_RUNNING_COMMAND + command, resultHandler.getExitValue()); } return new ImmutablePair<>(localStdoutStream.toString(utf8), localStdErrStream.toString(utf8)); } catch (InterruptedException | IOException e) { throw new IllegalStateException(PROBLEMS_RUNNING_COMMAND + command, e); } finally { if (dumpOutput) { LOG.info("exit code: " + resultHandler.getExitValue()); try { LOG.debug("stderr was: " + localStdErrStream.toString(utf8)); LOG.debug("stdout was: " + localStdoutStream.toString(utf8)); } catch (UnsupportedEncodingException e) { throw new RuntimeException("utf-8 does not exist?", e); } } } } catch (IOException e) { throw new RuntimeException("could not close output streams", e); } } /** * Cleans input of characters that might break-up logging output * @param input * @return */ public static String cleanForLogging(String input) { return input.replaceAll("[\n\r\t]", "_"); } }
/* * Copyright (C) 2009 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.trivium.dep.com.google.common.collect; import static io.trivium.dep.com.google.common.base.Preconditions.checkArgument; import static io.trivium.dep.com.google.common.base.Preconditions.checkNotNull; import static io.trivium.dep.com.google.common.base.Preconditions.checkState; import static io.trivium.dep.com.google.common.collect.MapMakerInternalMap.Strength.SOFT; import io.trivium.dep.com.google.common.annotations.GwtCompatible; import io.trivium.dep.com.google.common.annotations.GwtIncompatible; import io.trivium.dep.com.google.common.base.Ascii; import io.trivium.dep.com.google.common.base.Equivalence; import io.trivium.dep.com.google.common.base.Function; import io.trivium.dep.com.google.common.base.MoreObjects; import io.trivium.dep.com.google.common.base.Throwables; import io.trivium.dep.com.google.common.base.Ticker; import io.trivium.dep.com.google.common.collect.MapMakerInternalMap.Strength; import java.io.Serializable; import java.lang.ref.SoftReference; import java.lang.ref.WeakReference; import java.util.AbstractMap; import java.util.Collections; import java.util.ConcurrentModificationException; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import javax.annotation.Nullable; /** * <p>A builder of {@link ConcurrentMap} instances having any combination of the following features: * * <ul> * <li>keys or values automatically wrapped in {@linkplain WeakReference weak} or {@linkplain * SoftReference soft} references * <li>notification of evicted (or otherwise removed) entries * </ul> * * <p>Usage example: <pre> {@code * * ConcurrentMap<Request, Stopwatch> timers = new MapMaker() * .concurrencyLevel(4) * .weakKeys() * .makeMap();}</pre> * * <p>These features are all optional; {@code new MapMaker().makeMap()} returns a valid concurrent * map that behaves similarly to a {@link ConcurrentHashMap}. * * <p>The returned map is implemented as a hash table with similar performance characteristics to * {@link ConcurrentHashMap}. It supports all optional operations of the {@code ConcurrentMap} * interface. It does not permit null keys or values. * * <p><b>Note:</b> by default, the returned map uses equality comparisons (the {@link Object#equals * equals} method) to determine equality for keys or values. However, if {@link #weakKeys} was * specified, the map uses identity ({@code ==}) comparisons instead for keys. Likewise, if {@link * #weakValues} or {@link #softValues} was specified, the map uses identity comparisons for values. * * <p>The view collections of the returned map have <i>weakly consistent iterators</i>. This means * that they are safe for concurrent use, but if other threads modify the map after the iterator is * created, it is undefined which of these changes, if any, are reflected in that iterator. These * iterators never throw {@link ConcurrentModificationException}. * * <p>If {@link #weakKeys}, {@link #weakValues}, or {@link #softValues} are requested, it is * possible for a key or value present in the map to be reclaimed by the garbage collector. Entries * with reclaimed keys or values may be removed from the map on each map modification or on * occasional map accesses; such entries may be counted by {@link Map#size}, but will never be * visible to read or write operations. A partially-reclaimed entry is never exposed to the user. * Any {@link java.util.Map.Entry} instance retrieved from the map's * {@linkplain Map#entrySet entry set} is a snapshot of that entry's state at the time of * retrieval; such entries do, however, support {@link java.util.Map.Entry#setValue}, which simply * calls {@link Map#put} on the entry's key. * * <p>The maps produced by {@code MapMaker} are serializable, and the deserialized maps retain all * the configuration properties of the original map. During deserialization, if the original map had * used soft or weak references, the entries are reconstructed as they were, but it's not unlikely * they'll be quickly garbage-collected before they are ever accessed. * * <p>{@code new MapMaker().weakKeys().makeMap()} is a recommended replacement for {@link * java.util.WeakHashMap}, but note that it compares keys using object identity whereas {@code * WeakHashMap} uses {@link Object#equals}. * * @author Bob Lee * @author Charles Fry * @author Kevin Bourrillion * @since 2.0 */ @GwtCompatible(emulated = true) public final class MapMaker extends GenericMapMaker<Object, Object> { private static final int DEFAULT_INITIAL_CAPACITY = 16; private static final int DEFAULT_CONCURRENCY_LEVEL = 4; private static final int DEFAULT_EXPIRATION_NANOS = 0; static final int UNSET_INT = -1; // TODO(kevinb): dispense with this after benchmarking boolean useCustomMap; int initialCapacity = UNSET_INT; int concurrencyLevel = UNSET_INT; int maximumSize = UNSET_INT; Strength keyStrength; Strength valueStrength; long expireAfterWriteNanos = UNSET_INT; long expireAfterAccessNanos = UNSET_INT; RemovalCause nullRemovalCause; Equivalence<Object> keyEquivalence; Ticker ticker; /** * Constructs a new {@code MapMaker} instance with default settings, including strong keys, strong * values, and no automatic eviction of any kind. */ public MapMaker() {} /** * Sets a custom {@code Equivalence} strategy for comparing keys. * * <p>By default, the map uses {@link Equivalence#identity} to determine key equality when {@link * #weakKeys} is specified, and {@link Equivalence#equals()} otherwise. The only place this is * used is in {@link Interners.WeakInterner}. */ @GwtIncompatible("To be supported") @Override MapMaker keyEquivalence(Equivalence<Object> equivalence) { checkState(keyEquivalence == null, "key equivalence was already set to %s", keyEquivalence); keyEquivalence = checkNotNull(equivalence); this.useCustomMap = true; return this; } Equivalence<Object> getKeyEquivalence() { return MoreObjects.firstNonNull(keyEquivalence, getKeyStrength().defaultEquivalence()); } /** * Sets the minimum total size for the internal hash tables. For example, if the initial capacity * is {@code 60}, and the concurrency level is {@code 8}, then eight segments are created, each * having a hash table of size eight. Providing a large enough estimate at construction time * avoids the need for expensive resizing operations later, but setting this value unnecessarily * high wastes memory. * * @throws IllegalArgumentException if {@code initialCapacity} is negative * @throws IllegalStateException if an initial capacity was already set */ @Override public MapMaker initialCapacity(int initialCapacity) { checkState( this.initialCapacity == UNSET_INT, "initial capacity was already set to %s", this.initialCapacity); checkArgument(initialCapacity >= 0); this.initialCapacity = initialCapacity; return this; } int getInitialCapacity() { return (initialCapacity == UNSET_INT) ? DEFAULT_INITIAL_CAPACITY : initialCapacity; } /** * Specifies the maximum number of entries the map may contain. Note that the map <b>may evict an * entry before this limit is exceeded</b>. As the map size grows close to the maximum, the map * evicts entries that are less likely to be used again. For example, the map may evict an entry * because it hasn't been used recently or very often. * * <p>When {@code size} is zero, elements can be successfully added to the map, but are evicted * immediately. This has the same effect as invoking {@link #expireAfterWrite * expireAfterWrite}{@code (0, unit)} or {@link #expireAfterAccess expireAfterAccess}{@code (0, * unit)}. It can be useful in testing, or to disable caching temporarily without a code change. * * <p>Caching functionality in {@code MapMaker} has been moved to * {@link io.trivium.dep.com.google.common.cache.CacheBuilder}. * * @param size the maximum size of the map * @throws IllegalArgumentException if {@code size} is negative * @throws IllegalStateException if a maximum size was already set * @deprecated Caching functionality in {@code MapMaker} has been moved to * {@link io.trivium.dep.com.google.common.cache.CacheBuilder}, with {@link #maximumSize} being * replaced by {@link io.trivium.dep.com.google.common.cache.CacheBuilder#maximumSize}. Note that {@code * CacheBuilder} is simply an enhanced API for an implementation which was branched from * {@code MapMaker}. */ @Deprecated @Override MapMaker maximumSize(int size) { checkState( this.maximumSize == UNSET_INT, "maximum size was already set to %s", this.maximumSize); checkArgument(size >= 0, "maximum size must not be negative"); this.maximumSize = size; this.useCustomMap = true; if (maximumSize == 0) { // SIZE trumps EXPIRED this.nullRemovalCause = RemovalCause.SIZE; } return this; } /** * Guides the allowed concurrency among update operations. Used as a hint for internal sizing. The * table is internally partitioned to try to permit the indicated number of concurrent updates * without contention. Because assignment of entries to these partitions is not necessarily * uniform, the actual concurrency observed may vary. Ideally, you should choose a value to * accommodate as many threads as will ever concurrently modify the table. Using a significantly * higher value than you need can waste space and time, and a significantly lower value can lead * to thread contention. But overestimates and underestimates within an order of magnitude do not * usually have much noticeable impact. A value of one permits only one thread to modify the map * at a time, but since read operations can proceed concurrently, this still yields higher * concurrency than full synchronization. Defaults to 4. * * <p><b>Note:</b> Prior to Guava release 9.0, the default was 16. It is possible the default will * change again in the future. If you care about this value, you should always choose it * explicitly. * * @throws IllegalArgumentException if {@code concurrencyLevel} is nonpositive * @throws IllegalStateException if a concurrency level was already set */ @Override public MapMaker concurrencyLevel(int concurrencyLevel) { checkState( this.concurrencyLevel == UNSET_INT, "concurrency level was already set to %s", this.concurrencyLevel); checkArgument(concurrencyLevel > 0); this.concurrencyLevel = concurrencyLevel; return this; } int getConcurrencyLevel() { return (concurrencyLevel == UNSET_INT) ? DEFAULT_CONCURRENCY_LEVEL : concurrencyLevel; } /** * Specifies that each key (not value) stored in the map should be wrapped in a {@link * WeakReference} (by default, strong references are used). * * <p><b>Warning:</b> when this method is used, the resulting map will use identity ({@code ==}) * comparison to determine equality of keys, which is a technical violation of the {@link Map} * specification, and may not be what you expect. * * @throws IllegalStateException if the key strength was already set * @see WeakReference */ @GwtIncompatible("java.lang.ref.WeakReference") @Override public MapMaker weakKeys() { return setKeyStrength(Strength.WEAK); } MapMaker setKeyStrength(Strength strength) { checkState(keyStrength == null, "Key strength was already set to %s", keyStrength); keyStrength = checkNotNull(strength); checkArgument(keyStrength != SOFT, "Soft keys are not supported"); if (strength != Strength.STRONG) { // STRONG could be used during deserialization. useCustomMap = true; } return this; } Strength getKeyStrength() { return MoreObjects.firstNonNull(keyStrength, Strength.STRONG); } /** * Specifies that each value (not key) stored in the map should be wrapped in a * {@link WeakReference} (by default, strong references are used). * * <p>Weak values will be garbage collected once they are weakly reachable. This makes them a poor * candidate for caching; consider {@link #softValues} instead. * * <p><b>Warning:</b> when this method is used, the resulting map will use identity ({@code ==}) * comparison to determine equality of values. This technically violates the specifications of * the methods {@link Map#containsValue containsValue}, * {@link ConcurrentMap#remove(Object, Object) remove(Object, Object)} and * {@link ConcurrentMap#replace(Object, Object, Object) replace(K, V, V)}, and may not be what you * expect. * * @throws IllegalStateException if the value strength was already set * @see WeakReference */ @GwtIncompatible("java.lang.ref.WeakReference") @Override public MapMaker weakValues() { return setValueStrength(Strength.WEAK); } /** * Specifies that each value (not key) stored in the map should be wrapped in a * {@link SoftReference} (by default, strong references are used). Softly-referenced objects will * be garbage-collected in a <i>globally</i> least-recently-used manner, in response to memory * demand. * * <p><b>Warning:</b> in most circumstances it is better to set a per-cache {@linkplain * #maximumSize maximum size} instead of using soft references. You should only use this method if * you are well familiar with the practical consequences of soft references. * * <p><b>Warning:</b> when this method is used, the resulting map will use identity ({@code ==}) * comparison to determine equality of values. This technically violates the specifications of * the methods {@link Map#containsValue containsValue}, * {@link ConcurrentMap#remove(Object, Object) remove(Object, Object)} and * {@link ConcurrentMap#replace(Object, Object, Object) replace(K, V, V)}, and may not be what you * expect. * * @throws IllegalStateException if the value strength was already set * @see SoftReference * @deprecated Caching functionality in {@code MapMaker} has been moved to {@link * io.trivium.dep.com.google.common.cache.CacheBuilder}, with {@link #softValues} being replaced by {@link * io.trivium.dep.com.google.common.cache.CacheBuilder#softValues}. Note that {@code CacheBuilder} is simply * an enhanced API for an implementation which was branched from {@code MapMaker}. */ @Deprecated @GwtIncompatible("java.lang.ref.SoftReference") @Override MapMaker softValues() { return setValueStrength(Strength.SOFT); } MapMaker setValueStrength(Strength strength) { checkState(valueStrength == null, "Value strength was already set to %s", valueStrength); valueStrength = checkNotNull(strength); if (strength != Strength.STRONG) { // STRONG could be used during deserialization. useCustomMap = true; } return this; } Strength getValueStrength() { return MoreObjects.firstNonNull(valueStrength, Strength.STRONG); } /** * Specifies that each entry should be automatically removed from the map once a fixed duration * has elapsed after the entry's creation, or the most recent replacement of its value. * * <p>When {@code duration} is zero, elements can be successfully added to the map, but are * evicted immediately. This has a very similar effect to invoking {@link #maximumSize * maximumSize}{@code (0)}. It can be useful in testing, or to disable caching temporarily without * a code change. * * <p>Expired entries may be counted by {@link Map#size}, but will never be visible to read or * write operations. Expired entries are currently cleaned up during write operations, or during * occasional read operations in the absense of writes; though this behavior may change in the * future. * * @param duration the length of time after an entry is created that it should be automatically * removed * @param unit the unit that {@code duration} is expressed in * @throws IllegalArgumentException if {@code duration} is negative * @throws IllegalStateException if the time to live or time to idle was already set * @deprecated Caching functionality in {@code MapMaker} has been moved to * {@link io.trivium.dep.com.google.common.cache.CacheBuilder}, with {@link #expireAfterWrite} being * replaced by {@link io.trivium.dep.com.google.common.cache.CacheBuilder#expireAfterWrite}. Note that {@code * CacheBuilder} is simply an enhanced API for an implementation which was branched from * {@code MapMaker}. */ @Deprecated @Override MapMaker expireAfterWrite(long duration, TimeUnit unit) { checkExpiration(duration, unit); this.expireAfterWriteNanos = unit.toNanos(duration); if (duration == 0 && this.nullRemovalCause == null) { // SIZE trumps EXPIRED this.nullRemovalCause = RemovalCause.EXPIRED; } useCustomMap = true; return this; } private void checkExpiration(long duration, TimeUnit unit) { checkState( expireAfterWriteNanos == UNSET_INT, "expireAfterWrite was already set to %s ns", expireAfterWriteNanos); checkState( expireAfterAccessNanos == UNSET_INT, "expireAfterAccess was already set to %s ns", expireAfterAccessNanos); checkArgument(duration >= 0, "duration cannot be negative: %s %s", duration, unit); } long getExpireAfterWriteNanos() { return (expireAfterWriteNanos == UNSET_INT) ? DEFAULT_EXPIRATION_NANOS : expireAfterWriteNanos; } /** * Specifies that each entry should be automatically removed from the map once a fixed duration * has elapsed after the entry's last read or write access. * * <p>When {@code duration} is zero, elements can be successfully added to the map, but are * evicted immediately. This has a very similar effect to invoking {@link #maximumSize * maximumSize}{@code (0)}. It can be useful in testing, or to disable caching temporarily without * a code change. * * <p>Expired entries may be counted by {@link Map#size}, but will never be visible to read or * write operations. Expired entries are currently cleaned up during write operations, or during * occasional read operations in the absense of writes; though this behavior may change in the * future. * * @param duration the length of time after an entry is last accessed that it should be * automatically removed * @param unit the unit that {@code duration} is expressed in * @throws IllegalArgumentException if {@code duration} is negative * @throws IllegalStateException if the time to idle or time to live was already set * @deprecated Caching functionality in {@code MapMaker} has been moved to * {@link io.trivium.dep.com.google.common.cache.CacheBuilder}, with {@link #expireAfterAccess} being * replaced by {@link io.trivium.dep.com.google.common.cache.CacheBuilder#expireAfterAccess}. Note that * {@code CacheBuilder} is simply an enhanced API for an implementation which was branched * from {@code MapMaker}. */ @Deprecated @GwtIncompatible("To be supported") @Override MapMaker expireAfterAccess(long duration, TimeUnit unit) { checkExpiration(duration, unit); this.expireAfterAccessNanos = unit.toNanos(duration); if (duration == 0 && this.nullRemovalCause == null) { // SIZE trumps EXPIRED this.nullRemovalCause = RemovalCause.EXPIRED; } useCustomMap = true; return this; } long getExpireAfterAccessNanos() { return (expireAfterAccessNanos == UNSET_INT) ? DEFAULT_EXPIRATION_NANOS : expireAfterAccessNanos; } Ticker getTicker() { return MoreObjects.firstNonNull(ticker, Ticker.systemTicker()); } /** * Specifies a listener instance, which all maps built using this {@code MapMaker} will notify * each time an entry is removed from the map by any means. * * <p>Each map built by this map maker after this method is called invokes the supplied listener * after removing an element for any reason (see removal causes in {@link RemovalCause}). It will * invoke the listener during invocations of any of that map's public methods (even read-only * methods). * * <p><b>Important note:</b> Instead of returning <i>this</i> as a {@code MapMaker} instance, * this method returns {@code GenericMapMaker<K, V>}. From this point on, either the original * reference or the returned reference may be used to complete configuration and build the map, * but only the "generic" one is type-safe. That is, it will properly prevent you from building * maps whose key or value types are incompatible with the types accepted by the listener already * provided; the {@code MapMaker} type cannot do this. For best results, simply use the standard * method-chaining idiom, as illustrated in the documentation at top, configuring a {@code * MapMaker} and building your {@link Map} all in a single statement. * * <p><b>Warning:</b> if you ignore the above advice, and use this {@code MapMaker} to build a map * or cache whose key or value type is incompatible with the listener, you will likely experience * a {@link ClassCastException} at some <i>undefined</i> point in the future. * * @throws IllegalStateException if a removal listener was already set * @deprecated Caching functionality in {@code MapMaker} has been moved to * {@link io.trivium.dep.com.google.common.cache.CacheBuilder}, with {@link #removalListener} being * replaced by {@link io.trivium.dep.com.google.common.cache.CacheBuilder#removalListener}. Note that {@code * CacheBuilder} is simply an enhanced API for an implementation which was branched from * {@code MapMaker}. */ @Deprecated @GwtIncompatible("To be supported") <K, V> GenericMapMaker<K, V> removalListener(RemovalListener<K, V> listener) { checkState(this.removalListener == null); // safely limiting the kinds of maps this can produce @SuppressWarnings("unchecked") GenericMapMaker<K, V> me = (GenericMapMaker<K, V>) this; me.removalListener = checkNotNull(listener); useCustomMap = true; return me; } /** * Builds a thread-safe map. This method does not alter the state of this {@code MapMaker} * instance, so it can be invoked again to create multiple independent maps. * * <p>The bulk operations {@code putAll}, {@code equals}, and {@code clear} are not guaranteed to * be performed atomically on the returned map. Additionally, {@code size} and {@code * containsValue} are implemented as bulk read operations, and thus may fail to observe concurrent * writes. * * @return a serializable concurrent map having the requested features */ @Override public <K, V> ConcurrentMap<K, V> makeMap() { if (!useCustomMap) { return new ConcurrentHashMap<K, V>(getInitialCapacity(), 0.75f, getConcurrencyLevel()); } return (nullRemovalCause == null) ? new MapMakerInternalMap<K, V>(this) : new NullConcurrentMap<K, V>(this); } /** * Returns a MapMakerInternalMap for the benefit of internal callers that use features of * that class not exposed through ConcurrentMap. */ @Override @GwtIncompatible("MapMakerInternalMap") <K, V> MapMakerInternalMap<K, V> makeCustomMap() { return new MapMakerInternalMap<K, V>(this); } /** * Builds a map that supports atomic, on-demand computation of values. {@link Map#get} either * returns an already-computed value for the given key, atomically computes it using the supplied * function, or, if another thread is currently computing the value for this key, simply waits for * that thread to finish and returns its computed value. Note that the function may be executed * concurrently by multiple threads, but only for distinct keys. * * <p>New code should use {@link io.trivium.dep.com.google.common.cache.CacheBuilder}, which supports * {@linkplain io.trivium.dep.com.google.common.cache.CacheStats statistics} collection, introduces the * {@link io.trivium.dep.com.google.common.cache.CacheLoader} interface for loading entries into the cache * (allowing checked exceptions to be thrown in the process), and more cleanly separates * computation from the cache's {@code Map} view. * * <p>If an entry's value has not finished computing yet, query methods besides {@code get} return * immediately as if an entry doesn't exist. In other words, an entry isn't externally visible * until the value's computation completes. * * <p>{@link Map#get} on the returned map will never return {@code null}. It may throw: * * <ul> * <li>{@link NullPointerException} if the key is null or the computing function returns a null * result * <li>{@link ComputationException} if an exception was thrown by the computing function. If that * exception is already of type {@link ComputationException} it is propagated directly; otherwise * it is wrapped. * </ul> * * <p><b>Note:</b> Callers of {@code get} <i>must</i> ensure that the key argument is of type * {@code K}. The {@code get} method accepts {@code Object}, so the key type is not checked at * compile time. Passing an object of a type other than {@code K} can result in that object being * unsafely passed to the computing function as type {@code K}, and unsafely stored in the map. * * <p>If {@link Map#put} is called before a computation completes, other threads waiting on the * computation will wake up and return the stored value. * * <p>This method does not alter the state of this {@code MapMaker} instance, so it can be invoked * again to create multiple independent maps. * * <p>Insertion, removal, update, and access operations on the returned map safely execute * concurrently by multiple threads. Iterators on the returned map are weakly consistent, * returning elements reflecting the state of the map at some point at or since the creation of * the iterator. They do not throw {@link ConcurrentModificationException}, and may proceed * concurrently with other operations. * * <p>The bulk operations {@code putAll}, {@code equals}, and {@code clear} are not guaranteed to * be performed atomically on the returned map. Additionally, {@code size} and {@code * containsValue} are implemented as bulk read operations, and thus may fail to observe concurrent * writes. * * @param computingFunction the function used to compute new values * @return a serializable concurrent map having the requested features * @deprecated Caching functionality in {@code MapMaker} has been moved to * {@link io.trivium.dep.com.google.common.cache.CacheBuilder}, with {@link #makeComputingMap} being replaced * by {@link io.trivium.dep.com.google.common.cache.CacheBuilder#build}. See the * <a href="https://github.com/google/guava/wiki/MapMakerMigration">MapMaker * Migration Guide</a> for more details. */ @Deprecated @Override <K, V> ConcurrentMap<K, V> makeComputingMap(Function<? super K, ? extends V> computingFunction) { return (nullRemovalCause == null) ? new MapMaker.ComputingMapAdapter<K, V>(this, computingFunction) : new NullComputingConcurrentMap<K, V>(this, computingFunction); } /** * Returns a string representation for this MapMaker instance. The exact form of the returned * string is not specificed. */ @Override public String toString() { MoreObjects.ToStringHelper s = MoreObjects.toStringHelper(this); if (initialCapacity != UNSET_INT) { s.add("initialCapacity", initialCapacity); } if (concurrencyLevel != UNSET_INT) { s.add("concurrencyLevel", concurrencyLevel); } if (maximumSize != UNSET_INT) { s.add("maximumSize", maximumSize); } if (expireAfterWriteNanos != UNSET_INT) { s.add("expireAfterWrite", expireAfterWriteNanos + "ns"); } if (expireAfterAccessNanos != UNSET_INT) { s.add("expireAfterAccess", expireAfterAccessNanos + "ns"); } if (keyStrength != null) { s.add("keyStrength", Ascii.toLowerCase(keyStrength.toString())); } if (valueStrength != null) { s.add("valueStrength", Ascii.toLowerCase(valueStrength.toString())); } if (keyEquivalence != null) { s.addValue("keyEquivalence"); } if (removalListener != null) { s.addValue("removalListener"); } return s.toString(); } /** * An object that can receive a notification when an entry is removed from a map. The removal * resulting in notification could have occured to an entry being manually removed or replaced, or * due to eviction resulting from timed expiration, exceeding a maximum size, or garbage * collection. * * <p>An instance may be called concurrently by multiple threads to process different entries. * Implementations of this interface should avoid performing blocking calls or synchronizing on * shared resources. * * @param <K> the most general type of keys this listener can listen for; for * example {@code Object} if any key is acceptable * @param <V> the most general type of values this listener can listen for; for * example {@code Object} if any key is acceptable */ interface RemovalListener<K, V> { /** * Notifies the listener that a removal occurred at some point in the past. */ void onRemoval(RemovalNotification<K, V> notification); } /** * A notification of the removal of a single entry. The key or value may be null if it was already * garbage collected. * * <p>Like other {@code Map.Entry} instances associated with MapMaker, this class holds strong * references to the key and value, regardless of the type of references the map may be using. */ static final class RemovalNotification<K, V> extends ImmutableEntry<K, V> { private static final long serialVersionUID = 0; private final RemovalCause cause; RemovalNotification(@Nullable K key, @Nullable V value, RemovalCause cause) { super(key, value); this.cause = cause; } /** * Returns the cause for which the entry was removed. */ public RemovalCause getCause() { return cause; } /** * Returns {@code true} if there was an automatic removal due to eviction (the cause is neither * {@link RemovalCause#EXPLICIT} nor {@link RemovalCause#REPLACED}). */ public boolean wasEvicted() { return cause.wasEvicted(); } } /** * The reason why an entry was removed. */ enum RemovalCause { /** * The entry was manually removed by the user. This can result from the user invoking * {@link Map#remove}, {@link ConcurrentMap#remove}, or {@link java.util.Iterator#remove}. */ EXPLICIT { @Override boolean wasEvicted() { return false; } }, /** * The entry itself was not actually removed, but its value was replaced by the user. This can * result from the user invoking {@link Map#put}, {@link Map#putAll}, * {@link ConcurrentMap#replace(Object, Object)}, or * {@link ConcurrentMap#replace(Object, Object, Object)}. */ REPLACED { @Override boolean wasEvicted() { return false; } }, /** * The entry was removed automatically because its key or value was garbage-collected. This can * occur when using {@link #softValues}, {@link #weakKeys}, or {@link #weakValues}. */ COLLECTED { @Override boolean wasEvicted() { return true; } }, /** * The entry's expiration timestamp has passed. This can occur when using {@link * #expireAfterWrite} or {@link #expireAfterAccess}. */ EXPIRED { @Override boolean wasEvicted() { return true; } }, /** * The entry was evicted due to size constraints. This can occur when using {@link * #maximumSize}. */ SIZE { @Override boolean wasEvicted() { return true; } }; /** * Returns {@code true} if there was an automatic removal due to eviction (the cause is neither * {@link #EXPLICIT} nor {@link #REPLACED}). */ abstract boolean wasEvicted(); } /** A map that is always empty and evicts on insertion. */ static class NullConcurrentMap<K, V> extends AbstractMap<K, V> implements ConcurrentMap<K, V>, Serializable { private static final long serialVersionUID = 0; private final RemovalListener<K, V> removalListener; private final RemovalCause removalCause; NullConcurrentMap(MapMaker mapMaker) { removalListener = mapMaker.getRemovalListener(); removalCause = mapMaker.nullRemovalCause; } // implements ConcurrentMap @Override public boolean containsKey(@Nullable Object key) { return false; } @Override public boolean containsValue(@Nullable Object value) { return false; } @Override public V get(@Nullable Object key) { return null; } void notifyRemoval(K key, V value) { RemovalNotification<K, V> notification = new RemovalNotification<K, V>(key, value, removalCause); removalListener.onRemoval(notification); } @Override public V put(K key, V value) { checkNotNull(key); checkNotNull(value); notifyRemoval(key, value); return null; } @Override public V putIfAbsent(K key, V value) { return put(key, value); } @Override public V remove(@Nullable Object key) { return null; } @Override public boolean remove(@Nullable Object key, @Nullable Object value) { return false; } @Override public V replace(K key, V value) { checkNotNull(key); checkNotNull(value); return null; } @Override public boolean replace(K key, @Nullable V oldValue, V newValue) { checkNotNull(key); checkNotNull(newValue); return false; } @Override public Set<Entry<K, V>> entrySet() { return Collections.emptySet(); } } /** Computes on retrieval and evicts the result. */ static final class NullComputingConcurrentMap<K, V> extends NullConcurrentMap<K, V> { private static final long serialVersionUID = 0; final Function<? super K, ? extends V> computingFunction; NullComputingConcurrentMap( MapMaker mapMaker, Function<? super K, ? extends V> computingFunction) { super(mapMaker); this.computingFunction = checkNotNull(computingFunction); } @SuppressWarnings("unchecked") // unsafe, which is why Cache is preferred @Override public V get(Object k) { K key = (K) k; V value = compute(key); checkNotNull(value, "%s returned null for key %s.", computingFunction, key); notifyRemoval(key, value); return value; } private V compute(K key) { checkNotNull(key); try { return computingFunction.apply(key); } catch (ComputationException e) { throw e; } catch (Throwable t) { throw new ComputationException(t); } } } /** * Overrides get() to compute on demand. Also throws an exception when {@code null} is returned * from a computation. */ /* * This might make more sense in ComputingConcurrentHashMap, but it causes a javac crash in some * cases there: http://code.google.com/p/guava-libraries/issues/detail?id=950 */ static final class ComputingMapAdapter<K, V> extends ComputingConcurrentHashMap<K, V> implements Serializable { private static final long serialVersionUID = 0; ComputingMapAdapter(MapMaker mapMaker, Function<? super K, ? extends V> computingFunction) { super(mapMaker, computingFunction); } @SuppressWarnings("unchecked") // unsafe, which is one advantage of Cache over Map @Override public V get(Object key) { V value; try { value = getOrCompute((K) key); } catch (ExecutionException e) { Throwable cause = e.getCause(); Throwables.propagateIfInstanceOf(cause, ComputationException.class); throw new ComputationException(cause); } if (value == null) { throw new NullPointerException(computingFunction + " returned null for key " + key + "."); } return value; } } }
package gov.va.med.lom.vistabroker.test; /* * TestEncountersRpc.java * * Author: Rob Durkin (rob.durkin@med.va.gov) * Version 1.0 (03/12/2007) * */ import gov.va.med.lom.vistabroker.patient.data.Encounter; import gov.va.med.lom.vistabroker.patient.data.EncounterAppointment; import gov.va.med.lom.vistabroker.security.ISecurityContext; import gov.va.med.lom.vistabroker.security.SecurityContextFactory; import gov.va.med.lom.vistabroker.service.PatientVBService; import gov.va.med.lom.vistabroker.util.VistaBrokerServiceFactory; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.List; import java.util.ResourceBundle; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; public class TestEncountersRpc2 { private static final Log log = LogFactory.getLog(TestEncountersRpc2.class); static void printUsage() { System.out.println("Usage: java TestEncountersRpc DFN AUTH_PROPS"); System.out.println("where DFN is the DFN of the patient."); System.out.println(" AUTH_PROPS is the name of a properties file containing VistA connection info."); } /* * Prints the list of appointments for the patient and encounter data for each appointment. */ public static void main(String[] args) { PatientVBService patientRpcsRemote = VistaBrokerServiceFactory.getPatientVBService(); String division = null; String duz = null; String dfn = null; @SuppressWarnings("unused") String securityId = null; if (args.length != 2) { printUsage(); System.exit(1); } else { dfn = args[0]; ResourceBundle res = ResourceBundle.getBundle(args[1]); division = res.getString("division"); duz = res.getString("duz"); securityId = res.getString("securityID"); } try { // Set security context ISecurityContext securityContext = SecurityContextFactory.createDuzSecurityContext(division, duz); // Create a medications rpc object and invoke the rpc Date date = new Date(); GregorianCalendar startTime = new GregorianCalendar(); startTime.setTime(date); startTime.add(Calendar.DATE, -30*36); GregorianCalendar endTime = new GregorianCalendar(); endTime.setTime(date); endTime.add(Calendar.DATE, 30); List<EncounterAppointment> encounterAppointments = (List<EncounterAppointment>)patientRpcsRemote.getOutpatientEncounters(securityContext, dfn, startTime, endTime).getCollection(); //List<EncounterAppointment> encounters = patientRpcsRemote.getAllEncounters2(securityContext, dfn, startTime, endTime, true); //List<EncounterAppointment> encounterAppointments = filterEncounters(encounters, null, "I"); //appointments.length System.out.println("DatetimeStr,Datetime,Dfn,LocationIen,LocationName,Status,Title,Type,Standalone,LocationAbbr,LocationText,ProviderDuz,ProviderName,RoomBed,Type,TypeId,VisitCat,VisitStr"); //for(EncounterAppointment appointment : encountersAppointments) { for(EncounterAppointment encounterAppointment : encounterAppointments) { System.out.print("\"" + encounterAppointment.getDatetimeStr() + "\","); System.out.print("\"" + encounterAppointment.getDatetime() + "\","); System.out.print("\"" + encounterAppointment.getDfn() + "\","); System.out.print("\"" + encounterAppointment.getLocationIen() + "\","); System.out.print("\"" + encounterAppointment.getLocationName() + "\","); System.out.print("\"" + encounterAppointment.getStatus() + "\","); System.out.print("\"" + encounterAppointment.getTitle() + "\","); System.out.print("\"" + encounterAppointment.getType() + "\","); System.out.print("\"" + encounterAppointment.getStandalone() + "\","); // Retrieve encounter for the appointment Encounter encounter = new Encounter(); encounter.setDfn(dfn); encounter.setLocationIen(encounterAppointment.getLocationIen()); encounter.setDatetimeStr(encounterAppointment.getDatetimeStr()); encounter = patientRpcsRemote.getEncounterDetails(securityContext, encounter, false).getPayload(); // Print the encounter data System.out.print("\"" + encounter.getLocationAbbr() + "\","); System.out.print("\"" + encounter.getLocationText() + "\","); System.out.print("\"" + encounter.getProviderDuz() + "\","); System.out.print("\"" + encounter.getProviderName() + "\","); System.out.print("\"" + encounter.getRoomBed() + "\","); System.out.print("\"" + encounter.getType() + "\","); System.out.print("\"" + encounter.getTypeId() + "\","); System.out.print("\"" + encounter.getVisitCat() + "\","); System.out.println("\"" + encounter.getVisitStr() + "\""); /* System.out.println("###########"); // Print appointment info System.out.println("datetimeStr: " + appointment.getDatetimeStr()); System.out.println("dfn: " + appointment.getDfn()); System.out.println("locationIen: " + appointment.getLocationIen()); System.out.println("locationName: " + appointment.getLocationName()); System.out.println("status: " + appointment.getStatus()); System.out.println("title: " + appointment.getTitle()); System.out.println("type: " + appointment.getType()); System.out.println("datetime: " + appointment.getDatetime()); System.out.println("standalone: " + appointment.getStandalone()); System.out.println("-----------"); // Retrieve encounter for the appointment Encounter encounter = new Encounter(); encounter.setDfn(dfn); encounter.setLocationIen(appointment.getLocationIen()); encounter.setDatetimeStr(appointment.getDatetimeStr()); encounter = patientRpcsRemote.getEncounterDetails(securityContext, encounter, false); // Print the encounter data System.out.println("datetimeStr: " + encounter.getDatetimeStr()); System.out.println("dfn: " + encounter.getDfn()); System.out.println("locationAbbr: " + encounter.getLocationAbbr()); System.out.println("locationIen: " + encounter.getLocationIen()); System.out.println("locationName: " + encounter.getLocationName()); System.out.println("locationText: " + encounter.getLocationText()); System.out.println("providerDuz: " + encounter.getProviderDuz()); System.out.println("providerName: " + encounter.getProviderName()); System.out.println("roomBed: " + encounter.getRoomBed()); System.out.println("type: " + encounter.getType()); System.out.println("typeId: " + encounter.getTypeId()); System.out.println("visitCat: " + encounter.getVisitCat()); System.out.println("visitStr: " + encounter.getVisitStr()); System.out.println(); */ } EncounterAppointment latest = getLatestEncounter(securityContext, patientRpcsRemote, dfn); System.out.println(); if (latest == null) { System.out.println("No encounters found!"); } else { System.out.println("Latest encounter: " + latest.getDatetimeStr()); } System.out.println(); } catch(Exception e) { System.err.println(e.getMessage()); log.error("Error occurred while calling RPC: ", e); } } /** * VistaBroker's gov.va.med.lom.vistabroker.patient.dao.PatientEncounterDao.filterEncounters method is broken! So we emulate it here. */ @SuppressWarnings("unused") private static List<EncounterAppointment> filterEncounters(List<EncounterAppointment> encounterList, String includeType, String excludeType) { List<EncounterAppointment> encounters = new ArrayList<EncounterAppointment>(); for (EncounterAppointment e : encounterList) { if (((includeType == null) || (e.getType().equals(includeType))) && ((excludeType == null) || (!e.getType().equals(excludeType)))) encounters.add(e); } return encounters; } @SuppressWarnings("unchecked") public static EncounterAppointment getLatestEncounter(ISecurityContext securityContext, PatientVBService patientRpcs, String patientDfn) { int periodsAgo = 0; int daysInPeriod = 90; // Create a medications rpc object and invoke the rpc Date date = new Date(); GregorianCalendar startTime = new GregorianCalendar(); startTime.setTime(date); startTime.add(Calendar.DATE, -(periodsAgo + 1) * daysInPeriod); GregorianCalendar endTime = new GregorianCalendar(); endTime.setTime(date); endTime.add(Calendar.DATE, -periodsAgo * daysInPeriod); EncounterAppointment encounter = null; /** * Need to select only the last outpatient encounter, but there doesn't seem to be an easy RPC for that. So we improvise, searching over increments * of daysInPeriod at a time (for performance's sake), going farther back in time until we find one or until we get tired of looking. */ while (encounter == null && periodsAgo < 8) { periodsAgo++; startTime.add(Calendar.DATE, -daysInPeriod); endTime.add(Calendar.DATE, -daysInPeriod); //List<EncounterAppointment> encounters = patientRpcsRemote.getOutpatientEncounters(securityContext, dfn, startTime, endTime); List<EncounterAppointment> encounters = (List<EncounterAppointment>)patientRpcs.getAllEncounters2(securityContext, patientDfn, startTime, endTime, true); // find the latest outpatient encounter (is there a VistaBroker method for this?) GregorianCalendar latestDate = new GregorianCalendar(); latestDate.setTime(date); latestDate.add(Calendar.DATE, -365*120); GregorianCalendar testDate = new GregorianCalendar(); for (EncounterAppointment thisEncounter : encounters) { testDate.setTime(thisEncounter.getDatetime()); //System.out.println(thisEncounter.getDatetimeStr() + " " + thisEncounter.getType()); if (testDate.after(latestDate) && !thisEncounter.getType().equals("I")) { encounter = thisEncounter; latestDate.setTime(thisEncounter.getDatetime()); //System.out.println("It's the latest!"); } } } return encounter; } }
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.common.concur.lock; import com.orientechnologies.common.hash.OMurmurHash3; import com.orientechnologies.common.serialization.types.OLongSerializer; import com.orientechnologies.orient.core.OOrientShutdownListener; import com.orientechnologies.orient.core.OOrientStartupListener; import com.orientechnologies.orient.core.Orient; import java.util.Arrays; import java.util.Random; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.LockSupport; /** * @author Andrey Lomakin (a.lomakin-at-orientechnologies.com) * @since 8/20/14 */ public final class OThreadCountersHashTable implements OOrientStartupListener, OOrientShutdownListener { private static final int SEED = 362498820; private static final int NCPU = Runtime.getRuntime().availableProcessors(); private static final int DEFAULT_SIZE = 1 << (32 - Integer .numberOfLeadingZeros((NCPU << 2) - 1)); public static final int THRESHOLD = 10; private final boolean deadThreadsAreAllowed; private volatile ThreadLocal<HashEntry> hashEntry = new ThreadLocal<HashEntry>(); private volatile int activeTableIndex = 0; private final AtomicReference<AtomicReference<EntryHolder>[]>[] tables; private final AtomicInteger[] busyCounters; private final AtomicBoolean tablesAreBusy = new AtomicBoolean(false); public OThreadCountersHashTable() { this(DEFAULT_SIZE, false); } public OThreadCountersHashTable(int initialSize, boolean deadThreadsAreAllowed) { this.deadThreadsAreAllowed = deadThreadsAreAllowed; AtomicReference<EntryHolder>[] activeTable = new AtomicReference[initialSize << 1]; AtomicReference<AtomicReference<EntryHolder>[]>[] tables = new AtomicReference[32]; for (int i = 0; i < activeTable.length; i++) activeTable[i] = new AtomicReference<EntryHolder>(new EntryHolder(0, null, false)); tables[0] = new AtomicReference<AtomicReference<EntryHolder>[]>(activeTable); for (int i = 1; i < tables.length; i++) tables[i] = new AtomicReference<AtomicReference<EntryHolder>[]>(null); AtomicInteger[] counters = new AtomicInteger[32]; for (int i = 0; i < counters.length; i++) counters[i] = new AtomicInteger(); busyCounters = counters; this.tables = tables; Orient.instance().registerWeakOrientStartupListener(this); Orient.instance().registerWeakOrientShutdownListener(this); } public void increment() { HashEntry entry = hashEntry.get(); if (entry == null) { final Thread thread = Thread.currentThread(); entry = new HashEntry(thread, hashCodesByThreadId(thread.getId())); assert search(entry.thread) == null; insert(entry); assert search(entry.thread).thread == thread; hashEntry.set(entry); } entry.threadCounter++; } public void decrement() { final HashEntry entry = hashEntry.get(); assert entry != null; entry.threadCounter--; } public boolean isEmpty() { int activeTableIndex; do { activeTableIndex = this.activeTableIndex; for (int i = 0; i <= activeTableIndex; i++) { if (i != activeTableIndex) { while (busyCounters[i].get() != 0) ; } final AtomicReference<EntryHolder>[] table = tables[i].get(); if (tableCountersNotEmpty(table)) return false; } } while (this.activeTableIndex != activeTableIndex); return true; } private boolean tableCountersNotEmpty(AtomicReference<EntryHolder>[] table) { for (AtomicReference<EntryHolder> entryHolderRef : table) { final EntryHolder entryHolder = entryHolderRef.get(); if (!entryIsEmpty(entryHolder) && entryHolder.entry.threadCounter > 0) return true; } return false; } private boolean entryIsEmpty(EntryHolder entryHolder) { if (deadThreadsAreAllowed) return entryHolder.entry == null; return entryHolder.entry == null || !entryHolder.entry.thread.isAlive(); } HashEntry search(Thread thread) { int[] hashCodes = hashCodesByThreadId(thread.getId()); int activeTableIndex; do { activeTableIndex = this.activeTableIndex; for (int i = 0; i <= activeTableIndex; i++) { final AtomicReference<EntryHolder>[] table = tables[i].get(); if (i != activeTableIndex) { while (busyCounters[i].get() != 0) ; } final HashEntry entry = searchInTables(thread, hashCodes, table); if (entry != null) return entry; } } while (activeTableIndex != this.activeTableIndex); return null; } private static HashEntry searchInTables(Thread thread, int[] hashCodes, AtomicReference<EntryHolder>[] tables) { while (true) { final int firstTableIndex = firstSubTableIndex(hashCodes, tables.length); final EntryHolder firstEntryHolderRnd1 = tables[firstTableIndex].get(); if (firstEntryHolderRnd1.entry != null && firstEntryHolderRnd1.entry.thread == thread) return firstEntryHolderRnd1.entry; final int secondTableIndex = secondSubTableIndex(hashCodes, tables.length); final EntryHolder secondEntryHolderRnd1 = tables[secondTableIndex].get(); if (secondEntryHolderRnd1.entry != null && secondEntryHolderRnd1.entry.thread == thread) return secondEntryHolderRnd1.entry; final EntryHolder firstEntryHolderRnd2 = tables[firstTableIndex].get(); if (firstEntryHolderRnd2.entry != null && firstEntryHolderRnd2.entry.thread == thread) return firstEntryHolderRnd2.entry; final EntryHolder secondEntryHolderRnd2 = tables[secondTableIndex].get(); if (secondEntryHolderRnd2.entry != null && secondEntryHolderRnd2.entry.thread == thread) return secondEntryHolderRnd2.entry; if (!checkCounter(firstEntryHolderRnd1.counter, secondEntryHolderRnd1.counter, firstEntryHolderRnd2.counter, secondEntryHolderRnd2.counter)) { return null; } } } private FindResult find(Thread thread, int[] hashCodes, AtomicReference<EntryHolder>[] tables) { while (true) { FindResult result = null; final int firstTableIndex = firstSubTableIndex(hashCodes, tables.length); final EntryHolder firstEntryHolderRnd1 = tables[firstTableIndex].get(); final int secondTableIndex = secondSubTableIndex(hashCodes, tables.length); final EntryHolder secondEntryHolderRnd1 = tables[secondTableIndex].get(); if (firstEntryHolderRnd1.markedForRelocation) { helpRelocate(firstTableIndex, false, tables); continue; } if (firstEntryHolderRnd1.entry != null && firstEntryHolderRnd1.entry.thread == thread) result = new FindResult(true, true, firstEntryHolderRnd1, secondEntryHolderRnd1); if (secondEntryHolderRnd1.markedForRelocation) { helpRelocate(secondTableIndex, false, tables); continue; } if (secondEntryHolderRnd1.entry != null && secondEntryHolderRnd1.entry.thread == thread) { assert result == null; result = new FindResult(true, false, firstEntryHolderRnd1, secondEntryHolderRnd1); } if (result != null) return result; final EntryHolder firstEntryHolderRnd2 = tables[firstTableIndex].get(); final EntryHolder secondEntryHolderRnd2 = tables[secondTableIndex].get(); if (firstEntryHolderRnd2.markedForRelocation) { helpRelocate(firstTableIndex, false, tables); continue; } if (firstEntryHolderRnd2.entry != null && firstEntryHolderRnd2.entry.thread == thread) result = new FindResult(true, true, firstEntryHolderRnd2, secondEntryHolderRnd2); if (secondEntryHolderRnd2.markedForRelocation) { helpRelocate(secondTableIndex, false, tables); continue; } if (secondEntryHolderRnd2.entry != null && secondEntryHolderRnd2.entry.thread == thread) { assert result == null; result = new FindResult(true, false, firstEntryHolderRnd1, secondEntryHolderRnd1); } if (result != null) return result; if (!checkCounter(firstEntryHolderRnd1.counter, secondEntryHolderRnd1.counter, firstEntryHolderRnd2.counter, secondEntryHolderRnd2.counter)) return new FindResult(false, false, firstEntryHolderRnd2, secondEntryHolderRnd2); } } private static boolean checkCounter(long firstEntryRnd1, long secondEntryRnd1, long firstEntryRnd2, long secondEntryRnd2) { return firstEntryRnd2 - firstEntryRnd1 >= 2 && secondEntryRnd2 - secondEntryRnd1 >= 2 && secondEntryRnd2 - firstEntryRnd1 >= 3; } void insert(Thread thread) { HashEntry entry = new HashEntry(thread, hashCodesByThreadId(thread.getId())); insert(entry); } private void insert(final HashEntry newEntry) { while (true) { final int activeTableIndex = this.activeTableIndex; final AtomicReference<EntryHolder>[] table = tables[activeTableIndex].get(); final AtomicInteger counter = busyCounters[activeTableIndex]; counter.getAndIncrement(); boolean result = insertInTables(newEntry, table); counter.getAndDecrement(); if (!result) { if (!rehash()) LockSupport.parkNanos(10); } else { return; } } } private boolean insertInTables(final HashEntry newEntry, AtomicReference<EntryHolder>[] tables) { while (true) { final FindResult result = find(newEntry.thread, newEntry.hashCodes, tables); assert !result.found; if (entryIsEmpty(result.firstEntryHolder)) { final int firstTableIndex = firstSubTableIndex(newEntry.hashCodes, tables.length); final EntryHolder holder = result.firstEntryHolder; if (tables[firstTableIndex].compareAndSet(holder, new EntryHolder(holder.counter, newEntry, false))) return true; } if (entryIsEmpty(result.secondEntryHolder)) { final int secondTableIndex = secondSubTableIndex(newEntry.hashCodes, tables.length); final EntryHolder holder = result.secondEntryHolder; if (tables[secondTableIndex].compareAndSet(holder, new EntryHolder(holder.counter, newEntry, false))) return true; } if (!relocate(firstSubTableIndex(newEntry.hashCodes, tables.length), tables)) return false; } } private boolean rehash() { if (!tablesAreBusy.compareAndSet(false, true)) return false; AtomicReference<EntryHolder>[] activeTable = tables[activeTableIndex].get(); AtomicReference<EntryHolder>[] newActiveTable = new AtomicReference[activeTable.length << 1]; for (int i = 0; i < newActiveTable.length; i++) newActiveTable[i] = new AtomicReference<EntryHolder>(new EntryHolder(0, null, false)); tables[activeTableIndex + 1].set(newActiveTable); activeTableIndex++; tablesAreBusy.set(false); return true; } private boolean relocate(int entryIndex, AtomicReference<EntryHolder>[] tables) { int startLevel = 0; final int tableSize = tables.length >> 1; path_discovery: while (true) { if (startLevel >= THRESHOLD) startLevel = 0; boolean found = false; final int[] route = new int[10]; int depth = startLevel; do { EntryHolder entryHolder = tables[entryIndex].get(); while (entryHolder.markedForRelocation) { helpRelocate(entryIndex, false, tables); entryHolder = tables[entryIndex].get(); } if (!entryIsEmpty(entryHolder)) { route[depth] = entryIndex; if (entryIndex < tableSize) entryIndex = secondSubTableIndex(entryHolder.entry.hashCodes, tables.length); else entryIndex = firstSubTableIndex(entryHolder.entry.hashCodes, tables.length); depth++; } else found = true; } while (!found && depth < THRESHOLD); if (found) { for (int i = depth - 1; i >= 0; i--) { final int index = route[i]; EntryHolder entryHolder = tables[index].get(); if (entryHolder.markedForRelocation) { helpRelocate(index, false, tables); entryHolder = tables[index].get(); } if (entryIsEmpty(entryHolder)) continue; final int destinationIndex = index < tableSize ? secondSubTableIndex(entryHolder.entry.hashCodes, tables.length) : firstSubTableIndex(entryHolder.entry.hashCodes, tables.length); EntryHolder destinationEntry = tables[destinationIndex].get(); if (!entryIsEmpty(destinationEntry)) { startLevel = i + 1; entryIndex = destinationIndex; continue path_discovery; } if (!helpRelocate(index, true, tables)) { startLevel = i + 1; entryIndex = destinationIndex; continue path_discovery; } } } return found; } } private boolean helpRelocate(int entryIndex, boolean initiator, AtomicReference<EntryHolder>[] tables) { final int tableSize = tables.length >> 1; while (true) { EntryHolder src = tables[entryIndex].get(); while (initiator && !src.markedForRelocation) { if (entryIsEmpty(src)) return true; tables[entryIndex].compareAndSet(src, new EntryHolder(src.counter, src.entry, true)); src = tables[entryIndex].get(); } if (!src.markedForRelocation) return true; final int destinationIndex = entryIndex < tableSize ? secondSubTableIndex(src.entry.hashCodes, tables.length) : firstSubTableIndex(src.entry.hashCodes, tables.length); final EntryHolder destinationHolder = tables[destinationIndex].get(); if (entryIsEmpty(destinationHolder)) { final long newCounter = destinationHolder.counter > src.counter ? destinationHolder.counter + 1 : src.counter + 1; if (src != tables[entryIndex].get()) continue; if (tables[destinationIndex].compareAndSet(destinationHolder, new EntryHolder(newCounter, src.entry, false))) { tables[entryIndex].compareAndSet(src, new EntryHolder(src.counter + 1, null, false)); return true; } else continue; } if (destinationHolder.entry == src.entry) { tables[entryIndex].compareAndSet(src, new EntryHolder(src.counter + 1, null, false)); return true; } tables[entryIndex].compareAndSet(src, new EntryHolder(src.counter, src.entry, false)); return false; } } @Override public void onShutdown() { hashEntry = null; } @Override public void onStartup() { if (hashEntry == null) hashEntry = new ThreadLocal<HashEntry>(); } private static int secondSubTableIndex(int[] hashCodes, int size) { final int subTableSize = size >> 1; return (hashCodes[1] & (subTableSize - 1)) + subTableSize; } private static int firstSubTableIndex(int[] hashCodes, int size) { return hashCodes[0] & ((size >> 1) - 1); } private static int[] hashCodesByThreadId(final long threadId) { final byte[] serializedId = new byte[8]; OLongSerializer.INSTANCE.serializeNative(threadId, serializedId, 0); final long hashCode = OMurmurHash3.murmurHash3_x64_64(serializedId, SEED); return new int[] { (int) (hashCode & 0xFFFFFFFFL), (int) (hashCode >>> 32) }; } static final class HashEntry { private final Thread thread; private final int[] hashCodes; private volatile long p0 = 0, p1 = 1, p2 = 2, p3 = 3, p4 = 4, p5 = 5, p6 = 6, p7 = 7; private volatile long threadCounter = 0; private volatile long p8 = 0, p9 = 1, p10 = 2, p11 = 3, p12 = 4, p13 = 5, p14 = 6; private HashEntry(Thread thread, int[] hashCodes) { this.thread = thread; this.hashCodes = hashCodes; } public Thread getThread() { return thread; } @Override public String toString() { modCounters(); return "HashEntry{" + "thread=" + thread + ", hashCodes=" + Arrays.toString(hashCodes) + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + ", threadCounter=" + threadCounter + ", p8=" + p8 + ", p9=" + p9 + ", p10=" + p10 + ", p11=" + p11 + ", p12=" + p12 + ", p13=" + p13 + ", p14=" + p14 + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; HashEntry hashEntry = (HashEntry) o; if (p1 != hashEntry.p1) return false; if (p10 != hashEntry.p10) return false; if (p11 != hashEntry.p11) return false; if (p12 != hashEntry.p12) return false; if (p13 != hashEntry.p13) return false; if (p14 != hashEntry.p14) return false; if (p2 != hashEntry.p2) return false; if (p3 != hashEntry.p3) return false; if (p4 != hashEntry.p4) return false; if (p5 != hashEntry.p5) return false; if (p6 != hashEntry.p6) return false; if (p7 != hashEntry.p7) return false; if (p8 != hashEntry.p8) return false; if (p9 != hashEntry.p9) return false; if (p0 != hashEntry.p0) return false; if (threadCounter != hashEntry.threadCounter) return false; if (!Arrays.equals(hashCodes, hashEntry.hashCodes)) return false; if (thread != null ? !thread.equals(hashEntry.thread) : hashEntry.thread != null) return false; return true; } private void modCounters() { final Random random = new Random(); p0 = random.nextLong(); p1 = random.nextLong(); p2 = random.nextLong(); p3 = random.nextLong(); p4 = random.nextLong(); p5 = random.nextLong(); p6 = random.nextLong(); p7 = random.nextLong(); p8 = random.nextLong(); p9 = random.nextLong(); p10 = random.nextLong(); p11 = random.nextLong(); p12 = random.nextLong(); p13 = random.nextLong(); p14 = random.nextLong(); } @Override public int hashCode() { int result = thread != null ? thread.hashCode() : 0; result = 31 * result + (hashCodes != null ? Arrays.hashCode(hashCodes) : 0); result = 31 * result + (int) (p0 ^ (p0 >>> 32)); result = 31 * result + (int) (p1 ^ (p1 >>> 32)); result = 31 * result + (int) (p2 ^ (p2 >>> 32)); result = 31 * result + (int) (p3 ^ (p3 >>> 32)); result = 31 * result + (int) (p4 ^ (p4 >>> 32)); result = 31 * result + (int) (p5 ^ (p5 >>> 32)); result = 31 * result + (int) (p6 ^ (p6 >>> 32)); result = 31 * result + (int) (p7 ^ (p7 >>> 32)); result = 31 * result + (int) (threadCounter ^ (threadCounter >>> 32)); result = 31 * result + (int) (p8 ^ (p8 >>> 32)); result = 31 * result + (int) (p9 ^ (p9 >>> 32)); result = 31 * result + (int) (p10 ^ (p10 >>> 32)); result = 31 * result + (int) (p11 ^ (p11 >>> 32)); result = 31 * result + (int) (p12 ^ (p12 >>> 32)); result = 31 * result + (int) (p13 ^ (p13 >>> 32)); result = 31 * result + (int) (p14 ^ (p14 >>> 32)); return result; } } private static final class EntryHolder { private final long counter; private final HashEntry entry; private final boolean markedForRelocation; private EntryHolder(long counter, HashEntry entry, boolean markedForRelocation) { this.counter = counter; this.entry = entry; this.markedForRelocation = markedForRelocation; } } private static final class FindResult { private final boolean found; private final boolean firstTable; private final EntryHolder firstEntryHolder; private final EntryHolder secondEntryHolder; private FindResult(boolean found, boolean firstTable, EntryHolder firstEntryHolder, EntryHolder secondEntryHolder) { this.found = found; this.firstTable = firstTable; this.firstEntryHolder = firstEntryHolder; this.secondEntryHolder = secondEntryHolder; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.backend.hadoop.executionengine.physicalLayer; import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import org.joda.time.DateTime; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pig.backend.executionengine.ExecException; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhyPlanVisitor; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan; import org.apache.pig.data.BagFactory; import org.apache.pig.data.DataBag; import org.apache.pig.data.DataByteArray; import org.apache.pig.data.DataType; import org.apache.pig.data.Tuple; import org.apache.pig.impl.plan.Operator; import org.apache.pig.impl.plan.OperatorKey; import org.apache.pig.impl.plan.VisitorException; import org.apache.pig.parser.SourceLocation; import org.apache.pig.pen.Illustrable; import org.apache.pig.pen.Illustrator; import org.apache.pig.pen.util.LineageTracer; /** * * This is the base class for all operators. This supports a generic way of * processing inputs which can be overridden by operators extending this class. * The input model assumes that it can either be taken from an operator or can * be attached directly to this operator. Also it is assumed that inputs to an * operator are always in the form of a tuple. * * For this pipeline rework, we assume a pull based model, i.e, the root * operator is going to call getNext with the appropriate type which initiates a * cascade of getNext calls that unroll to create input for the root operator to * work on. * * Any operator that extends the PhysicalOperator, supports a getNext with all * the different types of parameter types. The concrete implementation should * use the result type of its input operator to decide the type of getNext's * parameter. This is done to avoid switch/case based on the type as much as * possible. The default is assumed to return an erroneus Result corresponding * to an unsupported operation on that type. So the operators need to implement * only those types that are supported. * */ public abstract class PhysicalOperator extends Operator<PhyPlanVisitor> implements Illustrable, Cloneable { private static final Log log = LogFactory.getLog(PhysicalOperator.class); protected static final long serialVersionUID = 1L; // The degree of parallelism requested protected int requestedParallelism; // The inputs that this operator will read data from protected List<PhysicalOperator> inputs; // The outputs that this operator will write data to // Will be used to create Targeted tuples protected List<PhysicalOperator> outputs; // The data type for the results of this operator protected byte resultType = DataType.TUPLE; // The physical plan this operator is part of protected PhysicalPlan parentPlan; // Specifies if the input has been directly attached protected boolean inputAttached = false; // If inputAttached is true, input is set to the input tuple protected Tuple input = null; // The result of performing the operation along with the output protected Result res = null; // alias associated with this PhysicalOperator protected String alias = null; // Will be used by operators to report status or transmit heartbeat // Should be set by the backends to appropriate implementations that // wrap their own version of a reporter. private static ThreadLocal<PigProgressable> reporter = new ThreadLocal<PigProgressable>(); // Will be used by operators to aggregate warning messages // Should be set by the backends to appropriate implementations that // wrap their own version of a logger. protected static PigLogger pigLogger; // Dummy types used to access the getNext of appropriate // type. These will be null static final protected DataByteArray dummyDBA = null; static final protected String dummyString = null; static final protected Double dummyDouble = null; static final protected Float dummyFloat = null; static final protected Integer dummyInt = null; static final protected Long dummyLong = null; static final protected Boolean dummyBool = null; static final protected DateTime dummyDateTime = null; static final protected Tuple dummyTuple = null; static final protected DataBag dummyBag = null; static final protected BigInteger dummyBigInteger = null; static final protected BigInteger dummyBigDecimal = null; static final protected Map dummyMap = null; // TODO: This is not needed. But a lot of tests check serialized physical plans // that are sensitive to the serialized image of the contained physical operators. // So for now, just keep it. Later it'll be cleansed along with those test golden // files protected LineageTracer lineageTracer; protected transient Illustrator illustrator = null; private boolean accum; private transient boolean accumStart; private List<OriginalLocation> originalLocations = new ArrayList<OriginalLocation>(); public PhysicalOperator(OperatorKey k) { this(k, -1, null); } public PhysicalOperator(OperatorKey k, int rp) { this(k, rp, null); } public PhysicalOperator(OperatorKey k, List<PhysicalOperator> inp) { this(k, -1, inp); } public PhysicalOperator(OperatorKey k, int rp, List<PhysicalOperator> inp) { super(k); requestedParallelism = rp; inputs = inp; res = new Result(); } @Override public void setIllustrator(Illustrator illustrator) { this.illustrator = illustrator; } public Illustrator getIllustrator() { return illustrator; } public int getRequestedParallelism() { return requestedParallelism; } public void setRequestedParallelism(int requestedParallelism) { this.requestedParallelism = requestedParallelism; } public byte getResultType() { return resultType; } public String getAlias() { return alias; } protected String getAliasString() { return (alias == null) ? "" : (alias + ": "); } public void addOriginalLocation(String alias, SourceLocation sourceLocation) { this.alias = alias; this.originalLocations.add(new OriginalLocation(alias, sourceLocation.line(), sourceLocation.offset())); } public void addOriginalLocation(String alias, List<OriginalLocation> originalLocations) { this.alias = alias; this.originalLocations.addAll(originalLocations); } public List<OriginalLocation> getOriginalLocations() { return Collections.unmodifiableList(originalLocations); } public void setAccumulative() { accum = true; } public boolean isAccumulative() { return accum; } public void setAccumStart() { if (!accum) { throw new IllegalStateException("Accumulative is not turned on."); } accumStart = true; } public boolean isAccumStarted() { return accumStart; } public void setAccumEnd() { if (!accum){ throw new IllegalStateException("Accumulative is not turned on."); } accumStart = false; } public void setResultType(byte resultType) { this.resultType = resultType; } public List<PhysicalOperator> getInputs() { return inputs; } public void setInputs(List<PhysicalOperator> inputs) { this.inputs = inputs; } public boolean isInputAttached() { return inputAttached; } /** * Shorts the input path of this operator by providing the input tuple * directly * * @param t - * The tuple that should be used as input */ public void attachInput(Tuple t) { input = t; this.inputAttached = true; } /** * Detaches any tuples that are attached * */ public void detachInput() { input = null; this.inputAttached = false; } /** * A blocking operator should override this to return true. Blocking * operators are those that need the full bag before operate on the tuples * inside the bag. Example is the Global Rearrange. Non-blocking or pipeline * operators are those that work on a tuple by tuple basis. * * @return true if blocking and false otherwise */ public boolean isBlocking() { return false; } /** * A generic method for parsing input that either returns the attached input * if it exists or fetches it from its predecessor. If special processing is * required, this method should be overridden. * * @return The Result object that results from processing the input * @throws ExecException */ public Result processInput() throws ExecException { Result res = new Result(); if (input == null && (inputs == null || inputs.size()==0)) { // log.warn("No inputs found. Signaling End of Processing."); res.returnStatus = POStatus.STATUS_EOP; return res; } //Should be removed once the model is clear if(getReporter()!=null) { getReporter().progress(); } if (!isInputAttached()) { return inputs.get(0).getNext(dummyTuple); } else { res.result = input; res.returnStatus = (res.result == null ? POStatus.STATUS_NULL: POStatus.STATUS_OK); detachInput(); return res; } } @Override public abstract void visit(PhyPlanVisitor v) throws VisitorException; /** * Implementations that call into the different versions of getNext are often * identical, differing only in the signature of the getNext() call they make. * This method allows to cut down on some of the copy-and-paste. * * @param obj The object we are working with. Its class should correspond to DataType * @param dataType Describes the type of obj; a byte from DataType. * @return result Result of applying this Operator to the Object. * @throws ExecException */ @SuppressWarnings("rawtypes") // For legacy use of untemplatized Map. public Result getNext(Object obj, byte dataType) throws ExecException { try { switch (dataType) { case DataType.BAG: return getNext((DataBag) obj); case DataType.BOOLEAN: return getNext((Boolean) obj); case DataType.BYTEARRAY: return getNext((DataByteArray) obj); case DataType.CHARARRAY: return getNext((String) obj); case DataType.DOUBLE: return getNext((Double) obj); case DataType.FLOAT: return getNext((Float) obj); case DataType.INTEGER: return getNext((Integer) obj); case DataType.LONG: return getNext((Long) obj); case DataType.BIGINTEGER: return getNext((BigInteger) obj); case DataType.BIGDECIMAL: return getNext((BigDecimal) obj); case DataType.DATETIME: return getNext((DateTime) obj); case DataType.MAP: return getNext((Map) obj); case DataType.TUPLE: return getNext((Tuple) obj); default: throw new ExecException("Unsupported type for getNext: " + DataType.findTypeName(dataType)); } } catch (RuntimeException e) { throw new ExecException("Exception while executing " + this.toString() + ": " + e.toString(), e); } } public static Object getDummy(byte dataType) throws ExecException { switch (dataType) { case DataType.BAG: return dummyBag; case DataType.BOOLEAN: return dummyBool; case DataType.BYTEARRAY: return dummyDBA; case DataType.CHARARRAY: return dummyString; case DataType.DOUBLE: return dummyDouble; case DataType.FLOAT: return dummyFloat; case DataType.INTEGER: return dummyFloat; case DataType.LONG: return dummyLong; case DataType.BIGINTEGER: return dummyBigInteger; case DataType.BIGDECIMAL: return dummyBigDecimal; case DataType.DATETIME: return dummyDateTime; case DataType.MAP: return dummyMap; case DataType.TUPLE: return dummyTuple; default: throw new ExecException("Unsupported type for getDummy: " + DataType.findTypeName(dataType)); } } public Result getNext(Integer i) throws ExecException { return res; } public Result getNext(Long l) throws ExecException { return res; } public Result getNext(Double d) throws ExecException { return res; } public Result getNext(Float f) throws ExecException { return res; } public Result getNext(DateTime dt) throws ExecException { return res; } public Result getNext(String s) throws ExecException { return res; } public Result getNext(DataByteArray ba) throws ExecException { return res; } public Result getNext(Map m) throws ExecException { return res; } public Result getNext(Boolean b) throws ExecException { return res; } public Result getNext(Tuple t) throws ExecException { return res; } public Result getNext(DataBag db) throws ExecException { Result ret = null; DataBag tmpBag = BagFactory.getInstance().newDefaultBag(); for(ret = getNext(dummyTuple);ret.returnStatus!=POStatus.STATUS_EOP;ret=getNext(dummyTuple)){ if(ret.returnStatus == POStatus.STATUS_ERR) { return ret; } tmpBag.add((Tuple)ret.result); } ret.result = tmpBag; ret.returnStatus = (tmpBag.size() == 0)? POStatus.STATUS_EOP : POStatus.STATUS_OK; return ret; } public Result getNext(BigInteger t) throws ExecException { return res; } public Result getNext(BigDecimal t) throws ExecException { return res; } /** * Reset internal state in an operator. For use in nested pipelines * where operators like limit and sort may need to reset their state. * Limit needs it because it needs to know it's seeing a fresh set of * input. Blocking operators like sort and distinct need it because they * may not have drained their previous input due to a limit and thus need * to be told to drop their old input and start over. */ public void reset() { } /** * @return PigProgressable stored in threadlocal */ public static PigProgressable getReporter() { return PhysicalOperator.reporter.get(); } /** * @param reporter PigProgressable to be stored in threadlocal */ public static void setReporter(PigProgressable reporter) { PhysicalOperator.reporter.set(reporter); } /** * Make a deep copy of this operator. This function is blank, however, * we should leave a place holder so that the subclasses can clone * @throws CloneNotSupportedException */ @Override public PhysicalOperator clone() throws CloneNotSupportedException { return (PhysicalOperator)super.clone(); } protected void cloneHelper(PhysicalOperator op) { resultType = op.resultType; originalLocations.addAll(op.originalLocations); } /** * @param physicalPlan */ public void setParentPlan(PhysicalPlan physicalPlan) { parentPlan = physicalPlan; } public Log getLogger() { return log; } public static void setPigLogger(PigLogger logger) { pigLogger = logger; } public static PigLogger getPigLogger() { return pigLogger; } public static class OriginalLocation implements Serializable { private String alias; private int line; private int offset; public OriginalLocation(String alias, int line, int offset) { super(); this.alias = alias; this.line = line; this.offset = offset; } public String getAlias() { return alias; } public int getLine() { return line; } public int getOffset() { return offset; } @Override public String toString() { return alias+"["+line+","+offset+"]"; } } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.lantern.guice; import com.google.inject.AbstractModule; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.Scopes; import com.google.inject.TypeLiteral; import ninja.leaping.configurate.commented.CommentedConfigurationNode; import ninja.leaping.configurate.loader.ConfigurationLoader; import org.slf4j.Logger; import org.spongepowered.api.Game; import org.spongepowered.api.config.ConfigDir; import org.spongepowered.api.config.DefaultConfig; import org.spongepowered.api.plugin.PluginContainer; import org.spongepowered.api.scheduler.AsynchronousExecutor; import org.spongepowered.api.scheduler.Scheduler; import org.spongepowered.api.scheduler.SpongeExecutorService; import org.spongepowered.api.scheduler.SynchronousExecutor; import org.spongepowered.lantern.config.LanternConfigManager; import java.io.File; import java.nio.file.Path; public class LanternPluginGuiceModule extends AbstractModule { private final PluginContainer container; private final Class<?> pluginClass; public LanternPluginGuiceModule(PluginContainer container, Class<?> pluginClass) { this.container = container; this.pluginClass = pluginClass; } @Override protected void configure() { ConfigDir privateConfigDir = new ConfigDirAnnotation(false); DefaultConfig sharedConfigFile = new ConfigFileAnnotation(true); DefaultConfig privateConfigFile = new ConfigFileAnnotation(false); bind(this.pluginClass).in(Scopes.SINGLETON); bind(PluginContainer.class).toInstance(this.container); bind(Logger.class).toInstance(this.container.getLogger()); // Plugin-private config directory (shared dir is in the global guice module) bind(Path.class).annotatedWith(privateConfigDir).toProvider(PrivateConfigDirProvider.class); bind(File.class).annotatedWith(privateConfigDir).toProvider(FilePrivateConfigDirProvider.class); bind(Path.class).annotatedWith(sharedConfigFile).toProvider(SharedConfigFileProvider.class); // Shared-directory config file bind(File.class).annotatedWith(sharedConfigFile).toProvider(FileSharedConfigFileProvider.class); bind(Path.class).annotatedWith(privateConfigFile).toProvider(PrivateConfigFileProvider.class); // Plugin-private directory config file bind(File.class).annotatedWith(privateConfigFile).toProvider(FilePrivateConfigFileProvider.class); bind(new TypeLiteral<ConfigurationLoader<CommentedConfigurationNode>>() { }).annotatedWith(sharedConfigFile) .toProvider(SharedHoconConfigProvider.class); // Loader for shared-directory config file bind(new TypeLiteral<ConfigurationLoader<CommentedConfigurationNode>>() { }).annotatedWith(privateConfigFile) .toProvider(PrivateHoconConfigProvider.class); // Loader for plugin-private directory config file // SpongeExecutorServices bind(SpongeExecutorService.class).annotatedWith(SynchronousExecutor.class).toProvider(SynchronousExecutorProvider.class); bind(SpongeExecutorService.class).annotatedWith(AsynchronousExecutor.class).toProvider(AsynchronousExecutorProvider.class); } private static class PrivateConfigDirProvider implements Provider<Path> { private final PluginContainer container; @Inject private PrivateConfigDirProvider(PluginContainer container) { this.container = container; } @Override public Path get() { return LanternConfigManager.getPrivateRoot(this.container).getDirectory(); } } private static class PrivateConfigFileProvider implements Provider<Path> { private final PluginContainer container; @Inject private PrivateConfigFileProvider(PluginContainer container) { this.container = container; } @Override public Path get() { return LanternConfigManager.getPrivateRoot(this.container).getConfigPath(); } } private static class SharedConfigFileProvider implements Provider<Path> { private final PluginContainer container; @Inject private SharedConfigFileProvider(PluginContainer container) { this.container = container; } @Override public Path get() { return LanternConfigManager.getSharedRoot(this.container).getConfigPath(); } } private static class SharedHoconConfigProvider implements Provider<ConfigurationLoader<CommentedConfigurationNode>> { private final PluginContainer container; @Inject private SharedHoconConfigProvider(PluginContainer container) { this.container = container; } @Override public ConfigurationLoader<CommentedConfigurationNode> get() { return LanternConfigManager.getSharedRoot(this.container).getConfig(); } } private static class PrivateHoconConfigProvider implements Provider<ConfigurationLoader<CommentedConfigurationNode>> { private final PluginContainer container; @Inject private PrivateHoconConfigProvider(PluginContainer container) { this.container = container; } @Override public ConfigurationLoader<CommentedConfigurationNode> get() { return LanternConfigManager.getPrivateRoot(this.container).getConfig(); } } // TODO: Support this without extra classes (basically it would be nice if Guice allowed something like an "alias" for File so we would only // need to add the conversion step Path -> File (Path.toFile()) once. private static class FilePrivateConfigDirProvider implements Provider<File> { private final Path configDir; @Inject private FilePrivateConfigDirProvider(@ConfigDir(sharedRoot = false) Path configDir) { this.configDir = configDir; } @Override public File get() { return configDir.toFile(); } } private static class FilePrivateConfigFileProvider implements Provider<File> { private final Path configPath; @Inject private FilePrivateConfigFileProvider(@DefaultConfig(sharedRoot = false) Path configPath) { this.configPath = configPath; } @Override public File get() { return configPath.toFile(); } } private static class FileSharedConfigFileProvider implements Provider<File> { private final Path configPath; @Inject private FileSharedConfigFileProvider(@DefaultConfig(sharedRoot = true) Path configPath) { this.configPath = configPath; } @Override public File get() { return configPath.toFile(); } } private static class SynchronousExecutorProvider implements Provider<SpongeExecutorService> { private final PluginContainer container; private final Scheduler schedulerService; @Inject private SynchronousExecutorProvider(PluginContainer container, Game game) { this.container = container; this.schedulerService = game.getScheduler(); } @Override public SpongeExecutorService get() { return this.schedulerService.createSyncExecutor(this.container); } } private static class AsynchronousExecutorProvider implements Provider<SpongeExecutorService> { private final PluginContainer container; private final Scheduler schedulerService; @Inject private AsynchronousExecutorProvider(PluginContainer container, Game game) { this.container = container; this.schedulerService = game.getScheduler(); } @Override public SpongeExecutorService get() { return this.schedulerService.createAsyncExecutor(this.container); } } }
/* * Copyright 2018 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example; import com.google.actions.api.ActionRequest; import com.google.actions.api.ActionResponse; import com.google.actions.api.ActionsSdkApp; import com.google.actions.api.Capability; import com.google.actions.api.ForIntent; import com.google.actions.api.response.ResponseBuilder; import com.google.actions.api.response.helperintent.SelectionCarousel; import com.google.actions.api.response.helperintent.SelectionList; import com.google.api.services.actions_fulfillment.v2.model.BasicCard; import com.google.api.services.actions_fulfillment.v2.model.Button; import com.google.api.services.actions_fulfillment.v2.model.CarouselBrowse; import com.google.api.services.actions_fulfillment.v2.model.CarouselBrowseItem; import com.google.api.services.actions_fulfillment.v2.model.CarouselSelectCarouselItem; import com.google.api.services.actions_fulfillment.v2.model.Image; import com.google.api.services.actions_fulfillment.v2.model.ListSelectListItem; import com.google.api.services.actions_fulfillment.v2.model.MediaObject; import com.google.api.services.actions_fulfillment.v2.model.MediaResponse; import com.google.api.services.actions_fulfillment.v2.model.OpenUrlAction; import com.google.api.services.actions_fulfillment.v2.model.OptionInfo; import com.google.api.services.actions_fulfillment.v2.model.SimpleResponse; import com.google.api.services.actions_fulfillment.v2.model.TableCard; import com.google.api.services.actions_fulfillment.v2.model.TableCardCell; import com.google.api.services.actions_fulfillment.v2.model.TableCardColumnProperties; import com.google.api.services.actions_fulfillment.v2.model.TableCardRow; import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; import java.util.ResourceBundle; public class ConversationComponentsApp extends ActionsSdkApp { // Note: Do not store any state as an instance variable. // It is ok to have final variables where the variable is assigned a value in // the constructor but remains unchanged. This is required to ensure thread- // safety as the entry point (ActionServlet) instances may // be reused by the server. private static final String IMG_URL_AOG = "https://storage.googleapis.com/actionsresources/logo_assistant_2x_64dp.png"; private static final String IMG_URL_GOOGLE_PAY = "https://storage.googleapis.com/actionsresources/logo_pay_64dp.png"; private static final String IMG_URL_GOOGLE_HOME = "https://lh3.googleusercontent.com/Nu3a6F80WfixUqf_ec_vgXy_c0-0r4VLJRXjVFF_X_CIilEu8B9fT35qyTEj_PEsKw"; private static final String IMG_URL_GOOGLE_PIXEL = "https://storage.googleapis.com/madebygoog/v1/Pixel/Pixel_ColorPicker/Pixel_Device_Angled_Black-720w.png"; private static final String IMG_URL_MEDIA = "http://storage.googleapis.com/automotive-media/album_art.jpg"; private static final String MEDIA_SOURCE = "http://storage.googleapis.com/automotive-media/Jazz_In_Paris.mp3"; private static final String[] IMAGES = new String[]{IMG_URL_AOG, IMG_URL_GOOGLE_PAY, IMG_URL_GOOGLE_HOME, IMG_URL_GOOGLE_PIXEL}; private static final String[] SUGGESTIONS = new String[]{"Basic Card", "Browse Carousel", "Carousel", "List", "Media", "Table Card"}; @ForIntent("actions.intent.MAIN") public ActionResponse main(ActionRequest request) { return welcome(request); } @ForIntent("actions.intent.TEXT") public ActionResponse text(ActionRequest request) { String rawText = request.getRawInput().getQuery().toLowerCase(); System.out.println("TEXT intent: user said - " + rawText); switch (rawText) { case "basic card": return basicCard(request); case "list": return selectionList(request); case "carousel": return selectionCarousel(request); case "browse carousel": return browseCarousel(request); case "normal ask": return normalAsk(request); case "normal bye": return normalBye(request); case "bye response": return byeResponse(request); case "table card": return tableCard(request); case "media": return mediaResponse(request); default: return normalAsk(request); } } @ForIntent("actions.intent.OPTION") public ActionResponse itemSelected(ActionRequest request) { ResponseBuilder responseBuilder = getResponseBuilder(request); ResourceBundle rb = ResourceBundle .getBundle("resources", request.getLocale()); String selectedItem = request.getSelectedOption(); responseBuilder.add(getMsg(rb, "item_selected", selectedItem)) .addSuggestions(SUGGESTIONS); return responseBuilder.build(); } @ForIntent("actions.intent.MEDIA_STATUS") public ActionResponse handleMediaStatusEvent(ActionRequest request) { ResponseBuilder responseBuilder = getResponseBuilder(request); ResourceBundle rb = ResourceBundle .getBundle("resources", request.getLocale()); String status = request.getMediaStatus(); if (status == null) { status = "Unknown"; } responseBuilder.add(getMsg(rb, "media_status_received", status)) .addSuggestions(SUGGESTIONS); return responseBuilder.build(); } private ActionResponse welcome(ActionRequest request) { ResponseBuilder responseBuilder = getResponseBuilder(request); ResourceBundle rb = ResourceBundle .getBundle("resources", request.getLocale()); responseBuilder .add( new SimpleResponse() .setDisplayText(rb.getString("welcome_msg_1")) .setTextToSpeech(rb.getString("welcome_msg_2"))) .add( new SimpleResponse() .setTextToSpeech(rb.getString("welcome_more_1")) .setDisplayText(rb.getString("welcome_more_2"))) .addSuggestions(SUGGESTIONS); return responseBuilder.build(); } private ActionResponse normalAsk(ActionRequest request) { ResponseBuilder responseBuilder = getResponseBuilder(request); ResourceBundle rb = ResourceBundle .getBundle("resources", request.getLocale()); ActionResponse response = responseBuilder .add(rb.getString("normal_ask_text")).build(); return response; } private ActionResponse basicCard(ActionRequest request) { ResponseBuilder responseBuilder = getResponseBuilder(request); ResourceBundle rb = ResourceBundle .getBundle("resources", request.getLocale()); if (!request.hasCapability(Capability.SCREEN_OUTPUT.getValue())) { return responseBuilder.add(rb.getString("msg_no_screen")).build(); } Button learnMoreButton = new Button() .setTitle(rb.getString("basic_card_button_text")) .setOpenUrlAction( new OpenUrlAction().setUrl("https://assistant.google.com")); List<Button> buttons = new ArrayList<>(); buttons.add(learnMoreButton); String text = rb.getString("basic_card_text"); responseBuilder .add(rb.getString("basic_card_response")) .add( new BasicCard() .setTitle(rb.getString("basic_card_title")) .setSubtitle(rb.getString("basic_card_sub_title")) .setFormattedText(text) .setImage( new Image() .setUrl(IMG_URL_AOG) .setAccessibilityText( rb.getString("basic_card_alt_text"))) .setButtons(buttons)) .addSuggestions(SUGGESTIONS); return responseBuilder.build(); } private ActionResponse selectionList(ActionRequest request) { ResponseBuilder responseBuilder = getResponseBuilder(request); ResourceBundle rb = ResourceBundle .getBundle("resources", request.getLocale()); if (!request.hasCapability(Capability.SCREEN_OUTPUT.getValue())) { return responseBuilder.add(rb.getString("msg_no_screen")).build(); } List<ListSelectListItem> items = new ArrayList<>(); ListSelectListItem item; for (int i = 0; i < 4; i++) { item = new ListSelectListItem(); item.setTitle(getMsg(rb, "list_item_title", i + 1)) .setDescription(getMsg(rb, "list_item_desc", i + 1)) .setImage( new Image() .setUrl(IMAGES[i]) .setAccessibilityText(rb.getString("list_image_alt_text"))) .setOptionInfo(new OptionInfo().setKey(String.valueOf(i + 1))); items.add(item); } responseBuilder .add(rb.getString("list_response_title")) .add(new SelectionList().setTitle(rb.getString("list_title")) .setItems(items)) .addSuggestions(SUGGESTIONS); return responseBuilder.build(); } private ActionResponse selectionCarousel(ActionRequest request) { ResponseBuilder responseBuilder = getResponseBuilder(request); ResourceBundle rb = ResourceBundle .getBundle("resources", request.getLocale()); if (!request.hasCapability(Capability.SCREEN_OUTPUT.getValue())) { return responseBuilder.add(rb.getString("msg_no_screen")).build(); } List<CarouselSelectCarouselItem> items = new ArrayList<>(); CarouselSelectCarouselItem item; for (int i = 0; i < 4; i++) { item = new CarouselSelectCarouselItem(); item.setTitle(getMsg(rb, "list_item_title", i + 1)) .setDescription(getMsg(rb, "list_item_desc", i + 1)) .setImage( new Image() .setUrl(IMAGES[i]) .setAccessibilityText(rb.getString("list_image_alt_text"))) .setOptionInfo(new OptionInfo().setKey(String.valueOf(i + 1))); items.add(item); } responseBuilder .add(rb.getString("selection_carousel_response_title")) .addSuggestions(SUGGESTIONS) .add(new SelectionCarousel().setItems(items)); return responseBuilder.build(); } private ActionResponse browseCarousel(ActionRequest request) { ResponseBuilder responseBuilder = getResponseBuilder(request); ResourceBundle rb = ResourceBundle .getBundle("resources", request.getLocale()); if (!request.hasCapability(Capability.SCREEN_OUTPUT.getValue())) { return responseBuilder.add(rb.getString("msg_no_screen")).build(); } if (!request.hasCapability(Capability.WEB_BROWSER.getValue())) { responseBuilder .add(rb.getString("msg_no_browser")) .addSuggestions(SUGGESTIONS); return responseBuilder.build(); } String url = "https://www.google.com"; List<CarouselBrowseItem> items = new ArrayList<>(); CarouselBrowseItem item; for (int i = 0; i < 4; i++) { item = new CarouselBrowseItem(); item.setTitle(getMsg(rb, "list_item_title", i + 1)); item.setDescription(getMsg(rb, "list_item_desc", i + 1)); item.setOpenUrlAction(new OpenUrlAction().setUrl(url)); item.setImage( new Image().setUrl(IMAGES[i]) .setAccessibilityText(rb.getString("list_image_alt_text"))); item.setFooter(getMsg(rb, "list_item_footer", i + 1)); items.add(item); } responseBuilder .add(rb.getString("browse_carousel_response")) .addSuggestions(SUGGESTIONS) .add(new CarouselBrowse().setItems(items)); return responseBuilder.build(); } private ActionResponse mediaResponse(ActionRequest request) { ResponseBuilder responseBuilder = getResponseBuilder(request); ResourceBundle rb = ResourceBundle .getBundle("resources", request.getLocale()); if (!request.hasCapability(Capability.MEDIA_RESPONSE_AUDIO.getValue())) { return responseBuilder.add(rb.getString("msg_no_media")).build(); } List<MediaObject> mediaObjects = new ArrayList<>(); mediaObjects.add( new MediaObject() .setName(rb.getString("media_name")) .setDescription(rb.getString("media_desc")) .setContentUrl(MEDIA_SOURCE) .setIcon( new Image() .setUrl(IMG_URL_MEDIA) .setAccessibilityText( rb.getString("media_image_alt_text")))); responseBuilder .add(rb.getString("media_response")) .addSuggestions(SUGGESTIONS) .add(new MediaResponse().setMediaObjects(mediaObjects) .setMediaType("AUDIO")); return responseBuilder.build(); } private ActionResponse tableCard(ActionRequest request) { ResponseBuilder responseBuilder = getResponseBuilder(request); ResourceBundle rb = ResourceBundle .getBundle("resources", request.getLocale()); if (!request.hasCapability(Capability.SCREEN_OUTPUT.getValue())) { return responseBuilder.add(rb.getString("msg_no_screen")).build(); } List<TableCardColumnProperties> columnProperties = new ArrayList<>(); columnProperties.add(new TableCardColumnProperties().setHeader(rb.getString("table_col_1"))); columnProperties.add(new TableCardColumnProperties().setHeader(rb.getString("table_col_2"))); columnProperties.add(new TableCardColumnProperties().setHeader(rb.getString("table_col_3"))); List<TableCardRow> rows = new ArrayList<>(); for (int i = 0; i < 4; i++) { List<TableCardCell> cells = new ArrayList<>(); for (int j = 0; j < 3; j++) { cells.add(new TableCardCell() .setText(getMsg(rb, "table_cell_value", (i + 1)))); } rows.add(new TableCardRow().setCells(cells)); } TableCard table = new TableCard() .setTitle(rb.getString("table_title")) .setSubtitle(rb.getString("table_subtitle")) .setColumnProperties(columnProperties) .setRows(rows); responseBuilder.add(rb.getString("table_response")).add(table) .addSuggestions(SUGGESTIONS); return responseBuilder.build(); } private ActionResponse normalBye(ActionRequest request) { ResponseBuilder responseBuilder = getResponseBuilder(request); ResourceBundle rb = ResourceBundle .getBundle("resources", request.getLocale()); responseBuilder.add(rb.getString("bye_display_text")).endConversation(); return responseBuilder.build(); } private ActionResponse byeResponse(ActionRequest request) { ResponseBuilder responseBuilder = getResponseBuilder(request); ResourceBundle rb = ResourceBundle .getBundle("resources", request.getLocale()); responseBuilder .add( new SimpleResponse() .setDisplayText(rb.getString("bye_display_text")) .setTextToSpeech(rb.getString("bye_tts"))) .endConversation(); return responseBuilder.build(); } private String getMsg(ResourceBundle rb, String key, Object... args) { return MessageFormat.format(rb.getString(key), args); } }
package realmrelay; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.TimeZone; import java.util.Vector; import realmrelay.net.ListenSocket; import realmrelay.packets.Packet; public final class ROTMGRelay { public static final ROTMGRelay instance = new ROTMGRelay(); // #settings public String listenHost = "localhost"; public int listenPort = 2050; public boolean bUseProxy = false; public String proxyHost = "socks4or5.someproxy.net"; public int proxyPort = 1080; public String remoteHost = "54.226.214.216"; public int remotePort = 2050; //public String key0 = "311f80691451c71d09a13a2a6e"; //Spublic String key1 = "72c5583cafb6818995cdd74b80"; public String key0 = "311f80691451c71b09a13a2a6e"; public String key1 = "72c5583cafb6818995cbd74b80"; // #settings end private final ListenSocket listenSocket; private final List<User> users = new ArrayList<User>(); private final List<User> newUsers = new Vector<User>(); private final Map<Integer, InetSocketAddress> gameIdSocketAddressMap = new Hashtable<Integer, InetSocketAddress>(); private final Map<String, Object> globalVarMap = new Hashtable<String, Object>(); private ROTMGRelay() { Properties p = new Properties(); p.setProperty("listenHost", this.listenHost); p.setProperty("listenPort", String.valueOf(this.listenPort)); p.setProperty("bUseProxy", String.valueOf(this.bUseProxy)); p.setProperty("proxyHost", this.proxyHost); p.setProperty("proxyPort", String.valueOf(this.proxyPort)); p.setProperty("remoteHost", this.remoteHost); p.setProperty("remotePort", String.valueOf(this.remotePort)); p.setProperty("key0", this.key0); p.setProperty("key1", this.key1); File file = new File("settings.properties"); if (!file.isFile()) { try { OutputStream out = new FileOutputStream(file); p.store(out, null); out.close(); } catch (Exception e) { e.printStackTrace(); } } p = new Properties(p); try { InputStream in = new FileInputStream(file); p.load(in); in.close(); this.listenHost = p.getProperty("listenHost"); this.listenPort = Integer.parseInt(p.getProperty("listenPort")); this.bUseProxy = Boolean.parseBoolean(p.getProperty("bUseProxy")); this.proxyHost = p.getProperty("proxyHost"); this.proxyPort = Integer.parseInt(p.getProperty("proxyPort")); this.remoteHost = p.getProperty("remoteHost"); this.remotePort = Integer.parseInt(p.getProperty("remotePort")); this.key0 = p.getProperty("key0"); this.key1 = p.getProperty("key1"); } catch (Exception e) { e.printStackTrace(); } this.listenSocket = new ListenSocket(this.listenHost, this.listenPort) { @Override public void socketAccepted(Socket localSocket) { try { User user = new User(localSocket); ROTMGRelay.instance.newUsers.add(user); } catch (Exception e) { e.printStackTrace(); try { localSocket.close(); } catch (IOException e1) { e1.printStackTrace(); } } } }; } /** * error message * * @param message */ public static void error(String message) { SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss"); sdf.setTimeZone(TimeZone.getTimeZone("GMT")); String timestamp = sdf.format(new Date()); String raw = timestamp + " " + message; System.err.println(raw); } /** * echo message * * @param message */ public static void echo(String message) { SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss"); sdf.setTimeZone(TimeZone.getTimeZone("GMT")); String timestamp = sdf.format(new Date()); String raw = timestamp + " " + message; System.out.println(raw); } public Object getGlobal(String var) { return this.globalVarMap.get(var); } public InetSocketAddress getSocketAddress(int gameId) { InetSocketAddress socketAddress = this.gameIdSocketAddressMap.get(gameId); if (socketAddress == null) { return new InetSocketAddress(this.remoteHost, this.remotePort); } return socketAddress; } public void setGlobal(String var, Object value) { this.globalVarMap.put(var, value); } public void setSocketAddress(int gameId, String host, int port) { InetSocketAddress socketAddress = new InetSocketAddress(host, port); this.gameIdSocketAddressMap.put(gameId, socketAddress); } public static void main(String[] args) { try { GETXmlParse.parseXMLData(); } catch (Exception e) { e.printStackTrace(); } Packet.init(); if (ROTMGRelay.instance.listenSocket.start()) { ROTMGRelay.echo("| Welcome to Project RR |"); while (!ROTMGRelay.instance.listenSocket.isClosed()) { while (!ROTMGRelay.instance.newUsers.isEmpty()) { User user = ROTMGRelay.instance.newUsers.remove(0); ROTMGRelay.instance.users.add(user); ROTMGRelay.echo("Connected " + user.localSocket); user.scriptManager.trigger("onEnable"); } int cores = Runtime.getRuntime().availableProcessors(); Thread[] threads = new Thread[cores]; int core = 0; Iterator<User> i = ROTMGRelay.instance.users.iterator(); while (i.hasNext()) { final User user = i.next(); if (user.localSocket.isClosed()) { i.remove(); continue; } if (threads[core] != null) { try { threads[core].join(); } catch (InterruptedException e) { e.printStackTrace(); } } (threads[core] = new Thread(new Runnable() { @Override public void run() { user.process(); } })).start(); core = (core + 1) % cores; } for (Thread thread : threads) { if (thread == null) { continue; } try { thread.join(); } catch (InterruptedException e) { e.printStackTrace(); } } Thread.yield(); } Iterator<User> i = ROTMGRelay.instance.users.iterator(); while (i.hasNext()) { User user = i.next(); user.kick(); } } else { ROTMGRelay.echo("Realm Relay listener problem. Make sure there are no instances of Realm Relay already running."); } } }
package com.airbnb.airpal.modules; import com.airbnb.airlift.http.client.OldJettyHttpClient; import com.airbnb.airpal.AirpalConfiguration; import com.airbnb.airpal.api.output.PersistentJobOutputFactory; import com.airbnb.airpal.api.output.builders.OutputBuilderFactory; import com.airbnb.airpal.api.output.persistors.CSVPersistorFactory; import com.airbnb.airpal.api.output.persistors.PersistorFactory; import com.airbnb.airpal.core.AirpalUserFactory; import com.airbnb.airpal.core.execution.ExecutionClient; import com.airbnb.airpal.core.health.PrestoHealthCheck; import com.airbnb.airpal.core.store.files.ExpiringFileStore; import com.airbnb.airpal.core.store.history.JobHistoryStore; import com.airbnb.airpal.core.store.history.JobHistoryStoreDAO; import com.airbnb.airpal.core.store.jobs.ActiveJobsStore; import com.airbnb.airpal.core.store.jobs.InMemoryActiveJobsStore; import com.airbnb.airpal.core.store.queries.QueryStore; import com.airbnb.airpal.core.store.queries.QueryStoreDAO; import com.airbnb.airpal.core.store.usage.CachingUsageStore; import com.airbnb.airpal.core.store.usage.SQLUsageStore; import com.airbnb.airpal.core.store.usage.UsageStore; import com.airbnb.airpal.presto.ClientSessionFactory; import com.airbnb.airpal.presto.QueryInfoClient; import com.airbnb.airpal.presto.metadata.ColumnCache; import com.airbnb.airpal.presto.metadata.PreviewTableCache; import com.airbnb.airpal.presto.metadata.SchemaCache; import com.airbnb.airpal.resources.ExecuteResource; import com.airbnb.airpal.resources.FilesResource; import com.airbnb.airpal.resources.HealthResource; import com.airbnb.airpal.resources.PingResource; import com.airbnb.airpal.resources.QueryResource; import com.airbnb.airpal.resources.SessionResource; import com.airbnb.airpal.resources.TablesResource; import com.airbnb.airpal.resources.sse.SSEEventSourceServlet; import com.airbnb.airpal.sql.beans.TableRow; import com.airbnb.airpal.sql.jdbi.QueryStoreMapper; import com.airbnb.airpal.sql.jdbi.URIArgumentFactory; import com.airbnb.airpal.sql.jdbi.UUIDArgumentFactory; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; import com.google.common.eventbus.AsyncEventBus; import com.google.common.eventbus.EventBus; import com.google.inject.AbstractModule; import com.google.inject.Provider; import com.google.inject.Provides; import com.google.inject.Scopes; import com.google.inject.Singleton; import com.google.inject.name.Names; import io.airlift.configuration.ConfigurationFactory; import io.airlift.http.client.AsyncHttpClient; import io.airlift.http.client.HttpClientConfig; import io.airlift.units.DataSize; import io.airlift.units.Duration; import io.dropwizard.jdbi.DBIFactory; import io.dropwizard.setup.Environment; import lombok.extern.slf4j.Slf4j; import org.apache.shiro.web.env.EnvironmentLoaderListener; import org.skife.jdbi.v2.DBI; import javax.inject.Named; import java.net.URI; import java.util.Collections; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import static com.airbnb.airpal.presto.QueryRunner.QueryRunnerFactory; @Slf4j public class AirpalModule extends AbstractModule { private final AirpalConfiguration config; private final Environment environment; public AirpalModule(AirpalConfiguration config, Environment environment) { this.config = config; this.environment = environment; } @Override protected void configure() { bind(TablesResource.class).in(Scopes.SINGLETON); bind(ExecuteResource.class).in(Scopes.SINGLETON); bind(QueryResource.class).in(Scopes.SINGLETON); bind(HealthResource.class).in(Scopes.SINGLETON); bind(PingResource.class).in(Scopes.SINGLETON); bind(SessionResource.class).in(Scopes.SINGLETON); bind(SSEEventSourceServlet.class).in(Scopes.SINGLETON); bind(FilesResource.class).in(Scopes.SINGLETON); bind(EnvironmentLoaderListener.class).in(Scopes.SINGLETON); bind(String.class).annotatedWith(Names.named("createTableDestinationSchema")).toInstance(config.getCreateTableDestinationSchema()); bind(String.class).annotatedWith(Names.named("s3Bucket")).toInstance(Strings.nullToEmpty(config.getS3Bucket())); bind(PrestoHealthCheck.class).in(Scopes.SINGLETON); bind(ExecutionClient.class).in(Scopes.SINGLETON); bind(PersistentJobOutputFactory.class).in(Scopes.SINGLETON); bind(JobHistoryStore.class).to(JobHistoryStoreDAO.class).in(Scopes.SINGLETON); } @Singleton @Provides public DBI provideDBI(ObjectMapper objectMapper) throws ClassNotFoundException { final DBIFactory factory = new DBIFactory(); final DBI dbi = factory.build(environment, config.getDataSourceFactory(), "mysql"); dbi.registerMapper(new TableRow.TableRowMapper(objectMapper)); dbi.registerMapper(new QueryStoreMapper(objectMapper)); dbi.registerArgumentFactory(new UUIDArgumentFactory()); dbi.registerArgumentFactory(new URIArgumentFactory()); return dbi; } @Singleton @Provides public ConfigurationFactory provideConfigurationFactory() { return new ConfigurationFactory(Collections.<String, String>emptyMap()); } @Singleton @Named("query-runner-http-client") @Provides public AsyncHttpClient provideQueryRunnerHttpClient() { final HttpClientConfig httpClientConfig = new HttpClientConfig().setConnectTimeout(new Duration(10, TimeUnit.SECONDS)); return new OldJettyHttpClient(httpClientConfig); } @Named("coordinator-uri") @Provides public URI providePrestoCoordinatorURI() { return config.getPrestoCoordinator(); } @Singleton @Named("default-catalog") @Provides public String provideDefaultCatalog() { return config.getPrestoCatalog(); } @Provides @Singleton public ClientSessionFactory provideClientSessionFactory(@Named("coordinator-uri") Provider<URI> uriProvider) { return new ClientSessionFactory(uriProvider, config.getPrestoUser(), config.getPrestoSource(), config.getPrestoCatalog(), config.getPrestoSchema(), config.isPrestoDebug()); } @Provides public QueryRunnerFactory provideQueryRunner(ClientSessionFactory sessionFactory, @Named("query-runner-http-client") AsyncHttpClient httpClient) { return new QueryRunnerFactory(sessionFactory, httpClient); } @Provides public QueryInfoClient provideQueryInfoClient() { return QueryInfoClient.create(); } @Singleton @Provides public SchemaCache provideSchemaCache(QueryRunnerFactory queryRunnerFactory, @Named("presto") ExecutorService executorService) { final SchemaCache cache = new SchemaCache(queryRunnerFactory, executorService); cache.populateCache(config.getPrestoCatalog()); return cache; } @Singleton @Provides public ColumnCache provideColumnCache(QueryRunnerFactory queryRunnerFactory, @Named("presto") ExecutorService executorService) { return new ColumnCache(queryRunnerFactory, new Duration(5, TimeUnit.MINUTES), new Duration(60, TimeUnit.MINUTES), executorService); } @Singleton @Provides public PreviewTableCache providePreviewTableCache(QueryRunnerFactory queryRunnerFactory, @Named("presto") ExecutorService executorService) { return new PreviewTableCache(queryRunnerFactory, new Duration(20, TimeUnit.MINUTES), executorService, 100); } @Singleton @Named("event-bus") @Provides public ExecutorService provideEventBusExecutorService() { return Executors.newCachedThreadPool(SchemaCache.daemonThreadsNamed("event-bus-%d")); } @Singleton @Named("presto") @Provides public ExecutorService provideCompleterExecutorService() { return Executors.newCachedThreadPool(SchemaCache.daemonThreadsNamed("presto-%d")); } @Singleton @Named("hive") @Provides public ScheduledExecutorService provideTableCacheUpdater() { return Executors.newSingleThreadScheduledExecutor(); } @Singleton @Named("sse") @Provides public ExecutorService provideSSEExecutorService() { return Executors.newCachedThreadPool(SchemaCache.daemonThreadsNamed("sse-%d")); } @Singleton @Provides public EventBus provideEventBus(@Named("event-bus") ExecutorService executor) { return new AsyncEventBus(executor); } @Provides public AWSCredentials provideAWSCredentials() { if ((config.getS3AccessKey() == null) || (config.getS3SecretKey() == null)) { return null; } else { return new BasicAWSCredentials(config.getS3AccessKey(), config.getS3SecretKey()); } } @Singleton @Provides public AmazonS3 provideAmazonS3Client(AWSCredentials awsCredentials) { if (awsCredentials == null) { return new AmazonS3Client(); } return new AmazonS3Client(awsCredentials); } @Singleton @Provides public UsageStore provideUsageCache(DBI dbi) { UsageStore delegate = new SQLUsageStore(config.getUsageWindow(), dbi); return new CachingUsageStore(delegate, io.dropwizard.util.Duration.minutes(6)); } @Provides public QueryStore provideQueryStore(DBI dbi) { return dbi.onDemand(QueryStoreDAO.class); } @Provides @Singleton public AirpalUserFactory provideAirpalUserFactory() { return new AirpalUserFactory(config.getPrestoSchema(), org.joda.time.Duration.standardMinutes(15), "default"); } @Provides @Singleton public ActiveJobsStore provideActiveJobsStore() { return new InMemoryActiveJobsStore(); } @Provides @Singleton public ExpiringFileStore provideExpiringFileStore() { return new ExpiringFileStore(new DataSize(100, DataSize.Unit.MEGABYTE)); } @Provides @Singleton public CSVPersistorFactory provideCSVPersistorFactory(ExpiringFileStore fileStore, AmazonS3 s3Client, @Named("s3Bucket") String s3Bucket) { return new CSVPersistorFactory(config.isUseS3(), s3Client, s3Bucket, fileStore); } @Provides @Singleton public PersistorFactory providePersistorFactory(CSVPersistorFactory csvPersistorFactory) { return new PersistorFactory(csvPersistorFactory); } @Provides @Singleton public OutputBuilderFactory provideOutputBuilderFactory() { long maxFileSizeInBytes = Math.round(Math.floor(config.getMaxOutputSize().getValue(DataSize.Unit.BYTE))); return new OutputBuilderFactory(maxFileSizeInBytes); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.store.easy.json; import java.io.IOException; import java.io.InputStream; import java.util.List; import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.exception.OutOfMemoryException; import org.apache.drill.exec.ops.FragmentContext; import org.apache.drill.exec.ops.OperatorContext; import org.apache.drill.exec.physical.impl.OutputMutator; import org.apache.drill.exec.store.AbstractRecordReader; import org.apache.drill.exec.store.dfs.DrillFileSystem; import org.apache.drill.exec.store.easy.json.JsonProcessor.ReadState; import org.apache.drill.exec.store.easy.json.reader.CountingJsonReader; import org.apache.drill.exec.vector.BaseValueVector; import org.apache.drill.exec.vector.complex.fn.JsonReader; import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter; import org.apache.hadoop.fs.Path; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonNode; import org.apache.drill.shaded.guava.com.google.common.base.Preconditions; import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList; public class JSONRecordReader extends AbstractRecordReader { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JSONRecordReader.class); public static final long DEFAULT_ROWS_PER_BATCH = BaseValueVector.INITIAL_VALUE_ALLOCATION; private VectorContainerWriter writer; // Data we're consuming private Path hadoopPath; private JsonNode embeddedContent; private InputStream stream; private final DrillFileSystem fileSystem; private JsonProcessor jsonReader; private int recordCount; private long runningRecordCount = 0; private final FragmentContext fragmentContext; private final boolean enableAllTextMode; private final boolean enableNanInf; private final boolean readNumbersAsDouble; private final boolean unionEnabled; private long parseErrorCount; private final boolean skipMalformedJSONRecords; private final boolean printSkippedMalformedJSONRecordLineNumber; ReadState write = null; /** * Create a JSON Record Reader that uses a file based input stream. * @param fragmentContext * @param inputPath * @param fileSystem * @param columns pathnames of columns/subfields to read * @throws OutOfMemoryException */ public JSONRecordReader(final FragmentContext fragmentContext, final String inputPath, final DrillFileSystem fileSystem, final List<SchemaPath> columns) throws OutOfMemoryException { this(fragmentContext, inputPath, null, fileSystem, columns); } /** * Create a new JSON Record Reader that uses a in memory materialized JSON stream. * @param fragmentContext * @param embeddedContent * @param fileSystem * @param columns pathnames of columns/subfields to read * @throws OutOfMemoryException */ public JSONRecordReader(final FragmentContext fragmentContext, final JsonNode embeddedContent, final DrillFileSystem fileSystem, final List<SchemaPath> columns) throws OutOfMemoryException { this(fragmentContext, null, embeddedContent, fileSystem, columns); } private JSONRecordReader(final FragmentContext fragmentContext, final String inputPath, final JsonNode embeddedContent, final DrillFileSystem fileSystem, final List<SchemaPath> columns) { Preconditions.checkArgument( (inputPath == null && embeddedContent != null) || (inputPath != null && embeddedContent == null), "One of inputPath or embeddedContent must be set but not both." ); if (inputPath != null) { this.hadoopPath = new Path(inputPath); } else { this.embeddedContent = embeddedContent; } this.fileSystem = fileSystem; this.fragmentContext = fragmentContext; // only enable all text mode if we aren't using embedded content mode. this.enableAllTextMode = embeddedContent == null && fragmentContext.getOptions().getOption(ExecConstants.JSON_READER_ALL_TEXT_MODE_VALIDATOR); this.enableNanInf = fragmentContext.getOptions().getOption(ExecConstants.JSON_READER_NAN_INF_NUMBERS_VALIDATOR); this.readNumbersAsDouble = embeddedContent == null && fragmentContext.getOptions().getOption(ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE_VALIDATOR); this.unionEnabled = embeddedContent == null && fragmentContext.getOptions().getBoolean(ExecConstants.ENABLE_UNION_TYPE_KEY); this.skipMalformedJSONRecords = fragmentContext.getOptions().getOption(ExecConstants.JSON_SKIP_MALFORMED_RECORDS_VALIDATOR); this.printSkippedMalformedJSONRecordLineNumber = fragmentContext.getOptions().getOption(ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG_VALIDATOR); setColumns(columns); } @Override public String toString() { return super.toString() + "[hadoopPath = " + hadoopPath + ", currentRecord=" + currentRecordNumberInFile() + ", jsonReader=" + jsonReader + ", recordCount = " + recordCount + ", parseErrorCount = " + parseErrorCount + ", runningRecordCount = " + runningRecordCount + ", ...]"; } @Override public void setup(final OperatorContext context, final OutputMutator output) throws ExecutionSetupException { try{ if (hadoopPath != null) { this.stream = fileSystem.openPossiblyCompressedStream(hadoopPath); } this.writer = new VectorContainerWriter(output, unionEnabled); if (isSkipQuery()) { this.jsonReader = new CountingJsonReader(fragmentContext.getManagedBuffer(), enableNanInf); } else { this.jsonReader = new JsonReader.Builder(fragmentContext.getManagedBuffer()) .schemaPathColumns(ImmutableList.copyOf(getColumns())) .allTextMode(enableAllTextMode) .skipOuterList(true) .readNumbersAsDouble(readNumbersAsDouble) .enableNanInf(enableNanInf) .build(); } setupParser(); } catch (final Exception e){ handleAndRaise("Failure reading JSON file", e); } } @Override protected List<SchemaPath> getDefaultColumnsToRead() { return ImmutableList.of(); } private void setupParser() throws IOException { if (hadoopPath != null) { jsonReader.setSource(stream); } else { jsonReader.setSource(embeddedContent); } jsonReader.setIgnoreJSONParseErrors(skipMalformedJSONRecords); } protected void handleAndRaise(String suffix, Exception e) throws UserException { String message = e.getMessage(); int columnNr = -1; if (e instanceof JsonParseException) { final JsonParseException ex = (JsonParseException) e; message = ex.getOriginalMessage(); columnNr = ex.getLocation().getColumnNr(); } UserException.Builder exceptionBuilder = UserException.dataReadError(e) .message("%s - %s", suffix, message); if (columnNr > 0) { exceptionBuilder.pushContext("Column ", columnNr); } if (hadoopPath != null) { exceptionBuilder.pushContext("Record ", currentRecordNumberInFile()) .pushContext("File ", hadoopPath.toUri().getPath()); } throw exceptionBuilder.build(logger); } private long currentRecordNumberInFile() { return runningRecordCount + recordCount + 1; } @Override public int next() { writer.allocate(); writer.reset(); recordCount = 0; parseErrorCount = 0; if (write == ReadState.JSON_RECORD_PARSE_EOF_ERROR) { return recordCount; } while (recordCount < DEFAULT_ROWS_PER_BATCH) { try { writer.setPosition(recordCount); write = jsonReader.write(writer); if (write == ReadState.WRITE_SUCCEED) { recordCount++; } else if (write == ReadState.JSON_RECORD_PARSE_ERROR || write == ReadState.JSON_RECORD_PARSE_EOF_ERROR) { if (!skipMalformedJSONRecords) { handleAndRaise("Error parsing JSON", new Exception()); } ++parseErrorCount; if (printSkippedMalformedJSONRecordLineNumber) { logger.debug("Error parsing JSON in " + hadoopPath.getName() + " : line nos :" + (recordCount + parseErrorCount)); } if (write == ReadState.JSON_RECORD_PARSE_EOF_ERROR) { break; } } else { break; } } catch (IOException ex) { handleAndRaise("Error parsing JSON", ex); } } // Skip empty json file with 0 row. // Only when data source has > 0 row, ensure the batch has one field. if (recordCount > 0) { jsonReader.ensureAtLeastOneField(writer); } writer.setValueCount(recordCount); updateRunningCount(); return recordCount; } private void updateRunningCount() { runningRecordCount += recordCount; } @Override public void close() throws Exception { if(stream != null) { stream.close(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.undertow.rest; import java.util.List; import java.util.Map; import java.util.TreeMap; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.camel.Exchange; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.component.undertow.BaseUndertowTest; import org.apache.camel.model.Model; import org.apache.camel.model.rest.RestBindingMode; import org.apache.camel.model.rest.RestDefinition; import org.apache.camel.model.rest.VerbDefinition; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; public class RestUndertowHttpPojoTypeTest extends BaseUndertowTest { private final ObjectMapper mapper = new ObjectMapper(); @Test public void testUndertowPojoTypeValidateModel() throws Exception { // Wasn't clear if there's a way to put this test into camel-core just to test the model // perhaps without starting the Camel Context? List<RestDefinition> restDefinitions = context().getExtension(Model.class).getRestDefinitions(); assertNotNull(restDefinitions); assertTrue(restDefinitions.size() > 0); RestDefinition restDefinition = restDefinitions.get(0); List<VerbDefinition> verbs = restDefinition.getVerbs(); assertNotNull(verbs); Map<String, VerbDefinition> mapVerb = new TreeMap<>(); verbs.forEach(verb -> mapVerb.put(verb.getId(), verb)); assertEquals(UserPojo[].class.getCanonicalName(), mapVerb.get("getUsers").getOutType()); assertEquals(UserPojo[].class.getCanonicalName(), mapVerb.get("getUsersList").getOutType()); assertEquals(UserPojo.class.getCanonicalName(), mapVerb.get("getUser").getOutType()); assertEquals(UserPojo.class.getCanonicalName(), mapVerb.get("putUser").getType()); assertEquals(UserPojo[].class.getCanonicalName(), mapVerb.get("putUsers").getType()); assertEquals(UserPojo[].class.getCanonicalName(), mapVerb.get("putUsersList").getType()); } @Test public void testUndertowPojoTypeGetUsers() throws Exception { Exchange outExchange = template.request("undertow:http://localhost:{{port}}/users", exchange -> { exchange.getIn().setHeader(Exchange.HTTP_METHOD, "GET"); exchange.getIn().setHeader(Exchange.ACCEPT_CONTENT_TYPE, "application/json"); }); assertNotNull(outExchange); assertEquals("application/json", outExchange.getMessage().getHeader(Exchange.CONTENT_TYPE)); String out = outExchange.getMessage().getBody(String.class); assertNotNull(out); UserPojo[] users = mapper.readValue(out, UserPojo[].class); assertEquals(2, users.length); assertEquals("Scott", users[0].getName()); assertEquals("Claus", users[1].getName()); } @Test public void testUndertowPojoTypePutUser() throws Exception { Exchange outExchange = template.request("undertow:http://localhost:{{port}}/users/1", exchange -> { exchange.getIn().setHeader(Exchange.HTTP_METHOD, "PUT"); exchange.getIn().setHeader(Exchange.ACCEPT_CONTENT_TYPE, "application/json"); exchange.getIn().setHeader(Exchange.CONTENT_TYPE, "application/json"); UserPojo user = new UserPojo(); user.setId(1); user.setName("Scott"); String body = mapper.writeValueAsString(user); exchange.getIn().setBody(body); }); assertNotNull(outExchange); assertEquals(200, outExchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE)); } @Test public void testUndertowPojoTypePutUserFail() throws Exception { MockEndpoint mock = getMockEndpoint("mock:putUser"); mock.expectedMessageCount(0); Exchange outExchange = template.request("undertow:http://localhost:{{port}}/users/1", exchange -> { exchange.getIn().setHeader(Exchange.HTTP_METHOD, "PUT"); exchange.getIn().setHeader(Exchange.ACCEPT_CONTENT_TYPE, "application/json"); exchange.getIn().setHeader(Exchange.CONTENT_TYPE, "application/json"); CountryPojo country = new CountryPojo(); country.setIso("US"); country.setCountry("United States"); String body = mapper.writeValueAsString(country); exchange.getIn().setBody(body); }); assertNotNull(outExchange); assertEquals(400, outExchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE)); assertMockEndpointsSatisfied(); } @Test public void testUndertowPojoTypePutUsers() throws Exception { UserPojo user1 = new UserPojo(); user1.setId(1); user1.setName("Scott"); UserPojo user2 = new UserPojo(); user2.setId(2); user2.setName("Claus"); final UserPojo[] users = new UserPojo[] { user1, user2 }; MockEndpoint mock = getMockEndpoint("mock:putUsers"); mock.expectedMessageCount(1); mock.message(0).body(UserPojo[].class); Exchange outExchange = template.request("undertow:http://localhost:{{port}}/users", exchange -> { exchange.getIn().setHeader(Exchange.HTTP_METHOD, "PUT"); exchange.getIn().setHeader(Exchange.ACCEPT_CONTENT_TYPE, "application/json"); exchange.getIn().setHeader(Exchange.CONTENT_TYPE, "application/json"); String body = mapper.writeValueAsString(users); exchange.getIn().setBody(body); }); assertNotNull(outExchange); assertEquals(200, outExchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE)); assertMockEndpointsSatisfied(); Exchange exchange = mock.assertExchangeReceived(0); UserPojo[] receivedUsers = exchange.getIn().getBody(UserPojo[].class); assertEquals(2, receivedUsers.length); assertEquals(user1.getName(), receivedUsers[0].getName()); assertEquals(user2.getName(), receivedUsers[1].getName()); } @Test public void testUndertowPojoTypePutUsersFail() throws Exception { MockEndpoint mock = getMockEndpoint("mock:putUsers"); mock.expectedMessageCount(0); Exchange outExchange = template.request("undertow:http://localhost:{{port}}/users", exchange -> { exchange.getIn().setHeader(Exchange.HTTP_METHOD, "PUT"); exchange.getIn().setHeader(Exchange.ACCEPT_CONTENT_TYPE, "application/json"); exchange.getIn().setHeader(Exchange.CONTENT_TYPE, "application/json"); UserPojo user = new UserPojo(); user.setId(1); user.setName("Scott"); String body = mapper.writeValueAsString(user); exchange.getIn().setBody(body); }); assertNotNull(outExchange); assertEquals(400, outExchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE)); assertMockEndpointsSatisfied(); } @Test public void testUndertowPojoTypePutUsersList() throws Exception { UserPojo user1 = new UserPojo(); user1.setId(1); user1.setName("Scott"); UserPojo user2 = new UserPojo(); user2.setId(2); user2.setName("Claus"); final UserPojo[] users = new UserPojo[] { user1, user2 }; MockEndpoint mock = getMockEndpoint("mock:putUsersList"); mock.expectedMessageCount(1); mock.message(0).body(UserPojo[].class); Exchange outExchange = template.request("undertow:http://localhost:{{port}}/users/list", exchange -> { exchange.getIn().setHeader(Exchange.HTTP_METHOD, "PUT"); exchange.getIn().setHeader(Exchange.ACCEPT_CONTENT_TYPE, "application/json"); exchange.getIn().setHeader(Exchange.CONTENT_TYPE, "application/json"); String body = mapper.writeValueAsString(users); exchange.getIn().setBody(body); }); assertNotNull(outExchange); assertEquals(200, outExchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE)); assertMockEndpointsSatisfied(); Exchange exchange = mock.assertExchangeReceived(0); UserPojo[] receivedUsers = exchange.getIn().getBody(UserPojo[].class); assertEquals(2, receivedUsers.length); assertEquals(user1.getName(), receivedUsers[0].getName()); assertEquals(user2.getName(), receivedUsers[1].getName()); } @Test public void testUndertowPojoTypePutUsersListFail() throws Exception { MockEndpoint mock = getMockEndpoint("mock:putUsersList"); mock.expectedMessageCount(0); Exchange outExchange = template.request("undertow:http://localhost:{{port}}/users/list", exchange -> { exchange.getIn().setHeader(Exchange.HTTP_METHOD, "PUT"); exchange.getIn().setHeader(Exchange.ACCEPT_CONTENT_TYPE, "application/json"); exchange.getIn().setHeader(Exchange.CONTENT_TYPE, "application/json"); UserPojo user = new UserPojo(); user.setId(1); user.setName("Scott"); String body = mapper.writeValueAsString(user); exchange.getIn().setBody(body); }); assertNotNull(outExchange); assertEquals(400, outExchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE)); assertMockEndpointsSatisfied(); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { onException(Exception.class) .handled(true) .setHeader(Exchange.HTTP_RESPONSE_CODE, constant(400)) .setHeader(Exchange.CONTENT_TYPE, constant("text/plain")) .setBody().simple("${exchange.message}"); // configure to use undertow on localhost with the given port restConfiguration().component("undertow").host("localhost").port(getPort()) .bindingMode(RestBindingMode.json); // use the rest DSL to define the rest services rest() .get("/users").id("getUsers").outType(UserPojo[].class).to("direct:users") .get("/users/list").id("getUsersList").outType(UserPojo[].class).to("direct:list") .get("/users/{id}").id("getUser").outType(UserPojo.class).to("direct:id") .put("/users/{id}").id("putUser").type(UserPojo.class).to("mock:putUser") .put("/users").id("putUsers").type(UserPojo[].class).to("mock:putUsers") .put("/users/list").id("putUsersList").type(UserPojo[].class).to("mock:putUsersList"); from("direct:users") .process(exchange -> { UserPojo user1 = new UserPojo(); user1.setId(1); user1.setName("Scott"); UserPojo user2 = new UserPojo(); user2.setId(2); user2.setName("Claus"); exchange.getOut().setBody(new UserPojo[] { user1, user2 }); }); from("direct:list") .process(exchange -> { UserPojo user1 = new UserPojo(); user1.setId(1); user1.setName("Scott"); UserPojo user2 = new UserPojo(); user2.setId(2); user2.setName("Claus"); exchange.getMessage().setBody(new UserPojo[] { user1, user2 }); }); from("direct:id") .process(exchange -> { UserPojo user1 = new UserPojo(); user1.setId(exchange.getIn().getHeader("id", int.class)); user1.setName("Scott"); exchange.getMessage().setBody(user1); }); } }; } }
/* * (c) Copyright IBM Corp 2001, 2006 */ package com.ibm.wsdl.util; import java.io.*; import java.util.*; import java.net.URL; import java.net.MalformedURLException; /** * Deals with strings (probably need to elaborate some more). * * @author Matthew J. Duftler */ public class StringUtils { public static final String lineSeparator = System.getProperty("line.separator", "\n"); public static final String lineSeparatorStr = cleanString(lineSeparator); // Ensure that escape sequences are passed through properly. public static String cleanString(String str) { if (str == null) return null; else { char[] charArray = str.toCharArray(); StringBuffer sBuf = new StringBuffer(); for (int i = 0; i < charArray.length; i++) switch (charArray[i]) { case '\"' : sBuf.append("\\\""); break; case '\\' : sBuf.append("\\\\"); break; case '\n' : sBuf.append("\\n"); break; case '\r' : sBuf.append("\\r"); break; default : sBuf.append(charArray[i]); break; } return sBuf.toString(); } } /* This method will return the correct name for a class object representing a primitive, a single instance of a class, as well as n-dimensional arrays of primitives or instances. This logic is needed to handle the string returned from Class.getName(). If the class object represents a single instance (or a primitive), Class.getName() returns the fully-qualified name of the class and no further work is needed. However, if the class object represents an array (of n dimensions), Class.getName() returns a Descriptor (the Descriptor grammar is defined in section 4.3 of the Java VM Spec). This method will parse the Descriptor if necessary. */ public static String getClassName(Class targetClass) { String className = targetClass.getName(); return targetClass.isArray() ? parseDescriptor(className) : className; } /* See the comment above for getClassName(targetClass)... */ private static String parseDescriptor(String className) { char[] classNameChars = className.toCharArray(); int arrayDim = 0; int i = 0; while (classNameChars[i] == '[') { arrayDim++; i++; } StringBuffer classNameBuf = new StringBuffer(); switch (classNameChars[i++]) { case 'B' : classNameBuf.append("byte"); break; case 'C' : classNameBuf.append("char"); break; case 'D' : classNameBuf.append("double"); break; case 'F' : classNameBuf.append("float"); break; case 'I' : classNameBuf.append("int"); break; case 'J' : classNameBuf.append("long"); break; case 'S' : classNameBuf.append("short"); break; case 'Z' : classNameBuf.append("boolean"); break; case 'L' : classNameBuf.append(classNameChars, i, classNameChars.length - i - 1); break; } for (i = 0; i < arrayDim; i++) classNameBuf.append("[]"); return classNameBuf.toString(); } /* @param contextURL the context in which to attempt to resolve the spec. Effectively a document base. */ public static URL getURL(URL contextURL, String spec) throws MalformedURLException { try { return new URL(contextURL, spec); } catch (MalformedURLException e) { File tempFile = new File(spec); if (contextURL == null || (contextURL != null && tempFile.isAbsolute())) { return tempFile.toURL(); } // only reach here if the contextURL !null, spec is relative path and // MalformedURLException thrown throw e; } } /* Returns an InputStream for reading from the specified resource, if the resource points to a stream. */ public static InputStream getContentAsInputStream(URL url) throws SecurityException, IllegalArgumentException, IOException { if (url == null) { throw new IllegalArgumentException("URL cannot be null."); } try { InputStream content = url.openStream(); if (content == null) { throw new IllegalArgumentException("No content."); } return content; } catch (SecurityException e) { throw new SecurityException("Your JVM's SecurityManager has " + "disallowed this."); } catch (FileNotFoundException e) { throw new FileNotFoundException("This file was not found: " + url); } } public static List parseNMTokens(String nmTokens) { StringTokenizer strTok = new StringTokenizer(nmTokens, " "); List tokens = new Vector(); while (strTok.hasMoreTokens()) { tokens.add(strTok.nextToken()); } return tokens; } public static String getNMTokens(List list) { if (list != null) { StringBuffer strBuf = new StringBuffer(); int size = list.size(); for (int i = 0; i < size; i++) { String token = (String)list.get(i); strBuf.append((i > 0 ? " " : "") + token); } return strBuf.toString(); } else { return null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.math; import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; import org.apache.beam.sdk.extensions.sql.impl.interpreter.BeamSqlFnExecutorTestBase; import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.BeamSqlExpression; import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.BeamSqlPrimitive; import org.apache.calcite.sql.type.SqlTypeName; import org.junit.Assert; import org.junit.Test; /** * Test for {@link BeamSqlMathUnaryExpression}. */ public class BeamSqlMathUnaryExpressionTest extends BeamSqlFnExecutorTestBase { @Test public void testForGreaterThanOneOperands() { List<BeamSqlExpression> operands = new ArrayList<>(); // operands more than 1 not allowed operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 2)); operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 4)); Assert.assertFalse(new BeamSqlAbsExpression(operands).accept()); } @Test public void testForOperandsType() { List<BeamSqlExpression> operands = new ArrayList<>(); // varchar operand not allowed operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "2")); Assert.assertFalse(new BeamSqlAbsExpression(operands).accept()); } @Test public void testForUnaryExpressions() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for sqrt function operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf("2"))); // test for abs function operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.BIGINT, -28965734597L)); Assert.assertEquals(28965734597L, new BeamSqlAbsExpression(operands).evaluate(record, null).getValue()); } @Test public void testForLnExpression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for LN function with operand type smallint operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf("2"))); Assert.assertEquals(Math.log(2), new BeamSqlLnExpression(operands).evaluate(record, null).getValue()); // test for LN function with operand type double operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4)); Assert .assertEquals(Math.log(2.4), new BeamSqlLnExpression(operands).evaluate(record, null).getValue()); // test for LN function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56))); Assert.assertEquals(Math.log(2.56), new BeamSqlLnExpression(operands).evaluate(record, null).getValue()); } @Test public void testForLog10Expression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for log10 function with operand type smallint operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf("2"))); Assert.assertEquals(Math.log10(2), new BeamSqlLogExpression(operands).evaluate(record, null).getValue()); // test for log10 function with operand type double operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4)); Assert.assertEquals(Math.log10(2.4), new BeamSqlLogExpression(operands).evaluate(record, null).getValue()); // test for log10 function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56))); Assert.assertEquals(Math.log10(2.56), new BeamSqlLogExpression(operands).evaluate(record, null).getValue()); } @Test public void testForExpExpression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for exp function with operand type smallint operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf("2"))); Assert.assertEquals(Math.exp(2), new BeamSqlExpExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type double operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4)); Assert.assertEquals(Math.exp(2.4), new BeamSqlExpExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56))); Assert.assertEquals(Math.exp(2.56), new BeamSqlExpExpression(operands).evaluate(record, null).getValue()); } @Test public void testForAcosExpression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for exp function with operand type smallint operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf("2"))); Assert.assertEquals(Double.NaN, new BeamSqlAcosExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type double operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 0.45)); Assert.assertEquals(Math.acos(0.45), new BeamSqlAcosExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(-0.367))); Assert.assertEquals(Math.acos(-0.367), new BeamSqlAcosExpression(operands).evaluate(record, null).getValue()); } @Test public void testForAsinExpression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for exp function with operand type double operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 0.45)); Assert.assertEquals(Math.asin(0.45), new BeamSqlAsinExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(-0.367))); Assert.assertEquals(Math.asin(-0.367), new BeamSqlAsinExpression(operands).evaluate(record, null).getValue()); } @Test public void testForAtanExpression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for exp function with operand type double operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 0.45)); Assert.assertEquals(Math.atan(0.45), new BeamSqlAtanExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(-0.367))); Assert.assertEquals(Math.atan(-0.367), new BeamSqlAtanExpression(operands).evaluate(record, null).getValue()); } @Test public void testForCosExpression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for exp function with operand type double operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 0.45)); Assert.assertEquals(Math.cos(0.45), new BeamSqlCosExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(-0.367))); Assert.assertEquals(Math.cos(-0.367), new BeamSqlCosExpression(operands).evaluate(record, null).getValue()); } @Test public void testForCotExpression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for exp function with operand type double operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, .45)); Assert.assertEquals(1.0d / Math.tan(0.45), new BeamSqlCotExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(-.367))); Assert.assertEquals(1.0d / Math.tan(-0.367), new BeamSqlCotExpression(operands).evaluate(record, null).getValue()); } @Test public void testForDegreesExpression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for exp function with operand type smallint operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf("2"))); Assert.assertEquals(Math.toDegrees(2), new BeamSqlDegreesExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type double operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4)); Assert.assertEquals(Math.toDegrees(2.4), new BeamSqlDegreesExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56))); Assert.assertEquals(Math.toDegrees(2.56), new BeamSqlDegreesExpression(operands).evaluate(record, null).getValue()); } @Test public void testForRadiansExpression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for exp function with operand type smallint operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf("2"))); Assert.assertEquals(Math.toRadians(2), new BeamSqlRadiansExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type double operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4)); Assert.assertEquals(Math.toRadians(2.4), new BeamSqlRadiansExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56))); Assert.assertEquals(Math.toRadians(2.56), new BeamSqlRadiansExpression(operands).evaluate(record, null).getValue()); } @Test public void testForSinExpression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for exp function with operand type smallint operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf("2"))); Assert.assertEquals(Math.sin(2), new BeamSqlSinExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type double operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4)); Assert.assertEquals(Math.sin(2.4), new BeamSqlSinExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56))); Assert.assertEquals(Math.sin(2.56), new BeamSqlSinExpression(operands).evaluate(record, null).getValue()); } @Test public void testForTanExpression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for exp function with operand type smallint operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf("2"))); Assert.assertEquals(Math.tan(2), new BeamSqlTanExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type double operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4)); Assert.assertEquals(Math.tan(2.4), new BeamSqlTanExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56))); Assert.assertEquals(Math.tan(2.56), new BeamSqlTanExpression(operands).evaluate(record, null).getValue()); } @Test public void testForSignExpression() { List<BeamSqlExpression> operands = new ArrayList<>(); // test for exp function with operand type smallint operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf("2"))); Assert.assertEquals((short) 1 , new BeamSqlSignExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type double operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4)); Assert.assertEquals(1.0, new BeamSqlSignExpression(operands).evaluate(record, null).getValue()); // test for exp function with operand type decimal operands.clear(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56))); Assert.assertEquals(BigDecimal.ONE, new BeamSqlSignExpression(operands).evaluate(record, null).getValue()); } @Test public void testForPi() { Assert.assertEquals(Math.PI, new BeamSqlPiExpression().evaluate(record, null).getValue()); } @Test public void testForCeil() { List<BeamSqlExpression> operands = new ArrayList<>(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.68687979)); Assert.assertEquals(Math.ceil(2.68687979), new BeamSqlCeilExpression(operands).evaluate(record, null).getValue()); } @Test public void testForFloor() { List<BeamSqlExpression> operands = new ArrayList<>(); operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.68687979)); Assert.assertEquals(Math.floor(2.68687979), new BeamSqlFloorExpression(operands).evaluate(record, null).getValue()); } }
/* * Copyright 2012-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.orm.jpa; import java.net.URL; import java.util.List; import java.util.Map; import javax.persistence.EntityManagerFactory; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.BeansException; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.BeanFactoryAware; import org.springframework.beans.factory.ObjectProvider; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.boot.autoconfigure.AutoConfigurationPackages; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication; import org.springframework.boot.autoconfigure.domain.EntityScanPackages; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.orm.jpa.EntityManagerFactoryBuilder; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.orm.jpa.JpaVendorAdapter; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.orm.jpa.persistenceunit.PersistenceUnitManager; import org.springframework.orm.jpa.support.OpenEntityManagerInViewFilter; import org.springframework.orm.jpa.support.OpenEntityManagerInViewInterceptor; import org.springframework.orm.jpa.vendor.AbstractJpaVendorAdapter; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.jta.JtaTransactionManager; import org.springframework.util.ResourceUtils; import org.springframework.web.servlet.config.annotation.InterceptorRegistry; import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter; /** * Base {@link EnableAutoConfiguration Auto-configuration} for JPA. * * @author Phillip Webb * @author Dave Syer * @author Oliver Gierke * @author Andy Wilkinson */ @EnableConfigurationProperties(JpaProperties.class) @Import(DataSourceInitializedPublisher.Registrar.class) public abstract class JpaBaseConfiguration implements BeanFactoryAware { private static final Log logger = LogFactory.getLog(JpaBaseConfiguration.class); private final DataSource dataSource; private final JpaProperties properties; private final JtaTransactionManager jtaTransactionManager; private ConfigurableListableBeanFactory beanFactory; protected JpaBaseConfiguration(DataSource dataSource, JpaProperties properties, ObjectProvider<JtaTransactionManager> jtaTransactionManagerProvider) { this.dataSource = dataSource; this.properties = properties; this.jtaTransactionManager = jtaTransactionManagerProvider.getIfAvailable(); } @Bean @ConditionalOnMissingBean(PlatformTransactionManager.class) public PlatformTransactionManager transactionManager() { return new JpaTransactionManager(); } @Bean @ConditionalOnMissingBean public JpaVendorAdapter jpaVendorAdapter() { AbstractJpaVendorAdapter adapter = createJpaVendorAdapter(); adapter.setShowSql(this.properties.isShowSql()); adapter.setDatabase(this.properties.getDatabase()); adapter.setDatabasePlatform(this.properties.getDatabasePlatform()); adapter.setGenerateDdl(this.properties.isGenerateDdl()); return adapter; } @Bean @ConditionalOnMissingBean public EntityManagerFactoryBuilder entityManagerFactoryBuilder( JpaVendorAdapter jpaVendorAdapter, ObjectProvider<PersistenceUnitManager> persistenceUnitManagerProvider) { EntityManagerFactoryBuilder builder = new EntityManagerFactoryBuilder( jpaVendorAdapter, this.properties.getProperties(), persistenceUnitManagerProvider.getIfAvailable(), determinePersistenceUnitRootLocation()); builder.setCallback(getVendorCallback()); return builder; } @Bean @Primary @ConditionalOnMissingBean({ LocalContainerEntityManagerFactoryBean.class, EntityManagerFactory.class }) public LocalContainerEntityManagerFactoryBean entityManagerFactory( EntityManagerFactoryBuilder factoryBuilder) { Map<String, Object> vendorProperties = getVendorProperties(); customizeVendorProperties(vendorProperties); return factoryBuilder.dataSource(this.dataSource).packages(getPackagesToScan()) .properties(vendorProperties).jta(isJta()).build(); } protected abstract AbstractJpaVendorAdapter createJpaVendorAdapter(); protected abstract Map<String, Object> getVendorProperties(); /** * Customize vendor properties before they are used. Allows for post processing (for * example to configure JTA specific settings). * @param vendorProperties the vendor properties to customize */ protected void customizeVendorProperties(Map<String, Object> vendorProperties) { } protected EntityManagerFactoryBuilder.EntityManagerFactoryBeanCallback getVendorCallback() { return null; } protected String[] getPackagesToScan() { List<String> packages = EntityScanPackages.get(this.beanFactory) .getPackageNames(); if (packages.isEmpty() && AutoConfigurationPackages.has(this.beanFactory)) { packages = AutoConfigurationPackages.get(this.beanFactory); } return packages.toArray(new String[packages.size()]); } /** * Return the JTA transaction manager. * @return the transaction manager or {@code null} */ protected JtaTransactionManager getJtaTransactionManager() { return this.jtaTransactionManager; } /** * Returns if a JTA {@link PlatformTransactionManager} is being used. * @return if a JTA transaction manager is being used */ protected final boolean isJta() { return (this.jtaTransactionManager != null); } /** * Return the {@link JpaProperties}. * @return the properties */ protected final JpaProperties getProperties() { return this.properties; } /** * Return the {@link DataSource}. * @return the data source */ protected final DataSource getDataSource() { return this.dataSource; } @Override public void setBeanFactory(BeanFactory beanFactory) throws BeansException { this.beanFactory = (ConfigurableListableBeanFactory) beanFactory; } private URL determinePersistenceUnitRootLocation() { Class<?> source = getClass(); try { URL url = source.getProtectionDomain().getCodeSource().getLocation(); return ResourceUtils.extractJarFileURL(url); } catch (Exception ex) { logger.info("Could not determine persistence " + "unit root location from " + source + " : " + ex); } return null; } @Configuration @ConditionalOnWebApplication @ConditionalOnClass(WebMvcConfigurerAdapter.class) @ConditionalOnMissingBean({ OpenEntityManagerInViewInterceptor.class, OpenEntityManagerInViewFilter.class }) @ConditionalOnProperty(prefix = "spring.jpa", name = "open-in-view", havingValue = "true", matchIfMissing = true) protected static class JpaWebConfiguration { // Defined as a nested config to ensure WebMvcConfigurerAdapter is not read when // not on the classpath @Configuration protected static class JpaWebMvcConfiguration extends WebMvcConfigurerAdapter { @Bean public OpenEntityManagerInViewInterceptor openEntityManagerInViewInterceptor() { return new OpenEntityManagerInViewInterceptor(); } @Override public void addInterceptors(InterceptorRegistry registry) { registry.addWebRequestInterceptor(openEntityManagerInViewInterceptor()); } } } }
package ca.ericbannatyne.nfa; import static org.junit.Assert.*; import java.util.HashSet; import java.util.Set; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; public class NFATest { private static Set<Character> alphabet; private NFA nfa; @BeforeClass public static void setUpBeforeClass() throws Exception { alphabet = new HashSet<Character>(); alphabet.add('1'); alphabet.add('0'); } @AfterClass public static void tearDownAfterClass() throws Exception { alphabet = null; } @Before public void setUp() throws Exception { nfa = new NFA(alphabet); } @After public void tearDown() throws Exception { nfa = null; } @Test public void testNFASetOfCharacter() { nfa = new NFA(alphabet); assertSame(nfa.getClass(), NFA.class); assertFalse(nfa.isRunning()); assertEquals(nfa.getStates().size(), 0); assertEquals(nfa.getStartStates().size(), 0); assertEquals(nfa.getFinalStates().size(), 0); } @Test public void testNFASetOfCharacterSetOfState() { State state1 = new State(nfa, "state1"); State state2 = new State(nfa, "state2"); State state3 = new State(nfa, "state3"); Set<State> states = new HashSet<State>(); states.add(state1); states.add(state2); states.add(state3); nfa = new NFA(alphabet, states); assertFalse(nfa.isRunning()); assertEquals(nfa.getStates(), states); assertEquals(nfa.getStartStates().size(), 0); assertEquals(nfa.getFinalStates().size(), 0); } @Test public void testNFASetOfCharacterSetOfStateSetOfState() { State state1 = new State(nfa, "state1"); State state2 = new State(nfa, "state2"); State state3 = new State(nfa, "state3"); Set<State> states = new HashSet<State>(); states.add(state1); states.add(state2); states.add(state3); Set<State> startStates = new HashSet<State>(); startStates.add(state1); nfa = new NFA(alphabet, states, startStates); assertFalse(nfa.isRunning()); assertEquals(nfa.getStates(), states); assertEquals(nfa.getStartStates(), startStates); assertEquals(nfa.getFinalStates().size(), 0); State state4 = new State(nfa, "state4"); startStates.add(state4); try { new NFA(alphabet, states, startStates); fail(); } catch (UnknownStateException e) { } } @Test public void testNFASetOfCharacterSetOfStateSetOfStateSetOfState() { State state1 = new State(nfa, "state1"); State state2 = new State(nfa, "state2"); State state3 = new State(nfa, "state3"); Set<State> states = new HashSet<State>(); states.add(state1); states.add(state2); states.add(state3); Set<State> startStates = new HashSet<State>(); startStates.add(state1); Set<State> finalStates = new HashSet<State>(); finalStates.add(state3); nfa = new NFA(alphabet, states, startStates, finalStates); assertFalse(nfa.isRunning()); assertEquals(nfa.getStates(), states); assertEquals(nfa.getStartStates(), startStates); assertEquals(nfa.getFinalStates().size(), 0); State state4 = new State(nfa, "state4"); finalStates.add(state4); try { new NFA(alphabet, states, startStates, finalStates); fail(); } catch (UnknownStateException e) { } } @Test public void testNFANFA() { NFA nfa2 = new NFA(nfa); assertNotSame(nfa, nfa2); assertEquals(nfa.getStates(), nfa2.getStates()); assertNotSame(nfa.getStates(), nfa2.getStates()); assertEquals(nfa.getStartStates(), nfa2.getStartStates()); assertNotSame(nfa.getStartStates(), nfa2.getStartStates()); assertEquals(nfa.getFinalStates(), nfa2.getFinalStates()); assertNotSame(nfa.getFinalStates(), nfa2.getFinalStates()); } @Test public void testGetAlphabet() { assertEquals(alphabet, nfa.getAlphabet()); } @Test public void testLetterInAlphabet() { assertTrue(nfa.letterInAlphabet('1')); assertTrue(nfa.letterInAlphabet('0')); assertFalse(nfa.letterInAlphabet('2')); assertFalse(nfa.letterInAlphabet(NFA.EMPTY_STR)); } @Test public void testStringIsOverAlphabet() { assertTrue(nfa.stringIsOverAlphabet("")); assertTrue(nfa.stringIsOverAlphabet("0")); assertTrue(nfa.stringIsOverAlphabet("1")); assertTrue(nfa.stringIsOverAlphabet("10")); assertTrue(nfa.stringIsOverAlphabet("11")); assertTrue(nfa.stringIsOverAlphabet("101101")); assertTrue(nfa.stringIsOverAlphabet("110110101")); assertTrue(nfa.stringIsOverAlphabet("01")); assertTrue(nfa.stringIsOverAlphabet("00")); assertTrue(nfa.stringIsOverAlphabet("010010")); assertTrue(nfa.stringIsOverAlphabet("001001010")); assertFalse(nfa.stringIsOverAlphabet("2")); assertFalse(nfa.stringIsOverAlphabet("020")); assertFalse(nfa.stringIsOverAlphabet("01210")); } @Test public void testGetStates() { State state1 = new State(nfa, "state1"); State state2 = new State(nfa, "state2"); State state3 = new State(nfa, "state3"); Set<State> states = new HashSet<State>(); states.add(state1); states.add(state2); states.add(state3); nfa = new NFA(alphabet, states); Set<State> actual = nfa.getStates(); assertEquals(actual, states); assertNotSame(actual, states); } @Test public void testSetStates() { State state1 = new State(nfa, "state1"); State state2 = new State(nfa, "state2"); State state3 = new State(nfa, "state3"); Set<State> states = new HashSet<State>(); states.add(state1); states.add(state2); states.add(state3); nfa.setStates(states); } @Test public void testHasState() { fail("Not yet implemented"); } @Test public void testCheckStateIsValid() { fail("Not yet implemented"); } @Test public void testNewStateStringMapOfCharacterSetOfState() { fail("Not yet implemented"); } @Test public void testNewStateString() { fail("Not yet implemented"); } @Test public void testRemoveState() { fail("Not yet implemented"); } @Test public void testGetStartStates() { fail("Not yet implemented"); } @Test public void testSetStartStates() { fail("Not yet implemented"); } @Test public void testIsStartState() { fail("Not yet implemented"); } @Test public void testAddStartState() { fail("Not yet implemented"); } @Test public void testRemoveStartState() { fail("Not yet implemented"); } @Test public void testGetFinalStates() { fail("Not yet implemented"); } @Test public void testSetFinalStates() { fail("Not yet implemented"); } @Test public void testIsFinalState() { fail("Not yet implemented"); } @Test public void testAddFinalState() { fail("Not yet implemented"); } @Test public void testRemoveFinalState() { fail("Not yet implemented"); } @Test public void testEpsilonClosure() { fail("Not yet implemented"); } @Test public void testAccepts() { fail("Not yet implemented"); } @Test public void testIsRunning() { fail("Not yet implemented"); } @Test public void testStart() { fail("Not yet implemented"); } @Test public void testStop() { fail("Not yet implemented"); } @Test public void testGetString() { fail("Not yet implemented"); } @Test public void testGetPosition() { fail("Not yet implemented"); } @Test public void testStep() { fail("Not yet implemented"); } @Test public void testStepBack() { fail("Not yet implemented"); } @Test public void testGoToStep() { fail("Not yet implemented"); } @Test public void testGetCurrentStates() { fail("Not yet implemented"); } @Test public void testCurrentlyOnFinalState() { fail("Not yet implemented"); } }
/* * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/publicdomain/zero/1.0/ * * Source: http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/jsr166e/Striped64.java?revision=1.10 */ package io.prometheus.client; import java.util.Random; /** * A package-local class holding common representation and mechanics * for classes supporting dynamic striping on 64bit values. The class * extends Number so that concrete subclasses must publicly do so. */ abstract class Striped64 extends Number { /* * This class maintains a lazily-initialized table of atomically * updated variables, plus an extra "base" field. The table size * is a power of two. Indexing uses masked per-thread hash codes. * Nearly all declarations in this class are package-private, * accessed directly by subclasses. * * Table entries are of class Cell; a variant of AtomicLong padded * to reduce cache contention on most processors. Padding is * overkill for most Atomics because they are usually irregularly * scattered in memory and thus don't interfere much with each * other. But Atomic objects residing in arrays will tend to be * placed adjacent to each other, and so will most often share * cache lines (with a huge negative performance impact) without * this precaution. * * In part because Cells are relatively large, we avoid creating * them until they are needed. When there is no contention, all * updates are made to the base field. Upon first contention (a * failed CAS on base update), the table is initialized to size 2. * The table size is doubled upon further contention until * reaching the nearest power of two greater than or equal to the * number of CPUS. Table slots remain empty (null) until they are * needed. * * A single spinlock ("busy") is used for initializing and * resizing the table, as well as populating slots with new Cells. * There is no need for a blocking lock; when the lock is not * available, threads try other slots (or the base). During these * retries, there is increased contention and reduced locality, * which is still better than alternatives. * * Per-thread hash codes are initialized to random values. * Contention and/or table collisions are indicated by failed * CASes when performing an update operation (see method * retryUpdate). Upon a collision, if the table size is less than * the capacity, it is doubled in size unless some other thread * holds the lock. If a hashed slot is empty, and lock is * available, a new Cell is created. Otherwise, if the slot * exists, a CAS is tried. Retries proceed by "double hashing", * using a secondary hash (Marsaglia XorShift) to try to find a * free slot. * * The table size is capped because, when there are more threads * than CPUs, supposing that each thread were bound to a CPU, * there would exist a perfect hash function mapping threads to * slots that eliminates collisions. When we reach capacity, we * search for this mapping by randomly varying the hash codes of * colliding threads. Because search is random, and collisions * only become known via CAS failures, convergence can be slow, * and because threads are typically not bound to CPUS forever, * may not occur at all. However, despite these limitations, * observed contention rates are typically low in these cases. * * It is possible for a Cell to become unused when threads that * once hashed to it terminate, as well as in the case where * doubling the table causes no thread to hash to it under * expanded mask. We do not try to detect or remove such cells, * under the assumption that for long-running instances, observed * contention levels will recur, so the cells will eventually be * needed again; and for short-lived ones, it does not matter. */ /** * Padded variant of AtomicLong supporting only raw accesses plus CAS. * The value field is placed between pads, hoping that the JVM doesn't * reorder them. * * JVM intrinsics note: It would be possible to use a release-only * form of CAS here, if it were provided. */ static final class Cell { volatile long p0, p1, p2, p3, p4, p5, p6; volatile long value; volatile long q0, q1, q2, q3, q4, q5, q6; Cell(long x) { value = x; } final boolean cas(long cmp, long val) { return UNSAFE.compareAndSwapLong(this, valueOffset, cmp, val); } // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE; private static final long valueOffset; static { try { UNSAFE = getUnsafe(); Class<?> ak = Cell.class; valueOffset = UNSAFE.objectFieldOffset (ak.getDeclaredField("value")); } catch (Exception e) { throw new Error(e); } } } /** * ThreadLocal holding a single-slot int array holding hash code. * Unlike the JDK8 version of this class, we use a suboptimal * int[] representation to avoid introducing a new type that can * impede class-unloading when ThreadLocals are not removed. */ static final ThreadLocal<int[]> threadHashCode = new ThreadLocal<int[]>(); /** * Generator of new random hash codes */ static final Random rng = new Random(); /** Number of CPUS, to place bound on table size */ static final int NCPU = Runtime.getRuntime().availableProcessors(); /** * Table of cells. When non-null, size is a power of 2. */ transient volatile Cell[] cells; /** * Base value, used mainly when there is no contention, but also as * a fallback during table initialization races. Updated via CAS. */ transient volatile long base; /** * Spinlock (locked via CAS) used when resizing and/or creating Cells. */ transient volatile int busy; /** * Package-private default constructor */ Striped64() { } /** * CASes the base field. */ final boolean casBase(long cmp, long val) { return UNSAFE.compareAndSwapLong(this, baseOffset, cmp, val); } /** * CASes the busy field from 0 to 1 to acquire lock. */ final boolean casBusy() { return UNSAFE.compareAndSwapInt(this, busyOffset, 0, 1); } /** * Computes the function of current and new value. Subclasses * should open-code this update function for most uses, but the * virtualized form is needed within retryUpdate. * * @param currentValue the current value (of either base or a cell) * @param newValue the argument from a user update call * @return result of the update function */ abstract long fn(long currentValue, long newValue); /** * Handles cases of updates involving initialization, resizing, * creating new Cells, and/or contention. See above for * explanation. This method suffers the usual non-modularity * problems of optimistic retry code, relying on rechecked sets of * reads. * * @param x the value * @param hc the hash code holder * @param wasUncontended false if CAS failed before call */ final void retryUpdate(long x, int[] hc, boolean wasUncontended) { int h; if (hc == null) { threadHashCode.set(hc = new int[1]); // Initialize randomly int r = rng.nextInt(); // Avoid zero to allow xorShift rehash h = hc[0] = (r == 0) ? 1 : r; } else h = hc[0]; boolean collide = false; // True if last slot nonempty for (;;) { Cell[] as; Cell a; int n; long v; if ((as = cells) != null && (n = as.length) > 0) { if ((a = as[(n - 1) & h]) == null) { if (busy == 0) { // Try to attach new Cell Cell r = new Cell(x); // Optimistically create if (busy == 0 && casBusy()) { boolean created = false; try { // Recheck under lock Cell[] rs; int m, j; if ((rs = cells) != null && (m = rs.length) > 0 && rs[j = (m - 1) & h] == null) { rs[j] = r; created = true; } } finally { busy = 0; } if (created) break; continue; // Slot is now non-empty } } collide = false; } else if (!wasUncontended) // CAS already known to fail wasUncontended = true; // Continue after rehash else if (a.cas(v = a.value, fn(v, x))) break; else if (n >= NCPU || cells != as) collide = false; // At max size or stale else if (!collide) collide = true; else if (busy == 0 && casBusy()) { try { if (cells == as) { // Expand table unless stale Cell[] rs = new Cell[n << 1]; for (int i = 0; i < n; ++i) rs[i] = as[i]; cells = rs; } } finally { busy = 0; } collide = false; continue; // Retry with expanded table } h ^= h << 13; // Rehash h ^= h >>> 17; h ^= h << 5; hc[0] = h; // Record index for next time } else if (busy == 0 && cells == as && casBusy()) { boolean init = false; try { // Initialize table if (cells == as) { Cell[] rs = new Cell[2]; rs[h & 1] = new Cell(x); cells = rs; init = true; } } finally { busy = 0; } if (init) break; } else if (casBase(v = base, fn(v, x))) break; // Fall back on using base } } /** * Sets base and all cells to the given value. */ final void internalReset(long initialValue) { Cell[] as = cells; base = initialValue; if (as != null) { int n = as.length; for (int i = 0; i < n; ++i) { Cell a = as[i]; if (a != null) a.value = initialValue; } } } // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE; private static final long baseOffset; private static final long busyOffset; static { try { UNSAFE = getUnsafe(); Class<?> sk = Striped64.class; baseOffset = UNSAFE.objectFieldOffset (sk.getDeclaredField("base")); busyOffset = UNSAFE.objectFieldOffset (sk.getDeclaredField("busy")); } catch (Exception e) { throw new Error(e); } } /** * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package. * Replace with a simple call to Unsafe.getUnsafe when integrating * into a jdk. * * @return a sun.misc.Unsafe */ private static sun.misc.Unsafe getUnsafe() { try { return sun.misc.Unsafe.getUnsafe(); } catch (SecurityException tryReflectionInstead) {} try { return java.security.AccessController.doPrivileged (new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() { public sun.misc.Unsafe run() throws Exception { Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class; for (java.lang.reflect.Field f : k.getDeclaredFields()) { f.setAccessible(true); Object x = f.get(null); if (k.isInstance(x)) return k.cast(x); } throw new NoSuchFieldError("the Unsafe"); }}); } catch (java.security.PrivilegedActionException e) { throw new RuntimeException("Could not initialize intrinsics", e.getCause()); } } }
package invtweaks; import invtweaks.integration.ItemListSorter; import net.minecraft.client.Minecraft; import net.minecraft.client.resources.I18n; import net.minecraft.util.ResourceLocation; import org.apache.commons.io.FileUtils; import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.List; /** * Handles the (re)loading of the configuration, and all that is related to file extraction/moves. * * @author Jimeo Wan */ @SuppressWarnings("ResultOfMethodCallIgnored") // Lots of these from file creation public class InvTweaksConfigManager { private static final Logger log = InvTweaks.log; private final Minecraft mc; /** * The mod's configuration. */ @Nullable private InvTweaksConfig config = null; private long storedConfigLastModified = 0; @Nullable private InvTweaksHandlerAutoRefill autoRefillHandler = null; @Nullable private InvTweaksHandlerShortcuts shortcutsHandler = null; public InvTweaksConfigManager(Minecraft mc_) { mc = mc_; } private static long computeConfigLastModified() { long sum = Long.MIN_VALUE; if(InvTweaksConst.INVTWEAKS_TREES_DIR.exists()) { File[] treeFiles = InvTweaksConst.INVTWEAKS_TREES_DIR.listFiles(); for(File tree : treeFiles) { //Make sure it is the type of file we want. if(tree.getName().endsWith(".tree")) { sum += tree.lastModified(); } } } return sum + InvTweaksConst.CONFIG_RULES_FILE.lastModified() + InvTweaksConst.CONFIG_TREE_FILE.lastModified(); } private static void backupFile(@NotNull File file) { @NotNull File newFile = new File(file.getParentFile(), file.getName() + ".bak"); log.warn("Backing up file: %1$s to %2$s", file.getAbsolutePath(), newFile.getAbsolutePath()); if(newFile.exists()) { log.warn("New file %1$s already exists, deleting old.", newFile.getAbsolutePath()); newFile.delete(); } file.renameTo(newFile); } private static void showConfigErrors(@NotNull @SuppressWarnings("ParameterHidesMemberVariable") InvTweaksConfig config) { List<String> invalid = config.getInvalidKeywords(); if(invalid.size() > 0) { @NotNull String error = I18n.format("invtweaks.loadconfig.invalidkeywords") + ": "; for(String keyword : config.getInvalidKeywords()) { error += keyword + " "; } InvTweaks.logInGameStatic(error); } } // TODO Only reload modified file(s) public boolean makeSureConfigurationIsLoaded() { // Load properties try { if(config != null && config.refreshProperties()) { shortcutsHandler = new InvTweaksHandlerShortcuts(mc, config); if(config.getProperty(InvTweaksConfig.PROP_ENABLE_CONFIG_LOADED_MESSAGE).equals(InvTweaksConfig.VALUE_TRUE)) { InvTweaks.logInGameStatic("invtweaks.propsfile.loaded"); } } } catch(IOException e) { InvTweaks.logInGameErrorStatic("invtweaks.loadconfig.refresh.error", e); } // Load rules + tree files long configLastModified = computeConfigLastModified(); if(config != null) { // Check time of last edit for both configuration files. return storedConfigLastModified == configLastModified || loadConfig(); } else { storedConfigLastModified = configLastModified; return loadConfig(); } } @Nullable public InvTweaksConfig getConfig() { return config; } @Nullable public InvTweaksHandlerAutoRefill getAutoRefillHandler() { return autoRefillHandler; } @Nullable public InvTweaksHandlerShortcuts getShortcutsHandler() { return shortcutsHandler; } /** * Tries to load mod configuration from file, with error handling. If it fails, the config attribute will remain * null. */ private boolean loadConfig() { // Ensure the config folder exists @NotNull File configDir = InvTweaksConst.MINECRAFT_CONFIG_DIR; if(!configDir.exists()) { configDir.mkdir(); } //Create the Config file directory. if(!InvTweaksConst.INVTWEAKS_CONFIG_DIR.exists()) { InvTweaksConst.INVTWEAKS_CONFIG_DIR.mkdir(); } if(!InvTweaksConst.INVTWEAKS_TREES_DIR.exists()) { if(InvTweaksConst.INVTWEAKS_TREES_DIR.mkdir()) { extractFile(new ResourceLocation(InvTweaksConst.INVTWEAKS_RESOURCE_DOMAIN, "tree_readme.txt"), new File(InvTweaksConst.INVTWEAKS_TREES_DIR, "readme.txt")); } } // Compatibility: Tree version check try { if(!(InvTweaksItemTreeLoader.isValidVersion(InvTweaksConst.CONFIG_TREE_FILE))) { backupFile(InvTweaksConst.CONFIG_TREE_FILE); } } catch(Exception e) { log.warn("Failed to check item tree version: " + e.getMessage()); } // Compatibility: File names check if(InvTweaksConst.OLD_CONFIG_TREE_FILE.exists()) { if(InvTweaksConst.CONFIG_RULES_FILE.exists()) { backupFile(InvTweaksConst.CONFIG_TREE_FILE); } InvTweaksConst.OLD_CONFIG_TREE_FILE.renameTo(InvTweaksConst.CONFIG_TREE_FILE); } else if(InvTweaksConst.OLDER_CONFIG_RULES_FILE.exists()) { if(InvTweaksConst.CONFIG_RULES_FILE.exists()) { backupFile(InvTweaksConst.CONFIG_RULES_FILE); } InvTweaksConst.OLDER_CONFIG_RULES_FILE.renameTo(InvTweaksConst.CONFIG_RULES_FILE); } // Create missing files if(!InvTweaksConst.CONFIG_RULES_FILE.exists() && extractFile(InvTweaksConst.DEFAULT_CONFIG_FILE, InvTweaksConst.CONFIG_RULES_FILE)) { InvTweaks.logInGameStatic(InvTweaksConst.CONFIG_RULES_FILE + " " + I18n.format("invtweaks.loadconfig.filemissing")); } if(!InvTweaksConst.CONFIG_TREE_FILE.exists() && extractFile(InvTweaksConst.DEFAULT_CONFIG_TREE_FILE, InvTweaksConst.CONFIG_TREE_FILE)) { InvTweaks.logInGameStatic(InvTweaksConst.CONFIG_TREE_FILE + " " + I18n.format("invtweaks.loadconfig.filemissing")); } boolean treeBuilt = false; if(InvTweaksConst.INVTWEAKS_TREES_DIR.exists()) { treeBuilt = InvTweaksItemTreeBuilder.buildNewTree(); } storedConfigLastModified = computeConfigLastModified(); // Load @Nullable String error = null; @Nullable Exception errorException = null; try { // Configuration creation if(config == null) { if(treeBuilt & InvTweaksConst.MERGED_TREE_FILE.exists()) { config = new InvTweaksConfig(InvTweaksConst.CONFIG_RULES_FILE, InvTweaksConst.MERGED_TREE_FILE); } else if(treeBuilt & InvTweaksConst.MERGED_TREE_FILE_ALT.exists()) { config = new InvTweaksConfig(InvTweaksConst.CONFIG_RULES_FILE, InvTweaksConst.MERGED_TREE_FILE_ALT); } else { config = new InvTweaksConfig(InvTweaksConst.CONFIG_RULES_FILE, InvTweaksConst.CONFIG_TREE_FILE); } autoRefillHandler = new InvTweaksHandlerAutoRefill(mc, config); shortcutsHandler = new InvTweaksHandlerShortcuts(mc, config); } // Configuration loading config.load(); shortcutsHandler.loadShortcuts(); if(config.getProperty(InvTweaksConfig.PROP_ENABLE_CONFIG_LOADED_MESSAGE).equals(InvTweaksConfig.VALUE_TRUE)) { InvTweaks.logInGameStatic("invtweaks.loadconfig.done"); } showConfigErrors(config); ItemListSorter.ReloadItemList(); } catch(FileNotFoundException e) { error = "Config file not found"; errorException = e; } catch(Exception e) { error = "Error while loading config"; errorException = e; } if(error != null) { log.error(error); InvTweaks.logInGameErrorStatic(error, errorException); try { // TODO: Refactor this so I'm not just copying the code from above. // The purpose of this is to try to deal with any errors in their config files // Because things crash if config is null backupFile(InvTweaksConst.CONFIG_TREE_FILE); backupFile(InvTweaksConst.CONFIG_RULES_FILE); backupFile(InvTweaksConst.CONFIG_PROPS_FILE); //Intentionally not trying to use the merged file. extractFile(InvTweaksConst.DEFAULT_CONFIG_FILE, InvTweaksConst.CONFIG_RULES_FILE); extractFile(InvTweaksConst.DEFAULT_CONFIG_TREE_FILE, InvTweaksConst.CONFIG_TREE_FILE); config = new InvTweaksConfig(InvTweaksConst.CONFIG_RULES_FILE, InvTweaksConst.CONFIG_TREE_FILE); autoRefillHandler = new InvTweaksHandlerAutoRefill(mc, config); shortcutsHandler = new InvTweaksHandlerShortcuts(mc, config); config.load(); shortcutsHandler.loadShortcuts(); ItemListSorter.ReloadItemList(); } catch(Exception e) { // But if this fails too there's not much point in trying again config = null; autoRefillHandler = null; shortcutsHandler = null; if(e.getCause() == null) { e.initCause(errorException); } throw new Error("InvTweaks config load failed", e); } return false; } else { return true; } } private boolean extractFile(@NotNull ResourceLocation resource, @NotNull File destination) { try(@NotNull InputStream input = mc.getResourceManager().getResource(resource).getInputStream()) { try { FileUtils.copyInputStreamToFile(input, destination); return true; } catch(IOException e) { InvTweaks.logInGameStatic("[16] The mod won't work, because " + destination + " creation failed!"); log.error("Cannot create " + destination + " file: " + e.getMessage()); return false; } } catch(IOException e) { InvTweaks.logInGameStatic("[15] The mod won't work, because " + resource + " extraction failed!"); log.error("Cannot extract " + resource + " file: " + e.getMessage()); return false; } } }
/* * The MIT License (MIT) * * Copyright (c) 2007-2015 Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ /* * AttributeManager.java * * Everything to do with attributes. */ package org.broad.igv.track; import htsjdk.tribble.readers.AsciiLineReader; import org.broad.igv.Globals; import org.broad.igv.exceptions.DataLoadException; import org.broad.igv.logging.LogManager; import org.broad.igv.logging.Logger; import org.broad.igv.prefs.Constants; import org.broad.igv.prefs.PreferencesManager; import org.broad.igv.renderer.AbstractColorScale; import org.broad.igv.renderer.ContinuousColorScale; import org.broad.igv.renderer.MonocolorScale; import org.broad.igv.ui.IGV; import org.broad.igv.ui.color.ColorPalette; import org.broad.igv.ui.color.ColorTable; import org.broad.igv.ui.color.ColorUtilities; import org.broad.igv.ui.color.PaletteColorTable; import org.broad.igv.util.FileUtils; import org.broad.igv.util.ParsingUtils; import org.broad.igv.util.ResourceLocator; import org.broad.igv.util.Utilities; import java.awt.*; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.io.BufferedReader; import java.io.IOException; import java.util.List; import java.util.*; /** * @author jrobinso */ public class AttributeManager { public static final String GROUP_AUTOSCALE = "AUTOSCALE GROUP"; public static List<String> defaultTrackAttributes = Arrays.asList(Globals.TRACK_NAME_ATTRIBUTE, Globals.TRACK_DATA_FILE_ATTRIBUTE, Globals.TRACK_DATA_TYPE_ATTRIBUTE); private static Logger log = LogManager.getLogger(AttributeManager.class); private static AttributeManager singleton; final public static String ATTRIBUTES_LOADED_PROPERTY = "ATTRIBUTES_LOADED_PROPERTY"; final public static String ATTRIBUTES_NARROWED_PROPERTY = "ATTRIBUTES_NARROWED_PROPERTY"; private PropertyChangeSupport propertyChangeSupport; /** * The set of currently loaded attribute resource files */ Set<ResourceLocator> loadedResources = Collections.synchronizedSet(new HashSet()); /** * Sample table. Key is sample name, identifying a "row" in the table. Value is a map of column name / value * pairs. (e.g. {TCGA-001 -> { (gender->male), (treated -> true), etc}} */ Map<String, Map<String, String>> attributeMap = Collections.synchronizedMap(new LinkedHashMap()); /** * Map of track id -> sample name. */ Map<String, String> trackSampleMappings = Collections.synchronizedMap(new HashMap<String, String>()); /** * List of attribute names. The list * is kept so the keys may be fetched in the order they were added. */ Map<String, String> attributeNames = Collections.synchronizedMap(new LinkedHashMap()); /** * Column meta data (column == attributeKey). */ Map<String, ColumnMetaData> columnMetaData = Collections.synchronizedMap(new HashMap()); /** * The complete set of unique attribute values per attribute key. This is useful in * assigning unique colors */ Map<String, Set<String>> uniqueAttributeValues = Collections.synchronizedMap(new HashMap()); /** * Maps symbolic (discrete) attribute values to colors. Key is a composite of attribute name and value */ Map<String, Color> colorMap = Collections.synchronizedMap(new HashMap()); /** * Map of attribute column name -> color scale. For numeric columns. */ Map<String, AbstractColorScale> colorScales = new HashMap(); Map<String, ColorTable> colorTables = new HashMap<String, ColorTable>(); private AttributeManager() { propertyChangeSupport = new PropertyChangeSupport(this); // The default attributes addAttributeName("NAME"); addAttributeName("DATA TYPE"); addAttributeName("DATA FILE"); } static synchronized public AttributeManager getInstance() { if (singleton == null) { singleton = new AttributeManager(); } return singleton; } public void addPropertyChangeListener(PropertyChangeListener listener) { propertyChangeSupport.addPropertyChangeListener(listener); } public void removePropertyChangeListener(PropertyChangeListener listener) { propertyChangeSupport.removePropertyChangeListener(listener); } /** * Return the attribute value for the given track (trackName) and key. */ public String getAttribute(String trackName, String attributeName) { Map<String, String> attributes = attributeMap.get(trackName); String key = attributeName.toUpperCase(); String value = attributes == null ? null : attributes.get(key); if (value == null && trackSampleMappings.containsKey(trackName)) { final String sample = trackSampleMappings.get(trackName); attributes = attributeMap.get(sample); if (attributes != null) { value = attributes.get(key); } } return value; } /** * Return the list of attribute names (keys) in the order they should * be displayed. */ public List<String> getAttributeNames() { ArrayList<String> attNames = new ArrayList<String>(attributeNames.values()); return attNames; } /** * Return true if the associated column contains all numeric values */ public boolean isNumeric(String attributeName) { String key = attributeName.toUpperCase(); ColumnMetaData metaData = columnMetaData.get(key); return metaData != null && metaData.isNumeric(); } /** * Return all attributes, except those that have been "hidden" in the attribute panel * TODO -- don't compute this every time (or at least profile to see if this is a problem). * * @return */ public List<String> getVisibleAttributes() { List<String> visibleAttributes = getAttributeNames(); if (visibleAttributes == null) { Collections.emptyList(); } final Set<String> hiddenAttributes = IGV.getInstance().getSession().getHiddenAttributes(); if (hiddenAttributes != null) { visibleAttributes.removeAll(hiddenAttributes); } return visibleAttributes; } public void clearAllAttributes() { attributeMap.clear(); attributeNames.clear(); // The default attributes addAttributeName("NAME"); addAttributeName("DATA TYPE"); addAttributeName("DATA FILE"); uniqueAttributeValues.clear(); //hiddenAttributes.clear(); loadedResources = new HashSet(); } /** * Set an attribute value * * @param rowId -- track or sample identifier * @param attributeName * @param attributeValue */ public void addAttribute(String rowId, String attributeName, String attributeValue) { if (attributeValue == null || attributeValue.equals("")) { return; } // Add the 3 "special" attributes to ensure they are the first columns if (attributeNames.isEmpty()) { addAttributeName("NAME"); addAttributeName("DATA TYPE"); addAttributeName("DATA FILE"); } addAttributeName(attributeName); String key = attributeName.toUpperCase(); Set<String> uniqueSet = uniqueAttributeValues.get(key); if (uniqueSet == null) { uniqueSet = new HashSet<String>(); uniqueAttributeValues.put(key, uniqueSet); } uniqueSet.add(attributeValue); Map attributes = attributeMap.get(rowId); if (attributes == null) { attributes = new LinkedHashMap(); attributeMap.put(rowId, attributes); } // attributeKey = column header, attributeValue = value for header // and track name (trackIdentifier) row intersection attributes.put(key, attributeValue); updateMetaData(key, attributeValue); } public void removeAttribute(String rowId, String attributeName) { Map attributes = attributeMap.get(rowId); if (attributes != null) { attributes.remove(attributeName.toUpperCase()); } } private void addAttributeName(String name) { String key = name.toUpperCase(); if (!attributeNames.containsKey(key) && !name.startsWith("#")) { attributeNames.put(key, name); } } /** * Update the column meta data associated with the attribute key. * <p/> * Note: Currently the meta data only records if the column is numeric. * * @param attributeName * @param attributeValue */ private void updateMetaData(String attributeName, String attributeValue) { String key = attributeName.toUpperCase(); ColumnMetaData metaData = columnMetaData.get(key); if (metaData == null) { metaData = new ColumnMetaData(key); columnMetaData.put(key, metaData); } metaData.updateMetrics(attributeValue); } /** * Test to see if this file could be a sample information file. Some characteristics are (1) is tab delimited * with at least 2 columns * * @return */ public static boolean isSampleInfoFile(BufferedReader reader) throws IOException { return FileUtils.isTabDelimited(reader, 2); } /** * Load attributes from an ascii file in "Sample Info" format. */ public void loadSampleInfo(ResourceLocator locator) { AsciiLineReader reader = null; try { reader = ParsingUtils.openAsciiReader(locator); loadSampleTable(reader, locator.getPath()); loadedResources.add(locator); if (!Globals.isHeadless()) { IGV.getInstance().resetOverlayTracks(); IGV.getInstance().repaint(); } } catch (IOException ex) { log.error("Error loading attribute file", ex); throw new DataLoadException("Error reading attribute file", locator.getPath()); } finally { if (reader != null) { reader.close(); } firePropertyChange(this, ATTRIBUTES_LOADED_PROPERTY, null, null); } } static Set<String> nonGroupable = new HashSet<String>(Arrays.asList("DATA FILE", "DATA TYPE", "VITALSTATUS", "VITAL STATUS", "KARNSCORE", "CENSURED")); public List<String> getGroupableAttributes() { List<String> seriesNames = new ArrayList<String>(); for (Map.Entry<String, Set<String>> entry : uniqueAttributeValues.entrySet()) { int cnt = entry.getValue().size(); String att = entry.getKey(); if (cnt > 1 && cnt < 10 && !nonGroupable.contains(att)) { seriesNames.add(att); } } return seriesNames; } /** * Load sample table, which might optionally have 3 sections * #sampletable (default) * #samplemapping (track id -> sample mapping table) * #colors (color table) */ private void loadSampleTable(AsciiLineReader reader, String path) throws IOException { String[] colHeadings = null; List<String> sections = Arrays.asList("#sampletable", "#samplemapping", "#colors"); boolean foundAttributes = false; int nLines = 0; int lineLimit = 100000; String nextLine; String section = "#sampletable"; while ((nextLine = reader.readLine()) != null) { if (nLines++ > lineLimit) { break; } if (nextLine.toLowerCase().startsWith("#")) { String tmp = nextLine.toLowerCase().trim(); if (sections.contains(tmp)) { section = tmp; } continue; } String[] tokens = nextLine.split("\t"); if (section.equals("#sampletable")) { if (tokens.length >= 2) { if (colHeadings == null) { colHeadings = tokens; } else { String sampleName = tokens[0].trim(); // Loop through attribute columns //List<Attribute> attributes = new ArrayList(colHeadings.length); for (int i = 0; i < colHeadings.length; i++) { String attributeName = colHeadings[i].trim(); String attributeValue = (i < tokens.length ? tokens[i].trim() : ""); addAttribute(sampleName, attributeName, attributeValue); foundAttributes = true; } } } } else if (section.equals("#samplemapping")) { foundAttributes = true; if (tokens.length < 2) { continue; } String track = tokens[0]; String sample = tokens[1]; trackSampleMappings.put(track, sample); } else if (section.equals("#colors")) { foundAttributes = true; parseColors(tokens); } } if (!foundAttributes) { throw new DataLoadException("Could not determine file type. Does file have proper extension? ", path); } } private void parseColors(String[] tokens) throws IOException { if (tokens.length >= 3) { String attKey = tokens[0].toUpperCase(); if (isNumeric(attKey)) { ColumnMetaData metaData = columnMetaData.get(attKey); String rangeString = tokens[1].trim(); float min = (float) metaData.min; float max = (float) metaData.max; if (!rangeString.equals("*") && rangeString.length() > 0) { String[] tmp = rangeString.split(":"); if (tmp.length > 1) { try { min = Float.parseFloat(tmp[0]); max = Float.parseFloat(tmp[1]); } catch (NumberFormatException e) { log.error("Error parsing range string: " + rangeString, e); } } } AbstractColorScale scale = null; if (tokens.length == 3) { Color baseColor = ColorUtilities.stringToColor(tokens[2]); scale = new MonocolorScale(min, max, baseColor); colorScales.put(attKey, scale); } else { Color color1 = ColorUtilities.stringToColor(tokens[2]); Color color2 = ColorUtilities.stringToColor(tokens[3]); if (min < 0) { scale = new ContinuousColorScale(min, 0, max, color1, Color.white, color2); } else { scale = new ContinuousColorScale(min, max, color1, color2); } } colorScales.put(attKey, scale); } else { String attValue = tokens[1]; Color color = ColorUtilities.stringToColor(tokens[2]); String key = (attKey + "_" + attValue).toUpperCase(); colorMap.put(key, color); } } } public void firePropertyChange(Object source, String propertyName, Object oldValue, Object newValue) { PropertyChangeEvent event = new PropertyChangeEvent( source, propertyName, oldValue, newValue); propertyChangeSupport.firePropertyChange(event); } public Comparator getAttributeComparator() { return Utilities.getNumericStringComparator(); } /** * @return set of curently loaded resources */ public Set<ResourceLocator> getLoadedResources() { return loadedResources; } public String getSampleFor(String track) { if (trackSampleMappings.containsKey(track)) { return trackSampleMappings.get(track); } else if (isTCGAName(track)) { String sample = track.substring(0, 12); addAttribute(track, "Sample", sample); trackSampleMappings.put(track, sample); return sample; } else { String key = PreferencesManager.getPreferences().get(Constants.OVERLAY_ATTRIBUTE_KEY); return key == null ? null : getAttribute(track, key); } } // TCGA identifers have the form TCGA-00-0000 public static boolean isTCGAName(String name) { return name.length() >= 12 && name.toUpperCase().startsWith("TCGA-") && name.charAt(7) == '-'; } public Color getColor(String attKey, String attValue) { if (attValue == null || attValue.length() == 0) { return Color.gray; } final ColumnMetaData metaData = columnMetaData.get(attKey.toUpperCase()); if (metaData == null) { return Color.gray; } if (metaData.isNumeric()) { AbstractColorScale cs = colorScales.get(attKey); { if (cs == null) { // Create color scale based loosely on Brewer diverging / sequential palletes // TODO -- use actual brewer palletes if # of values < 8 if (metaData.isDiverging()) { // reg-blue diverging Color minColor = new Color(198, 219, 239); Color midColor = Color.white; Color maxColor = new Color(33, 102, 172); cs = new ContinuousColorScale(metaData.getMin(), 0, metaData.getMax(), minColor, midColor, maxColor); colorScales.put(attKey, cs); } else { // Blues scale Color minColor = new Color(198, 219, 239); Color maxColor = new Color(8, 69, 148); cs = new ContinuousColorScale(metaData.getMin(), metaData.getMax(), minColor, maxColor); colorScales.put(attKey, cs); } } try { float x = Float.parseFloat(attValue); return cs.getColor(x); } catch (NumberFormatException e) { return Color.lightGray; } } } // Look for color in pre-loaded color map String key = (attKey + "_" + attValue).toUpperCase(); Color c = colorMap.get(key); if (c == null) { key = ("*_" + attValue).toUpperCase(); c = colorMap.get(key); if (c == null) { key = (attValue + "_*").toUpperCase(); c = colorMap.get(key); } } // Get color from palette if (c == null) { // Measure of "information content" added by using color, very crude //boolean useColor = (metaData.getUniqueCount() < 10 || metaData.getUniqueRatio() <= 0.5) && // !(attKey.equals("NAME") || attKey.equals("DATA FILE") || attKey.equals("DATA TYPE")); boolean useColor = true; if (useColor) { ColorTable ct = colorTables.get(attKey); if (ct == null) { ColorPalette palette = ColorUtilities.getNextPalette(); ct = new PaletteColorTable(palette); colorTables.put(attKey, ct); } c = ct.get(attValue); } else { c = ColorUtilities.randomDesaturatedColor(0.5f); colorMap.put(key, c); } } return c; } public ColumnMetaData getColumnMetaData(String key) { return columnMetaData.get(key.toUpperCase()); } public static class ColumnMetaData { String name; private double min = Double.MAX_VALUE; private double max = -min; int totalCount = 0; public HashSet<String> uniqueAlphaValues = new HashSet<String>(); public HashSet<String> uniqueNumericValues = new HashSet<String>(); ColumnMetaData(String name) { this.name = name; } public void updateMetrics(String attributeValue) { totalCount++; // Test if data is numeric. Skip null and blank values if (attributeValue != null && attributeValue.length() > 0) { try { double value = Double.parseDouble(attributeValue); uniqueNumericValues.add(attributeValue); min = Math.min(min, value); max = Math.max(max, value); } catch (NumberFormatException e) { uniqueAlphaValues.add(attributeValue); } } } /** * A column is considered numeric if it has at least 2 numeric values, and * no more than 1 non-numeric value. * * @return */ public boolean isNumeric() { return uniqueNumericValues.size() > 1 && uniqueAlphaValues.size() < 2; } public boolean isDiverging() { return min < 0; } public double getMin() { return min; } public double getMax() { return max; } public double getUniqueRatio() { double totalUnique = uniqueAlphaValues.size() + uniqueNumericValues.size(); return totalUnique / totalCount; } public int getUniqueCount() { return uniqueAlphaValues.size() + uniqueNumericValues.size(); } public int getTotalCount() { return totalCount; } } }
/* * * * Copyright 2010-2016 OrientDB LTD (http://orientdb.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://orientdb.com * */ package com.orientechnologies.orient.core.metadata.schema; import com.orientechnologies.common.listener.OProgressListener; import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.db.record.OClassTrigger; import com.orientechnologies.orient.core.index.OIndex; import com.orientechnologies.orient.core.index.OIndexManagerAbstract; import com.orientechnologies.orient.core.metadata.function.OFunctionLibraryImpl; import com.orientechnologies.orient.core.metadata.schema.clusterselection.OClusterSelectionStrategy; import com.orientechnologies.orient.core.metadata.security.ORole; import com.orientechnologies.orient.core.metadata.security.OSecurityShared; import com.orientechnologies.orient.core.metadata.security.OUser; import com.orientechnologies.orient.core.metadata.sequence.OSequence; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.schedule.OScheduledEvent; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * @author Andrey Lomakin (a.lomakin-at-orientdb.com) * @since 10/21/14 */ public class OImmutableClass implements OClass { /** use OClass.EDGE_CLASS_NAME instead */ @Deprecated public static final String EDGE_CLASS_NAME = OClass.EDGE_CLASS_NAME; /** use OClass.EDGE_CLASS_NAME instead */ @Deprecated public static final String VERTEX_CLASS_NAME = OClass.VERTEX_CLASS_NAME; private boolean inited = false; private final boolean isAbstract; private final boolean strictMode; private final String name; private final String streamAbleName; private final Map<String, OProperty> properties; private Map<String, OProperty> allPropertiesMap; private Collection<OProperty> allProperties; private final OClusterSelectionStrategy clusterSelection; private final int defaultClusterId; private final int[] clusterIds; private final int[] polymorphicClusterIds; private final Collection<String> baseClassesNames; private final List<String> superClassesNames; private final float overSize; private final float classOverSize; private final String shortName; private final Map<String, String> customFields; private final String description; private final OImmutableSchema schema; // do not do it volatile it is already SAFE TO USE IT in MT mode. private final List<OImmutableClass> superClasses; // do not do it volatile it is already SAFE TO USE IT in MT mode. private Collection<OImmutableClass> subclasses; private boolean restricted; private boolean isVertexType; private boolean isEdgeType; private boolean triggered; private boolean function; private boolean scheduler; private boolean sequence; private boolean ouser; private boolean orole; private OIndex autoShardingIndex; private HashSet<OIndex> indexes; public OImmutableClass(final OClass oClass, final OImmutableSchema schema) { isAbstract = oClass.isAbstract(); strictMode = oClass.isStrictMode(); this.schema = schema; superClassesNames = oClass.getSuperClassesNames(); superClasses = new ArrayList<OImmutableClass>(superClassesNames.size()); name = oClass.getName(); streamAbleName = oClass.getStreamableName(); clusterSelection = oClass.getClusterSelection(); defaultClusterId = oClass.getDefaultClusterId(); clusterIds = oClass.getClusterIds(); polymorphicClusterIds = oClass.getPolymorphicClusterIds(); baseClassesNames = new ArrayList<String>(); for (OClass baseClass : oClass.getSubclasses()) baseClassesNames.add(baseClass.getName()); overSize = oClass.getOverSize(); classOverSize = oClass.getClassOverSize(); shortName = oClass.getShortName(); properties = new HashMap<String, OProperty>(); for (OProperty p : oClass.declaredProperties()) properties.put(p.getName(), new OImmutableProperty(p, this)); Map<String, String> customFields = new HashMap<String, String>(); for (String key : oClass.getCustomKeys()) customFields.put(key, oClass.getCustom(key)); this.customFields = Collections.unmodifiableMap(customFields); this.description = oClass.getDescription(); } public void init() { if (!inited) { initSuperClasses(); final Collection<OProperty> allProperties = new ArrayList<OProperty>(); final Map<String, OProperty> allPropsMap = new HashMap<String, OProperty>(20); for (int i = superClasses.size() - 1; i >= 0; i--) { allProperties.addAll(superClasses.get(i).allProperties); allPropsMap.putAll(superClasses.get(i).allPropertiesMap); } allProperties.addAll(properties.values()); for (OProperty p : properties.values()) { final String propName = p.getName(); if (!allPropsMap.containsKey(propName)) allPropsMap.put(propName, p); } this.allProperties = Collections.unmodifiableCollection(allProperties); this.allPropertiesMap = Collections.unmodifiableMap(allPropsMap); this.restricted = isSubClassOf(OSecurityShared.RESTRICTED_CLASSNAME); this.isVertexType = isSubClassOf(OClass.VERTEX_CLASS_NAME); this.isEdgeType = isSubClassOf(OClass.EDGE_CLASS_NAME); this.triggered = isSubClassOf(OClassTrigger.CLASSNAME); this.function = isSubClassOf(OFunctionLibraryImpl.CLASSNAME); this.scheduler = isSubClassOf(OScheduledEvent.CLASS_NAME); this.sequence = isSubClassOf(OSequence.CLASS_NAME); this.ouser = isSubClassOf(OUser.CLASS_NAME); this.orole = isSubClassOf(ORole.CLASS_NAME); this.indexes = new HashSet<>(); getRawIndexes(indexes); final ODatabaseDocumentInternal db = getDatabase(); if (db != null && db.getMetadata() != null && db.getMetadata().getIndexManagerInternal() != null) { this.autoShardingIndex = db.getMetadata().getIndexManagerInternal().getClassAutoShardingIndex(db, name); } else { this.autoShardingIndex = null; } } inited = true; } @Override public boolean isAbstract() { return isAbstract; } @Override public OClass setAbstract(boolean iAbstract) { throw new UnsupportedOperationException(); } @Override public boolean isStrictMode() { return strictMode; } @Override public OClass setStrictMode(boolean iMode) { throw new UnsupportedOperationException(); } @Override @Deprecated public OClass getSuperClass() { initSuperClasses(); return superClasses.isEmpty() ? null : superClasses.get(0); } @Override @Deprecated public OClass setSuperClass(OClass iSuperClass) { throw new UnsupportedOperationException(); } @Override public List<OClass> getSuperClasses() { return Collections.unmodifiableList((List<? extends OClass>) superClasses); } @Override public boolean hasSuperClasses() { return !superClasses.isEmpty(); } @Override public List<String> getSuperClassesNames() { return superClassesNames; } @Override public OClass setSuperClasses(List<? extends OClass> classes) { throw new UnsupportedOperationException(); } @Override public OClass addSuperClass(OClass superClass) { throw new UnsupportedOperationException(); } @Override public OClass removeSuperClass(OClass superClass) { throw new UnsupportedOperationException(); } @Override public String getName() { return name; } @Override public OClass setName(String iName) { throw new UnsupportedOperationException(); } @Override public String getStreamableName() { return streamAbleName; } @Override public Collection<OProperty> declaredProperties() { return Collections.unmodifiableCollection(properties.values()); } @Override public Collection<OProperty> properties() { return allProperties; } @Override public Map<String, OProperty> propertiesMap() { return allPropertiesMap; } public void getIndexedProperties(Collection<OProperty> indexedProperties) { for (OProperty p : properties.values()) if (areIndexed(p.getName())) indexedProperties.add(p); initSuperClasses(); for (OImmutableClass superClass : superClasses) { superClass.getIndexedProperties(indexedProperties); } } @Override public Collection<OProperty> getIndexedProperties() { Collection<OProperty> indexedProps = new HashSet<OProperty>(); getIndexedProperties(indexedProps); return indexedProps; } @Override public OProperty getProperty(String propertyName) { initSuperClasses(); OProperty p = properties.get(propertyName); if (p != null) return p; for (int i = 0; i < superClasses.size() && p == null; i++) { p = superClasses.get(i).getProperty(propertyName); } return p; } @Override public OProperty createProperty(String iPropertyName, OType iType) { throw new UnsupportedOperationException(); } @Override public OProperty createProperty(String iPropertyName, OType iType, OClass iLinkedClass) { throw new UnsupportedOperationException(); } @Override public OProperty createProperty( String iPropertyName, OType iType, OClass iLinkedClass, boolean unsafe) { throw new UnsupportedOperationException(); } @Override public OProperty createProperty(String iPropertyName, OType iType, OType iLinkedType) { throw new UnsupportedOperationException(); } @Override public OProperty createProperty( String iPropertyName, OType iType, OType iLinkedType, boolean unsafe) { throw new UnsupportedOperationException(); } @Override public void dropProperty(String iPropertyName) { throw new UnsupportedOperationException(); } @Override public boolean existsProperty(String propertyName) { boolean result = properties.containsKey(propertyName); if (result) return true; for (OImmutableClass superClass : superClasses) { result = superClass.existsProperty(propertyName); if (result) return true; } return false; } @Override public int getClusterForNewInstance(final ODocument doc) { return clusterSelection.getCluster(this, doc); } @Override public int getDefaultClusterId() { return defaultClusterId; } @Override public void setDefaultClusterId(int iDefaultClusterId) { throw new UnsupportedOperationException(); } @Override public int[] getClusterIds() { return clusterIds; } @Override public OClass addClusterId(int iId) { throw new UnsupportedOperationException(); } @Override public OClusterSelectionStrategy getClusterSelection() { return clusterSelection; } @Override public OClass setClusterSelection(OClusterSelectionStrategy clusterSelection) { throw new UnsupportedOperationException(); } @Override public OClass setClusterSelection(String iStrategyName) { throw new UnsupportedOperationException(); } @Override public OClass addCluster(String iClusterName) { throw new UnsupportedOperationException(); } @Override public OClass truncateCluster(String clusterName) { throw new UnsupportedOperationException(); } @Override public OClass removeClusterId(int iId) { throw new UnsupportedOperationException(); } @Override public int[] getPolymorphicClusterIds() { return Arrays.copyOf(polymorphicClusterIds, polymorphicClusterIds.length); } public OImmutableSchema getSchema() { return schema; } @Override public Collection<OClass> getSubclasses() { initBaseClasses(); ArrayList<OClass> result = new ArrayList<OClass>(); for (OClass c : subclasses) result.add(c); return result; } @Override public Collection<OClass> getAllSubclasses() { initBaseClasses(); final Set<OClass> set = new HashSet<OClass>(); set.addAll(getSubclasses()); for (OImmutableClass c : subclasses) set.addAll(c.getAllSubclasses()); return set; } @Override @Deprecated public Collection<OClass> getBaseClasses() { return getSubclasses(); } @Override @Deprecated public Collection<OClass> getAllBaseClasses() { return getAllSubclasses(); } @Override public Collection<OClass> getAllSuperClasses() { Set<OClass> ret = new HashSet<OClass>(); getAllSuperClasses(ret); return ret; } private void getAllSuperClasses(Set<OClass> set) { set.addAll(superClasses); for (OImmutableClass superClass : superClasses) { superClass.getAllSuperClasses(set); } } @Override public long getSize() { long size = 0; for (int clusterId : clusterIds) size += getDatabase().getClusterRecordSizeById(clusterId); return size; } @Override public float getOverSize() { return overSize; } @Override public float getClassOverSize() { return classOverSize; } @Override public OClass setOverSize(float overSize) { throw new UnsupportedOperationException(); } @Override public long count() { return count(true); } @Override public long count(boolean isPolymorphic) { if (isPolymorphic) return getDatabase() .countClusterElements( OClassImpl.readableClusters(getDatabase(), polymorphicClusterIds, name)); return getDatabase() .countClusterElements(OClassImpl.readableClusters(getDatabase(), clusterIds, name)); } @Override public void truncate() throws IOException { throw new UnsupportedOperationException(); } @Override public boolean isSubClassOf(final String iClassName) { if (iClassName == null) return false; if (iClassName.equalsIgnoreCase(getName()) || iClassName.equalsIgnoreCase(getShortName())) return true; final int s = superClasses.size(); for (int i = 0; i < s; ++i) { if (superClasses.get(i).isSubClassOf(iClassName)) return true; } return false; } @Override public boolean isSubClassOf(final OClass clazz) { if (clazz == null) return false; if (equals(clazz)) return true; final int s = superClasses.size(); for (int i = 0; i < s; ++i) { if (superClasses.get(i).isSubClassOf(clazz)) return true; } return false; } @Override public boolean isSuperClassOf(OClass clazz) { return clazz != null && clazz.isSubClassOf(this); } @Override public String getShortName() { return shortName; } @Override public OClass setShortName(String shortName) { throw new UnsupportedOperationException(); } @Override public String getDescription() { return description; } @Override public OClass setDescription(String iDescription) { throw new UnsupportedOperationException(); } @Override public Object get(ATTRIBUTES iAttribute) { if (iAttribute == null) throw new IllegalArgumentException("attribute is null"); switch (iAttribute) { case NAME: return getName(); case SHORTNAME: return getShortName(); case SUPERCLASS: return getSuperClass(); case SUPERCLASSES: return getSuperClasses(); case OVERSIZE: return getOverSize(); case STRICTMODE: return isStrictMode(); case ABSTRACT: return isAbstract(); case CLUSTERSELECTION: return getClusterSelection(); case CUSTOM: return getCustomInternal(); case DESCRIPTION: return getDescription(); } throw new IllegalArgumentException("Cannot find attribute '" + iAttribute + "'"); } @Override public OClass set(ATTRIBUTES attribute, Object iValue) { throw new UnsupportedOperationException(); } @Override public OIndex createIndex(String iName, INDEX_TYPE iType, String... fields) { throw new UnsupportedOperationException(); } @Override public OIndex createIndex(String iName, String iType, String... fields) { throw new UnsupportedOperationException(); } @Override public OIndex createIndex( String iName, INDEX_TYPE iType, OProgressListener iProgressListener, String... fields) { throw new UnsupportedOperationException(); } @Override public OIndex createIndex( String iName, String iType, OProgressListener iProgressListener, ODocument metadata, String algorithm, String... fields) { throw new UnsupportedOperationException(); } @Override public OIndex createIndex( String iName, String iType, OProgressListener iProgressListener, ODocument metadata, String... fields) { throw new UnsupportedOperationException(); } @Override public Set<OIndex> getInvolvedIndexes(Collection<String> fields) { initSuperClasses(); final Set<OIndex> result = new HashSet<OIndex>(getClassInvolvedIndexes(fields)); for (OImmutableClass superClass : superClasses) { result.addAll(superClass.getInvolvedIndexes(fields)); } return result; } @Override public Set<OIndex> getInvolvedIndexes(String... fields) { return getInvolvedIndexes(Arrays.asList(fields)); } @Override public Set<OIndex> getClassInvolvedIndexes(Collection<String> fields) { final ODatabaseDocumentInternal database = getDatabase(); final OIndexManagerAbstract indexManager = database.getMetadata().getIndexManagerInternal(); return indexManager.getClassInvolvedIndexes(database, name, fields); } @Override public Set<OIndex> getClassInvolvedIndexes(String... fields) { return getClassInvolvedIndexes(Arrays.asList(fields)); } @Override public boolean areIndexed(Collection<String> fields) { final ODatabaseDocumentInternal database = getDatabase(); final OIndexManagerAbstract indexManager = database.getMetadata().getIndexManagerInternal(); final boolean currentClassResult = indexManager.areIndexed(name, fields); initSuperClasses(); if (currentClassResult) return true; for (OImmutableClass superClass : superClasses) { if (superClass.areIndexed(fields)) return true; } return false; } @Override public boolean areIndexed(String... fields) { return areIndexed(Arrays.asList(fields)); } @Override public OIndex getClassIndex(String iName) { final ODatabaseDocumentInternal database = getDatabase(); return database .getMetadata() .getIndexManagerInternal() .getClassIndex(database, this.name, iName); } @Override public Set<OIndex> getClassIndexes() { final ODatabaseDocumentInternal database = getDatabase(); return database.getMetadata().getIndexManagerInternal().getClassIndexes(database, name); } @Override public void getClassIndexes(final Collection<OIndex> indexes) { final ODatabaseDocumentInternal database = getDatabase(); database.getMetadata().getIndexManagerInternal().getClassIndexes(database, name, indexes); } public void getRawClassIndexes(final Collection<OIndex> indexes) { getDatabase().getMetadata().getIndexManagerInternal().getClassRawIndexes(name, indexes); } @Override public void getIndexes(final Collection<OIndex> indexes) { initSuperClasses(); getClassIndexes(indexes); for (OClass superClass : superClasses) { superClass.getIndexes(indexes); } } public void getRawIndexes(final Collection<OIndex> indexes) { initSuperClasses(); getRawClassIndexes(indexes); for (OImmutableClass superClass : superClasses) { superClass.getRawIndexes(indexes); } } @Override public Set<OIndex> getIndexes() { final Set<OIndex> indexes = new HashSet<OIndex>(); getIndexes(indexes); return indexes; } public Set<OIndex> getRawIndexes() { return indexes; } @Override public OIndex getAutoShardingIndex() { return autoShardingIndex; } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result; return result; } @Override public boolean equals(final Object obj) { if (this == obj) return true; if (obj == null) return false; if (!OClass.class.isAssignableFrom(obj.getClass())) return false; final OClass other = (OClass) obj; if (name == null) { if (other.getName() != null) return false; } else if (!name.equals(other.getName())) return false; return true; } @Override public String toString() { return name; } @Override public String getCustom(final String iName) { return customFields.get(iName); } @Override public OClass setCustom(String iName, String iValue) { throw new UnsupportedOperationException(); } @Override public void removeCustom(String iName) { throw new UnsupportedOperationException(); } @Override public void clearCustom() { throw new UnsupportedOperationException(); } @Override public Set<String> getCustomKeys() { return Collections.unmodifiableSet(customFields.keySet()); } @Override public boolean hasClusterId(final int clusterId) { return Arrays.binarySearch(clusterIds, clusterId) >= 0; } @Override public boolean hasPolymorphicClusterId(final int clusterId) { return Arrays.binarySearch(polymorphicClusterIds, clusterId) >= 0; } @Override public int compareTo(final OClass other) { return name.compareTo(other.getName()); } private ODatabaseDocumentInternal getDatabase() { return ODatabaseRecordThreadLocal.instance().get(); } private Map<String, String> getCustomInternal() { return customFields; } private void initSuperClasses() { if (superClassesNames != null && superClassesNames.size() != superClasses.size()) { superClasses.clear(); for (String superClassName : superClassesNames) { OImmutableClass superClass = (OImmutableClass) schema.getClass(superClassName); superClass.init(); superClasses.add(superClass); } } } private void initBaseClasses() { if (subclasses == null) { final List<OImmutableClass> result = new ArrayList<OImmutableClass>(baseClassesNames.size()); for (String clsName : baseClassesNames) result.add((OImmutableClass) schema.getClass(clsName)); subclasses = result; } } public boolean isRestricted() { return restricted; } public boolean isEdgeType() { return isEdgeType; } public boolean isVertexType() { return isVertexType; } public boolean isTriggered() { return triggered; } public boolean isFunction() { return function; } public boolean isScheduler() { return scheduler; } public boolean isOuser() { return ouser; } public boolean isOrole() { return orole; } public boolean isSequence() { return sequence; } }
/* * Copyright 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.googlejavaformat.java; import static com.google.common.base.MoreObjects.firstNonNull; import com.google.common.base.MoreObjects; import com.google.common.base.Splitter; import com.google.common.base.Verify; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Range; import com.google.googlejavaformat.Input; import org.eclipse.jdt.core.ToolFactory; import org.eclipse.jdt.core.compiler.IScanner; import org.eclipse.jdt.core.compiler.ITerminalSymbols; import org.eclipse.jdt.core.compiler.InvalidInputException; import org.eclipse.jdt.core.dom.CompilationUnit; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.NavigableMap; /** * {@code JavaInput} extends {@link Input} to represent a Java input document. */ public final class JavaInput extends Input { /** * A {@code JavaInput} is a sequence of {@link Tok}s that cover the Java input. A {@link Tok} is * either a token (if {@code isToken()}), or a non-token, which is a comment (if * {@code isComment()}) or a newline (if {@code isNewline()}) or a maximal sequence of other * whitespace characters (if {@code isSpaces()}). Each {@link Tok} contains a sequence of * characters, an index (sequential starting at {@code 0} for tokens and comments, else * {@code -1}), and an Eclipse-compatible ({@code 0}-origin) position in the input. The * concatenation of the texts of all the {@link Tok}s equals the input. Each Input ends with a * token EOF {@link Tok}, with empty text. * * <p>A {@code /*} comment possibly contains newlines; a {@code //} comment does not contain the * terminating newline character, but is followed by a newline {@link Tok}. */ static final class Tok implements Input.Tok { private final int index; private final String originalText; private final String text; private final int position; private final int columnI; private final boolean isToken; /** * The {@code Tok} constructor. * @param index its index * @param originalText its original text, before removing Unicode escapes * @param text its text after removing Unicode escapes * @param position its {@code 0}-origin position in the input * @param columnI its {@code 0}-origin column number in the input * @param isToken whether the {@code Tok} is a token */ Tok(int index, String originalText, String text, int position, int columnI, boolean isToken) { this.index = index; this.originalText = originalText; this.text = text; this.position = position; this.columnI = columnI; this.isToken = isToken; } @Override public int getIndex() { return index; } @Override public String getText() { return text; } @Override public String getOriginalText() { return originalText; } @Override public int getPosition() { return position; } @Override public int getColumn() { return columnI; } boolean isToken() { return isToken; } @Override public boolean isNewline() { return "\n".equals(text); } @Override public boolean isSlashSlashComment() { return text.startsWith("//"); } @Override public boolean isSlashStarComment() { return text.startsWith("/*"); } @Override public boolean isComment() { return isSlashSlashComment() || isSlashStarComment(); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("index", index) .add("text", text) .add("position", position) .add("columnI", columnI) .add("isToken", isToken) .toString(); } } /** * A {@link Token} contains a token {@link Tok} and its associated non-tokens; each non-token * {@link Tok} belongs to one {@link Token}. Each {@link Token} has an immutable list of its * non-tokens that appear before it, and another list of its non-tokens that appear after it. The * concatenation of the texts of all the {@link Token}s' {@link Tok}s, each preceded by the texts * of its {@code toksBefore} and followed by the texts of its {@code toksAfter}, equals the input. */ static final class Token implements Input.Token { private final Tok tok; private final ImmutableList<Tok> toksBefore; private final ImmutableList<Tok> toksAfter; /** * Token constructor. * @param toksBefore the earlier non-token {link Tok}s assigned to this {@code Token} * @param tok this token {@link Tok} * @param toksAfter the later non-token {link Tok}s assigned to this {@code Token} */ Token(List<Tok> toksBefore, Tok tok, List<Tok> toksAfter) { this.toksBefore = ImmutableList.copyOf(toksBefore); this.tok = tok; this.toksAfter = ImmutableList.copyOf(toksAfter); } /** * Get the token's {@link Tok}. * @return the token's {@link Tok} */ @Override public Tok getTok() { return tok; } /** * Get the earlier {@link Tok}s assigned to this {@code Token}. * @return the earlier {@link Tok}s assigned to this {@code Token} */ @Override public ImmutableList<? extends Input.Tok> getToksBefore() { return toksBefore; } /** * Get the later {@link Tok}s assigned to this {@code Token}. * @return the later {@link Tok}s assigned to this {@code Token} */ @Override public ImmutableList<? extends Input.Tok> getToksAfter() { return toksAfter; } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("tok", tok) .add("toksBefore", toksBefore) .add("toksAfter", toksAfter) .toString(); } } private static final Splitter NEWLINE_SPLITTER = Splitter.on('\n'); private final String filename; private final String text; // The input. private int kN; // The number of numbered toks (tokens or comments), excluding the EOF. private Map<Integer, Range<Integer>> kToI = null; // Map from token indices to line numbers. /* * The following lists record the sequential indices of the {@code Tok}s on each input line. (Only * tokens and comments have sequential indices.) Tokens and {@code //} comments lie on just one * line; {@code /*} comments can lie on multiple lines. These data structures (along with * equivalent ones for the formatted output) let us compute correspondences between the input and * output. */ private final ImmutableMap<Integer, Integer> positionToColumnMap; // Map Tok position to column. private final ImmutableList<Token> tokens; // The Tokens for this input. private final ImmutableSortedMap<Integer, Token> positionTokenMap; // Map position to Token. /** Map from Tok index to the associated Token. */ private final Token[] kToToken; /** * Input constructor. * @param text the input text * @throws FormatterException if the input cannot be parsed */ public JavaInput(String filename, String text) throws FormatterException { this.filename = filename; this.text = text; char[] chars = text.toCharArray(); List<String> lines = NEWLINE_SPLITTER.splitToList(text); setLines(ImmutableList.copyOf(lines)); ImmutableList<Tok> toks = buildToks(text, chars); positionToColumnMap = makePositionToColumnMap(toks); tokens = buildTokens(toks); ImmutableSortedMap.Builder<Integer, Token> locationTokenMap = ImmutableSortedMap.naturalOrder(); for (Token token : tokens) { locationTokenMap.put(JavaOutput.startTok(token).getPosition(), token); } positionTokenMap = locationTokenMap.build(); // adjust kN for EOF kToToken = new Token[kN + 1]; for (Token token : tokens) { for (Input.Tok tok : token.getToksBefore()) { if (tok.getIndex() < 0) { continue; } kToToken[tok.getIndex()] = token; } kToToken[token.getTok().getIndex()] = token; for (Input.Tok tok : token.getToksAfter()) { if (tok.getIndex() < 0) { continue; } kToToken[tok.getIndex()] = token; } } } private static ImmutableMap<Integer, Integer> makePositionToColumnMap(List<Tok> toks) { ImmutableMap.Builder<Integer, Integer> builder = ImmutableMap.builder(); for (Tok tok : toks) { builder.put(tok.getPosition(), tok.getColumn()); } return builder.build(); } /** * Get the input text. * @return the input text */ @Override public String getText() { return text; } @Override public ImmutableMap<Integer, Integer> getPositionToColumnMap() { return positionToColumnMap; } /** Lex the input and build the list of toks. */ private ImmutableList<Tok> buildToks(String text, char... chars) throws FormatterException { try { kN = 0; IScanner scanner = ToolFactory.createScanner(true, true, true, "1.8"); scanner.setSource(chars); List<Tok> toks = new ArrayList<>(); int charI = 0; int columnI = 0; while (scanner.getCurrentTokenEndPosition() < chars.length - 1 && scanner.getNextToken() != ITerminalSymbols.TokenNameEOF) { int charI0 = scanner.getCurrentTokenStartPosition(); // Get string, possibly with Unicode escapes. String originalTokText = text.substring(charI0, scanner.getCurrentTokenEndPosition() + 1); String tokText = new String(scanner.getCurrentTokenSource()); // Unicode escapes removed. char tokText0 = tokText.charAt(0); // The token's first character. final boolean isToken; // Is this tok a token? final boolean isNumbered; // Is this tok numbered? (tokens and comments) boolean extraNewline = false; // Extra newline at end? List<String> strings = new ArrayList<>(); if (Character.isWhitespace(tokText0)) { isToken = false; isNumbered = false; boolean first = true; for (String spaces : NEWLINE_SPLITTER.split(originalTokText)) { if (!first) { strings.add("\n"); } if (!spaces.isEmpty()) { strings.add(spaces); } first = false; } } else if (tokText.startsWith("'") || tokText.startsWith("\"")) { isToken = true; isNumbered = true; strings.add(originalTokText); } else if (tokText.startsWith("//") || tokText.startsWith("/*")) { // For compatibility with an earlier lexer, the newline after a // comment is its own tok. if (tokText.startsWith("//") && originalTokText.endsWith("\n")) { originalTokText = originalTokText.substring(0, originalTokText.length() - 1); tokText = tokText.substring(0, tokText.length() - 1); extraNewline = true; } isToken = false; isNumbered = true; strings.add(originalTokText); } else if (Character.isJavaIdentifierStart(tokText0) || Character.isDigit(tokText0) || tokText0 == '.' && tokText.length() > 1 && Character.isDigit(tokText.charAt(1))) { // Identifier, keyword, or numeric literal (a dot may begin a number, as in .2D). isToken = true; isNumbered = true; strings.add(tokText); } else { // Other tokens ("+" or "++" or ">>" are broken into one-character toks, because ">>" // cannot be lexed without syntactic knowledge. This implementation fails if the token // contains Unicode escapes. isToken = true; isNumbered = true; for (char c : tokText.toCharArray()) { strings.add(String.valueOf(c)); } } if (strings.size() == 1) { toks.add( new Tok(isNumbered ? kN++ : -1, originalTokText, tokText, charI, columnI, isToken)); for (char c : originalTokText.toCharArray()) { if (c == '\n') { columnI = 0; } else { ++columnI; } ++charI; } } else { if (strings.size() != 1 && !tokText.equals(originalTokText)) { throw new FormatterException( "Unicode escapes not allowed in whitespace or multi-character operators"); } for (String str : strings) { toks.add(new Tok(isNumbered ? kN++ : -1, str, str, charI, columnI, isToken)); for (char c : str.toCharArray()) { if (c == '\n') { columnI = 0; } else { ++columnI; } ++charI; } } } if (extraNewline) { toks.add(new Tok(-1, "\n", "\n", charI, columnI, false)); columnI = 0; ++charI; } } toks.add(new Tok(kN++, "", "", charI, columnI, true)); // EOF tok. --kN; // Don't count EOF tok. computeRanges(toks); return ImmutableList.copyOf(toks); } catch (InvalidInputException e) { throw new FormatterException(e.getMessage()); } } private static ImmutableList<Token> buildTokens(List<Tok> toks) { ImmutableList.Builder<Token> tokens = ImmutableList.builder(); int k = 0; int kN = toks.size(); while (k < kN) { // Remaining non-tokens before the token go here. ImmutableList.Builder<Tok> toksBefore = ImmutableList.builder(); while (!toks.get(k).isToken()) { toksBefore.add(toks.get(k++)); } Tok tok = toks.get(k++); // Non-tokens starting on the same line go here too. ImmutableList.Builder<Tok> toksAfter = ImmutableList.builder(); while (k < kN && !"\n".equals(toks.get(k).getText()) && !toks.get(k).isToken()) { Tok nonTokenAfter = toks.get(k++); toksAfter.add(nonTokenAfter); if (nonTokenAfter.getText().contains("\n")) { break; } } tokens.add(new Token(toksBefore.build(), tok, toksAfter.build())); } return tokens.build(); } /** * Returns the lowest line number the {@link Token} or one of its {@code tokBefore}s lies on in * the {@code JavaInput}. * @param token the {@link Token} * @return the {@code 0}-based line number */ int getLineNumberLo(Token token) { int k = -1; for (Tok tok : token.toksBefore) { k = tok.getIndex(); if (k >= 0) { break; } } if (k < 0) { k = token.tok.getIndex(); } if (kToI == null) { kToI = makeKToIJ(this, kN); } return kToI.get(k).lowerEndpoint(); } /** * Returns the highest line number the {@link Token} or one of its {@code tokAfter}s lies on in * the {@code JavaInput}. * @param token the {@link Token} * @return the {@code 0}-based line number */ int getLineNumberHi(Token token) { int k = -1; for (Tok tok : token.toksAfter.reverse()) { k = tok.getIndex(); if (k >= 0) { break; } } if (k < 0) { k = token.tok.getIndex(); } if (kToI == null) { kToI = makeKToIJ(this, kN); } return kToI.get(k).upperEndpoint() - 1; } /** * Convert from an offset and length flag pair to a token range. * @param offset the {@code 0}-based offset in characters * @param length the length in characters * @return the {@code 0}-based {@link Range} of tokens * @throws FormatterException */ Range<Integer> characterRangeToTokenRange(int offset, int length) throws FormatterException { int requiredLength = offset + length; if (requiredLength > text.length()) { throw new FormatterException( String.format( "invalid length %d, offset + length (%d) is outside the file", requiredLength, requiredLength)); } if (length <= 0) { return Formatter.EMPTY_RANGE; } NavigableMap<Integer, JavaInput.Token> map = getPositionTokenMap(); Map.Entry<Integer, JavaInput.Token> tokenEntryLo = firstNonNull(map.floorEntry(offset), map.firstEntry()); Map.Entry<Integer, JavaInput.Token> tokenEntryHi = firstNonNull(map.ceilingEntry(offset + length - 1), map.lastEntry()); return Range.closedOpen( tokenEntryLo.getValue().getTok().getIndex(), tokenEntryHi.getValue().getTok().getIndex() + 1); } Range<Integer> lineRangeToTokenRange(Range<Integer> lineRange) { Range<Integer> lines = Range.closedOpen(0, getLineCount()); if (!lines.isConnected(lineRange)) { return EMPTY_RANGE; } lineRange = lines.intersection(lineRange); int startLine = Math.max(0, lineRange.lowerEndpoint()); int start = getRange0s(startLine).lowerEndpoint(); while (start < 0 && lines.contains(startLine)) { startLine++; start = getRange0s(startLine).lowerEndpoint(); } int endLine = Math.min(lineRange.upperEndpoint() - 1, getLineCount() - 1); int end = getRange1s(endLine).upperEndpoint(); while (end < 0 && lines.contains(endLine)) { endLine--; end = getRange1s(endLine).upperEndpoint(); } Verify.verify(start >= 0); if (end <= start) { // If the file starts with blank lines, a request to format the first line // wont include any tokens, and 'end' will end up being -1. That issue can't // happen at the end of the file because there's an explicit EOF token. return EMPTY_RANGE; } Verify.verify(end >= 0); return Range.closedOpen(start, end); } /** * Get the number of toks. * @return the number of toks, including the EOF tok */ int getkN() { return kN; } /** * Get the Token by index. * @param k the token index */ Token getToken(int k) { return kToToken[k]; } /** * Get the input tokens. * @return the input tokens */ @Override public ImmutableList<? extends Input.Token> getTokens() { return tokens; } /** * Get the navigable map from position to {@link Token}. Used to look for tokens following a given * one, and to implement the --offset and --length flags to reformat a character range in the * input file. * @return the navigable map from position to {@link Token} */ @Override public NavigableMap<Integer, Token> getPositionTokenMap() { return positionTokenMap; } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("tokens", tokens) .add("super", super.toString()) .toString(); } @Override public String filename() { return filename; } private CompilationUnit unit; @Override public int getLineNumber(int inputPosition) { Verify.verifyNotNull(unit, "Expected compilation unit to be set."); return unit.getLineNumber(inputPosition); } // TODO(cushon): refactor JavaInput so the CompilationUnit can be passed into // the constructor. public void setCompilationUnit(CompilationUnit unit) { this.unit = unit; } }
/* * Copyright 2016, Yahoo! Inc. Licensed under the terms of the Apache License 2.0. See LICENSE file * at the project root for terms. */ package com.yahoo.sketches.frequencies; import org.testng.Assert; import org.testng.annotations.Test; /** * Tests CountMinFast class * * @author Justin8712 * */ public class CountMinFastTest { @SuppressWarnings("unused") @Test(expectedExceptions = IllegalArgumentException.class) public void construct() { int size = 100; double eps = 1.0 / size; double delta = .01; CountMinFast countmin = new CountMinFast(eps, delta); Assert.assertNotNull(countmin); // Should throw exception new CountMinFast(-134, delta); } //@Test public void updateOneTime() { int size = 100; double eps = 1.0 / size; double delta = .01; CountMinFast countmin = new CountMinFast(eps, delta); countmin.update(13L); Assert.assertEquals(countmin.getEstimate(13L), 1); } //@Test public void ErrorCorrect() { int size = 100; double eps = 1.0 / size; double delta = .01; CountMinFast countmin = new CountMinFast(eps, delta); for (long key = 0L; key < 10000L; key++) { countmin.update(key, 1); Assert.assertTrue(countmin.getMaxError() == (long) (Math.ceil((key + 1) * eps))); } } /** * @param prob the probability of success for the geometric distribution. * @return a random number generated from the geometric distribution. */ static private long randomGeometricDist(double prob) { assert (prob > 0.0 && prob < 1.0); return (long) (Math.log(Math.random()) / Math.log(1.0 - prob)); } //@Test public void testRandomGeometricDist() { long maxKey = 0L; double prob = .1; for (int i = 0; i < 100; i++) { long key = randomGeometricDist(prob); if (key > maxKey) maxKey = key; // If you succeed with probability p the probability // of failing 20/p times is smaller than 1/2^20. Assert.assertTrue(maxKey < 20.0 / prob); } } //@Test public void realCountsInBounds() { int n = 4213; int size = 50; long key; double prob = .04; double eps = 1.0 / size; double delta = .01; int bad = 0; CountMinFast countmin = new CountMinFast(eps, delta); PositiveCountersMap realCounts = new PositiveCountersMap(); for (int i = 0; i < n; i++) { key = randomGeometricDist(prob); countmin.update(key); realCounts.increment(key); long realCount = realCounts.get(key); long upperBound = countmin.getEstimateUpperBound(key); long lowerBound = countmin.getEstimateLowerBound(key); if (upperBound >= realCount && realCount >= lowerBound) { continue; } else { System.out.format("upperbound: %d, realCount: %d, lowerbound: %d \n", upperBound, realCount, lowerBound); bad += 1; } } // System.out.format("bad is: %d and eps * n is: %f \n", bad, eps*n); Assert.assertTrue(bad <= eps * n); } //@Test public void realCountsInBoundsCU() { int n = 4213; int size = 50; long key; double prob = .04; double eps = 1.0 / size; double delta = .01; int bad = 0; CountMinFast countmin = new CountMinFast(eps, delta); PositiveCountersMap realCounts = new PositiveCountersMap(); for (int i = 0; i < n; i++) { key = randomGeometricDist(prob); countmin.conservative_update(key); realCounts.increment(key); long realCount = realCounts.get(key); long upperBound = countmin.getEstimateUpperBound(key); long lowerBound = countmin.getEstimateLowerBound(key); if (upperBound >= realCount && realCount >= lowerBound) { continue; } else { System.out.format("upperbound: %d, realCount: %d, lowerbound: %d \n", upperBound, realCount, lowerBound); bad += 1; } } // System.out.format("bad is: %d and eps * n is: %f \n", bad, eps*n); Assert.assertTrue(bad <= eps * n); } //@Test public void ConservativeBetterThanNon() { int n = 4213; int size = 50; long key; double prob = .04; double eps = 1.0 / size; double delta = .01; CountMinFast countmin1 = new CountMinFast(eps, delta); CountMinFast countmin2 = new CountMinFast(eps, delta); PositiveCountersMap realCounts = new PositiveCountersMap(); for (int i = 0; i < n; i++) { key = randomGeometricDist(prob); countmin1.conservative_update(key); countmin2.update(key); realCounts.increment(key); long upperBound = countmin1.getEstimateUpperBound(key); Assert.assertTrue(upperBound <= countmin2.getEstimateUpperBound(key)); } } //@Test(expectedExceptions = IllegalArgumentException.class) public void UnionErrorCheck() { int size1 = 100; int size2 = 400; double delta = .01; double eps1 = 1.0 / size1; double eps2 = 1.0 / size2; CountMinFast countmin1 = new CountMinFast(eps1, delta); CountMinFast countmin2 = new CountMinFast(eps2, delta); // should throw an exception countmin1.merge(countmin2); } //@Test public void realCountsInBoundsAfterUnion() { int n = 1000; int size = 400; double delta = .01; double eps = 1.0 / size; double prob1 = .01; double prob2 = .005; PositiveCountersMap realCounts = new PositiveCountersMap(); CountMinFast countmin1 = new CountMinFast(eps, delta); CountMinFast countmin2 = new CountMinFast(eps, delta); for (int i = 0; i < n; i++) { long key1 = randomGeometricDist(prob1); long key2 = randomGeometricDist(prob2); countmin1.update(key1); countmin2.update(key2); // Updating the real counters realCounts.increment(key1); realCounts.increment(key2); } CountMinFast countmin = countmin1.merge(countmin2); int bad = 0; int i = 0; for (long key : realCounts.keys()) { i = i + 1; long realCount = realCounts.get(key); long upperBound = countmin.getEstimateUpperBound(key); long lowerBound = countmin.getEstimateLowerBound(key); if (upperBound < realCount || realCount < lowerBound) { bad = bad + 1; System.out.format("upperbound: %d, realCount: %d, lowerbound: %d \n", upperBound, realCount, lowerBound); } } Assert.assertTrue(bad <= delta * i); } //@Test public void realCountsInBoundsAfterUnionCU() { int n = 1000; int size = 400; double delta = .01; double eps = 1.0 / size; double prob1 = .01; double prob2 = .005; PositiveCountersMap realCounts = new PositiveCountersMap(); CountMinFast countmin1 = new CountMinFast(eps, delta); CountMinFast countmin2 = new CountMinFast(eps, delta); for (int i = 0; i < n; i++) { long key1 = randomGeometricDist(prob1); long key2 = randomGeometricDist(prob2); countmin1.conservative_update(key1); countmin2.conservative_update(key2); // Updating the real counters realCounts.increment(key1); realCounts.increment(key2); } CountMinFast countmin = countmin1.merge(countmin2); int bad = 0; int i = 0; for (long key : realCounts.keys()) { i = i + 1; long realCount = realCounts.get(key); long upperBound = countmin.getEstimateUpperBound(key); long lowerBound = countmin.getEstimateLowerBound(key); if (upperBound < realCount || realCount < lowerBound) { bad = bad + 1; System.out.format("upperbound: %d, realCount: %d, lowerbound: %d \n", upperBound, realCount, lowerBound); } } Assert.assertTrue(bad <= delta * i); } //run only from command line public void stressTestUpdateTime() { int n = 2000000; int size = 100000; double eps = 1.0 / size; double delta = .1; int trials = 100; double total_updates_per_s = 0; for (int trial = 0; trial < trials; trial++) { CountMinFast countmin = new CountMinFast(eps, delta); int key = 0; double startTime = System.nanoTime(); for (int i = 0; i < n; i++) { // long key = randomGeometricDist(prob); countmin.update(key++); } double endTime = System.nanoTime(); double timePerUpdate = (endTime - startTime) / (1000000.0 * n); double updatesPerSecond = 1000.0 / timePerUpdate; total_updates_per_s += updatesPerSecond; } System.out.format("Amortized updates per second for update: %f\n", (total_updates_per_s / trials)); Assert.assertTrue(total_updates_per_s / trials > 1000000); } //run only from command line public void stressTestUpdateTimeCU() { int n = 2000000; int size = 100000; double eps = 1.0 / size; double delta = .1; int trials = 100; double total_updates_per_s = 0; for (int trial = 0; trial < trials; trial++) { CountMinFast countmin = new CountMinFast(eps, delta); int key = 0; double startTime = System.nanoTime(); for (int i = 0; i < n; i++) { // long key = randomGeometricDist(prob); countmin.conservative_update(key++); } double endTime = System.nanoTime(); double timePerUpdate = (endTime - startTime) / (1000000.0 * n); double updatesPerSecond = 1000.0 / timePerUpdate; total_updates_per_s += updatesPerSecond; } System.out.format("Amortized updates per second for conservate_update: %f\n", (total_updates_per_s / trials)); Assert.assertTrue(total_updates_per_s / trials > 1000000); } //@Test public void printlnTest() { println("PRINTING: " + this.getClass().getName()); } /** * @param s value to print */ static void println(String s) { // System.out.println(s); //disable here } }
/* * Copyright 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.css.compiler.ast; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.css.SourceCodeLocation; import java.util.List; import java.util.Map; import javax.annotation.Nullable; /** * A node representing a function. * * @author oana@google.com (Oana Florescu) */ public class CssFunctionNode extends CssValueNode implements ChunkAware { /** * Contains the list of recognized CSS functions. */ public abstract static class Function { /** A map of function names to function objects. */ private static final Map<String, Function> NAME_TO_FUNCTION_MAP; static { List<String> recognizedCssFunctions = ImmutableList.of( // CSS 2.1 "attr", "counter", "rect", "rgb", "url", // Per-site user stylesheet rules // http://lists.w3.org/Archives/Public/www-style/2004Aug/0135 "domain", "url-prefix", // IE8 and earlier: // .fiftyPercentOpacity { filter: alpha(opacity=50); } "alpha", // CSS 3 "cubic-bezier", "format", // used with @font-face "from", "hsl", "hsla", "local", // used with @font-face "perspective", "rgba", "rotate", "rotateX", "rotateY", "rotateZ", "rotate3d", "scale", "scaleX", "scaleY", "scaleZ", "scale3d", "steps", "to", "translate", "translateX", "translateY", "translateZ", "translate3d", // Filter // w3.org/TR/filter-effects-1/#FilterProperty // "url" already in list "blur", "brightness", "contrast", "drop-shadow", "grayscale", "hue-rotate", "invert", "opacity", "saturate", "sepia", // Image-Set "image-set", "-moz-image-set", "-ms-image-set", "-o-image-set", "-webkit-image-set", // These take the type of gradient (linear or radial) as a parameter. "-khtml-gradient", "-webkit-gradient", // Prefer -webkit-(linear|radial)-gradient // Linear gradients "linear-gradient", "-moz-linear-gradient", "-ms-linear-gradient", "-o-linear-gradient", "-webkit-linear-gradient", // Radial gradients "radial-gradient", "-moz-radial-gradient", "-ms-radial-gradient", "-o-radial-gradient", "-webkit-radial-gradient", // Calc "calc", "-webkit-calc", "-moz-calc", // CSS Shapes "inset", "circle", "ellipse", "polygon" ); ImmutableMap.Builder<String, Function> builder = ImmutableMap.builder(); for (String functionName : recognizedCssFunctions) { builder.put( functionName, new Function(functionName) { @Override public boolean isRecognized() { return true; } }); } NAME_TO_FUNCTION_MAP = builder.build(); } /** * Serves as a placeholder for custom functions. */ public static final Function CUSTOM = new Function(null /* functionName */) { @Override public boolean isRecognized() { return false; } }; /** The name of the function, as it appears in a CSS stylesheet. */ private final String functionName; private Function(String functionName) { this.functionName = functionName; } /** * Returns the CSS {@link Function} with the specified name, or {@code null} * if the name is not in the list of recognized names. Multiple invocations * of this method with the same parameter will return the same object. For a * function that is not in the list of recognized names but should be * considered valid, use {@link Function#CUSTOM}. */ public static Function byName(String name) { return NAME_TO_FUNCTION_MAP.get(name); } /** * Returns {@code true} when this function is in the list of * recognized names. */ public abstract boolean isRecognized(); /** * @return the name of the CSS function, such as "rgb" or "url" */ public String getFunctionName() { return functionName; } /** * For debugging only. */ @Override public String toString() { return getFunctionName(); } } private final Function function; private CssFunctionArgumentsNode arguments; private Object chunk; /** * Constructor of the class. * * TODO(oana): Deal with the situation that we have an unrecognized * function. * * @param function * @param sourceCodeLocation */ public CssFunctionNode(@Nullable Function function, @Nullable SourceCodeLocation sourceCodeLocation) { super(null, sourceCodeLocation); this.function = function; this.arguments = new CssFunctionArgumentsNode(); becomeParentForNode(this.arguments); } /** * Copy constructor. * * @param function */ public CssFunctionNode(CssFunctionNode function) { super(function); this.function = function.getFunction(); this.arguments = new CssFunctionArgumentsNode(function.getArguments()); becomeParentForNode(this.arguments); this.chunk = function.getChunk(); } /** * Constructor used by the proxy mechanism, avoids unnecessary arguments node * initialization. * * <p>NOTE(dgajda): The signature of this constructor only differs in argument * order from the main constructor of this class. * * @param function implementation of the function which is "called" by this * node * @param sourceCodeLocation location of this node */ protected CssFunctionNode(@Nullable SourceCodeLocation sourceCodeLocation, @Nullable Function function) { super(null, sourceCodeLocation); this.function = function; } @Override public CssFunctionNode deepCopy() { return new CssFunctionNode(this); } public Function getFunction() { return function; } public String getFunctionName() { return function.toString(); } public CssFunctionArgumentsNode getArguments() { return arguments; } public void setArguments(CssFunctionArgumentsNode arguments) { removeAsParentOfNode(this.arguments); this.arguments = arguments; becomeParentForNode(this.arguments); } @Override public String toString() { StringBuffer output = new StringBuffer(); if (function.getFunctionName() != null) { output.append(function.getFunctionName()); } output.append("("); for (CssNode node : getArguments().childIterable()) { output.append(node.toString()); } output.append(")"); return output.toString(); } @Override public Object getChunk() { return chunk; } @Override public void setChunk(Object chunk) { this.chunk = chunk; } /** * Print the node instead of null when this node is a parameter. */ @Override public String getValue() { return toString(); } }
package galu.transform; import galu.matrix.Matrix2; import galu.matrix.Matrix3; import galu.matrix.Matrix4; import galu.vector.Vector2; import galu.vector.Vector3; /** * @see <a href="http://en.wikipedia.org/wiki/Transformation_matrix">Transformation matrix on Wikipedia</a> * @see <a href="http://www.cs.princeton.edu/~gewang/projects/darth/stuff/quat_faq.html">The Matrix and Quaternions FAQ</a> * @see <a href="http://www.opengl-tutorial.org/beginners-tutorials/tutorial-3-matrices/">OpenGL Tutorial 3: Matrices</a> */ public final class Transformations { /* * 2-D transformations based on: * http://en.wikipedia.org/wiki/Transformation_matrix */ private Transformations() {} /** * Rotate by an angle clockwise about the origin. This assumes that +x is right and +y is up. * @param radians the angle to rotate by * @return a transformation matrix */ public static Matrix2 rotate(float radians) { float cos = (float) Math.cos(radians); float sin = (float) Math.sin(radians); return new Matrix2(cos, -sin, sin, cos); } /** * Scale by a factor of {@code scale.x} along the x-axis and {@code scale.y} along the y-axis. * @param scale the scaling factors * @return a transformation matrix */ public static Matrix2 scale(Vector2 scale) { return new Matrix2(scale.x, 0, 0, scale.y); } /** * Reflect along a line where {@code direction} is along that line * @param direction a vector in the direction of the line to reflect across * @return a transformation matrix */ public static Matrix2 reflect(Vector2 direction) { float x = direction.x; float y = direction.y; return new Matrix2(x*x - y*y, 2*x*y, 2*x*y, y*y-x*x).multiply(1f / direction.lengthSquared()); } /** * Combine a series of transformation matrices. * The order in which the matrices are given is the order in which they will be applied * @param matrices the matrices to compose * @return the combined transformation matrix */ public static Matrix2 combine(Matrix2... matrices) { Matrix2 result = Matrix2.IDENTITY; // Combine them backwards: to apply a, then b, we want b of a = b * a for(int i = matrices.length-1; i >= 0; i--) { result = result.multiply(matrices[i]); } return result; } /** * Reflect across a line through the origin. * @param direction a vector in the direction of the line to reflect across * @return a transformation matrix */ public static Matrix2 projectOrthogonal(Vector2 direction) { float x = direction.x; float y = direction.y; return new Matrix2(x*x, x*y, x*y, y*y).multiply(1f/direction.lengthSquared()); } public static Matrix3 rotation3(float radians, Vector3 axis) { float cos = (float) Math.cos(radians); float sin = (float) Math.sin(radians); float l = axis.x; float m = axis.y; float n = axis.z; return new Matrix3( l*l*(1f-cos) + cos, m*l*(1f-cos) - n*sin, n*l*(1f-cos) + m*sin, l*m*(1f-cos) + n*sin, m*m*(1f-cos) + cos, n*m*(1f-cos) - l*sin, l*n*(1f-cos) - m*sin, m*n*(1f-cos) + l*sin, n*n*(1f-cos) + cos ); } public static Matrix3 scale3(Vector3 scale) { return new Matrix3( scale.x, 0, 0, 0, scale.y, 0, 0, 0, scale.z ); } /** * Combine a series of transformation matrices. * The order in which the matrices are given is the order in which they will be applied * @param matrices the matrices to compose * @return the combined transformation matrix */ public static Matrix3 combine(Matrix3... matrices) { Matrix3 result = Matrix3.IDENTITY; // Combine them backwards: to apply a, then b, we want b of a = b * a for(int i = matrices.length-1; i >= 0; i--) { result = result.multiply(matrices[i]); } return result; } /** * Scale in three directions. * Specifically, {@code scale.x} along the x-axis, {@code scale.y} along the y-axis, {@code scale.z} along the z-axis. * @param scale the scaling factors * @return a transformation matrix */ public static Matrix4 scale(Vector3 scale) { return new Matrix4( scale.x, 0, 0, 0, 0, scale.y, 0, 0, 0, 0, scale.z, 0, 0, 0, 0, 1 ); } //TODO: check that these aren't defined in an incompatible column-major way /** * Rotate about the x-axis by {@code angle} radians. * @param angle an angle in radians * @return a transformation matrix */ public static Matrix4 rotateX(float angle) { float cos = (float) Math.cos(angle); float sin = (float) Math.sin(angle); return new Matrix4( 1, 0, 0, 0, 0, cos, -sin, 0, 0, sin, cos, 0, 0, 0, 0, 1 ); } /** * Rotate about the y-axis by {@code angle} radians. * @param angle an angle in radians * @return a transformation matrix */ public static Matrix4 rotateY(float angle) { float cos = (float) Math.cos(angle); float sin = (float) Math.sin(angle); return new Matrix4( cos, 0, sin, 0, 0, 1, 0, 0, -sin, 0, cos, 0, 0, 0, 0, 1 ); } /** * Rotate about the z-axis by {@code angle} radians. * @param angle an angle in radians * @return a transformation matrix */ public static Matrix4 rotateZ(float angle) { float cos = (float) Math.cos(angle); float sin = (float) Math.sin(angle); return new Matrix4( cos, -sin, 0, 0, sin, cos, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ); } /** * Rotate by Euler angles (along x-, y-, and z-axes). * @param angles the angles to rotate by (in radians) * @return a transformation matrix */ public static Matrix4 rotate(Vector3 angles) { float a = (float) Math.cos(angles.x); float b = (float) Math.sin(angles.x); float c = (float) Math.cos(angles.y); float d = (float) Math.sin(angles.y); float e = (float) Math.cos(angles.z); float f = (float) Math.sin(angles.z); float ad = a * d; float bd = b * d; return new Matrix4( c*e, -c*f, d, 0, bd*e+a*f, -bd*f+a*e, -b*c, 0, -ad*e+b*f, ad*f+b*e, a*c, 0, 0, 0, 0, 1 ); } /** * Rotate by an angle through an axis * @param angle the angle to rotate by (radians) * @param axis the axis to rotate through * @return a transformation matrix */ public static Matrix4 rotate(float angle, Vector3 axis) { float rcos = (float) Math.cos(angle); float rsin = (float) Math.sin(angle); Vector3 normalized = axis.normalize(); // GLM normalizes the axis (maybe so there isn't scaling?) float u = normalized.x; float v = normalized.y; float w = normalized.z; float m00 = rcos + u * u * (1f-rcos); float m10 = w * rsin + v * u * (1f-rcos); float m20 = -v * rsin + w * u * (1f-rcos); float m01 = -w * rsin + u * v * (1f-rcos); float m11 = rcos + v * v * (1f-rcos); float m21 = u * rsin + w * v * (1f-rcos); float m02 = v * rsin + u * w * (1f-rcos); float m12 = -u * rsin + v * w * (1f-rcos); float m22 = rcos + w * w * (1f-rcos); return new Matrix4( m00, m01, m02, 0, m10, m11, m12, 0, m20, m21, m22, 0, 0, 0, 0, 1 ); } /** * Translate by the values in {@code translation} * @param translation the amounts to translate by along each axis * @return a transformation matrix */ public static Matrix4 translate(Vector3 translation) { return new Matrix4( 1, 0, 0, translation.x, 0, 1, 0, translation.y, 0, 0, 1, translation.z, 0, 0, 0, 1 ); } /** * Shear a model. Parameter {a}{b} shears a by b. * @param xy shear X by Y * @param xz shear X by Z * @param yx shear Y by X * @param yz shear Y by Z * @param zx shear Z by X * @param zy shear Z by Y * @return a transformation matrix */ public static Matrix4 shear(float xy, float xz, float yx, float yz, float zx, float zy) { return new Matrix4( 1, yx, zx, 0, xy, 1, zy, 0, xz, yz, 1, 0, 0, 0, 0, 1 ); } /** * Combine a series of transformation matrices. * The order in which the matrices are given is the order in which they will be applied * @param matrices the matrices to compose * @return the combined transformation matrix */ public static Matrix4 combine(Matrix4... matrices) { Matrix4 result = Matrix4.IDENTITY; // Combine them backwards: to apply a, then b, we want b of a = b * a for(int i = matrices.length-1; i >= 0; i--) { result = result.multiply(matrices[i]); } return result; } /** * Convert a 3x3 transformation matrix to a 4x4 transformation matrix that can be used with homogeneous coordinates. * The conversion is as follows: * <pre> * | m00 m01 m02 0 | * | m10 m11 m12 0 | * | m20 m21 m22 0 | * | 0 0 0 1 | * </pre> * @param mat a 3x3 transformation matrix * @return the corresponding 4x4 transformation matrix */ public static Matrix4 toHomogeneous(Matrix3 mat) { return new Matrix4( mat.m00, mat.m01, mat.m02, 0, mat.m10, mat.m11, mat.m12, 0, mat.m20, mat.m21, mat.m22, 0, 0, 0, 0, 1 ); } /** * Convert a 4x4 matrix for transforming homogeneous coordinates to the corresponding 3x3 matrix. * The last row and last column are removed. * @param mat a 4x4 transformation matrix * @return the corresponding 3x3 transformation matrix */ public static Matrix3 fromHomogeneous(Matrix4 mat) { return new Matrix3( mat.m00, mat.m01, mat.m02, mat.m10, mat.m11, mat.m12, mat.m20, mat.m21, mat.m22 ); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1/image_annotator.proto package com.google.cloud.vision.v1; /** * * * <pre> * Response to an async batch file annotation request. * </pre> * * Protobuf type {@code google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse} */ public final class AsyncBatchAnnotateFilesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse) AsyncBatchAnnotateFilesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use AsyncBatchAnnotateFilesResponse.newBuilder() to construct. private AsyncBatchAnnotateFilesResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AsyncBatchAnnotateFilesResponse() { responses_ = java.util.Collections.emptyList(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private AsyncBatchAnnotateFilesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { responses_ = new java.util.ArrayList<com.google.cloud.vision.v1.AsyncAnnotateFileResponse>(); mutable_bitField0_ |= 0x00000001; } responses_.add( input.readMessage( com.google.cloud.vision.v1.AsyncAnnotateFileResponse.parser(), extensionRegistry)); break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { responses_ = java.util.Collections.unmodifiableList(responses_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1_AsyncBatchAnnotateFilesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1_AsyncBatchAnnotateFilesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse.class, com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse.Builder.class); } public static final int RESPONSES_FIELD_NUMBER = 1; private java.util.List<com.google.cloud.vision.v1.AsyncAnnotateFileResponse> responses_; /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public java.util.List<com.google.cloud.vision.v1.AsyncAnnotateFileResponse> getResponsesList() { return responses_; } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public java.util.List<? extends com.google.cloud.vision.v1.AsyncAnnotateFileResponseOrBuilder> getResponsesOrBuilderList() { return responses_; } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public int getResponsesCount() { return responses_.size(); } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public com.google.cloud.vision.v1.AsyncAnnotateFileResponse getResponses(int index) { return responses_.get(index); } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public com.google.cloud.vision.v1.AsyncAnnotateFileResponseOrBuilder getResponsesOrBuilder( int index) { return responses_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < responses_.size(); i++) { output.writeMessage(1, responses_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < responses_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, responses_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse)) { return super.equals(obj); } com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse other = (com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse) obj; boolean result = true; result = result && getResponsesList().equals(other.getResponsesList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getResponsesCount() > 0) { hash = (37 * hash) + RESPONSES_FIELD_NUMBER; hash = (53 * hash) + getResponsesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response to an async batch file annotation request. * </pre> * * Protobuf type {@code google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse) com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1_AsyncBatchAnnotateFilesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1_AsyncBatchAnnotateFilesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse.class, com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse.Builder.class); } // Construct using com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getResponsesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (responsesBuilder_ == null) { responses_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { responsesBuilder_.clear(); } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vision.v1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1_AsyncBatchAnnotateFilesResponse_descriptor; } @java.lang.Override public com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse getDefaultInstanceForType() { return com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse build() { com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse buildPartial() { com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse result = new com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse(this); int from_bitField0_ = bitField0_; if (responsesBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { responses_ = java.util.Collections.unmodifiableList(responses_); bitField0_ = (bitField0_ & ~0x00000001); } result.responses_ = responses_; } else { result.responses_ = responsesBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse) { return mergeFrom((com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse other) { if (other == com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse.getDefaultInstance()) return this; if (responsesBuilder_ == null) { if (!other.responses_.isEmpty()) { if (responses_.isEmpty()) { responses_ = other.responses_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureResponsesIsMutable(); responses_.addAll(other.responses_); } onChanged(); } } else { if (!other.responses_.isEmpty()) { if (responsesBuilder_.isEmpty()) { responsesBuilder_.dispose(); responsesBuilder_ = null; responses_ = other.responses_; bitField0_ = (bitField0_ & ~0x00000001); responsesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getResponsesFieldBuilder() : null; } else { responsesBuilder_.addAllMessages(other.responses_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<com.google.cloud.vision.v1.AsyncAnnotateFileResponse> responses_ = java.util.Collections.emptyList(); private void ensureResponsesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { responses_ = new java.util.ArrayList<com.google.cloud.vision.v1.AsyncAnnotateFileResponse>( responses_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1.AsyncAnnotateFileResponse, com.google.cloud.vision.v1.AsyncAnnotateFileResponse.Builder, com.google.cloud.vision.v1.AsyncAnnotateFileResponseOrBuilder> responsesBuilder_; /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public java.util.List<com.google.cloud.vision.v1.AsyncAnnotateFileResponse> getResponsesList() { if (responsesBuilder_ == null) { return java.util.Collections.unmodifiableList(responses_); } else { return responsesBuilder_.getMessageList(); } } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public int getResponsesCount() { if (responsesBuilder_ == null) { return responses_.size(); } else { return responsesBuilder_.getCount(); } } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public com.google.cloud.vision.v1.AsyncAnnotateFileResponse getResponses(int index) { if (responsesBuilder_ == null) { return responses_.get(index); } else { return responsesBuilder_.getMessage(index); } } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public Builder setResponses( int index, com.google.cloud.vision.v1.AsyncAnnotateFileResponse value) { if (responsesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResponsesIsMutable(); responses_.set(index, value); onChanged(); } else { responsesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public Builder setResponses( int index, com.google.cloud.vision.v1.AsyncAnnotateFileResponse.Builder builderForValue) { if (responsesBuilder_ == null) { ensureResponsesIsMutable(); responses_.set(index, builderForValue.build()); onChanged(); } else { responsesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public Builder addResponses(com.google.cloud.vision.v1.AsyncAnnotateFileResponse value) { if (responsesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResponsesIsMutable(); responses_.add(value); onChanged(); } else { responsesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public Builder addResponses( int index, com.google.cloud.vision.v1.AsyncAnnotateFileResponse value) { if (responsesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResponsesIsMutable(); responses_.add(index, value); onChanged(); } else { responsesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public Builder addResponses( com.google.cloud.vision.v1.AsyncAnnotateFileResponse.Builder builderForValue) { if (responsesBuilder_ == null) { ensureResponsesIsMutable(); responses_.add(builderForValue.build()); onChanged(); } else { responsesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public Builder addResponses( int index, com.google.cloud.vision.v1.AsyncAnnotateFileResponse.Builder builderForValue) { if (responsesBuilder_ == null) { ensureResponsesIsMutable(); responses_.add(index, builderForValue.build()); onChanged(); } else { responsesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public Builder addAllResponses( java.lang.Iterable<? extends com.google.cloud.vision.v1.AsyncAnnotateFileResponse> values) { if (responsesBuilder_ == null) { ensureResponsesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, responses_); onChanged(); } else { responsesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public Builder clearResponses() { if (responsesBuilder_ == null) { responses_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { responsesBuilder_.clear(); } return this; } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public Builder removeResponses(int index) { if (responsesBuilder_ == null) { ensureResponsesIsMutable(); responses_.remove(index); onChanged(); } else { responsesBuilder_.remove(index); } return this; } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public com.google.cloud.vision.v1.AsyncAnnotateFileResponse.Builder getResponsesBuilder( int index) { return getResponsesFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public com.google.cloud.vision.v1.AsyncAnnotateFileResponseOrBuilder getResponsesOrBuilder( int index) { if (responsesBuilder_ == null) { return responses_.get(index); } else { return responsesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public java.util.List<? extends com.google.cloud.vision.v1.AsyncAnnotateFileResponseOrBuilder> getResponsesOrBuilderList() { if (responsesBuilder_ != null) { return responsesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(responses_); } } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public com.google.cloud.vision.v1.AsyncAnnotateFileResponse.Builder addResponsesBuilder() { return getResponsesFieldBuilder() .addBuilder(com.google.cloud.vision.v1.AsyncAnnotateFileResponse.getDefaultInstance()); } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public com.google.cloud.vision.v1.AsyncAnnotateFileResponse.Builder addResponsesBuilder( int index) { return getResponsesFieldBuilder() .addBuilder( index, com.google.cloud.vision.v1.AsyncAnnotateFileResponse.getDefaultInstance()); } /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1.AsyncAnnotateFileResponse responses = 1;</code> */ public java.util.List<com.google.cloud.vision.v1.AsyncAnnotateFileResponse.Builder> getResponsesBuilderList() { return getResponsesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1.AsyncAnnotateFileResponse, com.google.cloud.vision.v1.AsyncAnnotateFileResponse.Builder, com.google.cloud.vision.v1.AsyncAnnotateFileResponseOrBuilder> getResponsesFieldBuilder() { if (responsesBuilder_ == null) { responsesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1.AsyncAnnotateFileResponse, com.google.cloud.vision.v1.AsyncAnnotateFileResponse.Builder, com.google.cloud.vision.v1.AsyncAnnotateFileResponseOrBuilder>( responses_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); responses_ = null; } return responsesBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse) private static final com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse(); } public static com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AsyncBatchAnnotateFilesResponse> PARSER = new com.google.protobuf.AbstractParser<AsyncBatchAnnotateFilesResponse>() { @java.lang.Override public AsyncBatchAnnotateFilesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new AsyncBatchAnnotateFilesResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<AsyncBatchAnnotateFilesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AsyncBatchAnnotateFilesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2011 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.pmml.pmml_4_2; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.math.BigInteger; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import javax.xml.XMLConstants; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import org.kie.dmg.pmml.pmml_4_2.descr.ClusteringModel; import org.kie.dmg.pmml.pmml_4_2.descr.DataDictionary; import org.kie.dmg.pmml.pmml_4_2.descr.DataField; import org.kie.dmg.pmml.pmml_4_2.descr.NaiveBayesModel; import org.kie.dmg.pmml.pmml_4_2.descr.NeuralNetwork; import org.kie.dmg.pmml.pmml_4_2.descr.PMML; import org.kie.dmg.pmml.pmml_4_2.descr.RegressionModel; import org.kie.dmg.pmml.pmml_4_2.descr.Scorecard; import org.kie.dmg.pmml.pmml_4_2.descr.SupportVectorMachineModel; import org.kie.dmg.pmml.pmml_4_2.descr.TreeModel; import org.drools.compiler.compiler.PMMLCompiler; import org.drools.compiler.compiler.PMMLResource; import org.drools.core.io.impl.ByteArrayResource; import org.drools.core.io.impl.ClassPathResource; import org.drools.core.util.IoUtils; import org.kie.api.KieBase; import org.kie.api.KieServices; import org.kie.api.builder.model.KieBaseModel; import org.kie.api.builder.model.KieModuleModel; import org.kie.api.builder.model.KieSessionModel; import org.kie.api.conf.EventProcessingOption; import org.kie.api.io.Resource; import org.kie.api.io.ResourceType; import org.kie.api.runtime.KieContainer; import org.kie.api.runtime.KieSession; import org.kie.internal.builder.KnowledgeBuilderResult; import org.kie.internal.io.ResourceFactory; import org.kie.pmml.pmml_4_2.model.Miningmodel; import org.kie.pmml.pmml_4_2.model.PMML4ModelType; import org.kie.pmml.pmml_4_2.model.PMML4UnitImpl; import org.kie.pmml.pmml_4_2.model.PMMLMiningField; import org.kie.pmml.pmml_4_2.model.PMMLOutputField; import org.kie.pmml.pmml_4_2.model.Treemodel; import org.kie.pmml.pmml_4_2.model.mining.MiningSegment; import org.kie.pmml.pmml_4_2.model.mining.MiningSegmentation; import org.mvel2.templates.SimpleTemplateRegistry; import org.mvel2.templates.TemplateCompiler; import org.mvel2.templates.TemplateRegistry; import org.xml.sax.SAXException; public class PMML4Compiler implements PMMLCompiler { public static final String PMML_NAMESPACE = "org.kie.dmg.pmml.pmml_4_2"; public static final String PMML_DROOLS = "org.kie.pmml.pmml_4_2"; public static final String PMML = PMML_NAMESPACE + ".descr"; public static final String SCHEMA_PATH = "xsd/org/dmg/pmml/pmml_4_2/pmml-4-2.xsd"; public static final String BASE_PACK = PMML_DROOLS.replace('.','/'); protected static boolean globalLoaded = false; protected static final String[] GLOBAL_TEMPLATES = new String[] { "global/pmml_header.drlt", "global/pmml_import.drlt", "global/rule_meta.drlt", "global/modelMark.drlt", "global/dataDefinition/common.drlt", "global/dataDefinition/rootDataField.drlt", "global/dataDefinition/inputBinding.drlt", "global/dataDefinition/outputBinding.drlt", "global/dataDefinition/ioTypeDeclare.drlt", "global/dataDefinition/updateIOField.drlt", "global/dataDefinition/inputFromEP.drlt", "global/dataDefinition/inputBean.drlt", "global/dataDefinition/outputBean.drlt", "global/manipulation/confirm.drlt", "global/manipulation/mapMissingValues.drlt", "global/manipulation/propagateMissingValues.drlt", "global/validation/intervalsOnDomainRestriction.drlt", "global/validation/valuesNoRestriction.drlt", "global/validation/valuesOnDomainRestriction.drlt", "global/validation/valuesOnDomainRestrictionMissing.drlt", "global/validation/valuesOnDomainRestrictionInvalid.drlt", }; protected static boolean transformationLoaded = false; protected static final String[] TRANSFORMATION_TEMPLATES = new String[] { "transformations/normContinuous/boundedLowerOutliers.drlt", "transformations/normContinuous/boundedUpperOutliers.drlt", "transformations/normContinuous/normContOutliersAsMissing.drlt", "transformations/normContinuous/linearTractNormalization.drlt", "transformations/normContinuous/lowerExtrapolateLinearTractNormalization.drlt", "transformations/normContinuous/upperExtrapolateLinearTractNormalization.drlt", "transformations/aggregate/aggregate.drlt", "transformations/aggregate/collect.drlt", "transformations/simple/constantField.drlt", "transformations/simple/aliasedField.drlt", "transformations/normDiscrete/indicatorFieldYes.drlt", "transformations/normDiscrete/indicatorFieldNo.drlt", "transformations/normDiscrete/predicateField.drlt", "transformations/discretize/intervalBinning.drlt", "transformations/discretize/outOfBinningDefault.drlt", "transformations/discretize/outOfBinningMissing.drlt", "transformations/mapping/mapping.drlt", "transformations/functions/apply.drlt", "transformations/functions/function.drlt" }; protected static boolean miningLoaded = false; protected static final String[] MINING_TEMPLATES = new String[] { "models/common/mining/miningField.drlt", "models/common/mining/miningFieldInvalid.drlt", "models/common/mining/miningFieldMissing.drlt", "models/common/mining/miningFieldOutlierAsMissing.drlt", "models/common/mining/miningFieldOutlierAsExtremeLow.drlt", "models/common/mining/miningFieldOutlierAsExtremeUpp.drlt", "models/common/targets/targetReshape.drlt", "models/common/targets/aliasedOutput.drlt", "models/common/targets/addOutputFeature.drlt", "models/common/targets/addRelOutputFeature.drlt", "models/common/targets/outputQuery.drlt", "models/common/targets/outputQueryPredicate.drlt" }; protected static boolean neuralLoaded = false; protected static final String[] NEURAL_TEMPLATES = new String[] { "models/neural/neuralBeans.drlt", "models/neural/neuralWireInput.drlt", "models/neural/neuralBuildSynapses.drlt", "models/neural/neuralBuildNeurons.drlt", "models/neural/neuralLinkSynapses.drlt", "models/neural/neuralFire.drlt", "models/neural/neuralLayerMaxNormalization.drlt", "models/neural/neuralLayerSoftMaxNormalization.drlt", "models/neural/neuralOutputField.drlt", "models/neural/neuralClean.drlt" }; protected static boolean svmLoaded = false; protected static final String[] SVM_TEMPLATES = new String[] { "models/svm/svmParams.drlt", "models/svm/svmDeclare.drlt", "models/svm/svmFunctions.drlt", "models/svm/svmBuild.drlt", "models/svm/svmInitSupportVector.drlt", "models/svm/svmInitInputVector.drlt", "models/svm/svmKernelEval.drlt", "models/svm/svmOutputGeneration.drlt", "models/svm/svmOutputVoteDeclare.drlt", "models/svm/svmOutputVote1vN.drlt", "models/svm/svmOutputVote1v1.drlt", }; protected static boolean naiveBayesLoaded = false; protected static final String[] NAIVE_BAYES_TEMPLATES = new String[] { "models/bayes/naiveBayesDeclare.drlt", "models/bayes/naiveBayesEvalDiscrete.drlt", "models/bayes/naiveBayesEvalContinuous.drlt", "models/bayes/naiveBayesBuildCounts.drlt", "models/bayes/naiveBayesBuildDistrs.drlt", "models/bayes/naiveBayesBuildOuts.drlt", }; protected static boolean simpleRegLoaded = false; protected static final String[] SIMPLEREG_TEMPLATES = new String[] { "models/regression/regDeclare.drlt", "models/regression/regCommon.drlt", "models/regression/regParams.drlt", "models/regression/regEval.drlt", "models/regression/regClaxOutput.drlt", "models/regression/regNormalization.drlt", "models/regression/regDecumulation.drlt", }; protected static boolean clusteringLoaded = false; protected static final String[] CLUSTERING_TEMPLATES = new String[] { "models/clustering/clusteringDeclare.drlt", "models/clustering/clusteringInit.drlt", "models/clustering/clusteringEvalDistance.drlt", "models/clustering/clusteringEvalSimilarity.drlt", "models/clustering/clusteringMatrixCompare.drlt" }; protected static boolean treeLoaded = false; protected static final String[] TREE_TEMPLATES = new String[] { "models/tree/treeDeclare.drlt", "models/tree/treeCommon.drlt", "models/tree/treeInputDeclare.drlt", "models/tree/treeInit.drlt", "models/tree/treeAggregateEval.drlt", "models/tree/treeDefaultEval.drlt", "models/tree/treeEval.drlt", "models/tree/treeIOBinding.drlt", "models/tree/treeMissHandleAggregate.drlt", "models/tree/treeMissHandleWeighted.drlt", "models/tree/treeMissHandleLast.drlt", "models/tree/treeMissHandleNull.drlt", "models/tree/treeMissHandleNone.drlt" }; protected static boolean scorecardLoaded = false; protected static final String[] SCORECARD_TEMPLATES = new String[] { "models/scorecard/scorecardInit.drlt", "models/scorecard/scorecardParamsInit.drlt", "models/scorecard/scorecardDeclare.drlt", "models/scorecard/scorecardDataDeclare.drlt", "models/scorecard/scorecardPartialScore.drlt", "models/scorecard/scorecardScoring.drlt", "models/scorecard/scorecardOutputGeneration.drlt", "models/scorecard/scorecardOutputRankCode.drlt", "mvel/scorecard/complexPartialScore.mvel" }; protected static final String RESOURCE_PATH = BASE_PACK; protected static final String TEMPLATE_PATH = "/" + RESOURCE_PATH + "/templates/"; private static TemplateRegistry registry; private static List<KnowledgeBuilderResult> visitorBuildResults = new ArrayList<KnowledgeBuilderResult>(); private List<KnowledgeBuilderResult> results; private Schema schema; private PMML4Helper helper; public PMML4Compiler() { super(); this.results = new ArrayList<KnowledgeBuilderResult>(); helper = new PMML4Helper(); helper.setPack( "org.kie.pmml.pmml_4_2.test" ); SchemaFactory sf = SchemaFactory.newInstance( XMLConstants.W3C_XML_SCHEMA_NS_URI ); try { schema = sf.newSchema( PMML4Compiler.class.getClassLoader().getResource( SCHEMA_PATH ) ); } catch ( SAXException e ) { e.printStackTrace(); } } public PMML4Helper getHelper() { return helper; } private String getRuleUnitClass(PMML4Unit unit) { PMML4Model root = unit.getRootModel(); return root.getRuleUnitClassName(); } public String generateTheory( PMML pmml ) { StringBuilder sb = new StringBuilder(); PMML4Unit unit = new PMML4UnitImpl(pmml); KieBase visitor; try { visitor = checkBuildingResources( pmml ); } catch ( IOException e ) { this.results.add( new PMMLError( e.getMessage() ) ); return null; } KieSession visitorSession = visitor.newKieSession(); helper.reset(); visitorSession.setGlobal( "registry", registry ); visitorSession.setGlobal( "fld2var", new HashMap() ); visitorSession.setGlobal( "utils", helper ); visitorSession.setGlobal("unitPackageName", helper.getPack()); visitorSession.setGlobal("ruleUnitClassName", getRuleUnitClass(unit)); visitorSession.setGlobal( "theory", sb ); visitorSession.insert( pmml ); visitorSession.fireAllRules(); String modelEvaluatingRules = sb.toString(); visitorSession.dispose(); return modelEvaluatingRules; } private static void initRegistry() { if ( registry == null ) { registry = new SimpleTemplateRegistry(); } if ( ! globalLoaded ) { for ( String ntempl : GLOBAL_TEMPLATES ) { prepareTemplate( ntempl ); } globalLoaded = true; } if ( ! transformationLoaded ) { for ( String ntempl : TRANSFORMATION_TEMPLATES ) { prepareTemplate( ntempl ); } transformationLoaded = true; } if ( ! miningLoaded ) { for ( String ntempl : MINING_TEMPLATES ) { prepareTemplate( ntempl ); } miningLoaded = true; } } private static KieBase checkBuildingResources( PMML pmml ) throws IOException { KieServices ks = KieServices.Factory.get(); KieContainer kieContainer = ks.getKieClasspathContainer( PMML4Compiler.class.getClassLoader() ); if ( registry == null ) { initRegistry(); } String chosenKieBase = null; for ( Object o : pmml.getAssociationModelsAndBaselineModelsAndClusteringModels() ) { if ( o instanceof NaiveBayesModel ) { if ( ! naiveBayesLoaded ) { for ( String ntempl : NAIVE_BAYES_TEMPLATES ) { prepareTemplate( ntempl ); } naiveBayesLoaded = true; } chosenKieBase = chosenKieBase == null ? "KiePMML-Bayes" : "KiePMML"; } if ( o instanceof NeuralNetwork ) { if ( ! neuralLoaded ) { for ( String ntempl : NEURAL_TEMPLATES ) { prepareTemplate( ntempl ); } neuralLoaded = true; } chosenKieBase = chosenKieBase == null ? "KiePMML-Neural" : "KiePMML"; } if ( o instanceof ClusteringModel ) { if ( ! clusteringLoaded ) { for ( String ntempl : CLUSTERING_TEMPLATES ) { prepareTemplate( ntempl ); } clusteringLoaded = true; } chosenKieBase = chosenKieBase == null ? "KiePMML-Cluster" : "KiePMML"; } if ( o instanceof SupportVectorMachineModel ) { if ( ! svmLoaded ) { for ( String ntempl : SVM_TEMPLATES ) { prepareTemplate( ntempl ); } svmLoaded = true; } chosenKieBase = chosenKieBase == null ? "KiePMML-SVM" : "KiePMML"; } if ( o instanceof TreeModel ) { if ( ! treeLoaded ) { for ( String ntempl : TREE_TEMPLATES ) { prepareTemplate( ntempl ); } treeLoaded = true; } chosenKieBase = chosenKieBase == null ? "KiePMML-Tree" : "KiePMML"; } if ( o instanceof RegressionModel ) { if ( ! simpleRegLoaded ) { for ( String ntempl : SIMPLEREG_TEMPLATES ) { prepareTemplate( ntempl ); } simpleRegLoaded = true; } chosenKieBase = chosenKieBase == null ? "KiePMML-Regression" : "KiePMML"; } if ( o instanceof Scorecard ) { if ( ! scorecardLoaded ) { for ( String ntempl : SCORECARD_TEMPLATES ) { prepareTemplate( ntempl ); } scorecardLoaded = true; } chosenKieBase = chosenKieBase == null ? "KiePMML-Scorecard" : "KiePMML"; } } if ( chosenKieBase == null ) { chosenKieBase = "KiePMML-Base"; } return kieContainer.getKieBase( chosenKieBase ); } private static void prepareTemplate( String ntempl ) { try { String path = TEMPLATE_PATH + ntempl; Resource res = ResourceFactory.newClassPathResource(path, org.kie.pmml.pmml_4_2.PMML4Compiler.class); if ( res != null ) { InputStream stream = res.getInputStream(); if ( stream != null ) { registry.addNamedTemplate( path.substring(path.lastIndexOf('/') + 1), TemplateCompiler.compileTemplate(stream)); } } } catch (IOException e) { e.printStackTrace(); } } public String compile( String resource, ClassLoader classLoader ) { String theory = null; Resource cpr = new ClassPathResource( resource ); try { theory = compile( cpr.getInputStream(), classLoader ); } catch ( IOException e ) { results.add( new PMMLError( e.toString() ) ); e.printStackTrace(); } return theory; } public Resource[] transform( Resource resource, ClassLoader classLoader ) { String theory = null; try { theory = compile( resource.getInputStream(), classLoader ); } catch ( IOException e ) { results.add( new PMMLError( e.toString() ) ); e.printStackTrace(); return new Resource[ 0 ]; } return new Resource[] { buildOutputResource( resource, theory ) }; } private Resource buildOutputResource( Resource resource, String theory ) { ByteArrayResource byteArrayResource = new ByteArrayResource( theory.getBytes( IoUtils.UTF8_CHARSET ) ); byteArrayResource.setResourceType( ResourceType.PMML ); if ( resource.getSourcePath() != null ) { String originalPath = resource.getSourcePath(); int start = originalPath.lastIndexOf( File.separator ); byteArrayResource.setSourcePath( "generated-sources/" + originalPath.substring( start ) + ".pmml" ); } else { byteArrayResource.setSourcePath( "generated-sources/" + helper.getContext() + ".pmml" ); } return byteArrayResource; } private InputStream getInputStreamByFileName(String fileName) { InputStream is = null; Resource res = ResourceFactory.newClassPathResource(fileName); try { is = res.getInputStream(); } catch (Exception e) { } if (is == null) { res = ResourceFactory.newFileResource(fileName); } try { is = res.getInputStream(); } catch (Exception e) { this.results.add(new PMMLError("Unable to retrieve file based resource: "+fileName)); } return is; } @Override public Map<String,String> getJavaClasses(String fileName) { InputStream is = getInputStreamByFileName(fileName); if (is != null) { return getJavaClasses(is); } return new HashMap<>(); } @Override public Map<String,String> getJavaClasses(InputStream stream) { Map<String,String> javaClasses = new HashMap<>(); PMML pmml = loadModel(PMML, stream); if (pmml != null) { PMML4Unit unit = new PMML4UnitImpl(pmml); if (unit != null) { List<PMML4Model> models = unit.getModels(); models.forEach(model -> { Map.Entry<String, String> inputPojo = model.getMappedMiningPojo(); Map.Entry<String, String> ruleUnit = model.getMappedRuleUnit(); Map.Entry<String, String> outcome = null; if (model.getModelType() == PMML4ModelType.TREE) { outcome = ((Treemodel)model).getTreeNodeJava(); } if (inputPojo != null) javaClasses.put(inputPojo.getKey(), inputPojo.getValue()); if (ruleUnit != null) javaClasses.put(ruleUnit.getKey(), ruleUnit.getValue()); if (outcome != null) javaClasses.put(outcome.getKey(), outcome.getValue()); }); } } return javaClasses; } public List<PMMLResource> precompile( String fileName, ClassLoader classLoader, KieBaseModel rootKieBaseModel) { InputStream is = getInputStreamByFileName(fileName); List<PMMLResource> resources = null; if (is != null) { try { resources = precompile(is,classLoader,rootKieBaseModel); } catch (Exception e) { PMMLError err = new PMMLError("Unable to retrieve pre-compiled resources for PMML: "+e.getMessage()); this.results.add(err); } } return (resources != null) ? resources:Collections.emptyList(); } public List<PMMLResource> precompile( InputStream stream, ClassLoader classLoader, KieBaseModel rootKieBaseModel) { List<PMMLResource> resources = new ArrayList<>(); KieServices services = KieServices.Factory.get(); KieModuleModel module = services.newKieModuleModel(); this.results = new ArrayList<KnowledgeBuilderResult>(); PMML pmml = loadModel(PMML, stream); helper.setResolver(classLoader); PMML4Unit unit = new PMML4UnitImpl(pmml); if (unit.containsMiningModel()) { Miningmodel rootModel = unit.getRootMiningModel(); resources = buildResourcesFromModel(pmml,rootModel,null,classLoader,module); } else { PMML4Model rootModel = unit.getRootModel(); if (rootModel != null) { helper.setPack(rootModel.getModelPackageName()); KieBaseModel kbm = module.newKieBaseModel(rootModel.getModelId()); kbm.addPackage(helper.getPack()) .setDefault(true) .setEventProcessingMode(EventProcessingOption.CLOUD); PMMLResource resource = new PMMLResource(helper.getPack()); resource.setKieBaseModel(kbm); resource.addRules(rootModel.getModelId(), this.compile(pmml, classLoader)); resources.add(resource); } } return resources; } private void addMissingFieldDefinition(PMML pmml, MiningSegmentation msm, MiningSegment seg) { // get the list of models that may contain the field definition List<PMML4Model> models = msm.getMiningSegments().stream() .filter(s -> s != seg && s.getSegmentIndex() < seg.getSegmentIndex()) .map(iseg -> { return iseg.getModel(); }) .collect(Collectors.toList()); seg.getModel().getMiningFields().stream().filter(mf -> !mf.isInDictionary()).forEach(pmf -> { String fldName = pmf.getName(); boolean fieldAdded = false; for (Iterator<PMML4Model> iter = models.iterator(); iter.hasNext() && !fieldAdded;) { PMML4Model mdl = iter.next(); PMMLOutputField outfield = mdl.findOutputField(fldName); PMMLMiningField target = (outfield != null && outfield.getTargetField() != null) ? mdl.findMiningField(outfield.getTargetField()) : null; if (outfield != null) { DataField e = null; if (outfield.getRawDataField() != null && outfield.getRawDataField().getDataType() != null) { e = outfield.getRawDataField(); } else if (target != null ) { e = target.getRawDataField(); } if (e != null) { e.setName(fldName); pmml.getDataDictionary().getDataFields().add(e); BigInteger bi = pmml.getDataDictionary().getNumberOfFields(); pmml.getDataDictionary().setNumberOfFields(bi.add(BigInteger.ONE)); fieldAdded = true; } } } }); } protected PMMLResource buildResourceFromSegment( PMML pmml_origin, MiningSegment segment, ClassLoader classLoader, KieModuleModel module) { PMML pmml = new PMML(); DataDictionary dd = pmml_origin.getDataDictionary(); pmml.setDataDictionary(dd); pmml.setHeader(pmml_origin.getHeader()); pmml.getAssociationModelsAndBaselineModelsAndClusteringModels().add(segment.getModel().getRawModel()); addMissingFieldDefinition(pmml,segment.getOwner(),segment); helper.setPack(segment.getModel().getModelPackageName());//PMML4Helper.pmmlDefaultPackageName()+".mining.segment_"+segment.getSegmentId()); String rules = this.compile(pmml, classLoader); KieBaseModel kbModel = module.newKieBaseModel(segment.getOwner().getOwner().getModelId()+"_"+segment.getOwner().getSegmentationId()+"_SEGMENT_"+segment.getSegmentId()); kbModel.addPackage(helper.getPack()) .setDefault(false) .setEventProcessingMode(EventProcessingOption.CLOUD); KieSessionModel ksm = kbModel.newKieSessionModel("SEGMENT_"+segment.getSegmentId()); ksm.setDefault(true); PMMLResource resource = new PMMLResource(helper.getPack()); resource.setKieBaseModel(kbModel); resource.addRules(segment.getModel().getModelId(), rules); return resource; } protected List<PMMLResource> buildResourcesFromModel(PMML pmml, Miningmodel miningModel, List<PMMLResource> resourcesList, ClassLoader classLoader, KieModuleModel module) { if (resourcesList == null) { resourcesList = new ArrayList<>(); } PMMLResource resource = new PMMLResource(miningModel.getModelPackageName());//new PMMLResource(PMML_DROOLS+".mining.model_"+miningModel.getModelId()); KieBaseModel rootKieBaseModel = module.newKieBaseModel(resource.getPackageName()); rootKieBaseModel.addPackage(resource.getPackageName()); rootKieBaseModel.setDefault(true); resource.setKieBaseModel(rootKieBaseModel); resource.addRules(miningModel.getModelId(), miningModel.generateRules()); resourcesList.add(resource); getChildResources(pmml,miningModel, resourcesList, classLoader, module); return resourcesList; } protected List<PMMLResource> getChildResources(PMML pmml_origin, Miningmodel parent, List<PMMLResource> resourceList, ClassLoader classLoader, KieModuleModel module) { if (parent != null && parent.getSegmentation() != null) { MiningSegmentation segmentation = parent.getSegmentation(); if (segmentation.getMiningSegments() != null) { List<MiningSegment> segments = segmentation.getMiningSegments(); for (MiningSegment segment: segments) { if (segment.getModel() instanceof Miningmodel) { buildResourcesFromModel(pmml_origin,(Miningmodel)segment.getModel(), resourceList, classLoader, module); } else { resourceList.add(buildResourceFromSegment(pmml_origin,segment, classLoader, module)); } } } } return resourceList; } public String compile( PMML pmml, ClassLoader classLoader) { helper.setResolver( classLoader ); if ( getResults().isEmpty() ) { return generateTheory( pmml ); } else { return null; } } public String compile( InputStream source, ClassLoader classLoader ) { this.results = new ArrayList<KnowledgeBuilderResult>(); PMML pmml = loadModel( PMML, source ); return compile(pmml,classLoader); } public List<KnowledgeBuilderResult> getResults() { List<KnowledgeBuilderResult> combinedResults = new ArrayList<KnowledgeBuilderResult>( this.results ); combinedResults.addAll( visitorBuildResults ); return combinedResults; } @Override public void clearResults() { this.results.clear(); } public void dump( String s, OutputStream ostream ) { // write to outstream Writer writer = null; try { writer = new OutputStreamWriter( ostream, "UTF-8" ); writer.write(s); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { try { if (writer != null) { writer.flush(); } } catch (IOException e) { e.printStackTrace(); } } } /** * Imports a PMML source file, returning a Java descriptor * @param model the PMML package name (classes derived from a specific schema) * @param source the name of the PMML resource storing the predictive model * @return the Java Descriptor of the PMML resource */ public PMML loadModel( String model, InputStream source ) { try { if ( schema == null ) { visitorBuildResults.add( new PMMLWarning( ResourceFactory.newInputStreamResource( source ), "Could not validate PMML document, schema not available" ) ); } final JAXBContext jc; final ClassLoader ccl = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader( PMML4Compiler.class.getClassLoader() ); jc = JAXBContext.newInstance( model, PMML4Compiler.class.getClassLoader() ); } finally { Thread.currentThread().setContextClassLoader( ccl ); } Unmarshaller unmarshaller = jc.createUnmarshaller(); if ( schema != null ) { unmarshaller.setSchema( schema ); } return (PMML) unmarshaller.unmarshal( source ); } catch ( JAXBException e ) { this.results.add( new PMMLError( e.toString() ) ); return null; } } public static void dumpModel( PMML model, OutputStream target ) { try { final JAXBContext jc; final ClassLoader ccl = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader( PMML4Compiler.class.getClassLoader() ); jc = JAXBContext.newInstance( PMML.class.getPackage().getName(), PMML4Compiler.class.getClassLoader() ); } finally { Thread.currentThread().setContextClassLoader( ccl ); } Marshaller marshaller = jc.createMarshaller(); marshaller.setProperty( Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE ); marshaller.marshal( model, target ); } catch ( JAXBException e ) { e.printStackTrace(); } } @Override public String getCompilerVersion() { return "KIE PMML v2"; } }
/* * Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.replicatedmap; import com.hazelcast.config.Config; import com.hazelcast.config.InMemoryFormat; import com.hazelcast.core.DistributedObject; import com.hazelcast.core.EntryAdapter; import com.hazelcast.core.EntryEvent; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.ReplicatedMap; import com.hazelcast.replicatedmap.impl.record.ReplicatedRecord; import com.hazelcast.test.AssertTask; import com.hazelcast.test.HazelcastSerialClassRunner; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.QuickTest; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.util.AbstractMap.SimpleEntry; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(HazelcastSerialClassRunner.class) @Category(QuickTest.class) public class ReplicatedMapTest extends ReplicatedMapAbstractTest { @Test public void testEmptyMapIsEmpty() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(1); HazelcastInstance instance = nodeFactory.newHazelcastInstance(); ReplicatedMap<Integer, Integer> map = instance.getReplicatedMap(randomName()); assertTrue("map should be empty", map.isEmpty()); } @Test public void testNonEmptyMapIsNotEmpty() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(1); HazelcastInstance instance = nodeFactory.newHazelcastInstance(); ReplicatedMap<Integer, Integer> map = instance.getReplicatedMap(randomName()); map.put(1, 1); assertFalse("map should not be empty", map.isEmpty()); } @Test(expected = IllegalArgumentException.class) public void testNegativeTtlThrowsException() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(1); HazelcastInstance instance = nodeFactory.newHazelcastInstance(); ReplicatedMap<Integer, Integer> map = instance.getReplicatedMap(randomName()); map.put(1, 1, -1, TimeUnit.DAYS); } @Test public void testAddObject() throws Exception { testAdd(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testAddObjectSyncFillup() throws Exception { Config config = buildConfig(InMemoryFormat.OBJECT); config.getReplicatedMapConfig("default").setAsyncFillup(false); testFillUp(config); } @Test public void testAddObjectAsyncFillup() throws Exception { Config config = buildConfig(InMemoryFormat.OBJECT); config.getReplicatedMapConfig("default").setAsyncFillup(true); testFillUp(config); } @Test public void testAddBinary() throws Exception { testAdd(buildConfig(InMemoryFormat.BINARY)); } @Test public void testAddBinarySyncFillup() throws Exception { Config config = buildConfig(InMemoryFormat.BINARY); config.getReplicatedMapConfig("default").setAsyncFillup(false); testFillUp(config); } @Test public void testAddBinaryAsyncFillup() throws Exception { Config config = buildConfig(InMemoryFormat.BINARY); config.getReplicatedMapConfig("default").setAsyncFillup(true); testFillUp(config); } private void testAdd(Config config) throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); for (String key : keys) { map1.put(key, "bar"); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertEquals("bar", map1.get(key)); assertEquals("bar", map2.get(key)); } } }); } private void testFillUp(Config config) { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); for (String key : keys) { map1.put(key, "bar"); } final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertEquals("bar", map2.get(key)); } } }); } @Test public void testPutAllObject() throws Exception { testPutAll(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testPutAllBinary() throws Exception { testPutAll(buildConfig(InMemoryFormat.BINARY)); } private void testPutAll(Config config) throws TimeoutException { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); final Map<String, String> mapTest = new HashMap<String, String>(); for (String key : keys) { mapTest.put(key, "bar"); } map1.putAll(mapTest); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertEquals("bar", map1.get(key)); assertEquals("bar", map2.get(key)); } } }); } @Test public void testClearObject() throws Exception { testClear(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testClearBinary() throws Exception { testClear(buildConfig(InMemoryFormat.BINARY)); } private void testClear(Config config) throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); for (String key : keys) { map1.put(key, "bar"); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertEquals("bar", map1.get(key)); assertEquals("bar", map2.get(key)); } } }); map1.clear(); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertEquals(0, map1.size()); assertEquals(0, map2.size()); } }); } @Test public void testAddTtlObject() throws Exception { testAddTtl(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testAddTtlBinary() throws Exception { testAddTtl(buildConfig(InMemoryFormat.BINARY)); } private void testAddTtl(Config config) throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); for (String key : keys) { map1.put(key, "bar", 10, TimeUnit.MINUTES); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertEquals("bar", map1.get(key)); ReplicatedRecord<String, String> record = getReplicatedRecord(map1, key); assertNotNull(record); assertNotEquals(0, record.getTtlMillis()); } } }); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertEquals("bar", map2.get(key)); ReplicatedRecord<String, String> record = getReplicatedRecord(map2, key); assertNotNull(record); assertNotEquals(0, record.getTtlMillis()); } } }); } @Test public void testUpdateObject() throws Exception { testUpdate(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testUpdateBinary() throws Exception { testUpdate(buildConfig(InMemoryFormat.BINARY)); } private void testUpdate(Config config) throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); for (String key : keys) { map1.put(key, "bar"); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertEquals("bar", map1.get(key)); assertEquals("bar", map2.get(key)); } } }); for (String key : keys) { map2.put(key, "bar2"); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertEquals("bar2", map1.get(key)); assertEquals("bar2", map2.get(key)); } } }); } @Test public void testUpdateTtlObject() throws Exception { testUpdateTtl(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testUpdateTtlBinary() throws Exception { testUpdateTtl(buildConfig(InMemoryFormat.BINARY)); } private void testUpdateTtl(Config config) throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); for (String key : keys) { map1.put(key, "bar"); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertEquals("bar", map1.get(key)); assertEquals("bar", map2.get(key)); } } }); for (String key : keys) { map2.put(key, "bar2", 10, TimeUnit.MINUTES); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertEquals("bar2", map1.get(key)); ReplicatedRecord<String, String> record = getReplicatedRecord(map1, key); assertNotNull(record); assertTrue(record.getTtlMillis() > 0); } } }); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertEquals("bar2", map2.get(key)); ReplicatedRecord<String, String> record = getReplicatedRecord(map2, key); assertNotNull(record); assertTrue(record.getTtlMillis() > 0); } } }); } @Test public void testRemoveObject() throws Exception { testRemove(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testRemoveBinary() throws Exception { testRemove(buildConfig(InMemoryFormat.BINARY)); } private void testRemove(Config config) throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); for (String key : keys) { map1.put(key, "bar"); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertEquals("map1 should return value for key " + key, "bar", map1.get(key)); assertEquals("map2 should return value for key " + key, "bar", map2.get(key)); } } }); for (String key : keys) { map2.remove(key); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertFalse("map1 should not contain key " + key, map1.containsKey(key)); assertFalse("map2 should not contain key " + key, map2.containsKey(key)); } } }); } @Test public void testContainsKey_returnsFalse_onRemovedKeys() throws Exception { HazelcastInstance node = createHazelcastInstance(); ReplicatedMap<Integer, Integer> map = node.getReplicatedMap("default"); map.put(1, Integer.MAX_VALUE); map.remove(1); assertFalse(map.containsKey(1)); } @Test public void testContainsKey_returnsFalse_onNonexistentKeys() throws Exception { HazelcastInstance node = createHazelcastInstance(); ReplicatedMap<Integer, Integer> map = node.getReplicatedMap("default"); assertFalse(map.containsKey(1)); } @Test public void testContainsKey_returnsTrue_onExistingKeys() throws Exception { HazelcastInstance node = createHazelcastInstance(); ReplicatedMap<Integer, Integer> map = node.getReplicatedMap("default"); map.put(1, Integer.MAX_VALUE); assertTrue(map.containsKey(1)); } @Test public void testKeySet_notIncludes_removedKeys() throws Exception { HazelcastInstance node = createHazelcastInstance(); final ReplicatedMap<Integer, Integer> map = node.getReplicatedMap("default"); map.put(1, Integer.MAX_VALUE); map.put(2, Integer.MIN_VALUE); map.remove(1); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { Set<Integer> keys = new HashSet<Integer>(map.keySet()); assertFalse(keys.contains(1)); } }, 20); } @Test public void testEntrySet_notIncludes_removedKeys() throws Exception { HazelcastInstance node = createHazelcastInstance(); final ReplicatedMap<Integer, Integer> map = node.getReplicatedMap("default"); map.put(1, Integer.MAX_VALUE); map.put(2, Integer.MIN_VALUE); map.remove(1); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { Set<Entry<Integer, Integer>> entries = map.entrySet(); for (Entry<Integer, Integer> entry : entries) { if (entry.getKey().equals(1)) { fail(String.format("We do not expect an entry which's key equals to %d in entry set", 1)); } } } }, 20); } @Test public void testSizeObject() throws Exception { testSize(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testSizeBinary() throws Exception { testSize(buildConfig(InMemoryFormat.BINARY)); } private void testSize(Config config) throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); final SimpleEntry<String, String>[] testValues = buildTestValues(keys); int half = testValues.length / 2; for (int i = 0; i < testValues.length; i++) { final ReplicatedMap<String, String> map = i < half ? map1 : map2; final SimpleEntry<String, String> entry = testValues[i]; map.put(entry.getKey(), entry.getValue()); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertEquals(keys.size(), map1.size()); assertEquals(keys.size(), map2.size()); } }); } @Test public void testContainsKeyObject() throws Exception { testContainsKey(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testContainsKeyBinary() throws Exception { testContainsKey(buildConfig(InMemoryFormat.BINARY)); } private void testContainsKey(Config config) throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); for (String key : keys) { map1.put(key, "bar"); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertTrue(map1.containsKey(key)); assertTrue(map2.containsKey(key)); } } }); } @Test public void testContainsValue_returnsFalse_onNonexistentValue() throws Exception { HazelcastInstance node = createHazelcastInstance(); ReplicatedMap<Integer, Integer> map = node.getReplicatedMap("default"); assertFalse(map.containsValue(1)); } @Test public void testContainsValueObject() throws Exception { testContainsValue(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testContainsValueBinary() throws Exception { testContainsValue(buildConfig(InMemoryFormat.BINARY)); } private void testContainsValue(Config config) throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); int half = keys.size() / 2, i = 0; for (String key : keys) { final ReplicatedMap<String, String> map = i++ < half ? map1 : map2; map.put(key, key); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { for (String key : keys) { assertTrue(map1.containsValue(key)); assertTrue(map2.containsValue(key)); } } }); } @Test public void testValuesWithComparator() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(1); HazelcastInstance instance = nodeFactory.newHazelcastInstance(); ReplicatedMap<Integer, Integer> map = instance.getReplicatedMap(randomName()); for (int i = 0; i < 100; i++) { map.put(i, i); } Collection<Integer> values = map.values(new DescendingComparator()); int v = 100; for (Integer value : values) { assertEquals(--v, (int) value); } } @Test public void testValuesObject() throws Exception { testValues(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testValuesBinary() throws Exception { testValues(buildConfig(InMemoryFormat.BINARY)); } private void testValues(Config config) throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); int half = keys.size() / 2, i = 0; for (String key : keys) { final ReplicatedMap<String, String> map = i++ < half ? map1 : map2; map.put(key, key); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertEquals(keys, new HashSet<String>(map1.values())); assertEquals(keys, new HashSet<String>(map2.values())); } }); } @Test public void testKeySetObject() throws Exception { testKeySet(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testKeySetBinary() throws Exception { testKeySet(buildConfig(InMemoryFormat.BINARY)); } private void testKeySet(Config config) throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); int half = keys.size() / 2, i = 0; for (String key : keys) { final ReplicatedMap<String, String> map = i++ < half ? map1 : map2; map.put(key, key); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertEquals(keys, new HashSet<String>(map1.keySet())); assertEquals(keys, new HashSet<String>(map2.keySet())); } }); } @Test public void testEntrySetObject() throws Exception { testEntrySet(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testEntrySetBinary() throws Exception { testEntrySet(buildConfig(InMemoryFormat.BINARY)); } private void testEntrySet(Config config) throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); int half = keys.size() / 2, i = 0; for (String key : keys) { final ReplicatedMap<String, String> map = i++ < half ? map1 : map2; map.put(key, key); } assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { List<Entry<String, String>> entrySet1 = new ArrayList<Entry<String, String>>(map1.entrySet()); List<Entry<String, String>> entrySet2 = new ArrayList<Entry<String, String>>(map2.entrySet()); assertEquals(keys.size(), entrySet1.size()); assertEquals(keys.size(), entrySet2.size()); for (Entry<String, String> e : entrySet1) { assertContains(keys, e.getKey()); } for (Entry<String, String> e : entrySet2) { assertContains(keys, e.getKey()); } } }); } @Test public void testAddListenerObject() throws Exception { testAddEntryListener(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testAddListenerBinary() throws Exception { testAddEntryListener(buildConfig(InMemoryFormat.BINARY)); } private void testAddEntryListener(Config config) throws TimeoutException { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); SimpleEntryListener listener = new SimpleEntryListener(1, 0); map2.addEntryListener(listener, keys.iterator().next()); for (String key : keys) { map1.put(key, "bar"); } assertOpenEventually(listener.addLatch); } @Test public void testEvictionObject() throws Exception { testEviction(buildConfig(InMemoryFormat.OBJECT)); } @Test public void testEvictionBinary() throws Exception { testEviction(buildConfig(InMemoryFormat.BINARY)); } private void testEviction(Config config) throws TimeoutException { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(2); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(config); HazelcastInstance instance2 = nodeFactory.newHazelcastInstance(config); final ReplicatedMap<String, String> map1 = instance1.getReplicatedMap("default"); final ReplicatedMap<String, String> map2 = instance2.getReplicatedMap("default"); final int partitionCount = getPartitionService(instance1).getPartitionCount(); final Set<String> keys = generateRandomKeys(instance1, partitionCount); SimpleEntryListener listener = new SimpleEntryListener(0, 100); map2.addEntryListener(listener); SimpleEntryListener listenerKey = new SimpleEntryListener(0, 1); map1.addEntryListener(listenerKey, keys.iterator().next()); for (String key : keys) { map1.put(key, "bar", 3, TimeUnit.SECONDS); } assertOpenEventually(listener.evictLatch); assertOpenEventually(listenerKey.evictLatch); } private class SimpleEntryListener extends EntryAdapter<String, String> { CountDownLatch addLatch; CountDownLatch evictLatch; SimpleEntryListener(int addCount, int evictCount) { addLatch = new CountDownLatch(addCount); evictLatch = new CountDownLatch(evictCount); } @Override public void entryAdded(EntryEvent event) { addLatch.countDown(); } @Override public void entryEvicted(EntryEvent event) { evictLatch.countDown(); } } @Test(expected = IllegalArgumentException.class) public void putNullKey() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(1); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(); ReplicatedMap<Object, Object> map1 = instance1.getReplicatedMap("default"); map1.put(null, 1); } @Test(expected = IllegalArgumentException.class) public void removeNullKey() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(1); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(); ReplicatedMap<Object, Object> map1 = instance1.getReplicatedMap("default"); map1.remove(null); } @Test public void removeEmptyListener() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(1); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(); ReplicatedMap<Object, Object> map1 = instance1.getReplicatedMap("default"); assertFalse(map1.removeEntryListener("2")); } @Test(expected = IllegalArgumentException.class) public void removeNullListener() throws Exception { TestHazelcastInstanceFactory nodeFactory = createHazelcastInstanceFactory(1); HazelcastInstance instance1 = nodeFactory.newHazelcastInstance(); ReplicatedMap<Object, Object> map1 = instance1.getReplicatedMap("default"); map1.removeEntryListener(null); } @Test public void testSizeAfterRemove() throws Exception { HazelcastInstance node = createHazelcastInstance(); ReplicatedMap<Integer, Integer> map = node.getReplicatedMap("default"); map.put(1, Integer.MAX_VALUE); map.remove(1); assertTrue(map.size() == 0); } @Test public void testDestroy() throws Exception { HazelcastInstance instance = createHazelcastInstance(); ReplicatedMap<Object, Object> replicatedMap = instance.getReplicatedMap(randomName()); replicatedMap.put(1, 1); replicatedMap.destroy(); Collection<DistributedObject> objects = instance.getDistributedObjects(); assertEquals(0, objects.size()); } class DescendingComparator implements Comparator<Integer> { @Override public int compare(Integer o1, Integer o2) { return o1 == o2 ? 0 : o1 > o2 ? -1 : 1; } } }
/** * Derby - Class org.apache.derbyTesting.functionTests.tests.memory.ClobMemTest * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.derbyTesting.functionTests.tests.memory; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Properties; import java.sql.SQLException; import java.sql.Statement; import java.sql.PreparedStatement; import java.sql.ResultSet; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.derbyTesting.functionTests.util.streams.LoopingAlphabetReader; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.SystemPropertyTestSetup; import org.apache.derbyTesting.junit.TestConfiguration; public class ClobMemTest extends BaseJDBCTestCase { private static final int LONG_CLOB_LENGTH = 18000000; private static final String LONG_CLOB_LENGTH_STRING= "18000000"; private static final char[] SHORT_CLOB_CHARS = new char[] {'\uc911','\uc5d0','a', '\uc608', '\uae30', '\uce58'}; public ClobMemTest(String name) { super(name); } /** * Insert a clob and test length. * * @param lengthless if true use the lengthless setCharacterStream api * * @throws SQLException * @throws IOException * @throws InvocationTargetException * @throws IllegalAccessException * @throws IllegalArgumentException */ private void testClobLength(boolean lengthless) throws SQLException, IOException, IllegalArgumentException, IllegalAccessException, InvocationTargetException { setAutoCommit(false); Statement s = createStatement(); s.executeUpdate("CREATE TABLE CLOBTABLE (K INT CONSTRAINT PK PRIMARY KEY, C CLOB(" + LONG_CLOB_LENGTH + "))"); PreparedStatement ps = prepareStatement("INSERT INTO CLOBTABLE VALUES(?,?)"); // We allocate 16MB for the test so use something bigger than that. ps.setInt(1,1); LoopingAlphabetReader reader = new LoopingAlphabetReader(LONG_CLOB_LENGTH); if (lengthless) { Method m = null; try { Class<?> c = ps.getClass(); m = c.getMethod("setCharacterStream",new Class[] {Integer.TYPE, InputStream.class}); } catch (NoSuchMethodException e) { // ignore method not found as method may not be present for // jdk's lower than 1.6. println("Skipping lengthless insert because method is not available"); return; } m.invoke(ps, new Object[] {new Integer(2), reader}); } else ps.setCharacterStream(2, reader, LONG_CLOB_LENGTH); ps.executeUpdate(); // insert a zero length clob. ps.setInt(1, 2); ps.setString(2, ""); ps.executeUpdate(); // insert a null clob. ps.setInt(1, 3); ps.setString(2,null); ps.executeUpdate(); // insert a short clob ps.setInt(1, 4); ps.setString(2, new String(SHORT_CLOB_CHARS)); ps.executeUpdate(); // Currently need to use optimizer override to force use of the index. // Derby should use sort avoidance and do it automatically, but there // appears to be a bug. ResultSet rs = s.executeQuery("SELECT K, LENGTH(C), C FROM CLOBTABLE" + "-- DERBY-PROPERTIES constraint=pk\n ORDER BY K"); rs.next(); assertEquals(LONG_CLOB_LENGTH_STRING,rs.getString(2)); // make sure we can still access the clob after getting length. // It should be ok because we reset the stream Reader rsReader = rs.getCharacterStream(3); int len= 0; char[] buf = new char[32672]; for (;;) { int size = rsReader.read(buf); if (size == -1) break; len += size; int expectedValue = ((len -1) % 26) + 'a'; if (size != 0) assertEquals(expectedValue,buf[size -1]); } assertEquals(LONG_CLOB_LENGTH,len); // empty clob rs.next(); assertEquals("0",rs.getString(2)); String chars = rs.getString(3); assertEquals(0, chars.length()); // null clob rs.next(); assertEquals(null, rs.getString(2)); chars = rs.getString(3); assertEquals(null, chars); // short clob rs.next(); assertEquals("" + SHORT_CLOB_CHARS.length , rs.getString(2)); chars = rs.getString(3); assertTrue(Arrays.equals(chars.toCharArray(), SHORT_CLOB_CHARS)); rs.close(); // Select just length without selecting the clob. rs = s.executeQuery("SELECT K, LENGTH(C) FROM CLOBTABLE " + "ORDER BY K"); JDBC.assertFullResultSet(rs, new String [][] {{"1",LONG_CLOB_LENGTH_STRING},{"2","0"}, {"3",null},{"4","6"}}); } /** * Test the length after inserting with the setCharacterStream api * that takes length. In this case the length will be encoded at the * begining of the stream and the call should be fairly low overhead. * * @throws SQLException * @throws IOException * @throws InvocationTargetException * @throws IllegalAccessException * @throws IllegalArgumentException */ public void testClobLength() throws SQLException, IOException, IllegalArgumentException, IllegalAccessException, InvocationTargetException { testClobLength(false); } /** * Test the length after inserting the clob value with the lengthless * setCharacterStream api. In this case we will have to read the whole * stream to get the length. * * @throws SQLException * @throws IOException * @throws InvocationTargetException * @throws IllegalAccessException * @throws IllegalArgumentException */ public void testClobLengthWithLengthlessInsert() throws SQLException, IOException, IllegalArgumentException, IllegalAccessException, InvocationTargetException { testClobLength(true); } public static Test suite() { TestSuite suite = new TestSuite(); // Just add Derby-6096 embedded as it takes time to run suite.addTest(new ClobMemTest("xtestderby6096ClobHashJoin")); suite.addTest(TestConfiguration.defaultSuite(ClobMemTest.class)); Properties p = new Properties(); // use small pageCacheSize so we don't run out of memory on the insert. p.setProperty("derby.storage.pageCacheSize", "100"); return new SystemPropertyTestSetup(suite,p); } /** * Tests that a clob can be safely occur multiple times in a SQL * select and test that large objects streams are not being * materialized when cloned. Same as * testDerby4477_3645_3646_Repro_lowmem, but now using clob rather * than blob. * @see BlobMemTest#testDerby4477_3645_3646_Repro_lowmem */ public void testDerby4477_3645_3646_Repro_lowmem_clob() throws SQLException, IOException { setAutoCommit(false); Statement s = createStatement(); int clobsize = LONG_CLOB_LENGTH; s.executeUpdate( "CREATE TABLE T_MAIN(" + "ID INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, " + "V CLOB(" + clobsize + ") )"); PreparedStatement ps = prepareStatement( "INSERT INTO T_MAIN(V) VALUES (?)"); int blobLen = clobsize; LoopingAlphabetReader stream = new LoopingAlphabetReader(blobLen); ps.setCharacterStream(1, stream, blobLen); ps.executeUpdate(); ps.close(); s.executeUpdate("CREATE TABLE T_COPY ( V1 CLOB(" + clobsize + "), V2 CLOB(" + clobsize + "))"); // This failed in the repro for DERBY-3645 solved as part of // DERBY-4477: s.executeUpdate("INSERT INTO T_COPY SELECT V, V FROM T_MAIN"); // Check that the two results are identical: ResultSet rs = s.executeQuery("SELECT * FROM T_COPY"); rs.next(); Reader is = rs.getCharacterStream(1); stream = new LoopingAlphabetReader(blobLen); assertEquals(stream, is); is = rs.getCharacterStream(2); stream = new LoopingAlphabetReader(blobLen); assertEquals(stream, is); rs.close(); // This failed in the repro for DERBY-3646 solved as part of // DERBY-4477 (repro slightly rewoked here): rs = s.executeQuery("SELECT 'I', V, ID, V from T_MAIN"); rs.next(); is = rs.getCharacterStream(2); stream = new LoopingAlphabetReader(blobLen); assertEquals(stream, is); is = rs.getCharacterStream(4); stream = new LoopingAlphabetReader(blobLen); assertEquals(stream, is); // clean up stream.close(); is.close(); s.close(); rs.close(); rollback(); } /** * * DERBY-6096 Make clob hash join does not run out of memory. * Prior to fix clobs were estimated at 0. We will test with * 32K clobs even though the estimatedUsage is at 10k. The default * max memory per table is only 1MB. * * @throws SQLException */ public void xtestderby6096ClobHashJoin() throws SQLException { char[] c = new char[32000]; Arrays.fill(c, 'a'); String cdata = new String(new char[32000]); Statement s = createStatement(); s.execute("create table d6096(i int, c clob)"); PreparedStatement ps = prepareStatement("insert into d6096 values (?, ?)"); ps.setString(2, cdata); for (int i = 0; i < 2000; i++) { ps.setInt(1, i); ps.execute(); } ResultSet rs = s.executeQuery("select * from d6096 t1, d6096 t2 where t1.i=t2.i"); // just a single fetch will build the hash table and consume the memory. assertTrue(rs.next()); // derby.tests.debug prints memory usage if (TestConfiguration.getCurrent().isVerbose()) { System.gc(); println("TotalMemory:" + Runtime.getRuntime().totalMemory() + " " + "Free Memory:" + Runtime.getRuntime().freeMemory()); } rs.close(); } }
package mil.nga.giat.geowave.core.index.simple; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import mil.nga.giat.geowave.core.index.ByteArrayId; import mil.nga.giat.geowave.core.index.ByteArrayRange; import mil.nga.giat.geowave.core.index.CompoundIndexStrategy; import mil.nga.giat.geowave.core.index.MultiDimensionalCoordinates; import mil.nga.giat.geowave.core.index.NumericIndexStrategy; import mil.nga.giat.geowave.core.index.PersistenceUtils; import mil.nga.giat.geowave.core.index.dimension.BasicDimensionDefinition; import mil.nga.giat.geowave.core.index.dimension.NumericDimensionDefinition; import mil.nga.giat.geowave.core.index.sfc.SFCFactory.SFCType; import mil.nga.giat.geowave.core.index.sfc.data.BasicNumericDataset; import mil.nga.giat.geowave.core.index.sfc.data.MultiDimensionalNumericData; import mil.nga.giat.geowave.core.index.sfc.data.NumericData; import mil.nga.giat.geowave.core.index.sfc.data.NumericRange; import mil.nga.giat.geowave.core.index.sfc.tiered.TieredSFCIndexFactory; import org.junit.Assert; import org.junit.Test; public class HashKeyIndexStrategyTest { private static final NumericDimensionDefinition[] SPATIAL_DIMENSIONS = new NumericDimensionDefinition[] { new BasicDimensionDefinition( -180, 180), new BasicDimensionDefinition( -90, 90) }; private static final NumericIndexStrategy sfcIndexStrategy = TieredSFCIndexFactory.createSingleTierStrategy( SPATIAL_DIMENSIONS, new int[] { 16, 16 }, SFCType.HILBERT); private static final HashKeyIndexStrategy hashIdexStrategy = new HashKeyIndexStrategy( 3); private static final CompoundIndexStrategy compoundIndexStrategy = new CompoundIndexStrategy( hashIdexStrategy, sfcIndexStrategy); private static final NumericRange dimension1Range = new NumericRange( 50.0, 50.025); private static final NumericRange dimension2Range = new NumericRange( -20.5, -20.455); private static final MultiDimensionalNumericData sfcIndexedRange = new BasicNumericDataset( new NumericData[] { dimension1Range, dimension2Range }); @Test public void testDistribution() { final Map<ByteArrayId, Long> counts = new HashMap<ByteArrayId, Long>(); int total = 0; for (double x = 90; x < 180; x += 0.05) { for (double y = 50; y < 90; y += 0.5) { final NumericRange dimension1Range = new NumericRange( x, x + 0.002); final NumericRange dimension2Range = new NumericRange( y - 0.002, y); final MultiDimensionalNumericData sfcIndexedRange = new BasicNumericDataset( new NumericData[] { dimension1Range, dimension2Range }); for (ByteArrayId id : hashIdexStrategy.getInsertionIds(sfcIndexedRange)) { Long count = counts.get(id); long nextcount = count == null ? 1 : count + 1; counts.put( id, nextcount); total++; } } } double mean = total / counts.size(); double diff = 0.0; for (Long count : counts.values()) { diff += Math.pow( mean - count, 2); } double sd = Math.sqrt(diff / counts.size()); assertTrue(sd < mean * 0.18); } @Test public void testBinaryEncoding() { final byte[] bytes = PersistenceUtils.toBinary(compoundIndexStrategy); final CompoundIndexStrategy deserializedStrategy = PersistenceUtils.fromBinary( bytes, CompoundIndexStrategy.class); final byte[] bytes2 = PersistenceUtils.toBinary(deserializedStrategy); Assert.assertArrayEquals( bytes, bytes2); } @Test public void testNumberOfDimensionsPerIndexStrategy() { final int[] numDimensionsPerStrategy = compoundIndexStrategy.getNumberOfDimensionsPerIndexStrategy(); Assert.assertEquals( 0, numDimensionsPerStrategy[0]); Assert.assertEquals( 2, numDimensionsPerStrategy[1]); } @Test public void testGetNumberOfDimensions() { final int numDimensions = compoundIndexStrategy.getNumberOfDimensions(); Assert.assertEquals( 2, numDimensions); } @Test public void testGetCoordinatesPerDimension() { final NumericRange dimension1Range = new NumericRange( 20.01, 20.02); final NumericRange dimension2Range = new NumericRange( 30.51, 30.59); final MultiDimensionalNumericData sfcIndexedRange = new BasicNumericDataset( new NumericData[] { dimension1Range, dimension2Range }); for (ByteArrayId id : compoundIndexStrategy.getInsertionIds(sfcIndexedRange)) { MultiDimensionalCoordinates coords = compoundIndexStrategy.getCoordinatesPerDimension(id); assertTrue(coords.getCoordinate( 0).getCoordinate() > 0); assertTrue(coords.getCoordinate( 1).getCoordinate() > 0); MultiDimensionalNumericData nd = compoundIndexStrategy.getRangeForId(id); assertEquals( 20.02, nd.getMaxValuesPerDimension()[0], 0.1); assertEquals( 30.59, nd.getMaxValuesPerDimension()[1], 0.2); assertEquals( 20.01, nd.getMinValuesPerDimension()[0], 0.1); assertEquals( 30.57, nd.getMinValuesPerDimension()[1], 0.2); } } @Test public void testGetQueryRangesWithMaximumNumberOfRanges() { final List<ByteArrayRange> sfcIndexRanges = sfcIndexStrategy.getQueryRanges(sfcIndexedRange); final List<ByteArrayRange> ranges = new ArrayList<>(); for (int i = 0; i < 3; i++) { for (final ByteArrayRange r2 : sfcIndexRanges) { final ByteArrayId start = compoundIndexStrategy.composeByteArrayId( new ByteArrayId( new byte[] { (byte) i }), r2.getStart()); final ByteArrayId end = compoundIndexStrategy.composeByteArrayId( new ByteArrayId( new byte[] { (byte) i }), r2.getEnd()); ranges.add(new ByteArrayRange( start, end)); } } final Set<ByteArrayRange> testRanges = new HashSet<>( ranges); final Set<ByteArrayRange> compoundIndexRanges = new HashSet<>( compoundIndexStrategy.getQueryRanges(sfcIndexedRange)); Assert.assertTrue(testRanges.containsAll(compoundIndexRanges)); Assert.assertTrue(compoundIndexRanges.containsAll(testRanges)); } }
package com.pi4j.gpio.extension.mcp; import java.io.IOException; import com.pi4j.io.gpio.GpioProvider; import com.pi4j.io.gpio.GpioProviderBase; import com.pi4j.io.gpio.Pin; import com.pi4j.io.gpio.PinMode; import com.pi4j.io.gpio.PinPullResistance; import com.pi4j.io.gpio.PinState; import com.pi4j.io.gpio.event.PinDigitalStateChangeEvent; import com.pi4j.io.gpio.event.PinListener; import com.pi4j.io.gpio.exception.InvalidPinException; import com.pi4j.io.gpio.exception.InvalidPinModeException; import com.pi4j.io.gpio.exception.UnsupportedPinModeException; import com.pi4j.io.gpio.exception.UnsupportedPinPullResistanceException; import com.pi4j.wiringpi.Spi; /* * #%L * ********************************************************************** * ORGANIZATION : Pi4J * PROJECT : Pi4J :: GPIO Extension * FILENAME : MCP23S17GpioProvider.java * * This file is part of the Pi4J project. More information about * this project can be found here: http://www.pi4j.com/ * ********************************************************************** * %% * Copyright (C) 2012 - 2013 Pi4J * %% * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You may obtain a copy of the License * at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. * #L% */ /** * <p> * This GPIO provider implements the MCP23S17 SPI GPIO expansion board as native Pi4J GPIO pins. * More information about the board can be found here: * * http://ww1.microchip.com/downloads/en/DeviceDoc/21952b.pdf * </p> * * <p> * The MCP23S17 is connected via SPI connection to the Raspberry Pi and provides 16 GPIO pins that * can be used for either digital input or digital output pins. * </p> * * @author Robert Savage * */ public class MCP23S17GpioProvider extends GpioProviderBase implements GpioProvider { public static final String NAME = "com.pi4j.gpio.extension.mcp.MCP23S17GpioProvider"; public static final String DESCRIPTION = "MCP23S17 GPIO Provider"; public static final byte DEFAULT_ADDRESS = 0b01000000; // 0x40 private static final byte REGISTER_IODIR_A = 0x00; private static final byte REGISTER_IODIR_B = 0x01; private static final byte REGISTER_GPINTEN_A = 0x04; private static final byte REGISTER_GPINTEN_B = 0x05; private static final byte REGISTER_DEFVAL_A = 0x06; private static final byte REGISTER_DEFVAL_B = 0x07; private static final byte REGISTER_INTCON_A = 0x08; private static final byte REGISTER_INTCON_B = 0x09; private static final byte REGISTER_GPPU_A = 0x0C; private static final byte REGISTER_GPPU_B = 0x0D; private static final byte REGISTER_INTF_A = 0x0E; private static final byte REGISTER_INTF_B = 0x0F; // private static final byte REGISTER_INTCAP_A = 0x10; // private static final byte REGISTER_INTCAP_B = 0x11; private static final byte REGISTER_GPIO_A = 0x12; private static final byte REGISTER_GPIO_B = 0x13; private static final int GPIO_A_OFFSET = 0; private static final int GPIO_B_OFFSET = 1000; private int currentStatesA = 0; private int currentStatesB = 0; private int currentDirectionA = 0; private int currentDirectionB = 0; private int currentPullupA = 0; private int currentPullupB = 0; private byte address = DEFAULT_ADDRESS; private GpioStateMonitor monitor = null; public static final int SPI_SPEED = 1000000; public static final byte WRITE_FLAG = 0b00000000; // 0x00 public static final byte READ_FLAG = 0b00000001; // 0x01 public MCP23S17GpioProvider(byte spiAddress, int spiChannel) throws IOException { this(spiAddress, spiChannel, SPI_SPEED); } public MCP23S17GpioProvider(byte spiAddress, int spiChannel, int spiSpeed) throws IOException { // setup SPI for communication int fd = Spi.wiringPiSPISetup(spiChannel, spiSpeed); if (fd <= -1) { throw new IOException("SPI port setup failed."); } // set all default pins directions write(REGISTER_IODIR_A, (byte) currentDirectionA); write(REGISTER_IODIR_B, (byte) currentDirectionB); // set all default pin interrupts write(REGISTER_GPINTEN_A, (byte) currentDirectionA); write(REGISTER_GPINTEN_B, (byte) currentDirectionB); // set all default pin interrupt default values write(REGISTER_DEFVAL_A, (byte) 0x00); write(REGISTER_DEFVAL_B, (byte) 0x00); // set all default pin interrupt comparison behaviors write(REGISTER_INTCON_A, (byte) 0x00); write(REGISTER_INTCON_B, (byte) 0x00); // set all default pin states write(REGISTER_GPIO_A, (byte) currentStatesA); write(REGISTER_GPIO_B, (byte) currentStatesB); // set all default pin pull up resistors write(REGISTER_GPPU_A, (byte) currentPullupA); write(REGISTER_GPPU_B, (byte) currentPullupB); } protected void write(byte register, byte data) { // create packet in data buffer byte packet[] = new byte[3]; packet[0] = (byte)(address|WRITE_FLAG); // address byte packet[1] = register; // register byte packet[2] = data; // data byte // send data packet Spi.wiringPiSPIDataRW(0, packet, 3); } protected byte read(byte register){ // create packet in data buffer byte packet[] = new byte[3]; packet[0] = (byte)(address|READ_FLAG); // address byte packet[1] = register; // register byte packet[2] = 0b00000000; // data byte int result = Spi.wiringPiSPIDataRW(0, packet, 3); if(result >= 0) return packet[2]; else throw new RuntimeException("Invalid SPI read operation: " + result); } @Override public String getName() { return NAME; } @Override public void export(Pin pin, PinMode mode) { // make sure to set the pin mode super.export(pin, mode); setMode(pin, mode); } @Override public void unexport(Pin pin) { super.unexport(pin); setMode(pin, PinMode.DIGITAL_OUTPUT); } @Override public void setMode(Pin pin, PinMode mode) { // validate if (!pin.getSupportedPinModes().contains(mode)) { throw new InvalidPinModeException(pin, "Invalid pin mode [" + mode.getName() + "]; pin [" + pin.getName() + "] does not support this mode."); } // validate if (!pin.getSupportedPinModes().contains(mode)) { throw new UnsupportedPinModeException(pin, mode); } // determine A or B port based on pin address try { if (pin.getAddress() < GPIO_B_OFFSET) { setModeA(pin, mode); } else { setModeB(pin, mode); } } catch (IOException ex) { throw new RuntimeException(ex); } // cache mode getPinCache(pin).setMode(mode); // if any pins are configured as input pins, then we need to start the interrupt monitoring // thread if (currentDirectionA > 0 || currentDirectionB > 0) { // if the monitor has not been started, then start it now if (monitor == null) { // start monitoring thread monitor = new GpioStateMonitor(this); monitor.start(); } } else { // shutdown and destroy monitoring thread since there are no input pins configured if (monitor != null) { monitor.shutdown(); monitor = null; } } } private void setModeA(Pin pin, PinMode mode) throws IOException { // determine register and pin address int pinAddress = pin.getAddress() - GPIO_A_OFFSET; // determine update direction value based on mode if (mode == PinMode.DIGITAL_INPUT) { currentDirectionA |= pinAddress; } else if (mode == PinMode.DIGITAL_OUTPUT) { currentDirectionA &= ~pinAddress; } // next update direction value write(REGISTER_IODIR_A, (byte) currentDirectionA); // enable interrupts; interrupt on any change from previous state write(REGISTER_GPINTEN_A, (byte) currentDirectionA); } private void setModeB(Pin pin, PinMode mode) throws IOException { // determine register and pin address int pinAddress = pin.getAddress() - GPIO_B_OFFSET; // determine update direction value based on mode if (mode == PinMode.DIGITAL_INPUT) { currentDirectionB |= pinAddress; } else if (mode == PinMode.DIGITAL_OUTPUT) { currentDirectionB &= ~pinAddress; } // next update direction (mode) value write(REGISTER_IODIR_B, (byte) currentDirectionB); // enable interrupts; interrupt on any change from previous state write(REGISTER_GPINTEN_B, (byte) currentDirectionB); } @Override public PinMode getMode(Pin pin) { return super.getMode(pin); } @Override public void setState(Pin pin, PinState state) { // validate if (hasPin(pin) == false) { throw new InvalidPinException(pin); } // only permit invocation on pins set to DIGITAL_OUTPUT modes if (getPinCache(pin).getMode() != PinMode.DIGITAL_OUTPUT) { throw new InvalidPinModeException(pin, "Invalid pin mode on pin [" + pin.getName() + "]; cannot setState() when pin mode is [" + getPinCache(pin).getMode().getName() + "]"); } try { // determine A or B port based on pin address if (pin.getAddress() < GPIO_B_OFFSET) { setStateA(pin, state); } else { setStateB(pin, state); } } catch (IOException ex) { throw new RuntimeException(ex); } // cache pin state getPinCache(pin).setState(state); } private void setStateA(Pin pin, PinState state) throws IOException { // determine pin address int pinAddress = pin.getAddress() - GPIO_A_OFFSET; // determine state value for pin bit if (state.isHigh()) { currentStatesA |= pinAddress; } else { currentStatesA &= ~pinAddress; } // update state value write(REGISTER_GPIO_A, (byte) currentStatesA); } private void setStateB(Pin pin, PinState state) throws IOException { // determine pin address int pinAddress = pin.getAddress() - GPIO_B_OFFSET; // determine state value for pin bit if (state.isHigh()) { currentStatesB |= pinAddress; } else { currentStatesB &= ~pinAddress; } // update state value write(REGISTER_GPIO_B, (byte) currentStatesB); } @Override public PinState getState(Pin pin) { // call super method to perform validation on pin PinState result = super.getState(pin); // determine A or B port based on pin address if (pin.getAddress() < GPIO_B_OFFSET) { result = getStateA(pin); // get pin state } else { result = getStateB(pin); // get pin state } // return pin state return result; } private PinState getStateA(Pin pin){ // determine pin address int pinAddress = pin.getAddress() - GPIO_A_OFFSET; // determine pin state PinState state = (currentStatesA & pinAddress) == pinAddress ? PinState.HIGH : PinState.LOW; // cache state getPinCache(pin).setState(state); return state; } private PinState getStateB(Pin pin){ // determine pin address int pinAddress = pin.getAddress() - GPIO_B_OFFSET; // determine pin state PinState state = (currentStatesB & pinAddress) == pinAddress ? PinState.HIGH : PinState.LOW; // cache state getPinCache(pin).setState(state); return state; } @Override public void setPullResistance(Pin pin, PinPullResistance resistance) { // validate if (hasPin(pin) == false) { throw new InvalidPinException(pin); } // validate if (!pin.getSupportedPinPullResistance().contains(resistance)) { throw new UnsupportedPinPullResistanceException(pin, resistance); } try { // determine A or B port based on pin address if (pin.getAddress() < GPIO_B_OFFSET) { setPullResistanceA(pin, resistance); } else { setPullResistanceB(pin, resistance); } } catch (IOException ex) { throw new RuntimeException(ex); } // cache resistance getPinCache(pin).setResistance(resistance); } private void setPullResistanceA(Pin pin, PinPullResistance resistance) throws IOException { // determine pin address int pinAddress = pin.getAddress() - GPIO_A_OFFSET; // determine pull up value for pin bit if (resistance == PinPullResistance.PULL_UP) { currentPullupA |= pinAddress; } else { currentPullupA &= ~pinAddress; } // next update pull up resistor value write(REGISTER_GPPU_A, (byte) currentPullupA); } private void setPullResistanceB(Pin pin, PinPullResistance resistance) throws IOException { // determine pin address int pinAddress = pin.getAddress() - GPIO_B_OFFSET; // determine pull up value for pin bit if (resistance == PinPullResistance.PULL_UP) { currentPullupB |= pinAddress; } else { currentPullupB &= ~pinAddress; } // next update pull up resistor value write(REGISTER_GPPU_B, (byte) currentPullupB); } @Override public PinPullResistance getPullResistance(Pin pin) { return super.getPullResistance(pin); } @Override public void shutdown() { // prevent reentrant invocation if(isShutdown()) return; // perform shutdown login in base super.shutdown(); // if a monitor is running, then shut it down now if (monitor != null) { // shutdown monitoring thread monitor.shutdown(); monitor = null; } } /** * This class/thread is used to to actively monitor for GPIO interrupts * * @author Robert Savage * */ private class GpioStateMonitor extends Thread { private MCP23S17GpioProvider provider; private boolean shuttingDown = false; public GpioStateMonitor(MCP23S17GpioProvider provider) { this.provider = provider; } public void shutdown() { shuttingDown = true; } public void run() { while (!shuttingDown) { try { // only process for interrupts if a pin on port A is configured as an input pin if (currentDirectionA > 0) { // process interrupts for port A byte pinInterruptA = provider.read(REGISTER_INTF_A); // validate that there is at least one interrupt active on port A if (pinInterruptA > 0) { // read the current pin states on port A byte pinInterruptState = provider.read(REGISTER_GPIO_A); // loop over the available pins on port B for (Pin pin : MCP23S17Pin.ALL_A_PINS) { int pinAddressA = pin.getAddress() - GPIO_A_OFFSET; // is there an interrupt flag on this pin? if ((pinInterruptA & pinAddressA) > 0) { // System.out.println("INTERRUPT ON PIN [" + pin.getName() + "]"); evaluatePinForChangeA(pin, pinInterruptState); } } } } // only process for interrupts if a pin on port B is configured as an input pin if (currentDirectionB > 0) { // process interrupts for port B int pinInterruptB = (int)provider.read(REGISTER_INTF_B); // validate that there is at least one interrupt active on port B if (pinInterruptB > 0) { // read the current pin states on port B int pinInterruptState = (int)provider.read(REGISTER_GPIO_B); // loop over the available pins on port B for (Pin pin : MCP23S17Pin.ALL_B_PINS) { int pinAddressB = pin.getAddress() - GPIO_B_OFFSET; // is there an interrupt flag on this pin? if ((pinInterruptB & pinAddressB) > 0) { //System.out.println("INTERRUPT ON PIN [" + pin.getName() + "]"); evaluatePinForChangeB(pin, pinInterruptState); } } } } // ... lets take a short breather ... Thread.currentThread(); Thread.sleep(50); } catch (Exception ex) { ex.printStackTrace(); } } } private void evaluatePinForChangeA(Pin pin, int state) { if (getPinCache(pin).isExported()) { // determine pin address int pinAddress = pin.getAddress() - GPIO_A_OFFSET; if ((state & pinAddress) != (currentStatesA & pinAddress)) { PinState newState = (state & pinAddress) == pinAddress ? PinState.HIGH : PinState.LOW; // cache state getPinCache(pin).setState(newState); // determine and cache state value for pin bit if (newState.isHigh()) { currentStatesA |= pinAddress; } else { currentStatesA &= ~pinAddress; } // change detected for INPUT PIN // System.out.println("<<< CHANGE >>> " + pin.getName() + " : " + state); dispatchPinChangeEvent(pin.getAddress(), newState); } } } private void evaluatePinForChangeB(Pin pin, int state) { if (getPinCache(pin).isExported()) { // determine pin address int pinAddress = pin.getAddress() - GPIO_B_OFFSET; if ((state & pinAddress) != (currentStatesB & pinAddress)) { PinState newState = (state & pinAddress) == pinAddress ? PinState.HIGH : PinState.LOW; // cache state getPinCache(pin).setState(newState); // determine and cache state value for pin bit if (newState.isHigh()) { currentStatesB |= pinAddress; } else { currentStatesB &= ~pinAddress; } // change detected for INPUT PIN // System.out.println("<<< CHANGE >>> " + pin.getName() + " : " + state); dispatchPinChangeEvent(pin.getAddress(), newState); } } } private void dispatchPinChangeEvent(int pinAddress, PinState state) { // iterate over the pin listeners map for (Pin pin : listeners.keySet()) { // System.out.println("<<< DISPATCH >>> " + pin.getName() + " : " + // state.getName()); // dispatch this event to the listener // if a matching pin address is found if (pin.getAddress() == pinAddress) { // dispatch this event to all listener handlers for (PinListener listener : listeners.get(pin)) { listener.handlePinEvent(new PinDigitalStateChangeEvent(this, pin, state)); } } } } } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.10.30 at 09:54:48 PM EST // package staxgen.schema; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElements; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;extension base="{http://www.w3.org/2001/XMLSchema}openAttrs"> * &lt;sequence> * &lt;choice maxOccurs="unbounded" minOccurs="0"> * &lt;element ref="{http://www.w3.org/2001/XMLSchema}include"/> * &lt;element ref="{http://www.w3.org/2001/XMLSchema}import"/> * &lt;element ref="{http://www.w3.org/2001/XMLSchema}redefine"/> * &lt;element ref="{http://www.w3.org/2001/XMLSchema}annotation"/> * &lt;/choice> * &lt;sequence maxOccurs="unbounded" minOccurs="0"> * &lt;group ref="{http://www.w3.org/2001/XMLSchema}schemaTop"/> * &lt;element ref="{http://www.w3.org/2001/XMLSchema}annotation" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/sequence> * &lt;attribute name="targetNamespace" type="{http://www.w3.org/2001/XMLSchema}anyURI" /> * &lt;attribute name="version" type="{http://www.w3.org/2001/XMLSchema}token" /> * &lt;attribute name="finalDefault" type="{http://www.w3.org/2001/XMLSchema}fullDerivationSet" default="" /> * &lt;attribute name="blockDefault" type="{http://www.w3.org/2001/XMLSchema}blockSet" default="" /> * &lt;attribute name="attributeFormDefault" type="{http://www.w3.org/2001/XMLSchema}formChoice" default="unqualified" /> * &lt;attribute name="elementFormDefault" type="{http://www.w3.org/2001/XMLSchema}formChoice" default="unqualified" /> * &lt;attribute name="id" type="{http://www.w3.org/2001/XMLSchema}ID" /> * &lt;attribute ref="{http://www.w3.org/XML/1998/namespace}lang"/> * &lt;anyAttribute processContents='lax' namespace='##other'/> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "includeOrImportOrRedefine", "simpleTypeOrComplexTypeOrGroup" }) @XmlRootElement(name = "schema") public class Schema extends OpenAttrs { @XmlElements({ @XmlElement(name = "include", type = Include.class), @XmlElement(name = "import", type = Import.class), @XmlElement(name = "redefine", type = Redefine.class), @XmlElement(name = "annotation", type = Annotation.class) }) protected List<OpenAttrs> includeOrImportOrRedefine; @XmlElements({ @XmlElement(name = "simpleType", type = TopLevelSimpleType.class), @XmlElement(name = "complexType", type = TopLevelComplexType.class), @XmlElement(name = "group", type = NamedGroup.class), @XmlElement(name = "attributeGroup", type = NamedAttributeGroup.class), @XmlElement(name = "element", type = TopLevelElement.class), @XmlElement(name = "attribute", type = TopLevelAttribute.class), @XmlElement(name = "notation", type = Notation.class), @XmlElement(name = "annotation", type = Annotation.class) }) protected List<OpenAttrs> simpleTypeOrComplexTypeOrGroup; @XmlAttribute(name = "targetNamespace") @XmlSchemaType(name = "anyURI") protected String targetNamespace; @XmlAttribute(name = "version") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlSchemaType(name = "token") protected String version; @XmlAttribute(name = "finalDefault") @XmlSchemaType(name = "fullDerivationSet") protected List<String> finalDefault; @XmlAttribute(name = "blockDefault") @XmlSchemaType(name = "blockSet") protected List<String> blockDefault; @XmlAttribute(name = "attributeFormDefault") protected FormChoice attributeFormDefault; @XmlAttribute(name = "elementFormDefault") protected FormChoice elementFormDefault; @XmlAttribute(name = "id") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace") protected String lang; /** * Gets the value of the includeOrImportOrRedefine property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the includeOrImportOrRedefine property. * * <p> * For example, to add a new item, do as follows: * <pre> * getIncludeOrImportOrRedefine().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Include } * {@link Import } * {@link Redefine } * {@link Annotation } * * */ public List<OpenAttrs> getIncludeOrImportOrRedefine() { if (includeOrImportOrRedefine == null) { includeOrImportOrRedefine = new ArrayList<OpenAttrs>(); } return this.includeOrImportOrRedefine; } /** * Gets the value of the simpleTypeOrComplexTypeOrGroup property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the simpleTypeOrComplexTypeOrGroup property. * * <p> * For example, to add a new item, do as follows: * <pre> * getSimpleTypeOrComplexTypeOrGroup().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link TopLevelSimpleType } * {@link TopLevelComplexType } * {@link NamedGroup } * {@link NamedAttributeGroup } * {@link TopLevelElement } * {@link TopLevelAttribute } * {@link Notation } * {@link Annotation } * * */ public List<OpenAttrs> getSimpleTypeOrComplexTypeOrGroup() { if (simpleTypeOrComplexTypeOrGroup == null) { simpleTypeOrComplexTypeOrGroup = new ArrayList<OpenAttrs>(); } return this.simpleTypeOrComplexTypeOrGroup; } /** * Gets the value of the targetNamespace property. * * @return * possible object is * {@link String } * */ public String getTargetNamespace() { return targetNamespace; } /** * Sets the value of the targetNamespace property. * * @param value * allowed object is * {@link String } * */ public void setTargetNamespace(String value) { this.targetNamespace = value; } /** * Gets the value of the version property. * * @return * possible object is * {@link String } * */ public String getVersion() { return version; } /** * Sets the value of the version property. * * @param value * allowed object is * {@link String } * */ public void setVersion(String value) { this.version = value; } /** * Gets the value of the finalDefault property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the finalDefault property. * * <p> * For example, to add a new item, do as follows: * <pre> * getFinalDefault().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getFinalDefault() { if (finalDefault == null) { finalDefault = new ArrayList<String>(); } return this.finalDefault; } /** * Gets the value of the blockDefault property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the blockDefault property. * * <p> * For example, to add a new item, do as follows: * <pre> * getBlockDefault().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getBlockDefault() { if (blockDefault == null) { blockDefault = new ArrayList<String>(); } return this.blockDefault; } /** * Gets the value of the attributeFormDefault property. * * @return * possible object is * {@link FormChoice } * */ public FormChoice getAttributeFormDefault() { if (attributeFormDefault == null) { return FormChoice.UNQUALIFIED; } else { return attributeFormDefault; } } /** * Sets the value of the attributeFormDefault property. * * @param value * allowed object is * {@link FormChoice } * */ public void setAttributeFormDefault(FormChoice value) { this.attributeFormDefault = value; } /** * Gets the value of the elementFormDefault property. * * @return * possible object is * {@link FormChoice } * */ public FormChoice getElementFormDefault() { if (elementFormDefault == null) { return FormChoice.UNQUALIFIED; } else { return elementFormDefault; } } /** * Sets the value of the elementFormDefault property. * * @param value * allowed object is * {@link FormChoice } * */ public void setElementFormDefault(FormChoice value) { this.elementFormDefault = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the lang property. * * @return * possible object is * {@link String } * */ public String getLang() { return lang; } /** * Sets the value of the lang property. * * @param value * allowed object is * {@link String } * */ public void setLang(String value) { this.lang = value; } }
// SECTION-START[License Header] // <editor-fold defaultstate="collapsed" desc=" Generated License "> /* * Java Object Management and Configuration * Copyright (C) Christian Schulte, 2005-206 * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * o Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * o Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * $JOMC$ * */ // </editor-fold> // SECTION-END package org.jomc.standalone.ri.naming; import java.util.Collections; import java.util.HashMap; import java.util.Hashtable; import java.util.Map; import javax.naming.Binding; import javax.naming.CompositeName; import javax.naming.Context; import javax.naming.ContextNotEmptyException; import javax.naming.Name; import javax.naming.NameAlreadyBoundException; import javax.naming.NameClassPair; import javax.naming.NameNotFoundException; import javax.naming.NameParser; import javax.naming.NamingEnumeration; import javax.naming.NamingException; import javax.naming.NotContextException; import javax.naming.OperationNotSupportedException; import javax.naming.spi.NamingManager; // SECTION-START[Documentation] // <editor-fold defaultstate="collapsed" desc=" Generated Documentation "> /** * Standalone {@code Context} implementation. * * <dl> * <dt><b>Identifier:</b></dt><dd>org.jomc.standalone.ri.naming.StandaloneContext</dd> * <dt><b>Name:</b></dt><dd>JOMC Standalone RI StandaloneContext</dd> * <dt><b>Specifications:</b></dt> * <dd>javax.naming.Context</dd> * <dt><b>Abstract:</b></dt><dd>No</dd> * <dt><b>Final:</b></dt><dd>No</dd> * <dt><b>Stateless:</b></dt><dd>No</dd> * </dl> * * @author <a href="mailto:schulte2005@users.sourceforge.net">Christian Schulte</a> 1.0 * @version 1.0-beta-3-SNAPSHOT */ // </editor-fold> // SECTION-END // SECTION-START[Annotations] // <editor-fold defaultstate="collapsed" desc=" Generated Annotations "> @javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" ) // </editor-fold> // SECTION-END public class StandaloneContext implements Context { // SECTION-START[Context] public Object lookup( final Name name ) throws NamingException { if ( name.isEmpty() ) { final StandaloneContext shared = new StandaloneContext( this.getObjectMap() ); shared.getEnvironment().putAll( this.getEnvironment() ); return shared; } try { return NamingManager.getObjectInstance( this.getObjectMap().get( name ), name, this, this.getEnvironment() ); } catch ( final Exception e ) { final NamingException n = new NamingException( getMessage( e ) ); n.setRootCause( e ); throw n; } } public Object lookup( final String name ) throws NamingException { return this.lookup( new CompositeName( name ) ); } public void bind( final Name name, final Object obj ) throws NamingException { if ( this.getObjectMap().containsKey( name ) ) { throw new NameAlreadyBoundException( name.toString() ); } this.getObjectMap().put( name, NamingManager.getStateToBind( obj, name, this, this.getEnvironment() ) ); } public void bind( final String name, final Object obj ) throws NamingException { this.bind( new CompositeName( name ), obj ); } public void rebind( final Name name, final Object obj ) throws NamingException { if ( name.isEmpty() ) { throw new NamingException( name.toString() ); } this.getObjectMap().put( name, NamingManager.getStateToBind( obj, name, this, this.getEnvironment() ) ); } public void rebind( final String name, final Object obj ) throws NamingException { this.rebind( new CompositeName( name ), obj ); } public void unbind( final Name name ) throws NamingException { this.getObjectMap().remove( name ); } public void unbind( final String name ) throws NamingException { this.unbind( new CompositeName( name ) ); } public synchronized void rename( final Name oldName, final Name newName ) throws NamingException { if ( oldName.isEmpty() ) { throw new NamingException( oldName.toString() ); } if ( newName.isEmpty() ) { throw new NamingException( newName.toString() ); } this.bind( newName, this.lookup( oldName ) ); this.unbind( oldName ); } public void rename( final String oldName, final String newName ) throws NamingException { this.rename( new CompositeName( oldName ), new CompositeName( newName ) ); } public NamingEnumeration<NameClassPair> list( final Name name ) throws NamingException { return new NamingEnumeration<NameClassPair>() { private Object next = lookup( name ); public NameClassPair next() throws NamingException { if ( this.next == null ) { throw new NamingException(); } final NameClassPair nameClassPair = new NameClassPair( name.toString(), this.next.getClass().getName() ); this.next = null; return nameClassPair; } public boolean hasMore() throws NamingException { return this.next != null; } public void close() throws NamingException { this.next = null; } public boolean hasMoreElements() { try { return this.hasMore(); } catch ( final NamingException e ) { throw new AssertionError( e ); } } public NameClassPair nextElement() { try { return this.next(); } catch ( final NamingException e ) { throw new AssertionError( e ); } } }; } public NamingEnumeration<NameClassPair> list( final String name ) throws NamingException { return this.list( new CompositeName( name ) ); } public NamingEnumeration<Binding> listBindings( final Name name ) throws NamingException { return new NamingEnumeration<Binding>() { private Object next = lookup( name ); public Binding next() throws NamingException { if ( this.next == null ) { throw new NamingException(); } final Binding binding = new Binding( name.toString(), this.next ); this.next = null; return binding; } public boolean hasMore() throws NamingException { return this.next != null; } public void close() throws NamingException { this.next = null; } public boolean hasMoreElements() { try { return this.hasMore(); } catch ( final NamingException e ) { throw new AssertionError( e ); } } public Binding nextElement() { try { return this.next(); } catch ( final NamingException e ) { throw new AssertionError( e ); } } }; } public NamingEnumeration<Binding> listBindings( final String name ) throws NamingException { return this.listBindings( new CompositeName( name ) ); } public synchronized void destroySubcontext( final Name name ) throws NamingException { if ( name.isEmpty() ) { throw new NamingException( name.toString() ); } final Object o = this.getContextMap().get( name ); if ( o == null ) { throw new NameNotFoundException( name.toString() ); } if ( !( o instanceof StandaloneContext ) ) { throw new NotContextException( o.toString() ); } if ( !( (StandaloneContext) o ).getObjectMap().isEmpty() ) { throw new ContextNotEmptyException( name.toString() ); } this.getContextMap().remove( name ); } public void destroySubcontext( final String name ) throws NamingException { this.destroySubcontext( new CompositeName( name ) ); } public synchronized Context createSubcontext( final Name name ) throws NamingException { if ( name.isEmpty() ) { throw new NamingException( name.toString() ); } if ( this.getObjectMap().containsKey( name ) ) { throw new NameAlreadyBoundException( name.toString() ); } final StandaloneContext subcontext = new StandaloneContext(); subcontext.getEnvironment().putAll( this.getEnvironment() ); this.getContextMap().put( name, subcontext ); return subcontext; } public Context createSubcontext( final String name ) throws NamingException { return this.createSubcontext( new CompositeName( name ) ); } public Object lookupLink( final Name name ) throws NamingException { return this.lookup( name ); } public Object lookupLink( final String name ) throws NamingException { return this.lookupLink( new CompositeName( name ) ); } public NameParser getNameParser( final Name name ) throws NamingException { return new NameParser() { public Name parse( final String name ) throws NamingException { return new CompositeName( name ); } }; } public NameParser getNameParser( final String name ) throws NamingException { return this.getNameParser( new CompositeName( name ) ); } public Name composeName( final Name name, final Name prefix ) throws NamingException { return new CompositeName( prefix.toString() ).add( name.toString() ); } public String composeName( final String name, final String prefix ) throws NamingException { return this.composeName( new CompositeName( name ), new CompositeName( prefix ) ).toString(); } public Object addToEnvironment( final String propName, final Object propVal ) throws NamingException { if ( propName == null ) { throw new NamingException(); } if ( propVal == null ) { throw new NamingException(); } return this.getEnvironment().put( propName, propVal ); } public Object removeFromEnvironment( final String propName ) throws NamingException { if ( propName == null ) { throw new NamingException(); } return this.getEnvironment().remove( propName ); } public Hashtable getEnvironment() throws NamingException { if ( this.environment == null ) { this.environment = new Hashtable(); } return this.environment; } public void close() throws NamingException { } public String getNameInNamespace() throws NamingException { throw new OperationNotSupportedException(); } // SECTION-END // SECTION-START[StandaloneContext] /** Sub-contexts of the instance. */ private Map<Name, Context> contextMap; /** Objects of the instance. */ private Map<Name, Object> objectMap; /** Environment of the instance. */ private Hashtable<?, ?> environment; /** * Creates a new {@code StandaloneContext} taking a map backing the instance. * * @param objectMap The map backing the instance. */ public StandaloneContext( final Map<Name, Object> objectMap ) { super(); this.objectMap = objectMap; } /** * Gets the object map backing the instance. * * @return The object map backing the instance. */ protected Map<Name, Object> getObjectMap() { if ( this.objectMap == null ) { this.objectMap = Collections.synchronizedMap( new HashMap<Name, Object>() ); } return this.objectMap; } /** * Gets the context map backing the instance. * * @return The context map backing the instance. */ protected Map<Name, Context> getContextMap() { if ( this.contextMap == null ) { this.contextMap = Collections.synchronizedMap( new HashMap<Name, Context>() ); } return this.contextMap; } private static String getMessage( final Throwable t ) { return t != null ? t.getMessage() != null ? t.getMessage() : getMessage( t.getCause() ) : null; } // SECTION-END // SECTION-START[Constructors] // <editor-fold defaultstate="collapsed" desc=" Generated Constructors "> /** Creates a new {@code StandaloneContext} instance. */ @javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" ) public StandaloneContext() { // SECTION-START[Default Constructor] this( null ); // SECTION-END } // </editor-fold> // SECTION-END // SECTION-START[Dependencies] // SECTION-END // SECTION-START[Properties] // SECTION-END // SECTION-START[Messages] // SECTION-END }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.xdebugger.impl.inline; import com.intellij.icons.AllIcons; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorCustomElementRenderer; import com.intellij.openapi.editor.Inlay; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.colors.EditorFontType; import com.intellij.openapi.editor.colors.TextAttributesKey; import com.intellij.openapi.editor.event.EditorMouseEvent; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.impl.EditorImpl; import com.intellij.openapi.editor.impl.FontInfo; import com.intellij.openapi.editor.markup.EffectType; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.ui.GraphicsConfig; import com.intellij.openapi.util.Pair; import com.intellij.ui.SimpleColoredText; import com.intellij.ui.SimpleTextAttributes; import com.intellij.ui.paint.EffectPainter; import com.intellij.util.ui.GraphicsUtil; import com.intellij.util.ui.UIUtil; import com.intellij.xdebugger.XDebugSession; import com.intellij.xdebugger.XDebuggerManager; import com.intellij.xdebugger.XSourcePosition; import com.intellij.xdebugger.frame.XNamedValue; import com.intellij.xdebugger.frame.XValue; import com.intellij.xdebugger.impl.XDebugSessionImpl; import com.intellij.xdebugger.impl.XDebuggerManagerImpl; import com.intellij.xdebugger.impl.evaluate.XDebuggerEditorLinePainter; import com.intellij.xdebugger.impl.evaluate.quick.XDebuggerTreeCreator; import com.intellij.xdebugger.impl.ui.XDebugSessionTab; import com.intellij.xdebugger.impl.ui.XDebuggerUIConstants; import com.intellij.xdebugger.impl.ui.tree.nodes.XValueNodeImpl; import com.intellij.xdebugger.ui.DebuggerColors; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.util.Collections; import static com.intellij.openapi.editor.colors.EditorColors.REFERENCE_HYPERLINK_COLOR; import static com.intellij.xdebugger.XSourcePosition.isOnTheSameLine; public final class InlineDebugRenderer implements EditorCustomElementRenderer { public static final String NAME_VALUE_SEPARATION = XDebuggerInlayUtil.INLINE_HINTS_DELIMETER + " "; public static final String INDENT = " "; boolean myPopupIsShown = false; private final boolean myCustomNode; private final XDebugSession mySession; private final XValueNodeImpl myValueNode; private final XDebuggerTreeCreator myTreeCreator; private boolean isHovered = false; private int myRemoveXCoordinate = Integer.MAX_VALUE; private int myTextStartXCoordinate; private final XSourcePosition myPosition; private SimpleColoredText myPresentation; InlineDebugRenderer(XValueNodeImpl valueNode, @NotNull XSourcePosition position, @NotNull XDebugSession session) { myPosition = position; mySession = session; myCustomNode = valueNode instanceof InlineWatchNodeImpl; myValueNode = valueNode; updatePresentation(); myTreeCreator = new XDebuggerTreeCreator(session.getProject(), session.getDebugProcess().getEditorsProvider(), session.getCurrentPosition(), ((XDebugSessionImpl)session).getValueMarkers()); } public void updatePresentation() { TextAttributes attributes = XDebuggerEditorLinePainter.getAttributes(myPosition.getLine(), myPosition.getFile(), mySession); SimpleColoredText valuePresentation = XDebuggerEditorLinePainter.createPresentation(myValueNode); myPresentation = XDebuggerEditorLinePainter .computeVariablePresentationWithChanges(myValueNode, myValueNode.getName(), valuePresentation, attributes, myPosition.getLine(), mySession.getProject()); } private boolean isInExecutionPointHighlight() { XSourcePosition debuggerPosition = mySession.getCurrentPosition(); if (debuggerPosition != null) { XDebuggerManagerImpl debuggerManager = (XDebuggerManagerImpl)XDebuggerManager.getInstance(mySession.getProject()); return isOnTheSameLine(myPosition, debuggerPosition) && debuggerManager.isFullLineHighlighter(); } return false; } private static Font getFont(@NotNull Editor editor) { EditorColorsScheme colorsScheme = editor.getColorsScheme(); TextAttributes attributes = editor.getColorsScheme().getAttributes(DebuggerColors.INLINED_VALUES_EXECUTION_LINE); int fontStyle = attributes == null ? Font.PLAIN : attributes.getFontType(); return UIUtil.getFontWithFallback(colorsScheme.getFont(EditorFontType.forJavaStyle(fontStyle))); } public void onClick(Inlay inlay, @NotNull EditorMouseEvent event) { int x = event.getMouseEvent().getX(); boolean isRemoveIconClick = myCustomNode && x >= myRemoveXCoordinate; if (isRemoveIconClick) { XDebugSessionTab tab = ((XDebugSessionImpl)mySession).getSessionTab(); if (tab != null) { tab.getWatchesView().removeWatches(Collections.singletonList(myValueNode)); } inlay.update(); } else if (x >= myTextStartXCoordinate) { handleClick(inlay); } } private void handleClick(Inlay inlay) { InlineDebugRenderer inlayRenderer = (InlineDebugRenderer)inlay.getRenderer(); if (inlayRenderer.myPopupIsShown) { return; } String name = "valueName"; XValue container = myValueNode.getValueContainer(); if (container instanceof XNamedValue) { name = ((XNamedValue)container).getName(); } Pair<XValue, String> descriptor = Pair.create(container, name); Rectangle bounds = inlay.getBounds(); Point point = new Point(bounds.x, bounds.y + bounds.height); inlayRenderer.myPopupIsShown = true; XDebuggerTreeInlayPopup.showTreePopup(myTreeCreator, descriptor, myValueNode, inlay.getEditor(), point, myPosition, mySession, () -> { ApplicationManager.getApplication().invokeLater(() -> { inlayRenderer.myPopupIsShown = false; }); }); } public void onMouseExit(@NotNull Inlay inlay) { setHovered(false, inlay); } public void onMouseMove(@NotNull Inlay inlay, @NotNull EditorMouseEvent event) { setHovered(event.getMouseEvent().getX() >= myTextStartXCoordinate, inlay); } private void setHovered(boolean active, @NotNull Inlay inlay) { boolean oldState = isHovered; isHovered = active; Cursor cursor = active ? Cursor.getPredefinedCursor(Cursor.HAND_CURSOR) : null; ((EditorEx)inlay.getEditor()).setCustomCursor(InlineDebugRenderer.class, cursor); if (oldState != active) { inlay.update(); } } @Override public @Nullable ActionGroup getContextMenuGroup(@NotNull Inlay inlay) { return null; } @Override public int calcWidthInPixels(@NotNull Inlay inlay) { int width = getInlayTextWidth(inlay); width += myCustomNode ? AllIcons.Actions.Close.getIconWidth() : AllIcons.General.LinkDropTriangle.getIconWidth(); if (myCustomNode) { width += AllIcons.Debugger.Watch.getIconWidth(); } return width; } private int getInlayTextWidth(@NotNull Inlay inlay) { Font font = getFont(inlay.getEditor()); String text; if (isErrorMessage()) { text = myPresentation.getTexts().get(0); } else { text = myPresentation.toString() + NAME_VALUE_SEPARATION; } return getFontMetrics(font, inlay.getEditor()).stringWidth(text + INDENT); } @NotNull private static FontMetrics getFontMetrics(Font font, @NotNull Editor editor) { return FontInfo.getFontMetrics(font, FontInfo.getFontRenderContext(editor.getContentComponent())); } private static final float BACKGROUND_ALPHA = 0.55f; private static int getIconY(Icon icon, Rectangle r) { return r.y + r.height / 2 - icon.getIconHeight() / 2; } @Override public void paint(@NotNull Inlay inlay, @NotNull Graphics g, @NotNull Rectangle r, @NotNull TextAttributes textAttributes) { EditorImpl editor = (EditorImpl)inlay.getEditor(); TextAttributes inlineAttributes = getAttributes(editor); if (inlineAttributes == null || inlineAttributes.getForegroundColor() == null) return; Font font = getFont(editor); g.setFont(font); FontMetrics metrics = getFontMetrics(font, editor); int gap = 1;//(r.height < fontMetrics.lineHeight + 2) ? 1 : 2; int margin = metrics.charWidth(' ') / 4; Color backgroundColor = inlineAttributes.getBackgroundColor(); int curX = r.x + metrics.charWidth(' '); if (backgroundColor != null) { float alpha = BACKGROUND_ALPHA; GraphicsConfig config = GraphicsUtil.setupAAPainting(g); GraphicsUtil.paintWithAlpha(g, alpha); g.setColor(backgroundColor); g.fillRoundRect(curX + margin, r.y + gap, r.width - (2 * margin) - metrics.charWidth(' '), r.height - gap * 2, 6, 6); config.restore(); } curX += (2 * margin); if (myCustomNode) { Icon watchIcon = AllIcons.Debugger.Watch; watchIcon.paintIcon(inlay.getEditor().getComponent(), g, curX, getIconY(watchIcon, r)); curX += watchIcon.getIconWidth() + margin * 2; } myTextStartXCoordinate = curX; for (int i = 0; i < myPresentation.getTexts().size(); i++) { String curText = myPresentation.getTexts().get(i); if (i == 0 && !isErrorMessage()) { curText += NAME_VALUE_SEPARATION; } SimpleTextAttributes attr = myPresentation.getAttributes().get(i); Color fgColor = isHovered ? inlineAttributes.getForegroundColor() : attr.getFgColor(); g.setColor(fgColor); g.drawString(curText, curX, r.y + inlay.getEditor().getAscent()); curX += metrics.stringWidth(curText); if (isErrorMessage()) { break; } } if (isHovered) { Icon icon; if (myCustomNode) { icon = AllIcons.Actions.Close; myRemoveXCoordinate = curX; } else { icon = AllIcons.General.LinkDropTriangle; } icon.paintIcon(inlay.getEditor().getComponent(), g, curX, getIconY(icon, r)); } paintEffects(g, r, editor, inlineAttributes, font, metrics); } private boolean isErrorMessage() { return XDebuggerUIConstants.ERROR_MESSAGE_ICON.equals(myValueNode.getIcon()); } private static void paintEffects(@NotNull Graphics g, @NotNull Rectangle r, EditorImpl editor, TextAttributes inlineAttributes, Font font, FontMetrics metrics) { Color effectColor = inlineAttributes.getEffectColor(); EffectType effectType = inlineAttributes.getEffectType(); if (effectColor != null) { g.setColor(effectColor); Graphics2D g2d = (Graphics2D)g; int xStart = r.x; int xEnd = r.x + r.width; int y = r.y + metrics.getAscent(); if (effectType == EffectType.LINE_UNDERSCORE) { EffectPainter.LINE_UNDERSCORE.paint(g2d, xStart, y, xEnd - xStart, metrics.getDescent(), font); } else if (effectType == EffectType.BOLD_LINE_UNDERSCORE) { EffectPainter.BOLD_LINE_UNDERSCORE.paint(g2d, xStart, y, xEnd - xStart, metrics.getDescent(), font); } else if (effectType == EffectType.STRIKEOUT) { EffectPainter.STRIKE_THROUGH.paint(g2d, xStart, y, xEnd - xStart, editor.getCharHeight(), font); } else if (effectType == EffectType.WAVE_UNDERSCORE) { EffectPainter.WAVE_UNDERSCORE.paint(g2d, xStart, y, xEnd - xStart, metrics.getDescent(), font); } else if (effectType == EffectType.BOLD_DOTTED_LINE) { EffectPainter.BOLD_DOTTED_UNDERSCORE.paint(g2d, xStart, y, xEnd - xStart, metrics.getDescent(), font); } } } private TextAttributes getAttributes(Editor editor) { TextAttributesKey key = isInExecutionPointHighlight() ? DebuggerColors.INLINED_VALUES_EXECUTION_LINE : DebuggerColors.INLINED_VALUES; EditorColorsScheme scheme = editor.getColorsScheme(); TextAttributes inlinedAttributes = scheme.getAttributes(key); if (isHovered) { TextAttributes hoveredInlineAttr = new TextAttributes(); hoveredInlineAttr.copyFrom(inlinedAttributes); Color hoveredAndSelectedColor = scheme.getAttributes(DebuggerColors.EXECUTIONPOINT_ATTRIBUTES).getForegroundColor(); Color foregroundColor = isInExecutionPointHighlight() ? hoveredAndSelectedColor : scheme.getAttributes(REFERENCE_HYPERLINK_COLOR).getForegroundColor(); if (foregroundColor == null) foregroundColor = scheme.getDefaultForeground(); hoveredInlineAttr.setForegroundColor(foregroundColor); return hoveredInlineAttr; } return inlinedAttributes; } boolean isCustomNode() { return myCustomNode; } XValueNodeImpl getValueNode() { return myValueNode; } XSourcePosition getPosition() { return myPosition; } }
package org.apache.hadoop.security.authentication.util; /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class implements parsing and handling of Kerberos principal names. In * particular, it splits them apart and translates them down into local * operating system names. */ @SuppressWarnings("all") @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public class KerberosName { private static final Logger LOG = LoggerFactory.getLogger(KerberosName.class); /** The first component of the name */ private final String serviceName; /** The second component of the name. It may be null. */ private final String hostName; /** The realm of the name. */ private final String realm; /** * A pattern that matches a Kerberos name with at most 2 components. */ private static final Pattern nameParser = Pattern.compile("([^/@]*)(/([^/@]*))?@([^/@]*)"); /** * A pattern that matches a string with out '$' and then a single * parameter with $n. */ private static Pattern parameterPattern = Pattern.compile("([^$]*)(\\$(\\d*))?"); /** * A pattern for parsing a auth_to_local rule. */ private static final Pattern ruleParser = Pattern.compile("\\s*((DEFAULT)|(RULE:\\[(\\d*):([^\\]]*)](\\(([^)]*)\\))?"+ "(s/([^/]*)/([^/]*)/(g)?)?))"); /** * A pattern that recognizes simple/non-simple names. */ private static final Pattern nonSimplePattern = Pattern.compile("[/@]"); /** * The list of translation rules. */ private static List<Rule> rules; private static String defaultRealm; static { try { defaultRealm = KerberosUtil.getDefaultRealm(); } catch (Exception ke) { LOG.debug("Kerberos krb5 configuration not found, setting default realm to empty"); defaultRealm=""; } } /** * Create a name from the full Kerberos principal name. * @param name */ public KerberosName(String name) { Matcher match = nameParser.matcher(name); if (!match.matches()) { if (name.contains("@")) { throw new IllegalArgumentException("Malformed Kerberos name: " + name); } else { serviceName = name; hostName = null; realm = null; } } else { serviceName = match.group(1); hostName = match.group(3); realm = match.group(4); } } /** * Get the configured default realm. * @return the default realm from the krb5.conf */ public String getDefaultRealm() { return defaultRealm; } /** * Put the name back together from the parts. */ @Override public String toString() { StringBuilder result = new StringBuilder(); result.append(serviceName); if (hostName != null) { result.append('/'); result.append(hostName); } if (realm != null) { result.append('@'); result.append(realm); } return result.toString(); } /** * Get the first component of the name. * @return the first section of the Kerberos principal name */ public String getServiceName() { return serviceName; } /** * Get the second component of the name. * @return the second section of the Kerberos principal name, and may be null */ public String getHostName() { return hostName; } /** * Get the realm of the name. * @return the realm of the name, may be null */ public String getRealm() { return realm; } /** * An encoding of a rule for translating kerberos names. */ private static class Rule { private final boolean isDefault; private final int numOfComponents; private final String format; private final Pattern match; private final Pattern fromPattern; private final String toPattern; private final boolean repeat; Rule() { isDefault = true; numOfComponents = 0; format = null; match = null; fromPattern = null; toPattern = null; repeat = false; } Rule(int numOfComponents, String format, String match, String fromPattern, String toPattern, boolean repeat) { isDefault = false; this.numOfComponents = numOfComponents; this.format = format; this.match = match == null ? null : Pattern.compile(match); this.fromPattern = fromPattern == null ? null : Pattern.compile(fromPattern); this.toPattern = toPattern; this.repeat = repeat; } @Override public String toString() { StringBuilder buf = new StringBuilder(); if (isDefault) { buf.append("DEFAULT"); } else { buf.append("RULE:["); buf.append(numOfComponents); buf.append(':'); buf.append(format); buf.append(']'); if (match != null) { buf.append('('); buf.append(match); buf.append(')'); } if (fromPattern != null) { buf.append("s/"); buf.append(fromPattern); buf.append('/'); buf.append(toPattern); buf.append('/'); if (repeat) { buf.append('g'); } } } return buf.toString(); } /** * Replace the numbered parameters of the form $n where n is from 1 to * the length of params. Normal text is copied directly and $n is replaced * by the corresponding parameter. * @param format the string to replace parameters again * @param params the list of parameters * @return the generated string with the parameter references replaced. * @throws BadFormatString */ static String replaceParameters(String format, String[] params) throws BadFormatString { Matcher match = parameterPattern.matcher(format); int start = 0; StringBuilder result = new StringBuilder(); while (start < format.length() && match.find(start)) { result.append(match.group(1)); String paramNum = match.group(3); if (paramNum != null) { try { int num = Integer.parseInt(paramNum); if (num < 0 || num > params.length) { throw new BadFormatString("index " + num + " from " + format + " is outside of the valid range 0 to " + (params.length - 1)); } result.append(params[num]); } catch (NumberFormatException nfe) { throw new BadFormatString("bad format in username mapping in " + paramNum, nfe); } } start = match.end(); } return result.toString(); } /** * Replace the matches of the from pattern in the base string with the value * of the to string. * @param base the string to transform * @param from the pattern to look for in the base string * @param to the string to replace matches of the pattern with * @param repeat whether the substitution should be repeated * @return */ static String replaceSubstitution(String base, Pattern from, String to, boolean repeat) { Matcher match = from.matcher(base); if (repeat) { return match.replaceAll(to); } else { return match.replaceFirst(to); } } /** * Try to apply this rule to the given name represented as a parameter * array. * @param params first element is the realm, second and later elements are * are the components of the name "a/b@FOO" -> {"FOO", "a", "b"} * @return the short name if this rule applies or null * @throws IOException throws if something is wrong with the rules */ String apply(String[] params) throws IOException { String result = null; if (isDefault) { if (defaultRealm.equals(params[0])) { result = params[1]; } } else if (params.length - 1 == numOfComponents) { String base = replaceParameters(format, params); if (match == null || match.matcher(base).matches()) { if (fromPattern == null) { result = base; } else { result = replaceSubstitution(base, fromPattern, toPattern, repeat); } } } if (result != null && nonSimplePattern.matcher(result).find()) { throw new NoMatchingRule("Non-simple name " + result + " after auth_to_local rule " + this); } return result; } } static List<Rule> parseRules(String rules) { List<Rule> result = new ArrayList<Rule>(); String remaining = rules.trim(); while (remaining.length() > 0) { Matcher matcher = ruleParser.matcher(remaining); if (!matcher.lookingAt()) { throw new IllegalArgumentException("Invalid rule: " + remaining); } if (matcher.group(2) != null) { result.add(new Rule()); } else { result.add(new Rule(Integer.parseInt(matcher.group(4)), matcher.group(5), matcher.group(7), matcher.group(9), matcher.group(10), "g".equals(matcher.group(11)))); } remaining = remaining.substring(matcher.end()); } return result; } @SuppressWarnings("serial") public static class BadFormatString extends IOException { BadFormatString(String msg) { super(msg); } BadFormatString(String msg, Throwable err) { super(msg, err); } } @SuppressWarnings("serial") public static class NoMatchingRule extends IOException { NoMatchingRule(String msg) { super(msg); } } /** * Get the translation of the principal name into an operating system * user name. * @return the short name * @throws IOException */ public String getShortName() throws IOException { String[] params; if (hostName == null) { // if it is already simple, just return it if (realm == null) { return serviceName; } params = new String[]{realm, serviceName}; } else { params = new String[]{realm, serviceName, hostName}; } for(Rule r: rules) { String result = r.apply(params); if (result != null) { return result; } } throw new NoMatchingRule("No rules applied to " + toString()); } /** * Set the rules. * @param ruleString the rules string. */ public static void setRules(String ruleString) { rules = (ruleString != null) ? parseRules(ruleString) : null; } /** * Get the rules. * @return String of configured rules, or null if not yet configured */ public static String getRules() { String ruleString = null; if (rules != null) { StringBuilder sb = new StringBuilder(); for (Rule rule : rules) { sb.append(rule.toString()).append("\n"); } ruleString = sb.toString().trim(); } return ruleString; } /** * Indicates if the name rules have been set. * * @return if the name rules have been set. */ public static boolean hasRulesBeenSet() { return rules != null; } static void printRules() throws IOException { int i = 0; for(Rule r: rules) { System.out.println(++i + " " + r); } } }
/** * Copyright 2017-2019 The GreyCat Authors. All rights reserved. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package greycat.base; import greycat.*; import greycat.chunk.StateChunk; import greycat.chunk.WorldOrderChunk; import greycat.plugin.*; import greycat.struct.*; import greycat.struct.proxy.*; import greycat.utility.HashHelper; import greycat.utility.Tuple; import java.util.HashSet; import java.util.Set; /** * Base implementation to develop NodeFactory plugins without overriding every methods */ public class BaseNode implements Node { protected final long _world; protected final long _time; protected final long _id; protected final Graph _graph; protected final Resolver _resolver; //cache to enhance the resolving process public volatile long _index_worldOrder = -1; public volatile long _index_superTimeTree = -1; public volatile long _index_timeTree = -1; public volatile int _index_timeTree_offset = -1; public volatile long _index_stateChunk = -1; public volatile long _world_magic = -1; public volatile long _super_time_magic = -1; public volatile long _time_magic = -1; public volatile boolean _dead = false; private volatile int _lock; public BaseNode(long p_world, long p_time, long p_id, Graph p_graph) { this._world = p_world; this._time = p_time; this._id = p_id; this._graph = p_graph; this._resolver = p_graph.resolver(); } /** * @native ts */ public final void cacheLock() { while(this._lock != 0){ //wait } this._lock = 1; } /** * @native ts */ public final void cacheUnlock() { _lock = 0; } /** * This method should be overridden to init the object */ public void init() { //noop } @Override public final String nodeTypeName() { int typeHash = this._resolver.typeCode(this); final NodeDeclaration declaration = this.graph().nodeRegistry().declarationByHash(typeHash); if (declaration != null) { return declaration.name(); } else { return _resolver.hashToString(typeHash); } } protected final NodeState unphasedState() { return this._resolver.resolveState(this); } protected final NodeState phasedState() { return this._resolver.alignState(this); } protected final NodeState newState(long relativeTime) { return this._resolver.newState(this, _world, relativeTime); } @Override public final Graph graph() { return _graph; } @Override public final long world() { return this._world; } @Override public final long time() { return this._time; } @Override public final long id() { return this._id; } @Override public Object get(String name) { return this.getAt(this._resolver.stringToHash(name, false)); } @Override public Object getAt(int propIndex) { final NodeState resolved = this._resolver.resolveState(this); if (resolved != null) { Object rawObj = resolved.getAt(propIndex); if (rawObj != null) { return proxyIfNecessary(resolved, propIndex, rawObj); } else { return null; } } return null; } @Override public final Object getRawAt(int propIndex) { final NodeState resolved = this._resolver.resolveState(this); if (resolved != null) { return resolved.getRawAt(propIndex); } return null; } @Override public final Object getTypedRawAt(final int propIndex, final int type) { final NodeState resolved = this._resolver.resolveState(this); if (resolved != null) { return resolved.getTypedRawAt(propIndex, type); } return null; } private Object proxyIfNecessary(NodeState state, int index, Object elem) { if (!_graph.useProxies()) { return elem; } long resolvedTime = state.time(); long resolvedWorld = state.world(); if (resolvedTime == _time && resolvedWorld == _world) { //implement time sensitivity return elem; } else { int type = state.typeAt(index); //temporary proxy switch (type) { case Type.IMATRIX: return new IMatrixProxy(index, this, (IMatrix) elem); case Type.LMATRIX: return new LMatrixProxy(index, this, (LMatrix) elem); case Type.DMATRIX: return new DMatrixProxy(index, this, (DMatrix) elem); case Type.RELATION: return new RelationProxy(index, this, (Relation) elem); case Type.LONG_TO_LONG_MAP: return new LongLongMapProxy(index, this, (LongLongMap) elem); case Type.LONG_TO_LONG_ARRAY_MAP: return new LongLongArrayMapProxy(index, this, (LongLongArrayMap) elem); case Type.STRING_TO_INT_MAP: return new StringIntMapProxy(index, this, (StringIntMap) elem); case Type.LONG_ARRAY: return new LongArrayProxy(index, this, (LongArray) elem); case Type.INT_ARRAY: return new IntArrayProxy(index, this, (IntArray) elem); case Type.DOUBLE_ARRAY: return new DoubleArrayProxy(index, this, (DoubleArray) elem); case Type.STRING_ARRAY: return new StringArrayProxy(index, this, (StringArray) elem); case Type.INT_TO_INT_MAP: return new IntIntMapProxy(index, this, (IntIntMap) elem); case Type.INT_TO_STRING_MAP: return new IntStringMapProxy(index, this, (IntStringMap) elem); case Type.ESTRUCT_ARRAY: return new EStructArrayProxy(index, this, (EStructArray) elem); default: if (Type.isCustom(type)) { final BaseCustomType ct = (BaseCustomType) elem; ct._backend = new EStructArrayProxy(index, this, ct._backend); return ct; } else { return elem; } } } } @Override public final Object getOrCreate(String name, int type) { return this.getOrCreateAt(this._resolver.stringToHash(name, true), type); } @Override public Object getOrCreateAt(int index, int type) { final NodeState previousState = this._resolver.resolveState(this); final Object elem = previousState.getAt(index); if (elem != null) { return proxyIfNecessary(previousState, index, elem); } else { final NodeState preciseState = this._resolver.resolveState(this); if (preciseState != null) { return proxyIfNecessary(preciseState, index, preciseState.getOrCreateAt(index, type)); } else { throw new RuntimeException(Constants.CACHE_MISS_ERROR); } } } @Override public final Object getOrCreateCustom(String name, String typeName) { return this.getOrCreateAt(this._resolver.stringToHash(name, true), HashHelper.hash(typeName)); } @Override public final Object getOrCreateCustomAt(int index, String typeName) { return this.getOrCreateAt(index, HashHelper.hash(typeName)); } @Override public <A> A getWithDefault(final String key, final A defaultValue) { return getAtWithDefault(this._resolver.stringToHash(key, false), defaultValue); } @Override public <A> A getAtWithDefault(final int key, final A defaultValue) { Object found = getAt(key); if (found != null) { return (A) found; } else { return defaultValue; } } @Override public Node forceSet(String name, int type, Object value) { return forceSetAt(this._resolver.stringToHash(name, true), type, value); } @Override public Node forceSetAt(int index, int type, Object value) { final NodeState preciseState = this._resolver.alignState(this); if (preciseState != null) { preciseState.setAt(index, type, value); } else { throw new RuntimeException(Constants.CACHE_MISS_ERROR); } return this; } @Override public Node setAt(int index, int type, Object value) { final NodeState unPhasedState = this._resolver.resolveState(this); boolean isDiff = (type != unPhasedState.typeAt(index)); if (!isDiff) { isDiff = !isEquals(unPhasedState.getAt(index), value, type); } if (isDiff) { final NodeState preciseState = this._resolver.alignState(this); if (preciseState != null) { preciseState.setAt(index, type, value); } else { throw new RuntimeException(Constants.CACHE_MISS_ERROR); } } return this; } @Override public Node set(String name, int type, Object value) { //hash the property a single time final int hashed = this._resolver.stringToHash(name, true); return setAt(hashed, type, value); } private boolean isEquals(Object obj1, Object obj2, int type) { if (obj1 == null && obj2 == null) { return true; } if (obj1 == null || obj2 == null) { return false; } switch (type) { case Type.BOOL: return (((boolean) obj1) == ((boolean) obj2)); case Type.DOUBLE: return (((double) obj1) == ((double) obj2)); case Type.INT: return (((int) obj1) == ((int) obj2)); case Type.LONG: return (((long) obj1) == ((long) obj2)); case Type.STRING: return (((String) obj1).equals((String) obj2)); case Type.DOUBLE_ARRAY: DoubleArray obj1_ar_d = (DoubleArray) obj1; DoubleArray obj2_ar_d = (DoubleArray) obj2; if (obj1_ar_d.size() != obj2_ar_d.size()) { return false; } else { for (int i = 0; i < obj1_ar_d.size(); i++) { if (obj1_ar_d.get(i) != obj2_ar_d.get(i)) { return false; } } } return true; case Type.INT_ARRAY: IntArray obj1_ar_i = (IntArray) obj1; IntArray obj2_ar_i = (IntArray) obj2; if (obj1_ar_i.size() != obj2_ar_i.size()) { return false; } else { for (int i = 0; i < obj1_ar_i.size(); i++) { if (obj1_ar_i.get(i) != obj2_ar_i.get(i)) { return false; } } } return true; case Type.LONG_ARRAY: LongArray obj1_ar_l = (LongArray) obj1; LongArray obj2_ar_l = (LongArray) obj2; if (obj1_ar_l.size() != obj2_ar_l.size()) { return false; } else { for (int i = 0; i < obj1_ar_l.size(); i++) { if (obj1_ar_l.get(i) != obj2_ar_l.get(i)) { return false; } } } return true; /* case Type.STRING_ARRAY: /* StringArray obj1_ar_s = (StringArray) obj1; StringArray obj2_ar_s = (StringArray) obj2; if (obj1_ar_s.size() != obj2_ar_s.size()) { return false; } else { for (int i = 0; i < obj1_ar_s.size(); i++) { if (!obj1_ar_s.get(i).equals(obj2_ar_s.get(i))) { return false; } } } return true; */ case Type.RELATION: //case Type.RELATION_INDEXED: case Type.STRING_TO_INT_MAP: case Type.LONG_TO_LONG_MAP: case Type.LONG_TO_LONG_ARRAY_MAP: case Type.INT_TO_INT_MAP: case Type.INT_TO_STRING_MAP: throw new RuntimeException("Bad API usage: set can't be used with complex type, please use getOrCreate instead."); default: throw new RuntimeException("Not managed type " + type); } } @Override public int type(String name) { final NodeState resolved = this._resolver.resolveState(this); if (resolved != null) { return resolved.typeAt(this._resolver.stringToHash(name, false)); } return -1; } @Override public int typeAt(final int index) { final NodeState resolved = this._resolver.resolveState(this); if (resolved != null) { return resolved.typeAt(index); } return -1; } @Override public final Node remove(String name) { return set(name, Type.INT, null); } @Override public final Node removeAt(final int index) { return setAt(index, Type.INT, null); } /* @Override public final void traverse(String relationName, final Callback<Node[]> callback) { relationAt(this._resolver.stringToHash(relationName, false), callback); } @Override public void relationAt(int relationIndex, Callback<Node[]> callback) { if (callback == null) { return; } final NodeState resolved = this._resolver.resolveState(this); if (resolved != null) { switch (resolved.typeAt(relationIndex)) { case Type.RELATION: final Relation traverse = (Relation) resolved.getAt(relationIndex); if (traverse == null || traverse.size() == 0) { callback.on(new Node[0]); } else { final int relSize = traverse.size(); final long[] ids = new long[relSize]; for (int i = 0; i < relSize; i++) { ids[i] = traverse.get(i); } this._resolver.lookupAll(_world, _time, ids, new Callback<Node[]>() { @Override public void on(Node[] result) { callback.on(result); } }); } break; case Type.RELATION_INDEXED: final RelationIndexed relation_indexed = (RelationIndexed) resolved.getAt(relationIndex); if (relation_indexed == null || relation_indexed.size() == 0) { callback.on(new Node[0]); } else { this._resolver.lookupAll(_world, _time, relation_indexed.all(), new Callback<Node[]>() { @Override public void on(Node[] result) { callback.on(result); } }); } break; default: callback.on(new Node[0]); break; } } else { callback.on(new Node[0]); } } @Override public final Node addToRelation(String relationName, Node relatedNode, String... attributes) { return addToRelationAt(this._resolver.stringToHash(relationName, true), relatedNode, attributes); } @Override public Node addToRelationAt(int relationIndex, Node relatedNode, String... attributes) { if (relatedNode != null) { NodeState preciseState = this._resolver.alignState(this); if (preciseState != null) { boolean attributesNotEmpty = (attributes != null && attributes.length > 0); if (attributesNotEmpty) { RelationIndexed indexedRel = (RelationIndexed) preciseState.getOrCreateAt(relationIndex, Type.RELATION_INDEXED); indexedRel.add(relatedNode, attributes); } else { Relation relationArray = (Relation) preciseState.getOrCreateAt(relationIndex, Type.RELATION); relationArray.add(relatedNode.id()); } } else { throw new RuntimeException(Constants.CACHE_MISS_ERROR); } } return this; } @Override public final Node removeFromRelation(String relationName, Node relatedNode, String... attributes) { return removeFromRelationAt(this._resolver.stringToHash(relationName, false), relatedNode, attributes); } @Override public Node removeFromRelationAt(int relationIndex, Node relatedNode, String... attributes) { if (relatedNode != null) { final NodeState preciseState = this._resolver.alignState(this); if (preciseState != null) { boolean attributesNotEmpty = (attributes != null && attributes.length > 0); if (attributesNotEmpty) { RelationIndexed indexedRel = (RelationIndexed) preciseState.getOrCreateAt(relationIndex, Type.RELATION_INDEXED); indexedRel.remove(relatedNode, attributes); } else { Relation relationArray = (Relation) preciseState.getOrCreateAt(relationIndex, Type.RELATION); relationArray.remove(relatedNode.id()); } } else { throw new RuntimeException(Constants.CACHE_MISS_ERROR); } } return this; }*/ @Override public final void free() { this._resolver.freeNode(this); } @Override public final long timeDephasing() { final NodeState state = this._resolver.resolveState(this); if (state != null) { return this._time - state.time(); } else { throw new RuntimeException(Constants.CACHE_MISS_ERROR); } } @Override public final long lastModification() { final NodeState state = this._resolver.resolveState(this); if (state != null) { return state.time(); } else { throw new RuntimeException(Constants.CACHE_MISS_ERROR); } } @Override public final Node rephase() { this._resolver.alignState(this); return this; } @Override public final int[] attributeIndexes() { return this._resolver.resolveState(this).attributeIndexes(); } @Override public final void timepoints(final long beginningOfSearch, final long endOfSearch, final Callback<long[]> callback) { this._resolver.resolveTimepoints(this, beginningOfSearch, endOfSearch, callback); } @Override public void countTimepoints(long beginningOfSearch, long endOfSearch, Callback<Long> callback) { this._resolver.countTimepoints(this, beginningOfSearch, endOfSearch, callback); } @Override public final <A extends Node> void travelInTime(final long targetTime, final Callback<A> callback) { _resolver.lookup(_world, targetTime, _id, callback); } @Override public <A extends Node> void travelInWorld(final long targetWorld, final Callback<A> callback) { _resolver.lookup(targetWorld, _time, _id, callback); } @Override public <A extends Node> void travel(long targetWorld, long targetTime, Callback<A> callback) { _resolver.lookup(targetWorld, targetTime, _id, callback); } @Override public final Node setTimeSensitivity(long deltaTime, long offset) { _resolver.setTimeSensitivity(this, deltaTime, offset); return this; } @Override public final Tuple<Long, Long> timeSensitivity() { return _resolver.getTimeSensitivity(this); } @Override public final void end() { _resolver.end(this); } @Override public final void drop(Callback callback) { _resolver.drop(this, callback); } @Override public String toString() { if (_lock == 1) { return "locked"; } final StringBuilder builder = new StringBuilder(); final boolean[] isFirst = {true}; builder.append("{\"world\":"); builder.append(world()); builder.append(",\"time\":"); builder.append(time()); builder.append(",\"id\":"); builder.append(id()); builder.append(",\"group\":"); builder.append(group()); final NodeState state = this._resolver.resolveState(this); if (state != null) { state.each(new NodeStateCallback() { @Override public void on(int attributeKey, int elemType, Object elem) { if (elem != null) { String resolveName = _resolver.hashToString(attributeKey); if (resolveName == null) { resolveName = attributeKey + ""; } switch (elemType) { case Type.BOOL: { builder.append(",\""); builder.append(resolveName); builder.append("\":"); if ((Boolean) elem) { builder.append("1"); } else { builder.append("0"); } break; } case Type.STRING: { builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append("\""); builder.append(elem); builder.append("\""); break; } case Type.LONG: { builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append(elem); break; } case Type.INT: { builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append(elem); break; } case Type.DOUBLE: { if (!Constants.isNaN((double) elem)) { builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append(elem); } break; } case Type.DOUBLE_ARRAY: { builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append("["); DoubleArray castedArr = ((DoubleArray) elem); for (int j = 0; j < castedArr.size(); j++) { if (j != 0) { builder.append(","); } builder.append(castedArr.get(j)); } builder.append("]"); break; } case Type.RELATION: builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append("["); Relation castedRelArr = (Relation) elem; for (int j = 0; j < castedRelArr.size(); j++) { if (j != 0) { builder.append(","); } builder.append(castedRelArr.get(j)); } builder.append("]"); break; case Type.LONG_ARRAY: { builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append("["); LongArray castedArr2 = (LongArray) elem; for (int j = 0; j < castedArr2.size(); j++) { if (j != 0) { builder.append(","); } builder.append(castedArr2.get(j)); } builder.append("]"); break; } case Type.INT_ARRAY: { builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append("["); IntArray castedArr3 = (IntArray) elem; for (int j = 0; j < castedArr3.size(); j++) { if (j != 0) { builder.append(","); } builder.append(castedArr3.get(j)); } builder.append("]"); break; } case Type.LONG_TO_LONG_MAP: { builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append("{"); LongLongMap castedMapL2L = (LongLongMap) elem; isFirst[0] = true; castedMapL2L.each(new LongLongMapCallBack() { @Override public void on(long key, long value) { if (!isFirst[0]) { builder.append(","); } else { isFirst[0] = false; } builder.append("\""); builder.append(key); builder.append("\":"); builder.append(value); } }); builder.append("}"); break; } case Type.INT_TO_INT_MAP: { builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append("{"); IntIntMap castedMapI2I = (IntIntMap) elem; isFirst[0] = true; castedMapI2I.each(new IntIntMapCallBack() { @Override public void on(int key, int value) { if (!isFirst[0]) { builder.append(","); } else { isFirst[0] = false; } builder.append("\""); builder.append(key); builder.append("\":"); builder.append(value); } }); builder.append("}"); break; } case Type.INT_TO_STRING_MAP: { builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append("{"); IntStringMap castedMapI2I = (IntStringMap) elem; isFirst[0] = true; castedMapI2I.each(new IntStringMapCallBack() { @Override public void on(int key, String value) { if (!isFirst[0]) { builder.append(","); } else { isFirst[0] = false; } builder.append("\""); builder.append(key); builder.append("\":"); builder.append(value); } }); builder.append("}"); break; } case Type.LONG_TO_LONG_ARRAY_MAP: { builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append("{"); LongLongArrayMap castedMapL2LA = (LongLongArrayMap) elem; isFirst[0] = true; Set<Long> keys = new HashSet<Long>(); castedMapL2LA.each(new LongLongArrayMapCallBack() { @Override public void on(long key, long value) { keys.add(key); } }); final Long[] flatKeys = keys.toArray(new Long[keys.size()]); for (int i = 0; i < flatKeys.length; i++) { long[] values = castedMapL2LA.get(flatKeys[i]); if (!isFirst[0]) { builder.append(","); } else { isFirst[0] = false; } builder.append("\""); builder.append(flatKeys[i]); builder.append("\":["); for (int j = 0; j < values.length; j++) { if (j != 0) { builder.append(","); } builder.append(values[j]); } builder.append("]"); } builder.append("}"); break; } case Type.STRING_TO_INT_MAP: { builder.append(",\""); builder.append(resolveName); builder.append("\":"); builder.append("{"); StringIntMap castedMapS2L = (StringIntMap) elem; isFirst[0] = true; castedMapS2L.each(new StringLongMapCallBack() { @Override public void on(String key, long value) { if (!isFirst[0]) { builder.append(","); } else { isFirst[0] = false; } builder.append("\""); builder.append(key); builder.append("\":"); builder.append(value); } }); builder.append("}"); break; } } } } }); builder.append("}"); } return builder.toString(); } @Override public final Relation getRelation(String name) { return (Relation) get(name); } @Override public final Index getIndex(String name) { return (Index) get(name); } @Override public final DMatrix getDMatrix(String name) { return (DMatrix) get(name); } @Override public final LMatrix getLMatrix(String name) { return (LMatrix) get(name); } @Override public final IMatrix getIMatrix(String name) { return (IMatrix) get(name); } @Override public final EStructArray getEGraph(String name) { return (EStructArray) get(name); } @Override public final LongArray getLongArray(String name) { return (LongArray) get(name); } @Override public IntArray getIntArray(String name) { return (IntArray) get(name); } @Override public final DoubleArray getDoubleArray(String name) { return (DoubleArray) get(name); } @Override public final StringArray getStringArray(String name) { return (StringArray) get(name); } @Override public final StringIntMap getStringIntMap(String name) { return (StringIntMap) get(name); } @Override public final LongLongMap getLongLongMap(String name) { return (LongLongMap) get(name); } @Override public final IntIntMap getIntIntMap(String name) { return (IntIntMap) get(name); } @Override public final IntStringMap getIntStringMap(String name) { return (IntStringMap) get(name); } @Override public final LongLongArrayMap getLongLongArrayMap(String name) { return (LongLongArrayMap) get(name); } public final Node createClone() { final WorldOrderChunk worldOrderChunk = (WorldOrderChunk) _graph.space().get(_index_worldOrder); final long type = worldOrderChunk.type(); final Node cloned; if (type == Constants.NULL_LONG) { cloned = _graph.newNode(_world, _time); } else { cloned = _graph.newTypedNodeFrom(_world, _time, (int) type); } final StateChunk clonedStateChunk = (StateChunk) _resolver.resolveState(cloned); final StateChunk currentStateChunk = (StateChunk) _resolver.resolveState(this); clonedStateChunk.loadFrom(currentStateChunk); return cloned; } /* TODO check after */ @Override public final <A> void traverse(String relationName, final Callback<A> callback) { traverseAt(this._resolver.stringToHash(relationName, false), callback); } @Override public final <A> void traverseAt(int indexToTraverse, Callback<A> callback) { if (callback == null) { return; } final int ftype = typeAt(indexToTraverse); switch (ftype) { case Type.RELATION: final Relation relation = (Relation) getAt(indexToTraverse); if (relation == null || relation.size() == 0) { callback.on((A) (Object) new Node[0]); } else { final int relSize = relation.size(); final long[] ids = new long[relSize]; for (int i = 0; i < relSize; i++) { ids[i] = relation.get(i); } this._resolver.lookupAll(_world, _time, ids, new Callback<Node[]>() { @Override public void on(Node[] result) { callback.on((A) (Object) result); } }); } break; case Type.INDEX: final Index findex = (Index) getAt(indexToTraverse); final long[] ids = findex.all(); this._resolver.lookupAll(_world, _time, ids, new Callback<Node[]>() { @Override public void on(Node[] result) { callback.on((A) (Object) result); } }); break; case Type.TASK: final Task t = (Task) getAt(indexToTraverse); final TaskContext tc = t.prepare(_graph, this, new Callback<TaskResult>() { @Override public void on(TaskResult result) { if (result.size() == 1) { callback.on((A) result.get(0)); } else { callback.on((A) (Object) result); } } }); t.executeUsing(tc); break; default: callback.on(null); } } @Override public final Node addToRelation(String relationName, Node relatedNode) { return addToRelationAt(this._resolver.stringToHash(relationName, true), relatedNode); } @Override public Node addToRelationAt(int relationIndex, Node relatedNode) { if (relatedNode != null) { NodeState preciseState = this._resolver.alignState(this); if (preciseState != null) { Relation relationArray = (Relation) preciseState.getOrCreateAt(relationIndex, Type.RELATION); relationArray.add(relatedNode.id()); } else { throw new RuntimeException(Constants.CACHE_MISS_ERROR); } } return this; } @Override public final Node removeFromRelation(String relationName, Node relatedNode) { return removeFromRelationAt(this._resolver.stringToHash(relationName, false), relatedNode); } @Override public final Node removeFromRelationAt(int relationIndex, Node relatedNode) { if (relatedNode != null) { final NodeState preciseState = this._resolver.alignState(this); if (preciseState != null) { final Relation relationObj = (Relation) preciseState.getAt(relationIndex); if (relationObj != null) { relationObj.remove(relatedNode.id()); } } else { throw new RuntimeException(Constants.CACHE_MISS_ERROR); } } return this; } @Override public final int listen(final NodeListener listener) { return ((WorldOrderChunk) this._graph.space().get(_index_worldOrder)).listen(listener); } @Override public final void unlisten(final int registrationID) { ((WorldOrderChunk) this._graph.space().get(_index_worldOrder)).unlisten(registrationID); } @Override public final Node setGroup(int group_id) { this._graph.space().get(_index_worldOrder).setGroup(group_id); this._graph.space().get(_index_superTimeTree).setGroup(group_id); this._graph.space().get(_index_timeTree).setGroup(group_id); if (_index_stateChunk != -1) { this._graph.space().get(_index_stateChunk).setGroup(group_id); } return this; } @Override public int group() { return this._graph.space().get(_index_worldOrder).group(); } @Override public final int hashCode() { return (int) (this._id ^ this._time ^ this._world); } @Override public boolean equals(Object obj) { if (obj instanceof BaseNode) { BaseNode other = (BaseNode) obj; return other._id == this._id && other._time == this._time && other._world == this._world; } return false; } }
// Copyright 2007 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.enterprise.policychecker; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.enterprise.policychecker.Ace.Right; import com.google.enterprise.supergsa.security.AclPrincipal; import com.google.enterprise.supergsa.security.GsaAcl; import com.google.enterprise.supergsa.security.GsaAclEntry; import com.google.enterprise.supergsa.security.GsaEntry; import java.util.Collection; import java.util.List; import java.util.TreeSet; /** * Acl is an immutable value class that represents an Access Control List. * An Access Control List here is simply a list of {@link Ace}s. * <p> * The signature of a Acl is created by putting the list in order by Ace, * according to natural {@link Ace#compareTo(Ace)} ordering. * <p> * Acls are immutable so that they can be canonicalized and compared with * one another. In a working system, it is expected that Acls will be stored in * an associative structure such as a HashMap, keyed on their signatures, so * that there will be no more than one instance of an Acl in an equivalence * class. */ public class Acl implements Comparable<Acl> { // We use a TreeSet here so that the Aces are put in canonical sorted order private final TreeSet<Ace> aces; private final String signature; private static final Acl EMPTY_ACL = new Acl(new TreeSet<Ace>()); /** * Convenience method for building a Acl from a GsaAcl protocol buffer. * @param acl ACL protocol buffer describing ACL to return * @param group Group database with which to canonicalize Aces in the ACL so references are * consistent. */ public static Acl fromGsaAcl(GsaAcl acl, Group group) { if (acl.getEntriesCount() == 0) { return null; } List<Ace> aces = Lists.newArrayList(); for (GsaAclEntry entry : acl.getEntriesList()) { if (entry.hasGsaEntry()) { aces.add(processGsaEntry(entry.getGsaEntry(), group)); } } return new Acl(aces); } public static Acl fromGsaAcl(GsaAcl acl) { return fromGsaAcl(acl, null); } public static Acl emptyAcl() { return EMPTY_ACL; } // Build a GsaAcl from this Acl. Each Ace maps onto a GsaEntry as defined in // enterprise/supergsa/security/acl.proto. public GsaAcl toGsaAcl() { GsaAcl.Builder acl = GsaAcl.newBuilder(); for (Ace ace : aces) { GsaAclEntry.Builder aclEntry = acl.addEntriesBuilder(); GsaEntry.Builder entryBuilder = aclEntry.getGsaEntryBuilder(); entryBuilder.mergePrincipal(ace.getPrincipal().getAclPrincipal()); if (ace.getRight() == Ace.Right.READ) { entryBuilder.setAccess(GsaEntry.ACCESS.PERMIT); } else { entryBuilder.setAccess(GsaEntry.ACCESS.DENY); } entryBuilder.build(); aclEntry.build(); } return acl.build(); } /** * Handy {@code Comparator<Acl>}-style static function that can be used to build * compareTo's or Comparators for other Acl classes. Returns a negative * integer, zero, or a positive integer as the first Acl is less than, equal * to, or greater than the second. * * @param thisAcl * @param otherAcl * @return Returns a negative integer, zero, or a positive integer as the * first Acl is less than, equal to, or greater than the second. */ public static int compareAcls(final Acl thisAcl, final Acl otherAcl) { return thisAcl.getSignature().compareTo(otherAcl.getSignature()); } private static Ace processGsaEntry(GsaEntry entry, Group group) { Principal p; Right r; switch (entry.getPrincipal().getScope()) { case USER: p = new User(entry.getPrincipal()); break; case GROUP: p = new Group(entry.getPrincipal()); break; default: throw new IllegalArgumentException( "Invalid scope value: " + entry.getPrincipal().getScope()); } switch (entry.getAccess()) { case PERMIT: r = Right.READ; break; case DENY: r = Right.NONE; break; default: throw new IllegalArgumentException("Invalid access value: " + entry.getAccess()); } // This will canonicalize the group reference if the group parameter is supplied. if (group != null) { p = group.addPrincipal(p); } return new Ace(p, r); } private static String createSignature(final Collection<Ace> sortedAces) { StringBuilder sb = new StringBuilder(); String separator = ""; for (Ace ace : sortedAces) { sb.append(separator); sb.append(ace.toString()); separator = " "; } return sb.toString(); } private Acl(final TreeSet<Ace> aces) { this.signature = createSignature(aces); this.aces = aces; } /** * Sole public constructor: makes an Acl from a Collection of Aces. * * @param aces a Collection of Aces */ public Acl(final Collection<Ace> aces) { this(new TreeSet<Ace>(aces)); } /** * Returns a signature String unique to this Acl equivalence class. All Acls * equal to this will have the same signature String. All Acls not equal to * this will have signatures different from this one's. The caller should * treat this String as an opaque identifier. * * @return a signature string representation of this Acl. */ public String getSignature() { return signature; } public String toString() { return getSignature(); } /** * Returns the identifiers for all the Aces in this ACL. */ public ImmutableList<Principal> getPrincipals() { ImmutableList.Builder<Principal> builder = ImmutableList.builder(); for (Ace ace : aces) { builder.add(ace.getPrincipal()); } return builder.build(); } /** * Returns the identifiers for all the Aces in ACL. */ public ImmutableList<AclPrincipal> getAclPrincipals() { ImmutableList.Builder<AclPrincipal> builder = ImmutableList.builder(); for (Ace ace : aces) { builder.add(ace.getPrincipal().getAclPrincipal()); } return builder.build(); } /** * Returns all the Aces in this ACL. */ public ImmutableList<Ace> getAces() { ImmutableList.Builder<Ace> builder = ImmutableList.builder(); builder.addAll(aces); return builder.build(); } /** * Returns the identifiers for all the Aces which are users in this ACL. */ public ImmutableList<AclPrincipal> getUsers() { ImmutableList.Builder<AclPrincipal> builder = ImmutableList.builder(); for (Ace ace : aces) { Principal principal = ace.getPrincipal(); if (principal instanceof User) { builder.add(principal.getAclPrincipal()); } } return builder.build(); } /** * Returns the identifiers for all the Aces which are groups in this ACL. */ public ImmutableList<AclPrincipal> getGroups() { ImmutableList.Builder<AclPrincipal> builder = ImmutableList.builder(); for (Ace ace : aces) { Principal principal = ace.getPrincipal(); if (principal instanceof Group) { builder.add(principal.getAclPrincipal()); } } return builder.build(); } /** * Indicates whether some other object is "equal to" this one. Comparison * should be by signature only. * * @param obj * @return true if the two objects are equal */ public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof Acl)) { return false; } final Acl other = (Acl) obj; return signature.equals(other.getSignature()); } /** * Returns a hash code value for the object. The hash code should be based * solely on the signature. * * @return integer hash code */ public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((signature == null) ? 0 : signature.hashCode()); return result; } /** * Compares this Acl with the specified Acl for order. Returns a negative * integer, zero, or a positive integer as this Acl is less than, equal to, or * greater than the specified Acl. Comparison should be based solely on * signature. * * @param other * @return a negative integer, zero, or a positive integer as this Acl is less * than, equal to, or greater than the specified Acl. */ public int compareTo(Acl other) { return compareAcls(this, other); } /** * Test whether this Acl allows READ privilege to a named AclPrincipal * * @param principal a principal to test * @return true if the principal has READ privilege */ public boolean allowsRead(AclPrincipal principal) { return allowsRight(principal, Right.READ); } /** * Test whether this Acl allows READ privilege to any of a list of AclPrincipals * * @param aclPrincipals a list of principals to test * @return true if at least one principal has READ privilege */ public boolean allowsReadToAnyAclPrincipal(List<AclPrincipal> aclPrincipals) { return allowsRightToAnyAclPrincipal(aclPrincipals, Right.READ); } /** * Test whether this Acl explicitly forbids READ privilege to a named AclPrincipal * * @param principal a principal to test * @return true if the principal has READ privilege */ public boolean forbidsRead(AclPrincipal principal) { return allowsRight(principal, Right.NONE); } /** * Test whether this Acl explicitly forbids READ privilege to any of a list of * AclPrincipals. * * @param aclPrincipals a list of names of users to test * @return true if at least one names user has READ privilege */ public boolean forbidsReadToAnyAclPrincipal(List<AclPrincipal> aclPrincipals) { return allowsRightToAnyAclPrincipal(aclPrincipals, Right.NONE); } private boolean allowsRight(AclPrincipal principal, Right right) { if (principal == null) { throw new IllegalArgumentException(); } for (Ace ace : aces) { if (ace.allowsAclPrincipal(principal, right)) { return true; } } return false; } private boolean allowsRightToAnyAclPrincipal(List<AclPrincipal> aclPrincipals, Right right) { for (AclPrincipal principal : aclPrincipals) { if (allowsRight(principal, right)) { return true; } } return false; } }
/* * Copyright (C) Lennart Martens * * Contact: lennart.martens AT UGent.be (' AT ' to be replaced with '@') */ /* * Created by IntelliJ IDEA. * User: Lennart * Date: 10-okt-02 * Time: 18:01:11 */ package com.compomics.dbtoolkit.gui; import com.compomics.dbtoolkit.gui.components.FileInputPanel; import com.compomics.dbtoolkit.gui.components.StatusPanel; import com.compomics.dbtoolkit.gui.interfaces.StatusView; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.net.URL; import java.util.HashMap; /* * CVS information: * * $Revision: 1.6 $ * $Date: 2007/07/06 09:52:03 $ */ /** * This class implements the view and basic GUI logic * (action- and other listeners and change of look&feel * functionality) for the DBTool GUI. * * @author Lennart Martens */ public class DBTool extends CursorModifiableJFrameImpl implements StatusView { /** * The StatusPanel component. */ StatusPanel status = null; /** * The FileInput component. */ FileInputPanel fip = null; /** * This component gives a preview of the * first two FASTA entries in a loaded DB. */ JTextArea preview = null; /** * The scrollpane on which the preview TextArea is laid out. */ JScrollPane previewPane = null; /** * HashMap with key = look&feel name, value = look&feel class. */ HashMap lookAndFeels = null; /** * The controller for this view. */ private DBToolController iController = null; private static final String WINDOW_ICON = "DBIcon.gif"; /** * Constructor with a name (title) for the JFrame. * * @param aName String with the name (title) for the JFrame. */ public DBTool(String aName) { super(aName); this.iController = new DBToolController(this); } /** * This is the real working method that gathers the components * and displays the Frame. */ public void openWindow() { // Enable window closing. this.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent we) { iController.exitTriggered(); } }); // RUG icon for the window. try { URL url = this.getClass().getClassLoader().getResource(WINDOW_ICON); if(url != null) { this.setIconImage(new ImageIcon(url).getImage()); } } catch(Exception e) { e.printStackTrace(); } this.addComponentListener(new ComponentAdapter() { /** * Invoked when the component's size changes. */ public void componentResized(ComponentEvent e) { // We should inform the controller, // so it can reformat the preview data and show it. iController.triggerResized(); } }); // The menubar. JMenuBar bar = this.getMenuBarForFrame(); this.setJMenuBar(bar); // The main GUI. this.getContentPane().add(this.getScreen(), BorderLayout.CENTER); this.pack(); this.setSize(this.getWidth()+5, this.getHeight()+5); // Show the frame. this.setLocationRelativeTo(null); this.setVisible(true); } /** * This method constructs the MenuBar for the application. * * @return JMenuBar the menubar for this application. */ private JMenuBar getMenuBarForFrame() { // First menu is the file menu. JMenu file = new JMenu("File"); file.setMnemonic('f'); // We can either process the file that was loaded (if any), // or exit the program. JMenuItem open = new JMenuItem("Process", KeyEvent.VK_P); JMenuItem exit = new JMenuItem("Exit", KeyEvent.VK_X); open.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { // Inform the controller that a process request has been // triggered. iController.processDataTriggered(); } }); exit.addActionListener(new ActionListener() { /** * Invoked when an action occurs. */ public void actionPerformed(ActionEvent e) { iController.exitTriggered(); } }); // Construct the file menu. file.add(open); file.add(exit); // Next up is the settings menu. JMenu settings = new JMenu("Settings"); settings.setMnemonic(KeyEvent.VK_S); // We can set the number of entries to show in the preview pane. JMenuItem numFASTALines = new JMenuItem("Preview..."); numFASTALines.setMnemonic(KeyEvent.VK_P); numFASTALines.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { // Inform the controller we want to reset the number of items to display // on the preview. iController.requestFASTALines(); } }); // Next item to the settings menu is the look and feel choice. JMenu lAndF = new JMenu("Look and feel..."); lAndF.setMnemonic(KeyEvent.VK_L); // Get the installed look&feel list. UIManager.LookAndFeelInfo[] lfs = UIManager.getInstalledLookAndFeels(); lookAndFeels = new HashMap(lfs.length); // Look and feel handling is all taken care of here. // Notice the dynamically constructed set of sub-menuitems, based on the // installed look&feels for the current platform. JMenuItem[] lafs = new JMenuItem[lfs.length]; // So, we make a MenuItem for each L&F, and add the corresponding // action to it. for(int i = 0; i < lfs.length; i++) { // For each L&F... final UIManager.LookAndFeelInfo lF = lfs[i]; // ...display the name in the menu... lafs[i] = new JMenuItem(lF.getName()); // .. and listen for clicks... lafs[i].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { // .. and when clicked, set the corresponding L&F. setLAF(lF.getClassName()); } }); // Store them in a HashMap for later reference. lAndF.add(lafs[i]); lookAndFeels.put(lF.getName(), lF.getClassName()); } // Complete the settings menu. settings.add(lAndF); settings.add(numFASTALines); // Now for the tools menu. JMenu tools = new JMenu("Tools"); tools.setMnemonic(KeyEvent.VK_T); // Count menuitem. JMenuItem tool1 = new JMenuItem("Count DB entries"); tool1.setMnemonic(KeyEvent.VK_E); tool1.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { iController.countRequested(); } }); // To FASTA file menuitem. JMenuItem toFasta = new JMenuItem("Output as FASTA file..."); toFasta.setMnemonic(KeyEvent.VK_O); toFasta.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { iController.FASTAOutputRequested(); } }); // To replaced FASTA file menuitem. JMenuItem toReplacedFasta = new JMenuItem("Output as FASTA file after replacing residues..."); toReplacedFasta.setMnemonic(KeyEvent.VK_O); toReplacedFasta.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { iController.replacedOutputRequested(); } }); // Regular expression filter menuitem. JMenuItem regExpLimit = new JMenuItem("Filter proteins by regular expression..."); regExpLimit.setMnemonic(KeyEvent.VK_M); regExpLimit.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { iController.processRegExpFilterTriggered(); } }); // To reversed FASTA file menuitem. JMenuItem toReversedFasta = new JMenuItem("Output as reversed FASTA file..."); toReversedFasta.setMnemonic(KeyEvent.VK_V); toReversedFasta.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { iController.reversedOutputRequested(); } }); // To shuffled FASTA file menuitem. JMenuItem toShuffledFasta = new JMenuItem("Output as shuffled FASTA file..."); toShuffledFasta.setMnemonic(KeyEvent.VK_S); toShuffledFasta.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { iController.shuffleOutputRequested(); } }); // Concat file menuitem. JMenuItem concat = new JMenuItem("Concatenate DB's (or copy file)..."); concat.setMnemonic(KeyEvent.VK_C); concat.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { iController.processConcatTriggered(); } }); // Map peptides menuitem. JMenuItem pepMap = new JMenuItem("Map peptide list to database..."); pepMap.setMnemonic(KeyEvent.VK_M); pepMap.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { iController.processPeptideMappingTriggered(); } }); // ClearRed file menuitem. JMenuItem clearRed = new JMenuItem("Clear redundancy in DB..."); clearRed.setMnemonic(KeyEvent.VK_R); clearRed.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { iController.processClearRedTriggered(); } }); tools.add(tool1); tools.add(toFasta); tools.add(toReplacedFasta); tools.add(regExpLimit); tools.add(toReversedFasta); tools.add(toShuffledFasta); tools.add(concat); tools.add(pepMap); tools.add(clearRed); // The help menu conatins only some info about the program. JMenu help = new JMenu("Help"); help.setMnemonic(KeyEvent.VK_H); // The about menuitem will pop-up the about dialog. JMenuItem about = new JMenuItem("About"); about.setMnemonic(KeyEvent.VK_A); about.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { iController.showAbout(); } }); // Complete the Help menu. help.add(about); // Menubar construction in progress... JMenuBar bar = new JMenuBar(); bar.add(file); bar.add(settings); bar.add(tools); bar.add(help); // Voila. return bar; } /** * This method constructs the main screen in a JPanel. * * @return JPanel with the main screen. */ private JPanel getScreen() { // The screen. JPanel screen = new JPanel(new BorderLayout()); // The status panel. status = new StatusPanel(false); status.setStatus("No file loaded."); status.setError("None."); status.setBorder(BorderFactory.createTitledBorder("Status panel")); screen.add(status, BorderLayout.SOUTH); // FileInputPanel. fip = new FileInputPanel("Database file"); fip.addReceiver(iController); screen.add(fip, BorderLayout.NORTH); // Preview area. preview = new JTextArea(20, 120); preview.setFont(new Font("monospaced", Font.PLAIN, 12)); preview.setEditable(false); previewPane = new JScrollPane(preview); previewPane.setBorder(BorderFactory.createTitledBorder("Preview pane (" + iController.getNrOfEntries() + " FASTA entries)")); screen.add(previewPane, BorderLayout.CENTER); // Voila. return screen; } /** * This method attempts to set the Look and Feel. * * @param aClassName the classname for the look and feel. */ private void setLAF(String aClassName) { try { UIManager.setLookAndFeel(aClassName); SwingUtilities.updateComponentTreeUI(this); } catch(Exception e) { // Should not happen. } } /** * Overriden paint method to work well with the changing of L&F. */ public void paint(Graphics g) { preview.setBackground(this.getBackground()); super.paint(g); } /** * This method queries the previewpane for the number of characters it can display per line. * * @return int with the umber of characters the previewpane can display per line */ int getNrOfCharsOnPreviewPane() { FontMetrics fm = preview.getFontMetrics(preview.getFont()); int charWidth = fm.charWidth('W'); return (int)((previewPane.getSize().width - 5) / charWidth); } /** * The main method ensures that we can start the program at this point. * The real working method however, is the 'openWindow()' method - * this to allow integration from other Java programs. * * @param args String[] with the start-up arguments. These are not used here. */ public static void main(String[] args) { DBTool tool = null; try { tool = new DBTool("Database Processing Tool"); tool.openWindow(); } catch(Throwable t) { JOptionPane.showMessageDialog(tool, new String[]{"Fatal exception occurred in program!", "\n", " " + t.getMessage() + "'!", "\n", "Please contact the author about this if the problem persists.", "\n", "\n"}, "Program unexpectedly terminated!", JOptionPane.ERROR_MESSAGE); t.printStackTrace(); System.exit(1); } } /** * This method returns a Point to set as location for a child window * that is located at a fraction of the current location and size. * * @param aFractionWidth int with the fraction of the width to * put the child location at. * @param aFractionHeight int with the fraction of the height to * put the child location at. * @return Point with the requested location. */ Point getPoint(int aFractionWidth, int aFractionHeight) { Point p = this.getLocation(); Point result = new Point((int)(p.x + this.getWidth()/aFractionWidth), (int)(p.y + this.getHeight()/aFractionHeight)); return result; } /** * This method allows the caller to specify the status message * that is being displayed. * * @param aStatus String with the desired status message. */ public void setStatus(String aStatus) { this.status.setStatus(aStatus); } /** * This method allows the caller to specify the error message * that is being displayed. * * @param aError String with the desired error message. */ public void setError(String aError) { this.status.setError(aError); } /** * This method clears the visible data on the screen. */ public void clear() { this.preview.setText(""); this.fip.setTextFieldText(""); this.setStatus("No file loaded."); } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.completion; import com.intellij.codeInsight.ExpectedTypeInfo; import com.intellij.codeInsight.ExpectedTypeInfoImpl; import com.intellij.codeInsight.completion.impl.CompletionSorterImpl; import com.intellij.codeInsight.completion.impl.LiftShorterItemsClassifier; import com.intellij.codeInsight.lookup.*; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.codeStyle.NameUtil; import com.intellij.psi.codeStyle.VariableKind; import com.intellij.psi.javadoc.PsiDocComment; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import static com.intellij.patterns.PsiJavaPatterns.psiElement; /** * @author peter */ public class JavaCompletionSorting { private JavaCompletionSorting() { } public static CompletionResultSet addJavaSorting(final CompletionParameters parameters, CompletionResultSet result) { PsiElement position = parameters.getPosition(); ExpectedTypeInfo[] expectedTypes = getExpectedTypesWithDfa(parameters, position); CompletionType type = parameters.getCompletionType(); boolean smart = type == CompletionType.SMART; boolean afterNew = JavaSmartCompletionContributor.AFTER_NEW.accepts(position); List<LookupElementWeigher> afterProximity = new ArrayList<>(); ContainerUtil.addIfNotNull(afterProximity, PreferMostUsedWeigher.create(position)); afterProximity.add(new PreferContainingSameWords(expectedTypes)); afterProximity.add(new PreferShorter(expectedTypes)); CompletionSorter sorter = CompletionSorter.defaultSorter(parameters, result.getPrefixMatcher()); if (!smart && afterNew) { sorter = sorter.weighBefore("liftShorter", new PreferExpected(true, expectedTypes, position)); } else if (PsiTreeUtil.getParentOfType(position, PsiReferenceList.class) == null) { sorter = ((CompletionSorterImpl)sorter).withClassifier("liftShorterClasses", true, new LiftShorterClasses(position)); } if (smart) { sorter = sorter.weighAfter("priority", new PreferDefaultTypeWeigher(expectedTypes, parameters, false)); } List<LookupElementWeigher> afterStats = ContainerUtil.newArrayList(); afterStats.add(new PreferByKindWeigher(type, position, expectedTypes)); if (smart) { afterStats.add(new PreferDefaultTypeWeigher(expectedTypes, parameters, true)); } else { ContainerUtil.addIfNotNull(afterStats, preferStatics(position, expectedTypes)); if (!afterNew) { afterStats.add(new PreferExpected(false, expectedTypes, position)); } } ContainerUtil.addIfNotNull(afterStats, recursion(parameters, expectedTypes)); afterStats.add(new PreferSimilarlyEnding(expectedTypes)); if (ContainerUtil.or(expectedTypes, info -> !info.getType().equals(PsiType.VOID))) { afterStats.add(new PreferNonGeneric()); } Collections.addAll(afterStats, new PreferAccessible(position), new PreferSimple()); sorter = sorter.weighAfter("stats", afterStats.toArray(new LookupElementWeigher[0])); sorter = sorter.weighAfter("proximity", afterProximity.toArray(new LookupElementWeigher[0])); return result.withRelevanceSorter(sorter); } @NotNull private static ExpectedTypeInfo[] getExpectedTypesWithDfa(CompletionParameters parameters, PsiElement position) { if (psiElement().beforeLeaf(psiElement().withText(".")).accepts(position)) { return ExpectedTypeInfo.EMPTY_ARRAY; } List<ExpectedTypeInfo> castExpectation = SmartCastProvider.getParenthesizedCastExpectationByOperandType(position); if (!castExpectation.isEmpty()) { return castExpectation.toArray(ExpectedTypeInfo.EMPTY_ARRAY); } return JavaSmartCompletionContributor.getExpectedTypes(parameters); } @Nullable private static LookupElementWeigher recursion(CompletionParameters parameters, final ExpectedTypeInfo[] expectedInfos) { final PsiElement position = parameters.getPosition(); final PsiMethodCallExpression expression = PsiTreeUtil.getParentOfType(position, PsiMethodCallExpression.class, true, PsiClass.class); final PsiReferenceExpression reference = expression != null ? expression.getMethodExpression() : PsiTreeUtil.getParentOfType(position, PsiReferenceExpression.class); if (reference == null) return null; return new RecursionWeigher(position, parameters.getCompletionType(), reference, expression, expectedInfos); } @Nullable private static LookupElementWeigher preferStatics(PsiElement position, final ExpectedTypeInfo[] infos) { if (PsiTreeUtil.getParentOfType(position, PsiDocComment.class) != null) { return null; } if (position.getParent() instanceof PsiReferenceExpression) { final PsiReferenceExpression refExpr = (PsiReferenceExpression)position.getParent(); final PsiElement qualifier = refExpr.getQualifier(); if (qualifier == null) { return null; } if (!(qualifier instanceof PsiJavaCodeReferenceElement) || !(((PsiJavaCodeReferenceElement)qualifier).resolve() instanceof PsiClass)) { return null; } } return new LookupElementWeigher("statics") { @NotNull @Override public Comparable weigh(@NotNull LookupElement element) { final Object o = element.getObject(); if (o instanceof PsiKeyword) return -3; if (!(o instanceof PsiMember) || element.getUserData(JavaGenerateMemberCompletionContributor.GENERATE_ELEMENT) != null) { return 0; } if (((PsiMember)o).hasModifierProperty(PsiModifier.STATIC) && !hasNonVoid(infos)) { if (o instanceof PsiMethod) return -5; if (o instanceof PsiField) return -4; } if (o instanceof PsiClass) return -3; //instance method or field return -5; } }; } private static ExpectedTypeMatching getExpectedTypeMatching(LookupElement item, ExpectedTypeInfo[] expectedInfos, @Nullable String expectedMemberName) { PsiType itemType = JavaCompletionUtil.getLookupElementType(item); if (itemType != null) { PsiUtil.ensureValidType(itemType); for (final ExpectedTypeInfo expectedInfo : expectedInfos) { PsiType expectedType = expectedInfo.getType(); if (expectedInfo.getKind() == ExpectedTypeInfo.TYPE_OR_SUPERTYPE) { if (itemType.isAssignableFrom(expectedType)) { return ExpectedTypeMatching.expected; } } else { PsiType defaultType = expectedInfo.getDefaultType(); if (defaultType != expectedType && defaultType.isAssignableFrom(itemType)) { return ExpectedTypeMatching.ofDefaultType; } if (expectedType.isAssignableFrom(itemType)) { return ExpectedTypeMatching.expected; } } } } if (hasNonVoid(expectedInfos)) { if (item.getObject() instanceof PsiKeyword) { String keyword = ((PsiKeyword)item.getObject()).getText(); if (PsiKeyword.NEW.equals(keyword) || PsiKeyword.NULL.equals(keyword)) { return ExpectedTypeMatching.maybeExpected; } } } else if (expectedInfos.length > 0) { return ExpectedTypeMatching.unexpected; } return preferByMemberName(expectedMemberName, itemType); } @NotNull private static ExpectedTypeMatching preferByMemberName(@Nullable String expectedMemberName, @Nullable PsiType itemType) { if (expectedMemberName != null) { PsiClass itemClass = PsiUtil.resolveClassInClassTypeOnly(itemType); if (itemClass != null) { if (itemClass.findMethodsByName(expectedMemberName, true).length > 0 || itemClass.findFieldByName(expectedMemberName, true) != null || itemClass.findInnerClassByName(expectedMemberName, true) != null) { return ExpectedTypeMatching.expected; } } } return ExpectedTypeMatching.normal; } private static boolean hasNonVoid(ExpectedTypeInfo[] expectedInfos) { boolean hasNonVoid = false; for (ExpectedTypeInfo info : expectedInfos) { if (!PsiType.VOID.equals(info.getType())) { hasNonVoid = true; } } return hasNonVoid; } @Nullable private static String getLookupObjectName(Object o) { if (o instanceof PsiVariable) { final PsiVariable variable = (PsiVariable)o; JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(variable.getProject()); VariableKind variableKind = codeStyleManager.getVariableKind(variable); return codeStyleManager.variableNameToPropertyName(variable.getName(), variableKind); } if (o instanceof PsiMethod) { return ((PsiMethod)o).getName(); } return null; } private static int getNameEndMatchingDegree(final String name, ExpectedTypeInfo[] expectedInfos) { int res = 0; if (name != null && expectedInfos != null) { final List<String> words = NameUtil.nameToWordsLowerCase(name); final List<String> wordsNoDigits = NameUtil.nameToWordsLowerCase(truncDigits(name)); int max1 = calcMatch(words, 0, expectedInfos); max1 = calcMatch(wordsNoDigits, max1, expectedInfos); res = max1; } return res; } private static String truncDigits(String name){ int count = name.length() - 1; while (count >= 0) { char c = name.charAt(count); if (!Character.isDigit(c)) break; count--; } return name.substring(0, count + 1); } private static int calcMatch(final List<String> words, int max, ExpectedTypeInfo[] myExpectedInfos) { for (ExpectedTypeInfo myExpectedInfo : myExpectedInfos) { String expectedName = ((ExpectedTypeInfoImpl)myExpectedInfo).getExpectedName(); if (expectedName == null) continue; max = calcMatch(expectedName, words, max); max = calcMatch(truncDigits(expectedName), words, max); } return max; } private static int calcMatch(final String expectedName, final List<String> words, int max) { if (expectedName == null) return max; String[] expectedWords = NameUtil.nameToWords(expectedName); int limit = Math.min(words.size(), expectedWords.length); for (int i = 0; i < limit; i++) { String word = words.get(words.size() - i - 1); String expectedWord = expectedWords[expectedWords.length - i - 1]; if (word.equalsIgnoreCase(expectedWord)) { max = Math.max(max, i + 1); } else { break; } } return max; } private static class PreferDefaultTypeWeigher extends LookupElementWeigher { private final PsiTypeParameter myTypeParameter; private final ExpectedTypeInfo[] myExpectedTypes; private final CompletionParameters myParameters; private final boolean myPreferExact; private final CompletionLocation myLocation; PreferDefaultTypeWeigher(@NotNull ExpectedTypeInfo[] expectedTypes, CompletionParameters parameters, boolean preferExact) { super("defaultType" + (preferExact ? "Exact" : "")); myExpectedTypes = ContainerUtil.map2Array(expectedTypes, ExpectedTypeInfo.class, info -> { PsiType type = removeClassWildcard(info.getType()); PsiType defaultType = removeClassWildcard(info.getDefaultType()); if (type == info.getType() && defaultType == info.getDefaultType()) { return info; } return new ExpectedTypeInfoImpl(type, info.getKind(), defaultType, info.getTailType(), null, ExpectedTypeInfoImpl.NULL); }); myParameters = parameters; myPreferExact = preferExact; final Pair<PsiTypeParameterListOwner,Integer> pair = TypeArgumentCompletionProvider.getTypeParameterInfo(parameters.getPosition()); myTypeParameter = pair == null ? null : pair.first.getTypeParameters()[pair.second.intValue()]; myLocation = new CompletionLocation(myParameters); } @NotNull @Override public MyResult weigh(@NotNull LookupElement item) { final Object object = item.getObject(); if (object instanceof PsiClass) { if (object instanceof PsiTypeParameter) return MyResult.typeParameter; if (myTypeParameter != null && object.equals(PsiUtil.resolveClassInType(TypeConversionUtil.typeParameterErasure(myTypeParameter)))) { return MyResult.exactlyExpected; } } if (returnsUnboundType(item)) return MyResult.normal; PsiType itemType = JavaCompletionUtil.getLookupElementType(item); if ((myPreferExact || object instanceof PsiClass) && isExactlyExpected(item, itemType)) { return AbstractExpectedTypeSkipper.skips(item, myLocation) ? MyResult.expectedNoSelect : MyResult.exactlyExpected; } if (itemType == null) return MyResult.normal; for (final ExpectedTypeInfo expectedInfo : myExpectedTypes) { final PsiType defaultType = expectedInfo.getDefaultType(); final PsiType expectedType = expectedInfo.getType(); if (defaultType != expectedType) { if (myPreferExact && defaultType.equals(itemType)) { return MyResult.exactlyDefault; } if (defaultType.isAssignableFrom(itemType)) { return MyResult.ofDefaultType; } } if (PsiType.VOID.equals(itemType) && PsiType.VOID.equals(expectedType)) { return MyResult.exactlyExpected; } } return MyResult.normal; } private boolean isExactlyExpected(@NotNull LookupElement item, @Nullable PsiType itemType) { if (JavaCompletionUtil.SUPER_METHOD_PARAMETERS.get(item) != null) { return true; } if (itemType == null || itemType.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) { return false; } return ContainerUtil.exists(myExpectedTypes, info -> box(info.getType().getDeepComponentType()).equals(box(itemType))); } private boolean returnsUnboundType(@NotNull LookupElement item) { JavaMethodCallElement call = item.as(JavaMethodCallElement.CLASS_CONDITION_KEY); if (call != null && !call.getInferenceSubstitutor().equals(PsiSubstitutor.EMPTY)) { PsiType callType = TypeConversionUtil.erasure(call.getSubstitutor().substitute(call.getObject().getReturnType())); return callType == null || Arrays.stream(myExpectedTypes).noneMatch(i -> canBeExpected(callType, i)); } return false; } private static boolean canBeExpected(PsiType callType, ExpectedTypeInfo info) { PsiType expectedType = TypeConversionUtil.erasure(info.getType()); return expectedType != null && TypeConversionUtil.isAssignable(expectedType, callType); } private PsiType box(PsiType expectedType) { PsiClassType boxed = expectedType instanceof PsiPrimitiveType ? ((PsiPrimitiveType)expectedType).getBoxedType(myParameters.getPosition()) : null; return boxed != null ? boxed : expectedType; } private static PsiType removeClassWildcard(PsiType type) { if (type instanceof PsiClassType) { final PsiClass psiClass = ((PsiClassType)type).resolve(); if (psiClass != null && CommonClassNames.JAVA_LANG_CLASS.equals(psiClass.getQualifiedName())) { PsiClassType erased = (PsiClassType)GenericsUtil.eliminateWildcards(type); PsiType[] parameters = erased.getParameters(); if (parameters.length == 1 && !parameters[0].equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) { return erased; } } } return type; } private enum MyResult { expectedNoSelect, typeParameter, exactlyDefault, ofDefaultType, exactlyExpected, normal, } } private enum ExpectedTypeMatching { ofDefaultType, expected, maybeExpected, normal, unexpected, } private static class PreferAccessible extends LookupElementWeigher { private final PsiElement myPosition; public PreferAccessible(PsiElement position) { super("accessible"); myPosition = position; } private enum MyEnum { NORMAL, DEPRECATED, INACCESSIBLE, } @NotNull @Override public Comparable weigh(@NotNull LookupElement element) { final Object object = element.getObject(); if (object instanceof PsiDocCommentOwner) { final PsiDocCommentOwner member = (PsiDocCommentOwner)object; if (!JavaPsiFacade.getInstance(member.getProject()).getResolveHelper().isAccessible(member, myPosition, null)) return MyEnum.INACCESSIBLE; if (JavaCompletionUtil.isEffectivelyDeprecated(member)) return MyEnum.DEPRECATED; } return MyEnum.NORMAL; } } private static class PreferNonGeneric extends LookupElementWeigher { public PreferNonGeneric() { super("nonGeneric"); } @NotNull @Override public Comparable weigh(@NotNull LookupElement element) { final Object object = element.getObject(); if (object instanceof PsiMethod && !FunctionalExpressionCompletionProvider.isFunExprItem(element)) { PsiType type = ((PsiMethod)object).getReturnType(); final JavaMethodCallElement callItem = element.as(JavaMethodCallElement.CLASS_CONDITION_KEY); if (callItem != null) { type = callItem.getSubstitutor().substitute(type); } if (type instanceof PsiClassType && ((PsiClassType) type).resolve() instanceof PsiTypeParameter) return 1; } return 0; } } private static class PreferSimple extends LookupElementWeigher { public PreferSimple() { super("simple"); } @NotNull @Override public Comparable weigh(@NotNull LookupElement element) { final PsiTypeLookupItem lookupItem = element.as(PsiTypeLookupItem.CLASS_CONDITION_KEY); if (lookupItem != null) { return lookupItem.getBracketsCount() * 10 + (lookupItem.isAddArrayInitializer() ? 1 : 0); } return 0; } } private static class PreferExpected extends LookupElementWeigher { private final boolean myConstructorPossible; private final ExpectedTypeInfo[] myExpectedTypes; private final List<PsiType> myExpectedClasses = new SmartList<>(); private final String myExpectedMemberName; public PreferExpected(boolean constructorPossible, ExpectedTypeInfo[] expectedTypes, PsiElement position) { super("expectedType"); myConstructorPossible = constructorPossible; myExpectedTypes = expectedTypes; for (ExpectedTypeInfo info : expectedTypes) { ContainerUtil.addIfNotNull(myExpectedClasses, PsiUtil.substituteTypeParameter(info.getDefaultType(), CommonClassNames.JAVA_LANG_CLASS, 0, false)); } myExpectedMemberName = calcExpectedMemberNameByParentCall(position); } @Nullable private static String calcExpectedMemberNameByParentCall(PsiElement position) { if (position.getParent() instanceof PsiJavaCodeReferenceElement) { PsiElement grand = position.getParent().getParent(); if (grand instanceof PsiJavaCodeReferenceElement && ((PsiJavaCodeReferenceElement)grand).getQualifier() == position.getParent()) { return ((PsiJavaCodeReferenceElement)grand).getReferenceName(); } } return null; } @NotNull @Override public ExpectedTypeMatching weigh(@NotNull LookupElement item) { if (item.getObject() instanceof PsiClass && !myConstructorPossible) { PsiType itemType = JavaCompletionUtil.getLookupElementType(item); if (itemType != null) { for (PsiType expectedClass : myExpectedClasses) { if (expectedClass.isAssignableFrom(itemType)) { return ExpectedTypeMatching.expected; } } } ExpectedTypeMatching byName = preferByMemberName(myExpectedMemberName, itemType); if (byName != ExpectedTypeMatching.normal) { return byName; } } return getExpectedTypeMatching(item, myExpectedTypes, myExpectedMemberName); } } private static class PreferSimilarlyEnding extends LookupElementWeigher { private final ExpectedTypeInfo[] myExpectedTypes; public PreferSimilarlyEnding(ExpectedTypeInfo[] expectedTypes) { super("nameEnd"); myExpectedTypes = expectedTypes; } @NotNull @Override public Comparable weigh(@NotNull LookupElement element) { final String name = getLookupObjectName(element.getObject()); return -getNameEndMatchingDegree(name, myExpectedTypes); } } private static class PreferContainingSameWords extends LookupElementWeigher { private final ExpectedTypeInfo[] myExpectedTypes; public PreferContainingSameWords(ExpectedTypeInfo[] expectedTypes) { super("sameWords"); myExpectedTypes = expectedTypes; } @NotNull @Override public Comparable weigh(@NotNull LookupElement element) { final Object object = element.getObject(); final String name = getLookupObjectName(object); if (name != null) { int max = 0; final List<String> wordsNoDigits = NameUtil.nameToWordsLowerCase(truncDigits(name)); for (ExpectedTypeInfo myExpectedInfo : myExpectedTypes) { String expectedName = ((ExpectedTypeInfoImpl)myExpectedInfo).getExpectedName(); if (expectedName != null) { final THashSet<String> set = new THashSet<>(NameUtil.nameToWordsLowerCase(truncDigits(expectedName))); set.retainAll(wordsNoDigits); max = Math.max(max, set.size()); } } return -max; } return 0; } } private static class PreferShorter extends LookupElementWeigher { private final ExpectedTypeInfo[] myExpectedTypes; public PreferShorter(ExpectedTypeInfo[] expectedTypes) { super("shorter"); myExpectedTypes = expectedTypes; } @NotNull @Override public Comparable weigh(@NotNull LookupElement element) { final Object object = element.getObject(); final String name = getLookupObjectName(object); if (name != null && getNameEndMatchingDegree(name, myExpectedTypes) != 0) { return NameUtil.nameToWords(name).length - 1000; } return 0; } } private static class LiftShorterClasses extends ClassifierFactory<LookupElement> { final ProjectFileIndex fileIndex; private final PsiElement myPosition; public LiftShorterClasses(PsiElement position) { super("liftShorterClasses"); myPosition = position; fileIndex = ProjectRootManager.getInstance(myPosition.getProject()).getFileIndex(); } @Override public Classifier<LookupElement> createClassifier(Classifier<LookupElement> next) { return new LiftShorterItemsClassifier("liftShorterClasses", next, new LiftShorterItemsClassifier.LiftingCondition() { @Override public boolean shouldLift(LookupElement shorterElement, LookupElement longerElement) { Object object = shorterElement.getObject(); if (!(object instanceof PsiClass)) return false; if (longerElement.getUserData(JavaGenerateMemberCompletionContributor.GENERATE_ELEMENT) != null) return true; if (longerElement.getObject() instanceof PsiClass) { PsiClass psiClass = (PsiClass)object; PsiFile file = psiClass.getContainingFile(); if (file != null) { VirtualFile vFile = file.getOriginalFile().getVirtualFile(); if (vFile != null && fileIndex.isInSource(vFile)) { return true; } } } return false; } }, true); } } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson; import com.google.common.collect.Lists; import hudson.init.InitMilestone; import hudson.model.Hudson; import jenkins.ExtensionComponentSet; import jenkins.model.Jenkins; import hudson.util.AdaptedIterator; import hudson.util.DescriptorList; import hudson.util.Memoizer; import hudson.util.Iterators; import hudson.ExtensionPoint.LegacyInstancesAreScopedToHudson; import java.util.AbstractList; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Vector; import java.util.concurrent.CopyOnWriteArrayList; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import jenkins.util.io.OnMaster; /** * Retains the known extension instances for the given type 'T'. * * <p> * Extensions are loaded lazily on demand and automatically by using {@link ExtensionFinder}, but this * class also provides a mechanism to provide compatibility with the older {@link DescriptorList}-based * manual registration, * * <p> * All {@link ExtensionList} instances should be owned by {@link jenkins.model.Jenkins}, even though * extension points can be defined by anyone on any type. Use {@link jenkins.model.Jenkins#getExtensionList(Class)} * and {@link jenkins.model.Jenkins#getDescriptorList(Class)} to obtain the instances. * * @param <T> * Type of the extension point. This class holds instances of the subtypes of 'T'. * * @author Kohsuke Kawaguchi * @since 1.286 * @see jenkins.model.Jenkins#getExtensionList(Class) * @see jenkins.model.Jenkins#getDescriptorList(Class) */ public class ExtensionList<T> extends AbstractList<T> implements OnMaster { /** * @deprecated as of 1.417 * Use {@link #jenkins} */ @Deprecated public final Hudson hudson; public final @CheckForNull Jenkins jenkins; public final Class<T> extensionType; /** * Once discovered, extensions are retained here. */ @CopyOnWrite private volatile List<ExtensionComponent<T>> extensions; private final List<ExtensionListListener> listeners = new CopyOnWriteArrayList<ExtensionListListener>(); /** * Place to store manually registered instances with the per-Hudson scope. * {@link CopyOnWriteArrayList} is used here to support concurrent iterations and mutation. */ private final CopyOnWriteArrayList<ExtensionComponent<T>> legacyInstances; /** * @deprecated as of 1.416 * Use {@link #ExtensionList(Jenkins, Class)} */ @Deprecated protected ExtensionList(Hudson hudson, Class<T> extensionType) { this((Jenkins)hudson,extensionType); } protected ExtensionList(Jenkins jenkins, Class<T> extensionType) { this(jenkins,extensionType,new CopyOnWriteArrayList<ExtensionComponent<T>>()); } /** * @deprecated as of 1.416 * Use {@link #ExtensionList(Jenkins, Class, CopyOnWriteArrayList)} */ @Deprecated protected ExtensionList(Hudson hudson, Class<T> extensionType, CopyOnWriteArrayList<ExtensionComponent<T>> legacyStore) { this((Jenkins)hudson,extensionType,legacyStore); } /** * * @param legacyStore * Place to store manually registered instances. The version of the constructor that * omits this uses a new {@link Vector}, making the storage lifespan tied to the life of {@link ExtensionList}. * If the manually registered instances are scoped to VM level, the caller should pass in a static list. */ protected ExtensionList(Jenkins jenkins, Class<T> extensionType, CopyOnWriteArrayList<ExtensionComponent<T>> legacyStore) { this.hudson = (Hudson)jenkins; this.jenkins = jenkins; this.extensionType = extensionType; this.legacyInstances = legacyStore; if (jenkins == null) { extensions = Collections.emptyList(); } } /** * Add a listener to the extension list. * @param listener The listener. */ public void addListener(@Nonnull ExtensionListListener listener) { listeners.add(listener); } /** * Looks for the extension instance of the given type (subclasses excluded), * or return null. */ public @CheckForNull <U extends T> U get(Class<U> type) { for (T ext : this) if(ext.getClass()==type) return type.cast(ext); return null; } @Override public Iterator<T> iterator() { // we need to intercept mutation, so for now don't allow Iterator.remove return new AdaptedIterator<ExtensionComponent<T>,T>(Iterators.readOnly(ensureLoaded().iterator())) { protected T adapt(ExtensionComponent<T> item) { return item.getInstance(); } }; } /** * Gets the same thing as the 'this' list represents, except as {@link ExtensionComponent}s. */ public List<ExtensionComponent<T>> getComponents() { return Collections.unmodifiableList(ensureLoaded()); } public T get(int index) { return ensureLoaded().get(index).getInstance(); } public int size() { return ensureLoaded().size(); } /** * Gets the read-only view of this {@link ExtensionList} where components are reversed. */ public List<T> reverseView() { return new AbstractList<T>() { @Override public T get(int index) { return ExtensionList.this.get(size()-index-1); } @Override public int size() { return ExtensionList.this.size(); } }; } @Override public boolean remove(Object o) { try { return removeSync(o); } finally { if(extensions!=null) { fireOnChangeListeners(); } } } @Override public boolean removeAll(Collection<?> c) { boolean removed = false; try { for (Object o : c) { removed |= removeSync(o); } return removed; } finally { if (extensions != null && removed) { fireOnChangeListeners(); } } } private synchronized boolean removeSync(Object o) { boolean removed = removeComponent(legacyInstances, o); if(extensions!=null) { List<ExtensionComponent<T>> r = new ArrayList<ExtensionComponent<T>>(extensions); removed |= removeComponent(r,o); extensions = sort(r); } return removed; } private <T> boolean removeComponent(Collection<ExtensionComponent<T>> collection, Object t) { for (Iterator<ExtensionComponent<T>> itr = collection.iterator(); itr.hasNext();) { ExtensionComponent<T> c = itr.next(); if (c.getInstance().equals(t)) { return collection.remove(c); } } return false; } @Override public final synchronized T remove(int index) { T t = get(index); remove(t); return t; } /** * Write access will put the instance into a legacy store. * * @deprecated since 2009-02-23. * Prefer automatic registration. */ @Override @Deprecated public boolean add(T t) { try { return addSync(t); } finally { if(extensions!=null) { fireOnChangeListeners(); } } } private synchronized boolean addSync(T t) { legacyInstances.add(new ExtensionComponent<T>(t)); // if we've already filled extensions, add it if(extensions!=null) { List<ExtensionComponent<T>> r = new ArrayList<ExtensionComponent<T>>(extensions); r.add(new ExtensionComponent<T>(t)); extensions = sort(r); } return true; } @Override public void add(int index, T element) { add(element); } /** * Used to bind extension to URLs by their class names. * * @since 1.349 */ public T getDynamic(String className) { for (T t : this) if (t.getClass().getName().equals(className)) return t; return null; } private List<ExtensionComponent<T>> ensureLoaded() { if(extensions!=null) return extensions; // already loaded if (jenkins.getInitLevel().compareTo(InitMilestone.PLUGINS_PREPARED)<0) return legacyInstances; // can't perform the auto discovery until all plugins are loaded, so just make the legacy instances visible synchronized (getLoadLock()) { if(extensions==null) { List<ExtensionComponent<T>> r = load(); r.addAll(legacyInstances); extensions = sort(r); } return extensions; } } /** * Chooses the object that locks the loading of the extension instances. */ protected Object getLoadLock() { return jenkins.lookup.setIfNull(Lock.class,new Lock()); } /** * Used during {@link Jenkins#refreshExtensions()} to add new components into existing {@link ExtensionList}s. * Do not call from anywhere else. */ public void refresh(ExtensionComponentSet delta) { boolean fireOnChangeListeners = false; synchronized (getLoadLock()) { if (extensions==null) return; // not yet loaded. when we load it, we'll load everything visible by then, so no work needed Collection<ExtensionComponent<T>> found = load(delta); if (!found.isEmpty()) { List<ExtensionComponent<T>> l = Lists.newArrayList(extensions); l.addAll(found); extensions = sort(l); fireOnChangeListeners = true; } } if (fireOnChangeListeners) { fireOnChangeListeners(); } } private void fireOnChangeListeners() { for (ExtensionListListener listener : listeners) { try { listener.onChange(); } catch (Exception e) { LOGGER.log(Level.SEVERE, "Error firing ExtensionListListener.onChange().", e); } } } /** * Loading an {@link ExtensionList} can result in a nested loading of another {@link ExtensionList}. * What that means is that we need a single lock that spans across all the {@link ExtensionList}s, * or else we can end up in a dead lock. */ private static final class Lock {} /** * Loads all the extensions. */ protected List<ExtensionComponent<T>> load() { if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE,"Loading ExtensionList: "+extensionType, new Throwable()); return jenkins.getPluginManager().getPluginStrategy().findComponents(extensionType, hudson); } /** * Picks up extensions that we care from the given list. */ protected Collection<ExtensionComponent<T>> load(ExtensionComponentSet delta) { return delta.find(extensionType); } /** * If the {@link ExtensionList} implementation requires sorting extensions, * override this method to do so. * * <p> * The implementation should copy a list, do a sort, and return the new instance. */ protected List<ExtensionComponent<T>> sort(List<ExtensionComponent<T>> r) { r = new ArrayList<ExtensionComponent<T>>(r); Collections.sort(r); return r; } /** * @deprecated as of 1.416 * Use {@link #create(Jenkins, Class)} */ @Deprecated public static <T> ExtensionList<T> create(Hudson hudson, Class<T> type) { return create((Jenkins)hudson,type); } public static <T> ExtensionList<T> create(Jenkins jenkins, Class<T> type) { if(type.getAnnotation(LegacyInstancesAreScopedToHudson.class)!=null) return new ExtensionList<T>(jenkins,type); else { return new ExtensionList<T>(jenkins,type,staticLegacyInstances.get(type)); } } /** * Gets the extension list for a given type. * Normally calls {@link Jenkins#getExtensionList(Class)} but falls back to an empty list * in case {@link Jenkins#getInstanceOrNull()} is null. * Thus it is useful to call from {@code all()} methods which need to behave gracefully during startup or shutdown. * @param type the extension point type * @return some list * @since 1.572 */ public static @Nonnull <T> ExtensionList<T> lookup(Class<T> type) { Jenkins j = Jenkins.getInstanceOrNull(); return j == null ? create((Jenkins) null, type) : j.getExtensionList(type); } /** * Places to store static-scope legacy instances. */ private static final Memoizer<Class,CopyOnWriteArrayList> staticLegacyInstances = new Memoizer<Class,CopyOnWriteArrayList>() { public CopyOnWriteArrayList compute(Class key) { return new CopyOnWriteArrayList(); } }; /** * Exposed for the test harness to clear all legacy extension instances. */ public static void clearLegacyInstances() { staticLegacyInstances.clear(); } private static final Logger LOGGER = Logger.getLogger(ExtensionList.class.getName()); }
/* * Copyright (c) 2005-2010 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.customers.personnel.struts.action; import java.util.ArrayList; import java.util.Date; import java.util.List; import junit.framework.Assert; import org.mifos.application.master.business.CustomFieldType; import org.mifos.application.master.business.CustomFieldView; import org.mifos.application.util.helpers.ActionForwards; import org.mifos.application.util.helpers.Methods; import org.mifos.customers.office.business.OfficeBO; import org.mifos.customers.personnel.business.PersonnelBO; import org.mifos.customers.personnel.business.service.PersonnelBusinessService; import org.mifos.customers.personnel.util.helpers.PersonnelConstants; import org.mifos.customers.personnel.util.helpers.PersonnelLevel; import org.mifos.framework.MifosMockStrutsTestCase; import org.mifos.framework.TestUtils; import org.mifos.framework.business.service.ServiceFactory; import org.mifos.framework.business.util.Address; import org.mifos.framework.business.util.Name; import org.mifos.framework.components.batchjobs.MifosTask; import org.mifos.framework.components.fieldConfiguration.util.helpers.FieldConfig; import org.mifos.framework.hibernate.helper.StaticHibernateUtil; import org.mifos.framework.struts.plugin.helper.EntityMasterData; import org.mifos.framework.util.helpers.BusinessServiceName; import org.mifos.framework.util.helpers.Constants; import org.mifos.framework.util.helpers.SessionUtils; import org.mifos.framework.util.helpers.TestObjectFactory; import org.mifos.security.rolesandpermission.business.RoleBO; import org.mifos.security.util.ActivityContext; import org.mifos.security.util.UserContext; public class PersonnelSettingsActionStrutsTest extends MifosMockStrutsTestCase { public PersonnelSettingsActionStrutsTest() throws Exception { super(); } private String flowKey; private UserContext userContext; PersonnelBO personnel; @Override protected void setUp() throws Exception { super.setUp(); userContext = TestUtils.makeUser(); request.getSession().setAttribute(Constants.USERCONTEXT, userContext); addRequestParameter("recordLoanOfficerId", "1"); addRequestParameter("recordOfficeId", "1"); ActivityContext ac = new ActivityContext((short) 0, userContext.getBranchId().shortValue(), userContext.getId() .shortValue()); request.getSession(false).setAttribute("ActivityContext", ac); flowKey = createFlow(request, PersonnelSettingsAction.class); EntityMasterData.getInstance().init(); FieldConfig fieldConfig = FieldConfig.getInstance(); fieldConfig.init(); getActionServlet().getServletContext().setAttribute(Constants.FIELD_CONFIGURATION, fieldConfig.getEntityMandatoryFieldMap()); request.setAttribute(Constants.CURRENTFLOWKEY, flowKey); addRequestParameter(Constants.CURRENTFLOWKEY, flowKey); addRequestParameter("input", "CreateUser"); PersonnelBusinessService personnelBusinessService = new PersonnelBusinessService(); SessionUtils.setAttribute(PersonnelConstants.OFFICE, personnelBusinessService .getOffice(TestObjectFactory.HEAD_OFFICE), request); SessionUtils .setCollectionAttribute(PersonnelConstants.ROLES_LIST, personnelBusinessService.getRoles(), request); SessionUtils.setCollectionAttribute(PersonnelConstants.ROLEMASTERLIST, personnelBusinessService.getRoles(), request); personnelBusinessService = null; } @Override protected void tearDown() throws Exception { userContext = null; TestObjectFactory.cleanUp(personnel); StaticHibernateUtil.closeSession(); super.tearDown(); } public void testGet() throws Exception { createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.get.toString()); addRequestParameter("globalPersonnelNum", personnel.getGlobalPersonnelNum()); performNoErrors(); verifyMasterData(); verifyForward(ActionForwards.get_success.toString()); } public void testManage() throws Exception { createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.get.toString()); addRequestParameter("globalPersonnelNum", personnel.getGlobalPersonnelNum()); actionPerform(); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.manage.toString()); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); performNoErrors(); verifyForward(ActionForwards.manage_success.toString()); } public void testFailurePreview() throws Exception { createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.preview.toString()); addRequestParameter("middleName", personnel.getPersonnelDetails().getName().getMiddleName()); addRequestParameter("secondLastName", personnel.getPersonnelDetails().getName().getSecondLastName()); addRequestParameter("maritalStatus", personnel.getPersonnelDetails().getMaritalStatus().toString()); addRequestParameter("emailId", "1"); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); actionPerform(); Assert.assertEquals(1, getErrorSize("firstName")); Assert.assertEquals(1, getErrorSize("lastName")); Assert.assertEquals(1, getErrorSize("gender")); Assert.assertEquals(1, getErrorSize(PersonnelConstants.ERROR_VALID_EMAIL)); verifyInputForward(); } public void testFailurePreviewNoFirstName() throws Exception { createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.preview.toString()); addRequestParameter("middleName", personnel.getPersonnelDetails().getName().getMiddleName()); addRequestParameter("secondLastName", personnel.getPersonnelDetails().getName().getSecondLastName()); addRequestParameter("lastName", personnel.getPersonnelDetails().getName().getLastName()); addRequestParameter("gender", personnel.getPersonnelDetails().getGender().toString()); addRequestParameter("maritalStatus", personnel.getPersonnelDetails().getMaritalStatus().toString()); addRequestParameter("emailId", personnel.getEmailId()); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); actionPerform(); Assert.assertEquals(1, getErrorSize("firstName")); verifyInputForward(); } public void testFailurePreviewFirstNameLengthExceedsLimit() throws Exception { createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.preview.toString()); addRequestParameter("firstName", "Testing for firstName length exceeding by 100 characters" + "Testing for firstName length exceeding by 100 characters" + "Testing for firstName length exceeding by 100 characters" + "Testing for firstName length exceeding by 100 characters" + "Testing for firstName length exceeding by 100 characters " + "Testing for firstName length exceeding by 100 characters " + "Testing for firstName length exceeding by 100 characters"); addRequestParameter("middleName", personnel.getPersonnelDetails().getName().getMiddleName()); addRequestParameter("secondLastName", personnel.getPersonnelDetails().getName().getSecondLastName()); addRequestParameter("lastName", personnel.getPersonnelDetails().getName().getLastName()); addRequestParameter("gender", personnel.getPersonnelDetails().getGender().toString()); addRequestParameter("maritalStatus", personnel.getPersonnelDetails().getMaritalStatus().toString()); addRequestParameter("emailId", personnel.getEmailId()); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); actionPerform(); Assert.assertEquals(1, getErrorSize("firstName")); verifyInputForward(); } public void testFailurePreviewLastNameLengthExceedsLimit() throws Exception { createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.preview.toString()); addRequestParameter("lastName", "Testing for lastName length exceeding by 100 characters" + "Testing for lastName length exceeding by 100 characters" + "Testing for lastName length exceeding by 100 characters" + "Testing for lastName length exceeding by 100 characters" + "Testing for lastName length exceeding by 100 characters " + "Testing for lastName length exceeding by 100 characters " + "Testing for lastName length exceeding by 100 characters"); addRequestParameter("middleName", personnel.getPersonnelDetails().getName().getMiddleName()); addRequestParameter("secondLastName", personnel.getPersonnelDetails().getName().getSecondLastName()); addRequestParameter("firstName", personnel.getPersonnelDetails().getName().getFirstName()); addRequestParameter("gender", personnel.getPersonnelDetails().getGender().toString()); addRequestParameter("maritalStatus", personnel.getPersonnelDetails().getMaritalStatus().toString()); addRequestParameter("emailId", personnel.getEmailId()); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); actionPerform(); Assert.assertEquals(1, getErrorSize("lastName")); verifyInputForward(); } public void testFailurePreviewNoLastName() throws Exception { createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.preview.toString()); addRequestParameter("firstName", personnel.getPersonnelDetails().getName().getFirstName()); addRequestParameter("middleName", personnel.getPersonnelDetails().getName().getMiddleName()); addRequestParameter("secondLastName", personnel.getPersonnelDetails().getName().getSecondLastName()); addRequestParameter("gender", personnel.getPersonnelDetails().getGender().toString()); addRequestParameter("maritalStatus", personnel.getPersonnelDetails().getMaritalStatus().toString()); addRequestParameter("emailId", personnel.getEmailId()); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); actionPerform(); Assert.assertEquals(1, getErrorSize("lastName")); verifyInputForward(); } public void testFailurePreviewNoGenderSelected() throws Exception { createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.preview.toString()); addRequestParameter("firstName", personnel.getPersonnelDetails().getName().getFirstName()); addRequestParameter("middleName", personnel.getPersonnelDetails().getName().getMiddleName()); addRequestParameter("secondLastName", personnel.getPersonnelDetails().getName().getSecondLastName()); addRequestParameter("lastName", personnel.getPersonnelDetails().getName().getLastName()); addRequestParameter("maritalStatus", personnel.getPersonnelDetails().getMaritalStatus().toString()); addRequestParameter("emailId", personnel.getEmailId()); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); actionPerform(); Assert.assertEquals(1, getErrorSize("gender")); verifyInputForward(); } public void testFailurePreviewDisplayLengthExceedsMaxLimit() throws Exception { createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); userContext.setId(personnel.getPersonnelId()); request.getSession().setAttribute(Constants.USERCONTEXT, userContext); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.get.toString()); addRequestParameter("globalPersonnelNum", personnel.getGlobalPersonnelNum()); actionPerform(); userContext.setId(PersonnelConstants.SYSTEM_USER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.manage.toString()); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); actionPerform(); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.preview.toString()); addRequestParameter("firstName", "Testing for displayName length exceeding by 200 characters.It should be less than 200"); addRequestParameter("middleName", "new middle name"); addRequestParameter("secondLastName", "new second Last name"); addRequestParameter("lastName", "Testing for displayName length exceeding by 200 characters.It should be less than 200"); addRequestParameter("gender", "2"); addRequestParameter("maritalStatus", "2"); addRequestParameter("emailId", "XYZ@aditi.com"); addRequestParameter("preferredLocale", "1"); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); actionPerform(); Assert.assertEquals(1, getErrorSize("displayName")); verifyInputForward(); } public void testSuccessPreview() throws Exception { createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.preview.toString()); addRequestParameter("firstName", personnel.getPersonnelDetails().getName().getFirstName()); addRequestParameter("middleName", personnel.getPersonnelDetails().getName().getMiddleName()); addRequestParameter("secondLastName", personnel.getPersonnelDetails().getName().getSecondLastName()); addRequestParameter("lastName", personnel.getPersonnelDetails().getName().getLastName()); addRequestParameter("gender", personnel.getPersonnelDetails().getGender().toString()); addRequestParameter("maritalStatus", personnel.getPersonnelDetails().getMaritalStatus().toString()); addRequestParameter("emailId", personnel.getEmailId()); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); performNoErrors(); verifyForward(ActionForwards.preview_success.toString()); } public void testPrevious() throws Exception { setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.previous.toString()); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); performNoErrors(); verifyForward(ActionForwards.previous_success.toString()); } public void testSuccessUpdate() throws Exception { createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); userContext.setId(personnel.getPersonnelId()); request.getSession().setAttribute(Constants.USER_CONTEXT_KEY, userContext); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.get.toString()); addRequestParameter("globalPersonnelNum", personnel.getGlobalPersonnelNum()); actionPerform(); userContext.setId(PersonnelConstants.SYSTEM_USER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.manage.toString()); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); actionPerform(); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.preview.toString()); addRequestParameter("firstName", "new first name"); addRequestParameter("middleName", "new middle name"); addRequestParameter("secondLastName", "new second Last name"); addRequestParameter("lastName", "new last name"); addRequestParameter("gender", "2"); addRequestParameter("maritalStatus", "2"); addRequestParameter("emailId", "XYZ@aditi.com"); addRequestParameter("preferredLocale", "1"); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); actionPerform(); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.update.toString()); addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY)); performNoErrors(); verifyForward(ActionForwards.updateSettings_success.toString()); Assert.assertNull(request.getAttribute(Constants.CURRENTFLOWKEY)); personnel = (PersonnelBO) StaticHibernateUtil.getSessionTL().get(PersonnelBO.class, personnel.getPersonnelId()); Assert.assertEquals("new first name", personnel.getPersonnelDetails().getName().getFirstName()); Assert.assertEquals("new middle name", personnel.getPersonnelDetails().getName().getMiddleName()); Assert.assertEquals("new second Last name", personnel.getPersonnelDetails().getName().getSecondLastName()); Assert.assertEquals("new last name", personnel.getPersonnelDetails().getName().getLastName()); Assert.assertEquals("XYZ@aditi.com", personnel.getEmailId()); Assert.assertEquals(2, personnel.getPersonnelDetails().getGender().intValue()); Assert.assertEquals(1, personnel.getPreferredLocale().getLocaleId().intValue()); Assert.assertEquals(2, personnel.getPersonnelDetails().getMaritalStatus().intValue()); } public void testLoadChangePassword() throws Exception { createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.loadChangePassword.toString()); performNoErrors(); verifyForward(ActionForwards.loadChangePassword_success.toString()); } public void testGet_batchJobNotRunning() throws Exception { Assert.assertEquals(false, MifosTask.isBatchJobRunning()); createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.get.toString()); addRequestParameter("globalPersonnelNum", personnel.getGlobalPersonnelNum()); performNoErrors(); verifyMasterData(); verifyForward(ActionForwards.get_success.toString()); } public void testGet_batchJobRunning() throws Exception { MifosTask.batchJobStarted(); Assert.assertEquals(true, MifosTask.isBatchJobRunning()); createPersonnel(getBranchOffice(), PersonnelLevel.LOAN_OFFICER); setRequestPathInfo("/yourSettings.do"); addRequestParameter("method", Methods.get.toString()); addRequestParameter("globalPersonnelNum", personnel.getGlobalPersonnelNum()); actionPerform(); verifyForward(ActionForwards.load_main_page.toString()); MifosTask.batchJobFinished(); } private void verifyMasterData() throws Exception { Assert.assertNotNull(SessionUtils.getAttribute(PersonnelConstants.GENDER_LIST, request)); Assert.assertNotNull(SessionUtils.getAttribute(PersonnelConstants.MARITAL_STATUS_LIST, request)); Assert.assertNotNull(SessionUtils.getAttribute(PersonnelConstants.LANGUAGE_LIST, request)); } private PersonnelBO createPersonnel(OfficeBO office, PersonnelLevel personnelLevel) throws Exception { List<CustomFieldView> customFieldView = new ArrayList<CustomFieldView>(); customFieldView.add(new CustomFieldView(Short.valueOf("9"), "123456", CustomFieldType.NUMERIC)); Address address = new Address("abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd"); Date date = new Date(); personnel = new PersonnelBO(personnelLevel, office, Integer.valueOf("1"), Short.valueOf("1"), "ABCD", "XYZ", "xyz@yahoo.com", getRoles(), customFieldView, new Name("XYZ", null, null, "ABC"), "111111", date, Integer.valueOf("1"), Integer.valueOf("1"), date, date, address, userContext.getId()); personnel.save(); StaticHibernateUtil.commitTransaction(); StaticHibernateUtil.closeSession(); personnel = (PersonnelBO) StaticHibernateUtil.getSessionTL().get(PersonnelBO.class, personnel.getPersonnelId()); return personnel; } public List<RoleBO> getRoles() throws Exception { return ((PersonnelBusinessService) ServiceFactory.getInstance().getBusinessService( BusinessServiceName.Personnel)).getRoles(); } private OfficeBO getBranchOffice() { return TestObjectFactory.getOffice(TestObjectFactory.HEAD_OFFICE); } }
package vishnu.Indukuri.TextLater; import greendroid.app.GDActivity; import greendroid.graphics.drawable.ActionBarDrawable; import greendroid.widget.ActionBar; import greendroid.widget.ActionBar.Type; import greendroid.widget.ActionBarItem; import greendroid.widget.NormalActionBarItem; import greendroid.widget.PagedView; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.net.SocketException; import java.util.Calendar; import java.util.GregorianCalendar; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.FileEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.simpleframework.xml.Serializer; import org.simpleframework.xml.core.Persister; import android.app.AlarmManager; import android.app.AlertDialog; import android.app.Dialog; import android.app.PendingIntent; import android.app.PendingIntent.CanceledException; import android.content.ContentResolver; import android.content.Context; import android.content.DialogInterface; import android.content.DialogInterface.OnClickListener; import android.content.Intent; import android.content.SharedPreferences; import android.database.Cursor; import android.net.Uri; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.os.RemoteException; import android.provider.BaseColumns; import android.provider.ContactsContract.CommonDataKinds.Phone; import android.provider.ContactsContract.Contacts; import android.telephony.TelephonyManager; import android.util.Log; import android.view.View; import android.view.Window; import android.widget.DatePicker; import android.widget.EditText; import android.widget.TimePicker; import android.widget.Toast; public class Home extends GDActivity implements Runnable{ public static int TOKENS ; // NEED TO KEEP THIS SAFE. MAKE SURE NOBODY CAN MESS WITH THIS. public static String packageName ; static Context context ; Intent inte ; // Intent for Billing Service PendingIntent text ; PagedView pagedView ; EditText message ; boolean one = true ; EditText phoneNumber ; TimePicker TP ; DatePicker DP ; String x ; ActionBar mActionBar ; AlarmManager manager ; // ArrayList<Call> calls ; // ProgressDialog m_ProgressDialog ; // ListView currentData ; @Override public void onCreate(Bundle savedInstanceState){ super.onCreate(savedInstanceState) ; requestWindowFeature(Window.FEATURE_NO_TITLE) ; // Just looks nicer setActionBarContentView(R.layout.scheduler) ; context = this ; packageName = getPackageName() ; TP = (TimePicker) findViewById(R.id.TimePicker01) ; phoneNumber = (EditText) findViewById(R.id.EditText01) ; DP = (DatePicker) findViewById(R.id.DatePicker01) ; message = (EditText) findViewById(R.id.message) ; // setting up UI SharedPreferences settings = getSharedPreferences("TOKENS", 0); TOKENS = settings.getInt("tokens", -5); if(TOKENS == -5){ // the first time the user uses the application, EULA shows up showDialog(0) ; TOKENS = 3 ; } // Setting up Scheduled // calls = new ArrayList<Call>() ; // Setting up the PagedView Home page mActionBar = getActionBar() ; mActionBar.setType(Type.Empty) ; mActionBar.setTitle("Your Tokens: "+TOKENS) ; mActionBar.setBackgroundColor(0x00000000) ; addActionBarItem(mActionBar.newActionBarItem(NormalActionBarItem.class).setDrawable(new ActionBarDrawable(this,R.drawable.cart))) ; // pagedView = (PagedView) findViewById(R.id.paged_view); // pagedView.setOnPageChangeListener(mOnPagedViewChangedListener); // pagedView.setAdapter(new PhotoSwipeAdapter()); // // pagedView.setVerticalFadingEdgeEnabled(true) ; } @Override public void onResume() { super.onResume(); inte = new Intent(this, BillingService.class) ; startService(inte) ; } @Override public void onPause(){ super.onPause() ; SharedPreferences settings = getSharedPreferences("TOKENS", 0); SharedPreferences.Editor editor = settings.edit(); editor.putInt("tokens", TOKENS); editor.commit(); } @Override public void onStop(){ super.onStop() ; stopService(inte) ; } @Override protected Dialog onCreateDialog(int i){ AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle("Agreement").setCancelable(false).setNeutralButton("Ok", new OnClickListener(){ @Override public void onClick(DialogInterface dialog, int which) { dismissDialog(0) ; } }).setMessage("By using this application, you agree to the following: I am not responsible for any indirect, special, " + "incidental, or consequential damages of any character " + "including, without limitation to, damages for loss of goodwill, work stoppage, computer failure or malfunction, " + "or any and all other commercial damages or losses that your use of this application may cause. This app is for " + "recreational and entertainment use. Do not " + "depend upon it to perform any life saving tasks. The user is solely responsible for his or her use of this application." + "this agreement is in addition to any other agreements the user may have agreed to while using the android market, " + "not in replacement. All sales are final. No refunds.") ; return builder.create() ; } public static void showText(){ Toast.makeText(context, "Tokens delivered. Refresh to see them.", 200).show() ; } // onClick methods //------------------------- @Override public boolean onHandleActionBarItemClick(ActionBarItem item, int position) { try { Bundle resp = BillingService.purchase() ; if((resp.getInt("RESPONSE_CODE")) != 0){ Toast.makeText(Home.this, "Cannot reach Android Market. Sure you have the latest version?", 200).show() ; return true; } ((PendingIntent) resp.get("PURCHASE_INTENT" )).send() ; } catch (RemoteException e) { e.printStackTrace() ; Toast.makeText(Home.this, "I'm sorry but the purchase could not be started at this time.", 200).show() ; } catch (CanceledException e) { e.printStackTrace() ; Toast.makeText(Home.this, "Fatal Error", 200).show() ; // should never happen. } return true ; } // // PagedView // //----------------------------- // private OnPagedViewChangeListener mOnPagedViewChangedListener = new OnPagedViewChangeListener() { // @Override // public void onStopTracking(PagedView pagedView) { } // @Override // public void onStartTracking(PagedView pagedView) { } // @Override // public void onPageChanged(PagedView pagedView, int previousPage, int newPage) { } // }; // // private class PhotoSwipeAdapter extends PagedAdapter { // @Override // public int getCount() { // return 2; // } // @Override // public Object getItem(int position) { // return null; // } // @Override // public long getItemId(int position) { // return 0; // } // // public View getView(int position, View convertView, ViewGroup parent) { // if (position == 0) { // Toast.makeText(context, "Phone numbers must only have numbers. No spaces, dashes etc.", 500).show() ; // return getLayoutInflater().inflate(R.layout.scheduler, parent, false); // } // else if(position == 1){ // m_ProgressDialog = ProgressDialog.show(Home.this, "Please wait...", "Retrieving data ...", true); // calls = new ArrayList<Call>() ; // // Thread mThread = new Thread(new DataCheckThread()); // mThread.start(); // // return getLayoutInflater().inflate(R.layout.scheduled, parent, false) ; // } // return convertView; // } // } // Scheduler stuff //--------------------- public void scheduleCall(View view) { if(checkForMistakes()){ Toast.makeText(this, "Connecting to server...", 1000).show() ; Thread thread = new Thread(this) ; thread.start(); } else{ Toast.makeText(this, x, 200).show() ; } } public void scheduleText(View view){ if(checkForTMistakes()){ Intent textintent = new Intent(this, BillingReceiver.class) ; textintent.putExtra("vishnu.Indukuri.TextLater.message", message.getText().toString()) ; textintent.putExtra("vishnu.Indukuri.TextLater.number", phoneNumber.getText().toString()) ; textintent.putExtra("vishnu.Indukuri.TextLater.secureNo", 133459874602763999L) ; text = PendingIntent.getBroadcast(this, 001, textintent, 0) ; manager = (AlarmManager) getSystemService(Context.ALARM_SERVICE) ; manager.set(AlarmManager.RTC_WAKEUP, new GregorianCalendar(DP.getYear(),DP.getMonth(),DP.getDayOfMonth(), TP.getCurrentHour(),TP.getCurrentMinute()).getTimeInMillis(), text); Toast.makeText(context, "Message Scheduled", 200).show() ; } else{ Toast.makeText(this, x, 200).show() ; } } private boolean checkForMistakes() { if(Home.TOKENS <= 0){ x = "Not Enough tokens" ; return false ; } if(message.length() > 500 || message.length() < 1){ x = "Message must not empty or longer than 500 characters" ; return false ; } return checkForTMistakes() ; } private boolean checkForTMistakes(){ GregorianCalendar cal = new GregorianCalendar() ; if(DP.getMonth() != cal.get(Calendar.MONTH)){ x = "Call must be this month" ; return false ; } if(DP.getYear() != cal.get(Calendar.YEAR)) { x = "Call must be this year" ; return false ; } return true ; } //////////////////////// //// DATBASE OPERATIONS NO TOUCHING!!!! ///////////////////////// public void contacts(View view){ Intent intent = new Intent(Intent.ACTION_PICK, Contacts.CONTENT_URI); startActivityForResult(intent, 100) ; } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if(requestCode == 100 && data != null){ // check that i am in fact the one who called the activity picker loadContactInfo(data.getData()) ; } } private void loadContactInfo(Uri contactUri) { AsyncTask<Uri, Void,String> task = new AsyncTask<Uri, Void, String>() { // a new thread for database operations @Override protected String doInBackground(Uri... uris) { return loadContact(getContentResolver(),uris[0]); } @Override protected void onPostExecute(String result) { phoneNumber.setText(result) ; }}; task.execute(contactUri); } public String loadContact(ContentResolver contentResolver, Uri contactUri) { long contactId = -1 ; Cursor cursor = contentResolver.query(contactUri,new String[]{BaseColumns._ID, Contacts.DISPLAY_NAME}, null, null, null); try { if (cursor.moveToFirst()) { contactId = cursor.getLong(0); } } finally { cursor.close(); } cursor = contentResolver.query(Phone.CONTENT_URI,new String[]{Phone.NUMBER},Phone.CONTACT_ID + "=" + contactId, null, Phone.IS_SUPER_PRIMARY + " DESC"); String[] number = new String[10] ; try { if (cursor.moveToFirst()) { number = cursor.getString(0).split("-") ; for(int i=1; number[i] != null; i++){ number[0] += number[i] ; } // getting rid of any "-" in the phone number } } catch(Exception e){} finally { cursor.close(); } return number[0]; } /////////////////////////////// /// END OF DATABASE OPERATIONS //////////////////////////////// // Contact server and schedule call //------------------------- public void run() { String id = ((TelephonyManager) getApplicationContext().getSystemService(Context.TELEPHONY_SERVICE)).getDeviceId() ; Call call = new Call() ; Serializer serializer = new Persister(); call.setDate(new GregorianCalendar(DP.getYear(),DP.getMonth(),DP.getDayOfMonth(),TP.getCurrentHour(),TP.getCurrentMinute()).getTimeInMillis()) ; call.setNumber(phoneNumber.getText().toString(), id) ; call.setActId(id+call.getDate()+call.getNumber()) ; String tel = ((TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE)).getLine1Number() ; Log.e("Yowl",tel); char[] array = tel.toCharArray() ; String arrayspaces = "" ; for(int i=0;i < array.length;i++){ arrayspaces += array[i] ; arrayspaces += " " ; } call.setRecording("Yowl brings you this message from "+arrayspaces+", , , , , , , , , , , , , , , , , , , ,"+message.getText().toString()) ; File xml = new File(getFilesDir().getAbsoluteFile()+"/example.xml"); try { serializer.write(call, xml); } catch (Exception e) {e.printStackTrace(); } try{ HttpClient httpclient = new DefaultHttpClient(); HttpPost httppost = new HttpPost("http://vishnui.appspot.com/texthimlater"); FileEntity fe = new FileEntity(xml, "text/xml") ; httppost.setEntity(fe); x = "Talking to server..." ; mHandler.sendEmptyMessage(0) ; HttpResponse response = httpclient.execute(httppost); HttpEntity ent = response.getEntity() ; BufferedReader br = new BufferedReader(new InputStreamReader(ent.getContent())) ; x = br.readLine() ; mHandler.sendEmptyMessage(0) ; br.close() ; if(x.equals("SUCCESS")){ TOKENS-- ; } }catch (SocketException e) {e.printStackTrace(); x ="Network error. Please, try again." ; mHandler.sendEmptyMessage(0) ; }catch(IOException e) { e.printStackTrace() ; x ="Malformed HTTP response. Please, try again." ; mHandler.sendEmptyMessage(0) ; }catch(Exception e){Log.e("YOWL",e.toString()) ;} } // A handler to give messages to the UI thread private Handler mHandler = new Handler(){ @Override public void handleMessage(Message msg){ Toast.makeText(context, x, 200).show() ; mActionBar.setTitle("Your Tokens: "+TOKENS) ; } } ; // // Setting Scheduled // //------------------------- // private class DataCheckThread implements Runnable{ // @Override // public void run() { // try{ // HttpClient httpclient = new DefaultHttpClient(); // HttpPost httpost = new HttpPost("http://vishnui.appspot.com/datacheck") ; // StringEntity se = new StringEntity(((TelephonyManager) getApplicationContext().getSystemService(Context.TELEPHONY_SERVICE)).getDeviceId()) ; // httpost.setEntity(se) ; // HttpResponse response = httpclient.execute(httpost); // HttpEntity ent = response.getEntity() ; // BufferedReader br = new BufferedReader(new InputStreamReader(ent.getContent())) ; // String x ; // while((x = br.readLine()) != null){ // Call call = new Call() ; // StringTokenizer st = new StringTokenizer(x) ; // String overQ = st.nextToken() ; // if(overQ.equals("Overquota")) break ; // call.setDate(Long.parseLong(overQ)) ; // call.setRecording(st.nextToken()) ; // call.setStatusCode(st.nextToken()) ; // call.setNumber(st.nextToken()) ; // try{ // call.setNumber(st.nextToken()) ; // } catch (NoSuchElementException e) {x = br. readLine() ; call.setNumber(st.nextToken()) ;} // calls.add(call) ; // } // br.close() ; // }catch(SocketException e) {e.printStackTrace() ; x = "Cannot connect to the internet" ; showText() ; // } catch (UnsupportedEncodingException e) {e.printStackTrace(); // } catch(ConnectTimeoutException e){ e.printStackTrace() ; x = "Connection timed out. Please try again" ; showText() ; // } catch(IOException e){e.printStackTrace() ; // } //// runOnUiThread(runs); // } // } }
package crazypants.enderio.machine.recipe; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.StringReader; import java.util.Locale; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import net.minecraft.block.Block; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraftforge.fluids.Fluid; import net.minecraftforge.fluids.FluidContainerRegistry; import net.minecraftforge.fluids.FluidRegistry; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.oredict.OreDictionary; import org.apache.commons.io.IOUtils; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; import cpw.mods.fml.common.registry.GameRegistry; import crazypants.enderio.Log; import crazypants.enderio.machine.crusher.CrusherRecipeManager; import crazypants.enderio.machine.recipe.RecipeConfig.RecipeElement; import crazypants.enderio.machine.recipe.RecipeConfig.RecipeGroup; import crazypants.enderio.material.OreDictionaryPreferences; import crazypants.util.OreDictionaryHelper; public class RecipeConfigParser extends DefaultHandler { public static final String ELEMENT_RECIPE_GROUP = "recipeGroup"; public static final String ELEMENT_RECIPE = "recipe"; public static final String ELEMENT_INPUT = "input"; public static final String ELEMENT_OUTPUT = "output"; public static final String ELEMENT_ITEM_STACK = "itemStack"; public static final String ELEMENT_FLUID_STACK = "fluidStack"; public static final String ELEMENT_DUMP_REGISTERY = "dumpRegistery"; public static final String AT_NAME = "name"; public static final String AT_ENABLED = "enabled"; public static final String AT_DUMP_ITEMS = "modObjects"; public static final String AT_ORE_DICT = "oreDictionary"; public static final String AT_ENERGY_COST = "energyCost"; public static final String AT_BONUS_TYPE = "bonusType"; public static final String AT_ITEM_META = "itemMeta"; public static final String AT_ITEM_NAME = "itemName"; public static final String AT_MOD_ID = "modID"; public static final String AT_NUMBER = "number"; public static final String AT_AMOUNT = "amount"; public static final String AT_MULTIPLIER = "multiplier"; public static final String AT_SLOT = "slot"; public static final String AT_CHANCE = "chance"; public static final String AT_EXP = "exp"; // Log prefix private static final String LP = "RecipeParser: "; public static RecipeConfig parse(String str, CustomTagHandler customHandler) throws Exception { StringReader reader = new StringReader(str); InputSource is = new InputSource(reader); try { return parse(is, customHandler); } finally { reader.close(); } } public static RecipeConfig parse(File file, CustomTagHandler customHandler) throws Exception { BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file)); InputSource is = new InputSource(bis); try { return parse(is, customHandler); } finally { IOUtils.closeQuietly(bis); } } public static RecipeConfig parse(InputSource is, CustomTagHandler customHandler) throws Exception { RecipeConfigParser parser = new RecipeConfigParser(customHandler); SAXParserFactory spf = SAXParserFactory.newInstance(); spf.setNamespaceAware(true); SAXParser saxParser = spf.newSAXParser(); XMLReader xmlReader = saxParser.getXMLReader(); xmlReader.setContentHandler(parser); xmlReader.parse(is); return parser.getResult(); } private RecipeConfig result = null; private RecipeConfig root = null; private RecipeGroup recipeGroup = null; private RecipeElement recipe = null; private boolean outputTagOpen = false; private boolean inputTagOpen = false; private boolean debug = false; private boolean inCustomHandler = false; private CustomTagHandler customHandler = null; RecipeConfigParser(CustomTagHandler customHandler) { this.customHandler = customHandler; } RecipeConfig getResult() { return result != null ? result : root; } @Override public void warning(SAXParseException e) throws SAXException { Log.warn("Warning parsing SAG Mill config file: " + e.getMessage()); } @Override public void error(SAXParseException e) throws SAXException { Log.error("Error parsing SAG Mill config file: " + e.getMessage()); e.printStackTrace(); } @Override public void fatalError(SAXParseException e) throws SAXException { Log.error("Error parsing SAG Mill config file: " + e.getMessage()); e.printStackTrace(); } @Override public void endElement(String uri, String localName, String qName) throws SAXException { if(isElementRoot(localName)) { result = root; root = null; if(debug) { Log.debug(LP + "Closing root"); } return; } if(ELEMENT_RECIPE_GROUP.equals(localName)) { if(debug) { Log.debug(LP + "Closing recipe group"); } if(recipeGroup != null && root != null) { root.addRecipeGroup(recipeGroup); } recipeGroup = null; return; } if(ELEMENT_RECIPE.equals(localName)) { if(debug) { Log.debug(LP + "Closing recipe"); } if(recipe != null && recipeGroup != null) { recipeGroup.addRecipe(recipe); } else { Log.warn(LP + "Could not add recipe " + recipe + " to group " + recipeGroup); } recipe = null; return; } if(ELEMENT_OUTPUT.equals(localName)) { outputTagOpen = false; if(debug) { Log.debug(LP + "Closing output"); } return; } if(ELEMENT_INPUT.equals(localName)) { inputTagOpen = false; if(debug) { Log.debug(LP + "Closing input"); } return; } // Custom tag handling if(customHandler != null) { if(customHandler.endElement(uri, localName, qName)) { inCustomHandler = false; return; } } } @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { if(debug) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < attributes.getLength(); i++) { sb.append("[" + attributes.getQName(i) + "=" + attributes.getValue(i) + "]"); } Log.debug(LP + "RecipeConfigParser.startElement: localName:" + localName + " attrs:" + sb); } if(isElementRoot(localName)) { if(root != null) { Log.warn(LP + "Multiple root elements found."); } else { root = new RecipeConfig(); } return; } if(root == null) { Log.warn(LP + " Root element not specified before element " + localName + "."); root = new RecipeConfig(); } if(ELEMENT_DUMP_REGISTERY.equals(localName)) { root.setDumpOreDictionary(getBooleanValue(AT_ORE_DICT, attributes, false)); root.setDumpItemRegistery(getBooleanValue(AT_DUMP_ITEMS, attributes, false)); return; } if(ELEMENT_RECIPE_GROUP.equals(localName)) { if(recipeGroup != null) { Log.warn(LP + "Recipe group " + recipeGroup.getName() + " not closed before encountering a new recipe group."); } recipeGroup = root.createRecipeGroup(attributes.getValue(AT_NAME)); recipeGroup.setEnabled(getBooleanValue(AT_ENABLED, attributes, true)); if(!recipeGroup.isNameValid()) { Log.warn(LP + "A recipe group was found with an invalid name: " + attributes.getValue(AT_NAME)); recipeGroup = null; } return; } if(ELEMENT_RECIPE.equals(localName)) { if(recipeGroup == null) { Log.warn(LP + "A recipe was found outside of a recipe groups tags."); return; } if(recipe != null) { Log.warn(LP + "A new recipe was started before the recipe was closed."); } String name = getStringValue(AT_NAME, attributes, null); if(name == null) { Log.warn(LP + "An unnamed recipe was found."); return; } recipe = recipeGroup.createRecipe(name); recipe.setEnergyRequired(getIntValue(AT_ENERGY_COST, attributes, CrusherRecipeManager.ORE_ENERGY_COST)); recipe.setBonusType(getEnumValue(AT_BONUS_TYPE, attributes, RecipeBonusType.class, RecipeBonusType.MULTIPLY_OUTPUT)); return; } // Custom tag handling if(customHandler != null) { if(customHandler.startElement(uri, localName, qName, attributes)) { inCustomHandler = true; return; } } if(recipe == null) { if(!inCustomHandler) { Log.warn(LP + "Found element <" + localName + "> with no recipe decleration."); } return; } if(ELEMENT_OUTPUT.equals(localName)) { if(inputTagOpen) { Log.warn(LP + "<output> encounterd before <input> closed."); inputTagOpen = false; } if(outputTagOpen) { Log.warn(LP + "<output> encounterd before previous <output> closed."); } outputTagOpen = true; return; } if(ELEMENT_INPUT.equals(localName)) { if(outputTagOpen) { Log.warn(LP + "<input> encounterd before <output> closed."); outputTagOpen = false; } if(inputTagOpen) { Log.warn(LP + "<input> encounterd before previous <input> closed."); } inputTagOpen = true; return; } boolean isFluidStack = ELEMENT_FLUID_STACK.equals(localName); if(ELEMENT_ITEM_STACK.equals(localName) || isFluidStack) { if(!inputTagOpen && !outputTagOpen) { Log.warn(LP + "Encounterd an item stack outside of either an <input> or <output> tag."); return; } if(inputTagOpen && outputTagOpen) { Log.warn(LP + "Encounterd an item stack within both an <input> and <output> tag."); return; } if(inputTagOpen) { if(isFluidStack) { addInputFluidStack(attributes); } else { addInputStack(attributes); } } else { if(isFluidStack) { addOutputFluidStack(attributes); } else { addOutputStack(attributes); } } } } //TODO: What a hack! private boolean isElementRoot(String str) { return "AlloySmelterRecipes".equals(str) || "SAGMillRecipes".equals(str) || "VatRecipes".equals(str) || "SliceAndSpliceRecipes".equals(str); } private void addOutputStack(Attributes attributes) { RecipeInput stack = getItemStack(attributes); if(stack == null) { return; } float exp = getFloatValue(AT_EXP, attributes, 0f); recipe.addOutput(new RecipeOutput(stack.getInput(), getFloatValue(AT_CHANCE, attributes, 1f), exp)); } private void addInputStack(Attributes attributes) { RecipeInput stack = getItemStack(attributes); if(stack == null) { return; } recipe.addInput(stack); } private void addOutputFluidStack(Attributes attributes) { RecipeInput stack = getFluidStack(attributes); if(stack == null) { return; } recipe.addOutput(new RecipeOutput(stack.getFluidInput())); } private void addInputFluidStack(Attributes attributes) { RecipeInput stack = getFluidStack(attributes); if(stack == null) { return; } recipe.addInput(stack); } private RecipeInput getFluidStack(Attributes attributes) { int amount = getIntValue(AT_AMOUNT, attributes, FluidContainerRegistry.BUCKET_VOLUME); String name = getStringValue(AT_NAME, attributes, null); if(name == null) { return null; } Fluid fluid = FluidRegistry.getFluid(name); if(fluid == null) { Log.warn("When parsing recipes could not find fluid with name: " + name); return null; } return new RecipeInput(new FluidStack(fluid, amount), getFloatValue(AT_MULTIPLIER, attributes, 1)); } public static RecipeInput getItemStack(Attributes attributes) { int stackSize = getIntValue(AT_NUMBER, attributes, 1); String oreDict = getStringValue(AT_ORE_DICT, attributes, null); if(oreDict != null) { if(!OreDictionaryHelper.isRegistered(oreDict)) { Log.debug(LP + "Could not find an entry in the ore dictionary for " + oreDict); return null; } ItemStack stack = OreDictionaryPreferences.instance.getPreferred(oreDict); if(stack == null) { Log.debug(LP + "Could not find a prefered item in the ore dictionary for " + oreDict); return null; } stack = stack.copy(); stack.stackSize = stackSize; return new OreDictionaryRecipeInput(stack, OreDictionary.getOreID(oreDict), getFloatValue(AT_MULTIPLIER, attributes, 1), getIntValue(AT_SLOT, attributes, -1)); } boolean useMeta = true; int itemMeta = 0; String metaString = getStringValue(AT_ITEM_META, attributes, "0"); if("*".equals(metaString)) { useMeta = false; } else { itemMeta = getIntValue(AT_ITEM_META, attributes, 0); } ItemStack res = null; String modId = getStringValue(AT_MOD_ID, attributes, null); String name = getStringValue(AT_ITEM_NAME, attributes, null); if(modId != null && name != null) { Item i = GameRegistry.findItem(modId, name); if(i != null) { res = new ItemStack(i, stackSize, useMeta ? itemMeta : 0); } else { Block b = GameRegistry.findBlock(modId, name); if(b != null) { res = new ItemStack(b, stackSize, useMeta ? itemMeta : 0); } } } if(res == null) { Log.debug("Could not create an item stack from the attributes " + toString(attributes)); return null; } return new RecipeInput(res, useMeta, getFloatValue(AT_MULTIPLIER, attributes, 1), getIntValue(AT_SLOT, attributes, -1)); } public static boolean getBooleanValue(String qName, Attributes attributes, boolean def) { String val = attributes.getValue(qName); if(val == null) { return def; } val = val.toLowerCase().trim(); return val.equals("false") ? false : val.equals("true") ? true : def; } public static int getIntValue(String qName, Attributes attributes, int def) { try { return Integer.parseInt(getStringValue(qName, attributes, def + "")); } catch (Exception e) { Log.warn(LP + "Could not parse a valid int for attribute " + qName + " with value " + getStringValue(qName, attributes, null)); return def; } } public static float getFloatValue(String qName, Attributes attributes, float def) { try { return Float.parseFloat(getStringValue(qName, attributes, def + "")); } catch (Exception e) { Log.warn(LP + "Could not parse a valid float for attribute " + qName + " with value " + getStringValue(qName, attributes, null)); return def; } } public static String getStringValue(String qName, Attributes attributes, String def) { String val = attributes.getValue(qName); if(val == null) { return def; } val = val.trim(); if(val.length() <= 0) { return null; } return val; } public static <E extends Enum<E>> E getEnumValue(String qName, Attributes attributes, Class<E> clazz, E def) { String val = attributes.getValue(qName); if(val == null) { return def; } val = val.trim(); if(val.length() <= 0) { return def; } val = val.toUpperCase(Locale.ENGLISH); try { return Enum.valueOf(clazz, val); } catch(IllegalArgumentException ex) { return def; } } public static boolean hasAttribute(String att, Attributes attributes) { return attributes.getValue(att) != null; } public static String toString(Attributes attributes) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < attributes.getLength(); i++) { sb.append("[" + attributes.getQName(i) + "=" + attributes.getValue(i) + "]"); } return sb.toString(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.expr.fn.impl; import io.netty.buffer.DrillBuf; import io.netty.util.internal.PlatformDependent; import org.apache.drill.exec.memory.BoundsChecking; import com.google.common.primitives.UnsignedLongs; public final class XXHash { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(XXHash.class); private static final boolean BOUNDS_CHECKING_ENABLED = BoundsChecking.BOUNDS_CHECKING_ENABLED; static final long PRIME64_1 = UnsignedLongs.decode("11400714785074694791"); static final long PRIME64_2 = UnsignedLongs.decode("14029467366897019727"); static final long PRIME64_3 = UnsignedLongs.decode("1609587929392839161"); static final long PRIME64_4 = UnsignedLongs.decode("9650029242287828579"); static final long PRIME64_5 = UnsignedLongs.decode("2870177450012600261"); private static long hash64bytes(long start, long bEnd, long seed) { long len = bEnd - start; long h64; long p = start; // for long strings (greater than 32 bytes) if (len >= 32) { final long limit = bEnd - 32; long v1 = seed + PRIME64_1 + PRIME64_2; long v2 = seed + PRIME64_2; long v3 = seed + 0; long v4 = seed - PRIME64_1; do { v1 += PlatformDependent.getLong(p) * PRIME64_2; p = p + 8; v1 = Long.rotateLeft(v1, 31); v1 *= PRIME64_1; v2 += PlatformDependent.getLong(p) * PRIME64_2; p = p + 8; v2 = Long.rotateLeft(v2, 31); v2 *= PRIME64_1; v3 += PlatformDependent.getLong(p) * PRIME64_2; p = p + 8; v3 = Long.rotateLeft(v3, 31); v3 *= PRIME64_1; v4 += PlatformDependent.getLong(p) * PRIME64_2; p = p + 8; v4 = Long.rotateLeft(v4, 31); v4 *= PRIME64_1; } while (p <= limit); h64 = Long.rotateLeft(v1, 1) + Long.rotateLeft(v2, 7) + Long.rotateLeft(v3, 12) + Long.rotateLeft(v4, 18); v1 *= PRIME64_2; v1 = Long.rotateLeft(v1, 31); v1 *= PRIME64_1; h64 ^= v1; h64 = h64 * PRIME64_1 + PRIME64_4; v2 *= PRIME64_2; v2 = Long.rotateLeft(v2, 31); v2 *= PRIME64_1; h64 ^= v2; h64 = h64 * PRIME64_1 + PRIME64_4; v3 *= PRIME64_2; v3 = Long.rotateLeft(v3, 31); v3 *= PRIME64_1; h64 ^= v3; h64 = h64 * PRIME64_1 + PRIME64_4; v4 *= PRIME64_2; v4 = Long.rotateLeft(v4, 31); v4 *= PRIME64_1; h64 ^= v4; h64 = h64 * PRIME64_1 + PRIME64_4; } else { h64 = seed + PRIME64_5; } h64 += len; while (p + 8 <= bEnd) { long k1 = PlatformDependent.getLong(p); k1 *= PRIME64_2; k1 = Long.rotateLeft(k1, 31); k1 *= PRIME64_1; h64 ^= k1; h64 = Long.rotateLeft(h64, 27) * PRIME64_1 + PRIME64_4; p += 8; } if (p + 4 <= bEnd) { h64 ^= PlatformDependent.getInt(p) * PRIME64_1; h64 = Long.rotateLeft(h64, 23) * PRIME64_2 + PRIME64_3; p += 4; } while (p < bEnd) { h64 ^= PlatformDependent.getByte(p) * PRIME64_5; h64 = Long.rotateLeft(h64, 11) * PRIME64_1; p++; } return applyFinalHashComputation(h64); } private static long applyFinalHashComputation(long h64) { h64 ^= h64 >> 33; h64 *= PRIME64_2; h64 ^= h64 >> 29; h64 *= PRIME64_3; h64 ^= h64 >> 32; return h64; } /* 64 bit variations */ public static long hash64(int val, long seed){ long h64 = seed + PRIME64_5; h64 += 4; // add length (4 bytes) to hash value h64 ^= val * PRIME64_1; h64 = Long.rotateLeft(h64, 23) * PRIME64_2 + PRIME64_3; return applyFinalHashComputation(h64); } public static long hash64(long val, long seed){ long h64 = seed + PRIME64_5; h64 += 8; // add length (8 bytes) to hash value long k1 = val* PRIME64_2; k1 = Long.rotateLeft(k1, 31); k1 *= PRIME64_1; h64 ^= k1; h64 = Long.rotateLeft(h64, 27) * PRIME64_1 + PRIME64_4; return applyFinalHashComputation(h64); } public static long hash64(float val, long seed){ return hash64(Float.floatToIntBits(val), seed); } public static long hash64(double val, long seed){ return hash64(Double.doubleToLongBits(val), seed); } public static long hash64(int start, int end, DrillBuf buffer, long seed){ if(BOUNDS_CHECKING_ENABLED){ buffer.checkBytes(start, end); } long s = buffer.memoryAddress() + start; long e = buffer.memoryAddress() + end; return hash64bytes(s, e, seed); } /* 32 bit variations */ public static int hash32(int val, long seed){ return (int) hash64(val, seed); } public static int hash32(long val, long seed){ return (int) hash64(val, seed); } public static int hash32(float val, long seed){ return (int) hash64(val, seed); } public static int hash32(double val, long seed){ return (int) hash64(val, seed); } public static int hash32(int start, int end, DrillBuf buffer, long seed){ return (int) hash64(start, end, buffer, seed); } }
package hu.interconnect.hr.backend.api.dto; import java.math.BigDecimal; import java.util.Date; import hu.interconnect.hr.backend.api.enumeration.Allomanymod; import hu.interconnect.hr.backend.api.enumeration.CsaladiAllapot; import hu.interconnect.hr.backend.api.enumeration.JogviszonyMegszunesenekModja; import hu.interconnect.hr.backend.api.enumeration.KozteruletTipus; import hu.interconnect.hr.backend.api.enumeration.LakcimAktualis; import hu.interconnect.hr.backend.api.enumeration.MunkakorJellege; import hu.interconnect.hr.backend.api.enumeration.Nem; public class SzemelyitorzsEditDTO { private int tsz; private SzemelyiAdatokDTO szemelyiAdatok; private MunkakoriBesorolasDTO munkakoriBesorolas; private JogviszonyAdatokDTO jogviszonyAdatok; private CsaladDTO csalad; private OrvosiVizsgalatDTO orvosiVizsgalat; private SzabadsagnyilvantartasDTO szabadsagnyilvantartas; public int getTsz() { return tsz; } public void setTsz(int tsz) { this.tsz = tsz; } public SzemelyiAdatokDTO getSzemelyiAdatok() { return szemelyiAdatok; } public void setSzemelyiAdatok(SzemelyiAdatokDTO szemelyiAdatok) { this.szemelyiAdatok = szemelyiAdatok; } public MunkakoriBesorolasDTO getMunkakoriBesorolas() { return munkakoriBesorolas; } public void setMunkakoriBesorolas(MunkakoriBesorolasDTO munkakoriBesorolas) { this.munkakoriBesorolas = munkakoriBesorolas; } public JogviszonyAdatokDTO getJogviszonyAdatok() { return jogviszonyAdatok; } public void setJogviszonyAdatok(JogviszonyAdatokDTO jogviszonyAdatok) { this.jogviszonyAdatok = jogviszonyAdatok; } public CsaladDTO getCsalad() { return csalad; } public void setCsalad(CsaladDTO csalad) { this.csalad = csalad; } public OrvosiVizsgalatDTO getOrvosiVizsgalat() { return orvosiVizsgalat; } public void setOrvosiVizsgalat(OrvosiVizsgalatDTO orvosiVizsgalat) { this.orvosiVizsgalat = orvosiVizsgalat; } public SzabadsagnyilvantartasDTO getSzabadsagnyilvantartas() { return szabadsagnyilvantartas; } public void setSzabadsagnyilvantartas(SzabadsagnyilvantartasDTO szabadsagnyilvantartas) { this.szabadsagnyilvantartas = szabadsagnyilvantartas; } public static class SzemelyiAdatokDTO { private String vezeteknev; private String keresztnev; private Nem nem; private Integer allampolgarsag; private Date szuletesiDatum; private String szuletesiHely; private String szuletesiOrszag; private String szuletesiNev; private String szuletesiNevAnyja; private String adoazonositoJel; private String taj; private String szemelyiIgazolvanySzam; private Date szemelyiIgazolvanySzamLejarat; private String utlevelSzam; private Date utlevelSzamLejarat; private String jogositvanySzam; private Date jogositvanySzamLejarat; private String telefon; private String mobil; private String email; private LakcimAktualis lakcimAktualis; private LakcimDTO lakcimAllando; private LakcimDTO lakcimIdeiglenes; public String getVezeteknev() { return vezeteknev; } public void setVezeteknev(String vezeteknev) { this.vezeteknev = vezeteknev; } public String getKeresztnev() { return keresztnev; } public void setKeresztnev(String keresztnev) { this.keresztnev = keresztnev; } public Nem getNem() { return nem; } public void setNem(Nem nem) { this.nem = nem; } public Integer getAllampolgarsag() { return allampolgarsag; } public void setAllampolgarsag(Integer allampolgarsag) { this.allampolgarsag = allampolgarsag; } public Date getSzuletesiDatum() { return szuletesiDatum; } public void setSzuletesiDatum(Date szuletesiDatum) { this.szuletesiDatum = szuletesiDatum; } public String getSzuletesiHely() { return szuletesiHely; } public void setSzuletesiHely(String szuletesiHely) { this.szuletesiHely = szuletesiHely; } public String getSzuletesiOrszag() { return szuletesiOrszag; } public void setSzuletesiOrszag(String szuletesiOrszag) { this.szuletesiOrszag = szuletesiOrszag; } public String getSzuletesiNev() { return szuletesiNev; } public void setSzuletesiNev(String szuletesiNev) { this.szuletesiNev = szuletesiNev; } public String getSzuletesiNevAnyja() { return szuletesiNevAnyja; } public void setSzuletesiNevAnyja(String szuletesiNevAnyja) { this.szuletesiNevAnyja = szuletesiNevAnyja; } public String getAdoazonositoJel() { return adoazonositoJel; } public void setAdoazonositoJel(String adoazonositoJel) { this.adoazonositoJel = adoazonositoJel; } public String getTaj() { return taj; } public void setTaj(String taj) { this.taj = taj; } public String getSzemelyiIgazolvanySzam() { return szemelyiIgazolvanySzam; } public void setSzemelyiIgazolvanySzam(String szemelyiIgazolvanySzam) { this.szemelyiIgazolvanySzam = szemelyiIgazolvanySzam; } public Date getSzemelyiIgazolvanySzamLejarat() { return szemelyiIgazolvanySzamLejarat; } public void setSzemelyiIgazolvanySzamLejarat(Date szemelyiIgazolvanySzamLejarat) { this.szemelyiIgazolvanySzamLejarat = szemelyiIgazolvanySzamLejarat; } public String getUtlevelSzam() { return utlevelSzam; } public void setUtlevelSzam(String utlevelSzam) { this.utlevelSzam = utlevelSzam; } public Date getUtlevelSzamLejarat() { return utlevelSzamLejarat; } public void setUtlevelSzamLejarat(Date utlevelSzamLejarat) { this.utlevelSzamLejarat = utlevelSzamLejarat; } public String getJogositvanySzam() { return jogositvanySzam; } public void setJogositvanySzam(String jogositvanySzam) { this.jogositvanySzam = jogositvanySzam; } public Date getJogositvanySzamLejarat() { return jogositvanySzamLejarat; } public void setJogositvanySzamLejarat(Date jogositvanySzamLejarat) { this.jogositvanySzamLejarat = jogositvanySzamLejarat; } public String getTelefon() { return telefon; } public void setTelefon(String telefon) { this.telefon = telefon; } public String getMobil() { return mobil; } public void setMobil(String mobil) { this.mobil = mobil; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public LakcimAktualis getLakcimAktualis() { return lakcimAktualis; } public void setLakcimAktualis(LakcimAktualis lakcimAktualis) { this.lakcimAktualis = lakcimAktualis; } public LakcimDTO getLakcimAllando() { return lakcimAllando; } public void setLakcimAllando(LakcimDTO lakcimAllando) { this.lakcimAllando = lakcimAllando; } public LakcimDTO getLakcimIdeiglenes() { return lakcimIdeiglenes; } public void setLakcimIdeiglenes(LakcimDTO lakcimIdeiglenes) { this.lakcimIdeiglenes = lakcimIdeiglenes; } public static class LakcimDTO { private String iranyitoszam; private String telepules; private String kerulet; private String kozteruletNev; private KozteruletTipus kozteruletTipus; private String kozteruletSzam; private String epulet; private String lepcsohaz; private String emelet; private String ajto; public String getIranyitoszam() { return iranyitoszam; } public void setIranyitoszam(String iranyitoszam) { this.iranyitoszam = iranyitoszam; } public String getTelepules() { return telepules; } public void setTelepules(String telepules) { this.telepules = telepules; } public String getKerulet() { return kerulet; } public void setKerulet(String kerulet) { this.kerulet = kerulet; } public String getKozteruletNev() { return kozteruletNev; } public void setKozteruletNev(String kozteruletNev) { this.kozteruletNev = kozteruletNev; } public KozteruletTipus getKozteruletTipus() { return kozteruletTipus; } public void setKozteruletTipus(KozteruletTipus kozteruletTipus) { this.kozteruletTipus = kozteruletTipus; } public String getKozteruletSzam() { return kozteruletSzam; } public void setKozteruletSzam(String kozteruletSzam) { this.kozteruletSzam = kozteruletSzam; } public String getEpulet() { return epulet; } public void setEpulet(String epulet) { this.epulet = epulet; } public String getLepcsohaz() { return lepcsohaz; } public void setLepcsohaz(String lepcsohaz) { this.lepcsohaz = lepcsohaz; } public String getEmelet() { return emelet; } public void setEmelet(String emelet) { this.emelet = emelet; } public String getAjto() { return ajto; } public void setAjto(String ajto) { this.ajto = ajto; } } } public static class MunkakoriBesorolasDTO { private Integer szervezetiEgyseg; private MunkakorJellege munkakorJellege; private Integer koltseghely; private Integer foglalkozasiViszony; private Integer foglalkoztatasJellege; private Integer fEOR; private Integer munkakor; private boolean uzemanyagElszamolas; private BigDecimal munkaidoNapi; private BigDecimal munkaidoHeti; public Integer getSzervezetiEgyseg() { return szervezetiEgyseg; } public void setSzervezetiEgyseg(Integer szervezetiEgyseg) { this.szervezetiEgyseg = szervezetiEgyseg; } public MunkakorJellege getMunkakorJellege() { return munkakorJellege; } public void setMunkakorJellege(MunkakorJellege munkakorJellege) { this.munkakorJellege = munkakorJellege; } public Integer getKoltseghely() { return koltseghely; } public void setKoltseghely(Integer koltseghely) { this.koltseghely = koltseghely; } public Integer getFoglalkozasiViszony() { return foglalkozasiViszony; } public void setFoglalkozasiViszony(Integer foglalkozasiViszony) { this.foglalkozasiViszony = foglalkozasiViszony; } public Integer getFoglalkoztatasJellege() { return foglalkoztatasJellege; } public void setFoglalkoztatasJellege(Integer foglalkoztatasJellege) { this.foglalkoztatasJellege = foglalkoztatasJellege; } public Integer getfEOR() { return fEOR; } public void setfEOR(Integer fEOR) { this.fEOR = fEOR; } public Integer getMunkakor() { return munkakor; } public void setMunkakor(Integer munkakor) { this.munkakor = munkakor; } public boolean isUzemanyagElszamolas() { return uzemanyagElszamolas; } public void setUzemanyagElszamolas(boolean uzemanyagElszamolas) { this.uzemanyagElszamolas = uzemanyagElszamolas; } public BigDecimal getMunkaidoNapi() { return munkaidoNapi; } public void setMunkaidoNapi(BigDecimal munkaidoNapi) { this.munkaidoNapi = munkaidoNapi; } public BigDecimal getMunkaidoHeti() { return munkaidoHeti; } public void setMunkaidoHeti(BigDecimal munkaidoHeti) { this.munkaidoHeti = munkaidoHeti; } } public static class JogviszonyAdatokDTO { private Date jogviszonyKezdete; private Date jogviszonyVege; private JogviszonyMegszunesenekModja jogviszonyMegszunesenekModja; private Date probaidoVege; private Date munkaszerzodesLejar; private Allomanymod allomanymod; public Date getJogviszonyKezdete() { return jogviszonyKezdete; } public void setJogviszonyKezdete(Date jogviszonyKezdete) { this.jogviszonyKezdete = jogviszonyKezdete; } public Date getJogviszonyVege() { return jogviszonyVege; } public void setJogviszonyVege(Date jogviszonyVege) { this.jogviszonyVege = jogviszonyVege; } public JogviszonyMegszunesenekModja getJogviszonyMegszunesenekModja() { return jogviszonyMegszunesenekModja; } public void setJogviszonyMegszunesenekModja(JogviszonyMegszunesenekModja jogviszonyMegszunesenekModja) { this.jogviszonyMegszunesenekModja = jogviszonyMegszunesenekModja; } public Date getProbaidoVege() { return probaidoVege; } public void setProbaidoVege(Date probaidoVege) { this.probaidoVege = probaidoVege; } public Date getMunkaszerzodesLejar() { return munkaszerzodesLejar; } public void setMunkaszerzodesLejar(Date munkaszerzodesLejar) { this.munkaszerzodesLejar = munkaszerzodesLejar; } public Allomanymod getAllomanymod() { return allomanymod; } public void setAllomanymod(Allomanymod allomanymod) { this.allomanymod = allomanymod; } } public static class CsaladDTO { private CsaladiAllapot csaladiAllapot; public CsaladiAllapot getCsaladiAllapot() { return csaladiAllapot; } public void setCsaladiAllapot(CsaladiAllapot csaladiAllapot) { this.csaladiAllapot = csaladiAllapot; } } public static class OrvosiVizsgalatDTO { private int gyakorisag; private Date utolsoOrvosiVizsgalatIdopontja; public int getGyakorisag() { return gyakorisag; } public void setGyakorisag(int gyakorisag) { this.gyakorisag = gyakorisag; } public Date getUtolsoOrvosiVizsgalatIdopontja() { return utolsoOrvosiVizsgalatIdopontja; } public void setUtolsoOrvosiVizsgalatIdopontja(Date utolsoOrvosiVizsgalatIdopontja) { this.utolsoOrvosiVizsgalatIdopontja = utolsoOrvosiVizsgalatIdopontja; } } public static class SzabadsagnyilvantartasDTO { private int megvaltottSzabadsag; public int getMegvaltottSzabadsag() { return megvaltottSzabadsag; } public void setMegvaltottSzabadsag(int megvaltottSzabadsag) { this.megvaltottSzabadsag = megvaltottSzabadsag; } } }
package com.eucalyptus.tests.awssdk; import static com.eucalyptus.tests.awssdk.Eutester4j.assertThat; import static com.eucalyptus.tests.awssdk.Eutester4j.eucaUUID; import static com.eucalyptus.tests.awssdk.Eutester4j.initS3ClientWithNewAccount; import static com.eucalyptus.tests.awssdk.Eutester4j.print; import static com.eucalyptus.tests.awssdk.Eutester4j.testInfo; import static org.testng.AssertJUnit.assertTrue; import java.io.File; import java.io.FileInputStream; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.testng.annotations.AfterClass; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import com.amazonaws.AmazonServiceException; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.AccessControlList; import com.amazonaws.services.s3.model.Bucket; import com.amazonaws.services.s3.model.CannedAccessControlList; import com.amazonaws.services.s3.model.CanonicalGrantee; import com.amazonaws.services.s3.model.Grant; import com.amazonaws.services.s3.model.GroupGrantee; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.Owner; import com.amazonaws.services.s3.model.Permission; import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.PutObjectResult; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.Md5Utils; /** * </p>Amazon S3 supports a set of predefined grants, known as canned ACLs. Each canned ACL has a predefined a set of grantees and permissions. This * class contains tests for creating buckets with canned ACLs. After a bucket is successfully created, the bucket ACL is fetched and verified against * the canned ACL definition.</p> * * @see <a href="http://docs.aws.amazon.com/AmazonS3/latest/dev/ACLOverview.html">S3 Access Control Lists</a> * @author Swathi Gangisetty * */ public class S3ObjectACLAcrossAccountsTests { private static String bucketName = null; private static String key = null; private static List<Runnable> cleanupTasks = null; private static final File fileToPut = new File("test.dat"); private static AmazonS3 s3ClientA = null; private static AmazonS3 s3ClientB = null; private static String accountA = null; private static String accountB = null; private static String ownerNameA = null; private static String ownerNameB = null; private static String ownerIdA = null; private static String ownerIdB = null; private static String md5_orig = null; @BeforeClass public void init() throws Exception { print("### PRE SUITE SETUP - " + this.getClass().getSimpleName()); try { accountA = this.getClass().getSimpleName().toLowerCase() + "a"; accountB = this.getClass().getSimpleName().toLowerCase() + "b"; s3ClientA = initS3ClientWithNewAccount(accountA, "admin"); s3ClientB = initS3ClientWithNewAccount(accountB, "admin"); } catch (Exception e) { try { teardown(); } catch (Exception ie) { } throw e; } Owner ownerA = s3ClientA.getS3AccountOwner(); Owner ownerB = s3ClientB.getS3AccountOwner(); ownerNameA = ownerA.getDisplayName(); ownerNameB = ownerB.getDisplayName(); ownerIdA = ownerA.getId(); ownerIdB = ownerB.getId(); md5_orig = BinaryUtils.toHex(Md5Utils.computeMD5Hash(new FileInputStream(fileToPut))); } @AfterClass public void teardown() throws Exception { print("### POST SUITE CLEANUP - " + this.getClass().getSimpleName()); Eutester4j.deleteAccount(accountA); Eutester4j.deleteAccount(accountB); s3ClientA = null; s3ClientB = null; } @BeforeMethod public void setup() throws Exception { print("Initializing bucket name, key name and clean up tasks"); bucketName = eucaUUID(); key = eucaUUID(); cleanupTasks = new ArrayList<Runnable>(); } @AfterMethod public void cleanup() throws Exception { Collections.reverse(cleanupTasks); for (final Runnable cleanupTask : cleanupTasks) { try { cleanupTask.run(); } catch (Exception e) { print("Unable to run clean up task: " + e); } } } /** * <p> * Test for <code>public-read-write</code> canned ACL on bucket and <code>bucket-owner-full-control</code> canned ACL on object * </p> * * <p> * S3: Initially, the bucket owner has FULL_CONTROL permission on both the bucket and the object. Object owner has FULL_CONTROL permission on the * object. As the bucket owner, change the canned ACL on the object to <code>private</code>. This reduces the number of grants on the object to 1. * Bucket owner continues to have FULL_CONTROL permission on the object, object owner has no listed permissions. However the object owner seems to * have READ_ACP permission since he/she can get the ACL for the object. Object owner cannot get the object i.e. object owner does not have READ * permission on the object * </p> * * <p> * Walrus: Initially, the bucket owner has FULL_CONTROL permission on both the bucket and the object. Though not listed in the object ACL, object * owner has FULL_CONTROL permission on the object. As the bucket owner, change the canned ACL on the object to <code>private</code>. This reduces * the number of grants on the object to 1. Object owner continues to have FULL_CONTROL permission on the object, and is listed in the object ACL. * Bucket owner has no listed permissions in the ACL and does not seem to have READ_ACP, WRITE_ACP or READ permissions * </p> * * @see <a href="https://eucalyptus.atlassian.net/browse/EUCA-7712">EUCA-7712</a> */ // @Test public void bucket_PublicReadWrite_object_BucketOwnerFullControl_1() throws Exception { testInfo(this.getClass().getSimpleName() + " - bucket_PublicReadWrite_object_BucketOwnerFullControl_1"); try { /* Create bucket with Canned ACL PublicReadWrite as account A admin */ createBucket(s3ClientA, ownerNameA, bucketName, CannedAccessControlList.PublicReadWrite, ownerIdA); /* Put object with Canned ACL BucketOwnerFullControl as account B admin */ putObjectWithCannedACL(s3ClientB, ownerNameB, bucketName, key, CannedAccessControlList.BucketOwnerFullControl); /* Get object ACL as account B admin */ print(ownerNameB + ": Getting ACL for object " + key); AccessControlList objectACL = s3ClientB.getObjectAcl(bucketName, key); assertTrue("Mismatch in number of ACLs associated with the object. Expected 2 but got " + objectACL.getGrants().size(), objectACL.getGrants() .size() == 2); Iterator<Grant> iterator = objectACL.getGrants().iterator(); while (iterator.hasNext()) { Grant grant = iterator.next(); assertTrue("Grantee is not of type CanonicalGrantee", grant.getGrantee() instanceof CanonicalGrantee); assertTrue( "Expected grantee to be object owner " + objectACL.getOwner().getId() + " or bucket owner " + s3ClientA.getS3AccountOwner().getId() + ", but found " + grant.getGrantee().getIdentifier(), grant.getGrantee().getIdentifier().equals(objectACL.getOwner().getId()) || grant.getGrantee().getIdentifier().equals(s3ClientA.getS3AccountOwner().getId())); assertTrue("Expected object/bucket owner to have " + Permission.FullControl + " privilege, but found " + grant.getPermission(), grant .getPermission().equals(Permission.FullControl)); } /* Verify that account A admin has READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected bucket owner " + ownerNameA + " to have READ permission over the object", canReadObject(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to have READ_ACP permission over the object", canReadObjectACP(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to have WRITE_ACP permission over the object", canWriteObjectACP(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account B admin has READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected object owner " + ownerNameB + " to have READ permission over the object", canReadObject(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have READ_ACP permission over the object", canReadObjectACP(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have WRITE_ACP permission over the object", canWriteObjectACP(ownerNameB, s3ClientB, bucketName, key)); /* Set canned ACL Private for object as account A admin */ print(ownerNameA + ": Setting canned ACL " + CannedAccessControlList.Private + " for object " + key); s3ClientA.setObjectAcl(bucketName, key, CannedAccessControlList.Private); /* Verify that account A admin has READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected bucket owner " + ownerNameA + " to have READ permission over the object", canReadObject(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to have READ_ACP permission over the object", canReadObjectACP(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to have WRITE_ACP permission over the object", canWriteObjectACP(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account B admin has READ_ACP permission */ assertTrue("Expected object owner " + ownerNameB + " to have READ_ACP permission over the object", canReadObjectACP(ownerNameB, s3ClientB, bucketName, key)); /* Verify that account B admin does not have READ or WRITE_ACP permissions */ assertTrue("Expected object owner " + ownerNameB + " to not have READ permission over the object", !canReadObject(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to not have WRITE_ACP permission over the object", !canWriteObjectACP(ownerNameB, s3ClientB, bucketName, key)); } catch (AmazonServiceException ase) { printException(ase); assertThat(false, "Failed to run bucket_PublicReadWrite_object_BucketOwnerFullControl_1"); } } /** * <p> * Test for <code>public-read-write</code> canned ACL on bucket and <code>bucket-owner-full-control</code> canned ACL on object * </p> * * <p> * S3: Initially, the bucket owner has FULL_CONTROL permission on both the bucket and the object. Object owner has FULL_CONTROL permission on the * object. As the object owner, change the canned ACL on the object to <code>private</code>. This reduces the number of grants on the object to 1. * Object owner continues to have FULL_CONTROL permission on the object, bucket owner has no listed permissions. Bucket owner cannot get the object * or the ACL for the object i.e. bucket owner does not have READ and READ_ACP permissions on the object * </p> * * @see <a href="https://eucalyptus.atlassian.net/browse/EUCA-7712">EUCA-7712</a> */ @Test public void bucket_PublicReadWrite_object_BucketOwnerFullControl_2() throws Exception { testInfo(this.getClass().getSimpleName() + " - bucket_PublicReadWrite_object_BucketOwnerFullControl_2"); try { /* Create bucket with Canned ACL PublicReadWrite as account A admin */ createBucket(s3ClientA, ownerNameA, bucketName, CannedAccessControlList.PublicReadWrite, ownerIdA); /* Put object with Canned ACL BucketOwnerFullControl as account B admin */ putObjectWithCannedACL(s3ClientB, ownerNameB, bucketName, key, CannedAccessControlList.BucketOwnerFullControl); /* Get object ACL as account B admin */ print(ownerNameB + ": Getting ACL for object " + key); AccessControlList objectACL = s3ClientB.getObjectAcl(bucketName, key); assertTrue("Mismatch in number of ACLs associated with the object. Expected 2 but got " + objectACL.getGrants().size(), objectACL.getGrants() .size() == 2); Iterator<Grant> iterator = objectACL.getGrants().iterator(); while (iterator.hasNext()) { Grant grant = iterator.next(); assertTrue("Grantee is not of type CanonicalGrantee", grant.getGrantee() instanceof CanonicalGrantee); assertTrue( "Expected grantee to be object owner " + objectACL.getOwner().getId() + " or bucket owner " + s3ClientA.getS3AccountOwner().getId() + ", but found " + grant.getGrantee().getIdentifier(), grant.getGrantee().getIdentifier().equals(objectACL.getOwner().getId()) || grant.getGrantee().getIdentifier().equals(s3ClientA.getS3AccountOwner().getId())); assertTrue("Expected object/bucket owner to have " + Permission.FullControl + " privilege, but found " + grant.getPermission(), grant .getPermission().equals(Permission.FullControl)); } /* Verify that account A admin has READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected bucket owner " + ownerNameA + " to have READ permission over the object", canReadObject(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to have READ_ACP permission over the object", canReadObjectACP(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to have WRITE_ACP permission over the object", canWriteObjectACP(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account B admin has READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected object owner " + ownerNameB + " to have READ permission over the object", canReadObject(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have READ_ACP permission over the object", canReadObjectACP(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have WRITE_ACP permission over the object", canWriteObjectACP(ownerNameB, s3ClientB, bucketName, key)); /* Set canned ACL Private for object as account B admin */ print(ownerNameB + ": Setting canned ACL " + CannedAccessControlList.Private + " for object " + key); s3ClientB.setObjectAcl(bucketName, key, CannedAccessControlList.Private); /* Verify that account A admin does not have READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected bucket owner " + ownerNameA + " to not have READ permission over the object", !canReadObject(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to not have READ_ACP permission over the object", !canReadObjectACP(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to not have WRITE_ACP permission over the object", !canWriteObjectACP(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account B admin has READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected object owner " + ownerNameB + " to have READ permission over the object", canReadObject(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have READ_ACP permission over the object", canReadObjectACP(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have WRITE_ACP permission over the object", canWriteObjectACP(ownerNameB, s3ClientB, bucketName, key)); } catch (AmazonServiceException ase) { printException(ase); assertThat(false, "Failed to run bucket_PublicReadWrite_object_BucketOwnerFullControl_2"); } } /** * <p> * Test for <code>public-read-write</code> canned ACL on bucket and <code>bucket-owner-read</code> canned ACL on object * </p> * * @see <a href="https://eucalyptus.atlassian.net/browse/EUCA-7724">EUCA-7724</a> */ @Test public void bucket_PublicReadWrite_object_BucketOwnerRead() throws Exception { testInfo(this.getClass().getSimpleName() + " - bucket_PublicReadWrite_object_BucketOwnerRead"); try { /* Create bucket with Canned ACL PublicReadWrite as account A admin */ createBucket(s3ClientA, ownerNameA, bucketName, CannedAccessControlList.PublicReadWrite, ownerIdA); /* Put object with Canned ACL BucketOwnerRead as account B admin */ putObjectWithCannedACL(s3ClientB, ownerNameB, bucketName, key, CannedAccessControlList.BucketOwnerRead); /* Get object ACL as account B admin */ print(ownerNameB + ": Getting ACL for object " + key); AccessControlList objectACL = s3ClientB.getObjectAcl(bucketName, key); assertTrue("Mismatch in number of ACLs associated with the object. Expected 2 but got " + objectACL.getGrants().size(), objectACL.getGrants() .size() == 2); Iterator<Grant> iterator = objectACL.getGrants().iterator(); while (iterator.hasNext()) { Grant grant = iterator.next(); assertTrue("Grantee is not of type CanonicalGrantee", grant.getGrantee() instanceof CanonicalGrantee); assertTrue( "Expected grantee to be object owner " + objectACL.getOwner().getId() + " or bucket owner " + s3ClientA.getS3AccountOwner().getId() + ", but found " + grant.getGrantee().getIdentifier(), grant.getGrantee().getIdentifier().equals(objectACL.getOwner().getId()) || grant.getGrantee().getIdentifier().equals(s3ClientA.getS3AccountOwner().getId())); if (grant.getGrantee().getIdentifier().equals(objectACL.getOwner().getId())) { assertTrue("Expected object owner to have " + Permission.FullControl.toString() + " privileges, but found " + grant.getPermission(), grant .getPermission().equals(Permission.FullControl)); } else { assertTrue("Expected bucket owner to have " + Permission.Read.toString() + " privileges, but found " + grant.getPermission(), grant .getPermission().equals(Permission.Read)); } } /* Verify that account A admin has READ permission */ assertTrue("Expected bucket owner " + ownerNameA + " to have READ permission over the object", canReadObject(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account A admin does not have READ_ACP and WRITE_ACP permissions */ assertTrue("Expected bucket owner " + ownerNameA + " to not have READ_ACP permission over the object", !canReadObjectACP(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to not have WRITE_ACP permission over the object", !canWriteObjectACP(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account B admin has READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected object owner " + ownerNameB + " to have READ permission over the object", canReadObject(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have READ_ACP permission over the object", canReadObjectACP(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have WRITE_ACP permission over the object", canWriteObjectACP(ownerNameB, s3ClientB, bucketName, key)); } catch (AmazonServiceException ase) { printException(ase); assertThat(false, "Failed to run bucket_PublicReadWrite_object_BucketOwnerRead"); } } /** * <p> * Test for <code>public-read-write</code> canned ACL on bucket and <code>authenticated-read</code> canned ACL on object * </p> * * @see <a href="https://eucalyptus.atlassian.net/browse/EUCA-7728">EUCA-7728</a> */ @Test public void bucket_PublicReadWrite_object_AuthenticatedRead() throws Exception { testInfo(this.getClass().getSimpleName() + " - bucket_PublicReadWrite_object_AuthenticatedRead"); try { /* Create bucket with Canned ACL PublicReadWrite as account A admin */ createBucket(s3ClientA, ownerNameA, bucketName, CannedAccessControlList.PublicReadWrite, ownerIdA); /* Put object with Canned ACL BucketOwnerFullControl as account B admin */ putObjectWithCannedACL(s3ClientB, ownerNameB, bucketName, key, CannedAccessControlList.AuthenticatedRead); /* Get object ACL as account B admin */ print(ownerNameB + ": Getting ACL for object " + key); AccessControlList objectACL = s3ClientB.getObjectAcl(bucketName, key); assertTrue("Mismatch in number of ACLs associated with the object. Expected 2 but got " + objectACL.getGrants().size(), objectACL.getGrants() .size() == 2); Iterator<Grant> iterator = objectACL.getGrants().iterator(); while (iterator.hasNext()) { Grant grant = iterator.next(); if (grant.getGrantee() instanceof CanonicalGrantee) { assertTrue("Expected grantee to be object owner " + objectACL.getOwner().getId() + ", but found " + grant.getGrantee().getIdentifier(), grant.getGrantee().getIdentifier().equals(objectACL.getOwner().getId())); assertTrue("Expected object owner to have " + Permission.FullControl.toString() + " privileges, but found " + grant.getPermission(), grant .getPermission().equals(Permission.FullControl)); } else { assertTrue("Grantee of type GroupGrantee not found", grant.getGrantee() instanceof GroupGrantee); assertTrue("Expected grantee to be " + GroupGrantee.AuthenticatedUsers + ", but found " + ((GroupGrantee) grant.getGrantee()), ((GroupGrantee) grant.getGrantee()).equals(GroupGrantee.AuthenticatedUsers)); assertTrue( "Expected " + GroupGrantee.AuthenticatedUsers + " to have " + Permission.Read.toString() + " privilege, but found " + grant.getPermission(), grant.getPermission().equals(Permission.Read)); } } /* Verify that account A admin has READ permission */ assertTrue("Expected bucket owner " + ownerNameA + " to have READ permission over the object", canReadObject(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account A admin does not have READ_ACP and WRITE_ACP permissions */ assertTrue("Expected bucket owner " + ownerNameA + " to not have READ_ACP permission over the object", !canReadObjectACP(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to not have WRITE_ACP permission over the object", !canWriteObjectACP(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account B admin has READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected object owner " + ownerNameB + " to have READ permission over the object", canReadObject(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have READ_ACP permission over the object", canReadObjectACP(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have WRITE_ACP permission over the object", canWriteObjectACP(ownerNameB, s3ClientB, bucketName, key)); } catch (AmazonServiceException ase) { printException(ase); assertThat(false, "Failed to run bucket_PublicReadWrite_object_AuthenticatedRead"); } } /** * <p> * Test for <code>public-read-write</code> canned ACL on bucket and <code>public-read</code> canned ACL on object * </p> * * bug only in case of eucalyptus admin account */ @Test public void bucket_PublicReadWrite_object_PublicRead() throws Exception { testInfo(this.getClass().getSimpleName() + " - bucket_PublicReadWrite_object_PublicRead"); try { /* Create bucket with Canned ACL PublicReadWrite as account A admin */ createBucket(s3ClientA, ownerNameA, bucketName, CannedAccessControlList.PublicReadWrite, ownerIdA); /* Put object with Canned ACL BucketOwnerFullControl as account B admin */ putObjectWithCannedACL(s3ClientB, ownerNameB, bucketName, key, CannedAccessControlList.PublicRead); /* Get object ACL as account B admin */ print(ownerNameB + ": Getting ACL for object " + key); AccessControlList objectACL = s3ClientB.getObjectAcl(bucketName, key); assertTrue("Mismatch in number of ACLs associated with the object. Expected 2 but got " + objectACL.getGrants().size(), objectACL.getGrants() .size() == 2); Iterator<Grant> iterator = objectACL.getGrants().iterator(); while (iterator.hasNext()) { Grant grant = iterator.next(); if (grant.getGrantee() instanceof CanonicalGrantee) { assertTrue("Expected grantee to be object owner " + objectACL.getOwner().getId() + ", but found " + grant.getGrantee().getIdentifier(), grant.getGrantee().getIdentifier().equals(objectACL.getOwner().getId())); assertTrue("Expected object owner to have " + Permission.FullControl + " privilege, but found " + grant.getPermission(), grant .getPermission().equals(Permission.FullControl)); } else { assertTrue("Grantee of type GroupGrantee not found", grant.getGrantee() instanceof GroupGrantee); assertTrue("Expected grantee to be " + GroupGrantee.AllUsers + ", but found " + ((GroupGrantee) grant.getGrantee()), ((GroupGrantee) grant.getGrantee()).equals(GroupGrantee.AllUsers)); assertTrue( "Expected " + GroupGrantee.AllUsers + " to have " + Permission.Read.toString() + " privilege, but found " + grant.getPermission(), grant.getPermission().equals(Permission.Read)); } } /* Verify that account A admin has READ permission */ assertTrue("Expected bucket owner " + ownerNameA + " to have READ permission over the object", canReadObject(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account A admin does not have READ_ACP and WRITE_ACP permissions */ assertTrue("Expected bucket owner " + ownerNameA + " to not have READ_ACP permission over the object", !canReadObjectACP(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to not have WRITE_ACP permission over the object", !canWriteObjectACP(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account B admin has READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected object owner " + ownerNameB + " to have READ permission over the object", canReadObject(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have READ_ACP permission over the object", canReadObjectACP(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have WRITE_ACP permission over the object", canWriteObjectACP(ownerNameB, s3ClientB, bucketName, key)); } catch (AmazonServiceException ase) { printException(ase); assertThat(false, "Failed to run bucket_PublicReadWrite_object_PublicRead"); } } /** * <p> * Test for <code>public-read-write</code> canned ACL on bucket and <code>public-read-write</code> canned ACL on object * </p> * * bug only in case of eucalyptus admin account */ @Test public void bucket_PublicReadWrite_object_PublicReadWrite() throws Exception { testInfo(this.getClass().getSimpleName() + " - bucket_PublicReadWrite_object_PublicReadWrite"); try { /* Create bucket with Canned ACL PublicReadWrite as account A admin */ createBucket(s3ClientA, ownerNameA, bucketName, CannedAccessControlList.PublicReadWrite, ownerIdA); /* Put object with Canned ACL BucketOwnerFullControl as account B admin */ putObjectWithCannedACL(s3ClientB, ownerNameB, bucketName, key, CannedAccessControlList.PublicReadWrite); /* Get object ACL as account B admin */ print(ownerNameB + ": Getting ACL for object " + key); AccessControlList objectACL = s3ClientB.getObjectAcl(bucketName, key); assertTrue("Mismatch in number of ACLs associated with the object. Expected 3 but got " + objectACL.getGrants().size(), objectACL.getGrants() .size() == 3); Iterator<Grant> iterator = objectACL.getGrants().iterator(); while (iterator.hasNext()) { Grant grant = iterator.next(); if (grant.getGrantee() instanceof CanonicalGrantee) { assertTrue("Expected grantee to be object owner " + objectACL.getOwner().getId() + ", but found " + grant.getGrantee().getIdentifier(), grant.getGrantee().getIdentifier().equals(objectACL.getOwner().getId())); assertTrue("Expected object owner to have " + Permission.FullControl + " privilege, but found " + grant.getPermission(), grant .getPermission().equals(Permission.FullControl)); } else { assertTrue("Grantee of type GroupGrantee not found", grant.getGrantee() instanceof GroupGrantee); assertTrue("Expected grantee to be " + GroupGrantee.AllUsers + ", but found " + ((GroupGrantee) grant.getGrantee()), ((GroupGrantee) grant.getGrantee()).equals(GroupGrantee.AllUsers)); assertTrue("Expected " + GroupGrantee.AllUsers + " to have " + Permission.Read.toString() + " or " + Permission.Write.toString() + " privileges, but found " + grant.getPermission(), grant.getPermission().equals(Permission.Read) || grant.getPermission().equals(Permission.Write)); } } /* Verify that account A admin has READ permission */ assertTrue("Expected bucket owner " + ownerNameA + " to have READ permission over the object", canReadObject(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account A admin does not have READ_ACP and WRITE_ACP permissions */ assertTrue("Expected bucket owner " + ownerNameA + " to not have READ_ACP permission over the object", !canReadObjectACP(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to not have WRITE_ACP permission over the object", !canWriteObjectACP(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account B admin has READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected object owner " + ownerNameB + " to have READ permission over the object", canReadObject(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have READ_ACP permission over the object", canReadObjectACP(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have WRITE_ACP permission over the object", canWriteObjectACP(ownerNameB, s3ClientB, bucketName, key)); } catch (AmazonServiceException ase) { printException(ase); assertThat(false, "Failed to run bucket_PublicReadWrite_object_PublicReadWrite"); } } /** * <p> * Test for <code>public-read-write</code> canned ACL on bucket and <code>private</code> canned ACL on object * </p> * * bug only in case of eucalyptus admin account */ @Test public void bucket_PublicReadWrite_object_Private() throws Exception { testInfo(this.getClass().getSimpleName() + " - bucket_PublicReadWrite_object_PublicReadWrite"); try { /* Create bucket with Canned ACL PublicReadWrite as account A admin */ createBucket(s3ClientA, ownerNameA, bucketName, CannedAccessControlList.PublicReadWrite, ownerIdA); /* Put object with Canned ACL BucketOwnerFullControl as account B admin */ putObjectWithCannedACL(s3ClientB, ownerNameB, bucketName, key, CannedAccessControlList.Private); /* Get object ACL as account B admin */ print(ownerNameB + ": Getting ACL for object " + key); AccessControlList objectACL = s3ClientB.getObjectAcl(bucketName, key); assertTrue("Mismatch in number of ACLs associated with the object. Expected 1 but got " + objectACL.getGrants().size(), objectACL.getGrants() .size() == 1); Iterator<Grant> iterator = objectACL.getGrants().iterator(); while (iterator.hasNext()) { Grant grant = iterator.next(); assertTrue("Grantee is not of type CanonicalGrantee", grant.getGrantee() instanceof CanonicalGrantee); assertTrue("Expected grantee to be object owner " + objectACL.getOwner().getId() + ", but found " + grant.getGrantee().getIdentifier(), grant .getGrantee().getIdentifier().equals(objectACL.getOwner().getId())); assertTrue("Expected object owner to have " + Permission.FullControl + " privilege, but found " + grant.getPermission(), grant .getPermission().equals(Permission.FullControl)); } /* Verify that account A admin does not have READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected bucket owner " + ownerNameA + " to not have READ permission over the object", !canReadObject(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to not have READ_ACP permission over the object", !canReadObjectACP(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to not have WRITE_ACP permission over the object", !canWriteObjectACP(ownerNameA, s3ClientA, bucketName, key)); /* Verify that account B admin has READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected object owner " + ownerNameB + " to have READ permission over the object", canReadObject(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have READ_ACP permission over the object", canReadObjectACP(ownerNameB, s3ClientB, bucketName, key)); assertTrue("Expected object owner " + ownerNameB + " to have WRITE_ACP permission over the object", canWriteObjectACP(ownerNameB, s3ClientB, bucketName, key)); } catch (AmazonServiceException ase) { printException(ase); assertThat(false, "Failed to run bucket_PublicReadWrite_object_PublicReadWrite"); } } @Test public void ACL_Headers() throws Exception { testInfo(this.getClass().getSimpleName() + " - ACL_Headers"); try { /* Create bucket as account B admin with read-write permission for account A */ AccessControlList acl = new AccessControlList(); acl.getGrants().add(new Grant(new CanonicalGrantee(ownerIdA), Permission.Read)); acl.getGrants().add(new Grant(new CanonicalGrantee(ownerIdA), Permission.Write)); acl.getGrants().add(new Grant(new CanonicalGrantee(ownerIdB), Permission.FullControl)); createBucket(s3ClientB, ownerNameB, bucketName, acl, ownerIdB); /* Put object with as account A admin */ acl = new AccessControlList(); acl.getGrants().add(new Grant(GroupGrantee.LogDelivery, Permission.ReadAcp)); acl.getGrants().add(new Grant(GroupGrantee.AuthenticatedUsers, Permission.Read)); acl.getGrants().add(new Grant(new CanonicalGrantee(ownerIdB), Permission.Read)); acl.getGrants().add(new Grant(new CanonicalGrantee(ownerIdA), Permission.FullControl)); putObjectWithACL(s3ClientA, ownerNameA, bucketName, key, acl); /* Verify object ACLs */ S3Utils.verifyObjectACL(s3ClientA, accountA, bucketName, key, acl, ownerIdA); /* Verify that account B admin has READ permission */ assertTrue("Expected object owner " + ownerNameB + " to have READ permission over the object", canReadObject(ownerNameB, s3ClientB, bucketName, key)); /* Verify that account A admin has READ, READ_ACP and WRITE_ACP permissions */ assertTrue("Expected bucket owner " + ownerNameA + " to not have READ permission over the object", canReadObject(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to not have READ_ACP permission over the object", canReadObjectACP(ownerNameA, s3ClientA, bucketName, key)); assertTrue("Expected bucket owner " + ownerNameA + " to not have WRITE_ACP permission over the object", canWriteObjectACP(ownerNameA, s3ClientA, bucketName, key)); } catch (AmazonServiceException ase) { printException(ase); assertThat(false, "Failed to run ACL_Headers"); } } private void printException(AmazonServiceException ase) { ase.printStackTrace(); print("Caught Exception: " + ase.getMessage()); print("HTTP Status Code: " + ase.getStatusCode()); print("Amazon Error Code: " + ase.getErrorCode()); } private void createBucket(final AmazonS3 s3, final String accountName, final String bucketName, CannedAccessControlList cannedACL, String bucketOwnerId) throws Exception { Bucket bucket = S3Utils.createBucketWithCannedACL(s3, accountName, bucketName, cannedACL, S3Utils.BUCKET_CREATION_RETRIES); cleanupTasks.add(new Runnable() { @Override public void run() { print(accountName + ": Deleting bucket " + bucketName); s3.deleteBucket(bucketName); } }); assertTrue("Invalid reference to bucket", bucket != null); assertTrue("Mismatch in bucket names. Expected bucket name to be " + bucketName + ", but got " + bucket.getName(), bucketName.equals(bucket.getName())); S3Utils.verifyBucketACL(s3, accountName, bucketName, cannedACL, bucketOwnerId); } private void createBucket(final AmazonS3 s3, final String accountName, final String bucketName, AccessControlList acl, String bucketOwnerId) throws Exception { Bucket bucket = S3Utils.createBucketWithACL(s3, accountName, bucketName, acl, S3Utils.BUCKET_CREATION_RETRIES); cleanupTasks.add(new Runnable() { @Override public void run() { print(accountName + ": Deleting bucket " + bucketName); s3.deleteBucket(bucketName); } }); assertTrue("Invalid reference to bucket", bucket != null); assertTrue("Mismatch in bucket names. Expected bucket name to be " + bucketName + ", but got " + bucket.getName(), bucketName.equals(bucket.getName())); S3Utils.verifyBucketACL(s3, accountName, bucketName, acl, bucketOwnerId); } private void putObjectWithCannedACL(final AmazonS3 s3, final String accountName, final String bucketName, final String key, CannedAccessControlList cannedACL) throws Exception { print(accountName + ": Putting object " + key + " with canned ACL " + cannedACL + " in bucket " + bucketName); PutObjectResult putObj = s3.putObject(new PutObjectRequest(bucketName, key, fileToPut).withCannedAcl(cannedACL)); cleanupTasks.add(new Runnable() { @Override public void run() { print(accountName + ": Deleting object " + key + " from bucket " + bucketName); s3.deleteObject(bucketName, key); } }); assertTrue("Invalid put object result", putObj != null); assertTrue("Mimatch in md5sums between original object and PUT result. Expected " + md5_orig + ", but got " + putObj.getETag(), putObj.getETag() != null && putObj.getETag().equals(md5_orig)); } private void putObjectWithACL(final AmazonS3 s3, final String accountName, final String bucketName, final String key, AccessControlList acl) throws Exception { print(accountName + ": Putting object " + key + " with " + acl + " in bucket " + bucketName); PutObjectResult putObj = s3.putObject(new PutObjectRequest(bucketName, key, fileToPut).withAccessControlList(acl)); cleanupTasks.add(new Runnable() { @Override public void run() { print(accountName + ": Deleting object " + key + " from bucket " + bucketName); s3.deleteObject(bucketName, key); } }); assertTrue("Invalid put object result", putObj != null); assertTrue("Mimatch in md5sums between original object and PUT result. Expected " + md5_orig + ", but got " + putObj.getETag(), putObj.getETag() != null && putObj.getETag().equals(md5_orig)); } private boolean canReadObject(String accountName, AmazonS3 s3, String bucketName, String key) { boolean canDo = false; try { print(accountName + ": Getting object metadata for " + key + " from bucket " + bucketName); ObjectMetadata metadata = s3.getObjectMetadata(bucketName, key); assertTrue("Invalid metadata for object " + key, metadata != null); canDo = true; } catch (AmazonServiceException ex) { assertTrue("Expected status code to be 403, but got " + ex.getStatusCode(), ex.getStatusCode() == 403); print(accountName + ": Not authorized to READ on " + key); } return canDo; } private boolean canReadObjectACP(String accountName, AmazonS3 s3, String bucketName, String key) { boolean canDo = false; try { print(accountName + ": Getting ACL for object " + key); AccessControlList acl = s3.getObjectAcl(bucketName, key); assertTrue("Invalid ACL for object " + key, acl != null); canDo = true; } catch (AmazonServiceException ex) { assertTrue("Expected status code to be 403, but got " + ex.getStatusCode(), ex.getStatusCode() == 403); print(accountName + ": Not authorized to READ_ACP on " + key); } return canDo; } private boolean canWriteObjectACP(String accountName, AmazonS3 s3, String bucketName, String key) { boolean canDo = false; try { print(accountName + ": Setting ACL for object " + key); s3.setObjectAcl(bucketName, key, CannedAccessControlList.Private); canDo = true; } catch (AmazonServiceException ex) { assertTrue("Expected status code to be 403, but got " + ex.getStatusCode(), ex.getStatusCode() == 403); print(accountName + ": Not authorized to WRITE_ACP on " + key); } return canDo; } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.web.exchange; import java.net.URI; import java.util.Iterator; import java.util.List; import javax.ws.rs.Consumes; import javax.ws.rs.FormParam; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriInfo; import org.apache.commons.lang.StringUtils; import org.joda.beans.impl.flexi.FlexiBean; import com.opengamma.DataNotFoundException; import com.opengamma.id.ExternalId; import com.opengamma.id.ExternalIdBundle; import com.opengamma.id.ObjectId; import com.opengamma.id.UniqueId; import com.opengamma.master.exchange.ExchangeDocument; import com.opengamma.master.exchange.ExchangeHistoryRequest; import com.opengamma.master.exchange.ExchangeHistoryResult; import com.opengamma.master.exchange.ExchangeMaster; import com.opengamma.master.exchange.ExchangeSearchRequest; import com.opengamma.master.exchange.ExchangeSearchResult; import com.opengamma.master.exchange.ExchangeSearchSortOrder; import com.opengamma.master.exchange.ManageableExchange; import com.opengamma.util.paging.PagingRequest; import com.opengamma.web.WebPaging; /** * RESTful resource for all exchanges. * <p> * The exchanges resource represents the whole of a exchange master. */ @Path("/exchanges") public class WebExchangesResource extends AbstractWebExchangeResource { /** * Creates the resource. * @param exchangeMaster the exchange master, not null */ public WebExchangesResource(final ExchangeMaster exchangeMaster) { super(exchangeMaster); } //------------------------------------------------------------------------- @GET @Produces(MediaType.TEXT_HTML) public String getHTML( @QueryParam("pgIdx") final Integer pgIdx, @QueryParam("pgNum") final Integer pgNum, @QueryParam("pgSze") final Integer pgSze, @QueryParam("sort") final String sort, @QueryParam("name") final String name, @QueryParam("exchangeId") final List<String> exchangeIdStrs, @Context final UriInfo uriInfo) { final PagingRequest pr = buildPagingRequest(pgIdx, pgNum, pgSze); final ExchangeSearchSortOrder so = buildSortOrder(sort, ExchangeSearchSortOrder.NAME_ASC); final FlexiBean out = createSearchResultData(pr, so, name, exchangeIdStrs, uriInfo); return getFreemarker().build(HTML_DIR + "exchanges.ftl", out); } @GET @Produces(MediaType.APPLICATION_JSON) public String getJSON( @QueryParam("pgIdx") final Integer pgIdx, @QueryParam("pgNum") final Integer pgNum, @QueryParam("pgSze") final Integer pgSze, @QueryParam("sort") final String sort, @QueryParam("name") final String name, @QueryParam("exchangeId") final List<String> exchangeIdStrs, @Context final UriInfo uriInfo) { final PagingRequest pr = buildPagingRequest(pgIdx, pgNum, pgSze); final ExchangeSearchSortOrder so = buildSortOrder(sort, ExchangeSearchSortOrder.NAME_ASC); final FlexiBean out = createSearchResultData(pr, so, name, exchangeIdStrs, uriInfo); return getFreemarker().build(JSON_DIR + "exchanges.ftl", out); } private FlexiBean createSearchResultData(final PagingRequest pr, final ExchangeSearchSortOrder so, final String name, final List<String> exchangeIdStrs, final UriInfo uriInfo) { final FlexiBean out = createRootData(); final ExchangeSearchRequest searchRequest = new ExchangeSearchRequest(); searchRequest.setPagingRequest(pr); searchRequest.setSortOrder(so); searchRequest.setName(StringUtils.trimToNull(name)); final MultivaluedMap<String, String> query = uriInfo.getQueryParameters(); for (int i = 0; query.containsKey("idscheme." + i) && query.containsKey("idvalue." + i); i++) { final ExternalId id = ExternalId.of(query.getFirst("idscheme." + i), query.getFirst("idvalue." + i)); searchRequest.addExternalId(id); } for (final String exchangeIdStr : exchangeIdStrs) { searchRequest.addObjectId(ObjectId.parse(exchangeIdStr)); } out.put("searchRequest", searchRequest); if (data().getUriInfo().getQueryParameters().size() > 0) { final ExchangeSearchResult searchResult = data().getExchangeMaster().search(searchRequest); out.put("searchResult", searchResult); out.put("paging", new WebPaging(searchResult.getPaging(), uriInfo)); } return out; } //------------------------------------------------------------------------- @POST @Consumes(MediaType.APPLICATION_FORM_URLENCODED) @Produces(MediaType.TEXT_HTML) public Response postHTML( @FormParam("name") final String name, @FormParam("idscheme") final String idScheme, @FormParam("idvalue") final String idValue, @FormParam("regionscheme") final String regionScheme, @FormParam("regionvalue") final String regionValue) { final String trimmedName = StringUtils.trimToNull(name); final String trimmedIdScheme = StringUtils.trimToNull(idScheme); final String trimmedIdValue = StringUtils.trimToNull(idValue); final String trimmedRegionScheme = StringUtils.trimToNull(regionScheme); final String trimmedRegionValue = StringUtils.trimToNull(regionValue); if (trimmedName == null || idScheme == null || idValue == null) { final FlexiBean out = createRootData(); if (trimmedName == null) { out.put("err_nameMissing", true); } if (trimmedIdScheme == null) { out.put("err_idschemeMissing", true); } if (trimmedIdValue == null) { out.put("err_idvalueMissing", true); } if (trimmedRegionScheme == null) { out.put("err_regionschemeMissing", true); } if (trimmedRegionValue == null) { out.put("err_regionvalueMissing", true); } final String html = getFreemarker().build(HTML_DIR + "exchanges-add.ftl", out); return Response.ok(html).build(); } final URI uri = createExchange(trimmedName, trimmedIdScheme, trimmedIdValue, trimmedRegionScheme, trimmedRegionValue); return Response.seeOther(uri).build(); } @POST @Consumes(MediaType.APPLICATION_FORM_URLENCODED) @Produces(MediaType.APPLICATION_JSON) public Response postJSON( @FormParam("name") final String name, @FormParam("idscheme") final String idScheme, @FormParam("idvalue") final String idValue, @FormParam("regionscheme") final String regionScheme, @FormParam("regionvalue") final String regionValue) { final String trimmedName = StringUtils.trimToNull(name); final String trimmedIdScheme = StringUtils.trimToNull(idScheme); final String trimmedIdValue = StringUtils.trimToNull(idValue); final String trimmedRegionScheme = StringUtils.trimToNull(regionScheme); final String trimmedRegionValue = StringUtils.trimToNull(regionValue); if (trimmedName == null || trimmedIdScheme == null || trimmedIdValue == null) { return Response.status(Status.BAD_REQUEST).build(); } final URI uri = createExchange(trimmedName, trimmedIdScheme, trimmedIdValue, trimmedRegionScheme, trimmedRegionValue); return Response.created(uri).build(); } private URI createExchange(final String name, final String idScheme, final String idValue, final String regionScheme, final String regionValue) { final ExternalId id = ExternalId.of(idScheme, idValue); final ExternalId region = ExternalId.of(regionScheme, regionValue); final ManageableExchange exchange = new ManageableExchange(ExternalIdBundle.of(id), name, ExternalIdBundle.of(region), null); final ExchangeDocument doc = new ExchangeDocument(exchange); final ExchangeDocument added = data().getExchangeMaster().add(doc); final URI uri = data().getUriInfo().getAbsolutePathBuilder().path(added.getUniqueId().toLatest().toString()).build(); return uri; } //------------------------------------------------------------------------- @Path("{exchangeId}") public WebExchangeResource findExchange(@PathParam("exchangeId") final String idStr) { data().setUriExchangeId(idStr); final UniqueId oid = UniqueId.parse(idStr); try { final ExchangeDocument doc = data().getExchangeMaster().get(oid); data().setExchange(doc); } catch (final DataNotFoundException ex) { final ExchangeHistoryRequest historyRequest = new ExchangeHistoryRequest(oid); historyRequest.setPagingRequest(PagingRequest.ONE); final ExchangeHistoryResult historyResult = data().getExchangeMaster().history(historyRequest); if (historyResult.getDocuments().size() == 0) { throw ex; } data().setExchange(historyResult.getFirstDocument()); } return new WebExchangeResource(this); } //------------------------------------------------------------------------- /** * Creates the output root data. * @return the output root data, not null */ @Override protected FlexiBean createRootData() { final FlexiBean out = super.createRootData(); final ExchangeSearchRequest searchRequest = new ExchangeSearchRequest(); out.put("searchRequest", searchRequest); return out; } //------------------------------------------------------------------------- /** * Builds a URI for exchanges. * @param data the data, not null * @return the URI, not null */ public static URI uri(final WebExchangeData data) { return uri(data, null); } /** * Builds a URI for exchanges. * @param data the data, not null * @param identifiers the identifiers to search for, may be null * @return the URI, not null */ public static URI uri(final WebExchangeData data, final ExternalIdBundle identifiers) { final UriBuilder builder = data.getUriInfo().getBaseUriBuilder().path(WebExchangesResource.class); if (identifiers != null) { final Iterator<ExternalId> it = identifiers.iterator(); for (int i = 0; it.hasNext(); i++) { final ExternalId id = it.next(); builder.queryParam("idscheme." + i, id.getScheme().getName()); builder.queryParam("idvalue." + i, id.getValue()); } } return builder.build(); } }
/* * ARX: Powerful Data Anonymization * Copyright 2012 - 2015 Florian Kohlmayer, Fabian Prasser * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deidentifier.arx.gui.view.impl.common; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.deidentifier.arx.gui.Controller; import org.deidentifier.arx.gui.resources.Resources; import org.deidentifier.arx.gui.view.SWTUtil; import org.eclipse.swt.SWT; import org.eclipse.swt.events.DisposeEvent; import org.eclipse.swt.events.DisposeListener; import org.eclipse.swt.events.MouseAdapter; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.swt.widgets.TableItem; /** * This class implements a table, in which properties can be filtered. * * @author Fabian Prasser */ public class ComponentFilterTable { /** Constant. */ private static final int LABEL_WIDTH = 100; /** Constant. */ private static final int CHECKBOX_WIDTH = 20; /** Image. */ private final Image IMAGE_ENABLED; /** Image. */ private final Image IMAGE_DISABLED; /** Widget. */ private final Table table; /** Widgets. */ private Map<String, TableItem> items; /** The registered listeners. */ private List<SelectionListener> listeners; /** The selection map. */ private Map<String, Map<String, Boolean>> selected; /** The list of properties. */ private Map<String, List<String>> itemProperties; /** The list of properties. */ private List<String> properties; /** The list of entries. */ private List<String> entries; /** Selected entry. */ private String selectedEntry = null; /** Selected property. */ private String selectedProperty = null; /** * Creates a new instance. * * @param parent * @param controller * @param properties */ public ComponentFilterTable(Composite parent, Controller controller, List<String> properties) { IMAGE_ENABLED = controller.getResources().getImage("tick.png"); //$NON-NLS-1$ IMAGE_DISABLED = controller.getResources().getImage("cross.png"); //$NON-NLS-1$ this.listeners = new ArrayList<SelectionListener>(); this.selected = new HashMap<String, Map<String, Boolean>>(); this.properties = new ArrayList<String>(properties); this.entries = new ArrayList<String>(); this.items = new HashMap<String, TableItem>(); this.itemProperties = new HashMap<String, List<String>>(); this.table = SWTUtil.createTable(parent, SWT.BORDER | SWT.H_SCROLL | SWT.V_SCROLL); this.table.setHeaderVisible(true); this.setProperties(properties); table.addMouseListener(new MouseAdapter(){ public void mouseDown(MouseEvent arg0) { int row = getItemRowAt(arg0.x, arg0.y); int column = getItemColumnAt(arg0.x, arg0.y); if (row != -1 && column > 0 && column <= ComponentFilterTable.this.properties.size()) { String property = ComponentFilterTable.this.properties.get(column-1); String entry = ComponentFilterTable.this.entries.get(row); if (itemProperties.get(entry).contains(property)) { selectedProperty = property; selectedEntry = entry; } else { selectedProperty = null; selectedEntry = null; } } else { selectedProperty = null; selectedEntry = null; } } }); table.addMouseListener(new MouseAdapter(){ public void mouseDown(MouseEvent arg0) { if (selectedProperty != null && selectedEntry != null) { boolean selected = isSelected(selectedEntry, selectedProperty); setSelected(selectedEntry, selectedProperty, !selected); fireSelectionEvent(); } } }); table.addDisposeListener(new DisposeListener(){ public void widgetDisposed(DisposeEvent arg0) { IMAGE_ENABLED.dispose(); IMAGE_DISABLED.dispose(); } }); } /** * Adds a new entry, i.e., a row in the table * * @param entry * @param properties */ public void addEntry(String entry, List<String> properties) { if (!this.properties.containsAll(properties)) { throw new RuntimeException(Resources.getMessage("ComponentFilterTable.2")); //$NON-NLS-1$ } TableItem item = new TableItem(table, SWT.NONE); for (int i = 0; i < this.properties.size(); i++) { if (properties.contains(this.properties.get(i))) { item.setImage(i + 1, IMAGE_DISABLED); } } item.setImage(0, null); item.setText(0, entry); this.items.put(entry, item); this.itemProperties.put(entry, properties); this.entries.add(entry); table.redraw(); for (TableColumn c : table.getColumns()) { c.pack(); } } /** * Adds a selection listener. * * @param listener */ public void addSelectionListener(SelectionListener listener) { this.listeners.add(listener); } /** * Clears the table. */ public void clear() { this.table.setRedraw(false); for (TableItem item : table.getItems()) { item.dispose(); } for (TableColumn column : table.getColumns()) { column.dispose(); } this.table.removeAll(); this.table.setRedraw(true); this.table.redraw(); this.items.clear(); this.itemProperties.clear(); this.properties.clear(); this.entries.clear(); this.selected.clear(); } /** * Disposes this widget. */ public void dispose() { this.table.dispose(); } /** * Returns the entries. * * @return */ public List<String> getEntries() { return entries; } /** * Returns the properties. * * @return */ public List<String> getProperties() { return properties; } /** * Returns the currently selected entry. * * @return */ public String getSelectedEntry() { return selectedEntry; } /** * Returns the currently selected property. * * @return */ public String getSelectedProperty() { return selectedProperty; } /** * Returns whether the given property is selected for the given entry. * * @param entry * @param property * @return */ public boolean isSelected(String entry, String property) { if (!this.entries.contains(entry)) { throw new RuntimeException(Resources.getMessage("ComponentFilterTable.3")); //$NON-NLS-1$ } if (!this.properties.contains(property)) { throw new RuntimeException(Resources.getMessage("ComponentFilterTable.4")); //$NON-NLS-1$ } Map<String, Boolean> map = selected.get(entry); if (map == null) { return false; } else { Boolean b = map.get(property); return b == null ? false : b; } } /** * Removes a selection listener. * * @param listener */ public void removeSelectionListener(SelectionListener listener) { this.listeners.remove(listener); } /** * Enable/disable. * * @param enabled */ public void setEnabled(boolean enabled) { this.table.setEnabled(enabled); } /** * Sets layout data. * * @param layoutData */ public void setLayoutData(Object layoutData) { this.table.setLayoutData(layoutData); } /** * Sets new properties. Clears the table * @param properties */ public void setProperties(List<String> properties) { this.clear(); this.properties = new ArrayList<String>(properties); TableColumn column = new TableColumn(table, SWT.LEFT); column.setWidth(LABEL_WIDTH); column.setText(""); //$NON-NLS-1$ for (String property : properties) { column = new TableColumn(table, SWT.CENTER); column.setText(property); column.setWidth(CHECKBOX_WIDTH); } column = new TableColumn(table, SWT.LEFT); column.setText(""); //$NON-NLS-1$ } /** * Sets the given property selected for the given entry . * * @param entry * @param property * @param selected */ public void setSelected(String entry, String property, boolean selected) { if (!this.entries.contains(entry)) { throw new RuntimeException(Resources.getMessage("ComponentFilterTable.7")); //$NON-NLS-1$ } if (!this.properties.contains(property)) { throw new RuntimeException(Resources.getMessage("ComponentFilterTable.8")); //$NON-NLS-1$ } if (!this.selected.containsKey(entry)) { this.selected.put(entry, new HashMap<String, Boolean>()); } if (this.itemProperties.get(entry).contains(property)) { this.selected.get(entry).put(property, selected); int index = properties.indexOf(property); this.items.get(entry).setImage(index + 1, selected ? IMAGE_ENABLED : IMAGE_DISABLED); table.redraw(); } } /** * Fires a new event. */ private void fireSelectionEvent() { Event event = new Event(); event.display = table.getDisplay(); event.item = table; event.widget = table; SelectionEvent sEvent = new SelectionEvent(event); for (SelectionListener listener : listeners) { listener.widgetSelected(sEvent); } } /** * Returns the item at the given location. * * @param x * @param y * @return */ private int getItemColumnAt(int x, int y) { Point pt = new Point(x, y); int index = table.getTopIndex(); while (index < table.getItemCount()) { final TableItem item = table.getItem(index); for (int i = 0; i < table.getColumns().length; i++) { final Rectangle rect = item.getBounds(i); if (rect.contains(pt)) { return i; } } index++; } return -1; } /** * Returns the item at the given location. * * @param x * @param y * @return */ private int getItemRowAt(int x, int y) { Point pt = new Point(x, y); int index = table.getTopIndex(); while (index < table.getItemCount()) { final TableItem item = table.getItem(index); for (int i = 0; i < table.getColumns().length; i++) { final Rectangle rect = item.getBounds(i); if (rect.contains(pt)) { return index; } } index++; } return -1; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.rest.service.api.runtime; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.HashMap; import java.util.List; import org.apache.http.HttpStatus; import org.flowable.engine.runtime.Execution; import org.flowable.engine.runtime.ProcessInstance; import org.flowable.engine.test.Deployment; import org.flowable.identitylink.service.IdentityLinkType; import org.flowable.rest.service.BaseSpringRestTestCase; import org.flowable.rest.service.api.RestUrls; import org.flowable.task.api.DelegationState; import org.flowable.task.api.Task; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; /** * Test for all REST-operations related to the Task collection resource. * * @author Frederik Heremans */ public class TaskQueryResourceTest extends BaseSpringRestTestCase { /** * Test querying tasks. GET runtime/tasks */ @Deployment public void testQueryTasks() throws Exception { try { Calendar adhocTaskCreate = Calendar.getInstance(); adhocTaskCreate.set(Calendar.MILLISECOND, 0); Calendar processTaskCreate = Calendar.getInstance(); processTaskCreate.add(Calendar.HOUR, 2); processTaskCreate.set(Calendar.MILLISECOND, 0); Calendar inBetweenTaskCreation = Calendar.getInstance(); inBetweenTaskCreation.add(Calendar.HOUR, 1); processEngineConfiguration.getClock().setCurrentTime(adhocTaskCreate.getTime()); Task adhocTask = taskService.newTask(); adhocTask.setAssignee("gonzo"); adhocTask.setOwner("owner"); adhocTask.setDelegationState(DelegationState.PENDING); adhocTask.setDescription("Description one"); adhocTask.setName("Name one"); adhocTask.setDueDate(adhocTaskCreate.getTime()); adhocTask.setPriority(100); adhocTask.setFormKey("myForm.json"); adhocTask.setCategory("some-category"); taskService.saveTask(adhocTask); taskService.addUserIdentityLink(adhocTask.getId(), "misspiggy", IdentityLinkType.PARTICIPANT); processEngineConfiguration.getClock().setCurrentTime(processTaskCreate.getTime()); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", "myBusinessKey"); Task processTask = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); processTask.setParentTaskId(adhocTask.getId()); processTask.setPriority(50); processTask.setDueDate(processTaskCreate.getTime()); taskService.saveTask(processTask); // Check filter-less to fetch all tasks String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TASK_QUERY); ObjectNode requestNode = objectMapper.createObjectNode(); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId(), adhocTask.getId()); // Name filtering requestNode.removeAll(); requestNode.put("name", "Name one"); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Name like filtering requestNode.removeAll(); requestNode.put("nameLike", "%one"); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Description filtering requestNode.removeAll(); requestNode.put("description", "Description one"); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Description like filtering requestNode.removeAll(); requestNode.put("descriptionLike", "%one"); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Priority filtering requestNode.removeAll(); requestNode.put("priority", 100); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Mininmum Priority filtering requestNode.removeAll(); requestNode.put("minimumPriority", 70); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Maximum Priority filtering requestNode.removeAll(); requestNode.put("maximumPriority", 70); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Owner filtering requestNode.removeAll(); requestNode.put("owner", "owner"); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Assignee filtering requestNode.removeAll(); requestNode.put("assignee", "gonzo"); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Owner like filtering requestNode.removeAll(); requestNode.put("ownerLike", "owne%"); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Assignee like filtering requestNode.removeAll(); requestNode.put("assigneeLike", "%onzo"); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Unassigned filtering requestNode.removeAll(); requestNode.put("unassigned", true); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Delegation state filtering requestNode.removeAll(); requestNode.put("delegationState", "pending"); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Candidate user filtering requestNode.removeAll(); requestNode.put("candidateUser", "kermit"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Candidate group filtering requestNode.removeAll(); requestNode.put("candidateGroup", "sales"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Candidate group In filtering requestNode.removeAll(); ArrayNode arrayNode = requestNode.arrayNode(); arrayNode.add("sales"); arrayNode.add("someOtherGroup"); requestNode.set("candidateGroupIn", arrayNode); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Involved user filtering requestNode.removeAll(); requestNode.put("involvedUser", "misspiggy"); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Process instance filtering requestNode.removeAll(); requestNode.put("processInstanceId", processInstance.getId()); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Execution filtering requestNode.removeAll(); Execution taskExecution = runtimeService.createExecutionQuery().activityId("processTask").singleResult(); requestNode.put("executionId", taskExecution.getId()); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Process instance businesskey filtering requestNode.removeAll(); requestNode.put("processInstanceBusinessKey", "myBusinessKey"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Process instance businesskey like filtering requestNode.removeAll(); requestNode.put("processInstanceBusinessKeyLike", "myBusiness%"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Process definition key requestNode.removeAll(); requestNode.put("processDefinitionKey", "oneTaskProcess"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Process definition key like requestNode.removeAll(); requestNode.put("processDefinitionKeyLike", "%TaskProcess"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Process definition name requestNode.removeAll(); requestNode.put("processDefinitionName", "The One Task Process"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Process definition name like requestNode.removeAll(); requestNode.put("processDefinitionNameLike", "The One %"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // CreatedOn filtering requestNode.removeAll(); requestNode.put("createdOn", getISODateString(adhocTaskCreate.getTime())); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // CreatedAfter filtering requestNode.removeAll(); requestNode.put("createdAfter", getISODateString(inBetweenTaskCreation.getTime())); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // CreatedBefore filtering requestNode.removeAll(); requestNode.put("createdBefore", getISODateString(inBetweenTaskCreation.getTime())); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Subtask exclusion requestNode.removeAll(); requestNode.put("excludeSubTasks", true); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Task definition key filtering requestNode.removeAll(); requestNode.put("taskDefinitionKey", "processTask"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Task definition key like filtering requestNode.removeAll(); requestNode.put("taskDefinitionKeyLike", "process%"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Duedate filtering requestNode.removeAll(); requestNode.put("dueDate", getISODateString(adhocTaskCreate.getTime())); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Due after filtering requestNode.removeAll(); requestNode.put("dueAfter", getISODateString(inBetweenTaskCreation.getTime())); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Due before filtering requestNode.removeAll(); requestNode.put("dueBefore", getISODateString(inBetweenTaskCreation.getTime())); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Suspend process-instance to have a suspended task runtimeService.suspendProcessInstanceById(processInstance.getId()); // Suspended filtering requestNode.removeAll(); requestNode.put("active", false); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Active filtering requestNode.removeAll(); requestNode.put("active", true); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Filtering by category requestNode.removeAll(); requestNode.put("category", "some-category"); assertResultsPresentInPostDataResponse(url, requestNode, adhocTask.getId()); // Filtering without duedate requestNode.removeAll(); requestNode.put("withoutDueDate", true); // No response should be returned, no tasks without a duedate yet assertResultsPresentInPostDataResponse(url, requestNode); processTask = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); processTask.setDueDate(null); taskService.saveTask(processTask); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); } finally { // Clean adhoc-tasks even if test fails List<Task> tasks = taskService.createTaskQuery().list(); for (Task task : tasks) { if (task.getExecutionId() == null) { taskService.deleteTask(task.getId(), true); } } } } /** * Test querying tasks using task and process variables. GET runtime/tasks */ @Deployment public void testQueryTasksWithVariables() throws Exception { HashMap<String, Object> processVariables = new HashMap<>(); processVariables.put("stringVar", "Azerty"); processVariables.put("intVar", 67890); processVariables.put("booleanVar", false); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", processVariables); Task processTask = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); HashMap<String, Object> variables = new HashMap<>(); variables.put("stringVar", "Abcdef"); variables.put("intVar", 12345); variables.put("booleanVar", true); taskService.setVariablesLocal(processTask.getId(), variables); // Additional tasks to confirm it's filtered out runtimeService.startProcessInstanceByKey("oneTaskProcess"); ObjectNode requestNode = objectMapper.createObjectNode(); ArrayNode variableArray = objectMapper.createArrayNode(); ObjectNode variableNode = objectMapper.createObjectNode(); variableArray.add(variableNode); requestNode.set("taskVariables", variableArray); String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TASK_QUERY); // String equals variableNode.put("name", "stringVar"); variableNode.put("value", "Abcdef"); variableNode.put("operation", "equals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Integer equals variableNode.removeAll(); variableNode.put("name", "intVar"); variableNode.put("value", 12345); variableNode.put("operation", "equals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Boolean equals variableNode.removeAll(); variableNode.put("name", "booleanVar"); variableNode.put("value", true); variableNode.put("operation", "equals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // String not equals variableNode.removeAll(); variableNode.put("name", "stringVar"); variableNode.put("value", "ghijkl"); variableNode.put("operation", "notEquals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Integer not equals variableNode.removeAll(); variableNode.put("name", "intVar"); variableNode.put("value", 45678); variableNode.put("operation", "notEquals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Boolean not equals variableNode.removeAll(); variableNode.put("name", "booleanVar"); variableNode.put("value", false); variableNode.put("operation", "notEquals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // String equals ignore case variableNode.removeAll(); variableNode.put("name", "stringVar"); variableNode.put("value", "abCDEF"); variableNode.put("operation", "equalsIgnoreCase"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // String not equals ignore case variableNode.removeAll(); variableNode.put("name", "stringVar"); variableNode.put("value", "HIJKLm"); variableNode.put("operation", "notEqualsIgnoreCase"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // String equals without value variableNode.removeAll(); variableNode.put("value", "Abcdef"); variableNode.put("operation", "equals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Greater than variableNode.removeAll(); variableNode.put("name", "intVar"); variableNode.put("value", 12300); variableNode.put("operation", "greaterThan"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); variableNode.put("value", 12345); variableNode.put("operation", "greaterThan"); assertResultsPresentInPostDataResponse(url, requestNode); // Greater than or equal variableNode.removeAll(); variableNode.put("name", "intVar"); variableNode.put("value", 12300); variableNode.put("operation", "greaterThanOrEquals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); variableNode.put("value", 12345); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Less than variableNode.removeAll(); variableNode.put("name", "intVar"); variableNode.put("value", 12400); variableNode.put("operation", "lessThan"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); variableNode.put("value", 12345); variableNode.put("operation", "lessThan"); assertResultsPresentInPostDataResponse(url, requestNode); // Less than or equal variableNode.removeAll(); variableNode.put("name", "intVar"); variableNode.put("value", 12400); variableNode.put("operation", "lessThanOrEquals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); variableNode.put("value", 12345); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Like variableNode.removeAll(); variableNode.put("name", "stringVar"); variableNode.put("value", "Abcde%"); variableNode.put("operation", "like"); // Any other operation but equals without value variableNode.removeAll(); variableNode.put("value", "abcdef"); variableNode.put("operation", "notEquals"); assertResultsPresentInPostDataResponseWithStatusCheck(url, requestNode, HttpStatus.SC_BAD_REQUEST); // Illegal (but existing) operation variableNode.removeAll(); variableNode.put("name", "stringVar"); variableNode.put("value", "abcdef"); variableNode.put("operation", "operationX"); assertResultsPresentInPostDataResponseWithStatusCheck(url, requestNode, HttpStatus.SC_BAD_REQUEST); // Process variables requestNode = objectMapper.createObjectNode(); variableArray = objectMapper.createArrayNode(); variableNode = objectMapper.createObjectNode(); variableArray.add(variableNode); requestNode.set("processInstanceVariables", variableArray); // String equals variableNode.put("name", "stringVar"); variableNode.put("value", "Azerty"); variableNode.put("operation", "equals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Integer equals variableNode.removeAll(); variableNode.put("name", "intVar"); variableNode.put("value", 67890); variableNode.put("operation", "equals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Boolean equals variableNode.removeAll(); variableNode.put("name", "booleanVar"); variableNode.put("value", false); variableNode.put("operation", "equals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // String not equals variableNode.removeAll(); variableNode.put("name", "stringVar"); variableNode.put("value", "ghijkl"); variableNode.put("operation", "notEquals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Integer not equals variableNode.removeAll(); variableNode.put("name", "intVar"); variableNode.put("value", 45678); variableNode.put("operation", "notEquals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Boolean not equals variableNode.removeAll(); variableNode.put("name", "booleanVar"); variableNode.put("value", true); variableNode.put("operation", "notEquals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // String equals ignore case variableNode.removeAll(); variableNode.put("name", "stringVar"); variableNode.put("value", "azeRTY"); variableNode.put("operation", "equalsIgnoreCase"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // String not equals ignore case variableNode.removeAll(); variableNode.put("name", "stringVar"); variableNode.put("value", "HIJKLm"); variableNode.put("operation", "notEqualsIgnoreCase"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // String equals without value variableNode.removeAll(); variableNode.put("value", "Azerty"); variableNode.put("operation", "equals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Greater than variableNode.removeAll(); variableNode.put("name", "intVar"); variableNode.put("value", 67800); variableNode.put("operation", "greaterThan"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); variableNode.put("value", 67890); variableNode.put("operation", "greaterThan"); assertResultsPresentInPostDataResponse(url, requestNode); // Greater than or equal variableNode.removeAll(); variableNode.put("name", "intVar"); variableNode.put("value", 67800); variableNode.put("operation", "greaterThanOrEquals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); variableNode.put("value", 67890); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Less than variableNode.removeAll(); variableNode.put("name", "intVar"); variableNode.put("value", 67900); variableNode.put("operation", "lessThan"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); variableNode.put("value", 67890); variableNode.put("operation", "lessThan"); assertResultsPresentInPostDataResponse(url, requestNode); // Less than or equal variableNode.removeAll(); variableNode.put("name", "intVar"); variableNode.put("value", 67900); variableNode.put("operation", "lessThanOrEquals"); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); variableNode.put("value", 67890); assertResultsPresentInPostDataResponse(url, requestNode, processTask.getId()); // Like variableNode.removeAll(); variableNode.put("name", "stringVar"); variableNode.put("value", "Azert%"); variableNode.put("operation", "like"); } /** * Test querying tasks. GET runtime/tasks */ public void testQueryTasksWithPaging() throws Exception { try { Calendar adhocTaskCreate = Calendar.getInstance(); adhocTaskCreate.set(Calendar.MILLISECOND, 0); processEngineConfiguration.getClock().setCurrentTime(adhocTaskCreate.getTime()); List<String> taskIdList = new ArrayList<>(); for (int i = 0; i < 10; i++) { Task adhocTask = taskService.newTask(); adhocTask.setAssignee("gonzo"); adhocTask.setOwner("owner"); adhocTask.setDelegationState(DelegationState.PENDING); adhocTask.setDescription("Description one"); adhocTask.setName("Name one"); adhocTask.setDueDate(adhocTaskCreate.getTime()); adhocTask.setPriority(100); taskService.saveTask(adhocTask); taskService.addUserIdentityLink(adhocTask.getId(), "misspiggy", IdentityLinkType.PARTICIPANT); taskIdList.add(adhocTask.getId()); } Collections.sort(taskIdList); // Check filter-less to fetch all tasks String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_TASK_QUERY); ObjectNode requestNode = objectMapper.createObjectNode(); String[] taskIds = new String[] { taskIdList.get(0), taskIdList.get(1), taskIdList.get(2) }; assertResultsPresentInPostDataResponse(url + "?size=3&sort=id&order=asc", requestNode, taskIds); taskIds = new String[] { taskIdList.get(4), taskIdList.get(5), taskIdList.get(6), taskIdList.get(7) }; assertResultsPresentInPostDataResponse(url + "?start=4&size=4&sort=id&order=asc", requestNode, taskIds); taskIds = new String[] { taskIdList.get(8), taskIdList.get(9) }; assertResultsPresentInPostDataResponse(url + "?start=8&size=10&sort=id&order=asc", requestNode, taskIds); } finally { // Clean adhoc-tasks even if test fails List<Task> tasks = taskService.createTaskQuery().list(); for (Task task : tasks) { if (task.getExecutionId() == null) { taskService.deleteTask(task.getId(), true); } } } } }
package seedu.taskmanager.logic; import static seedu.taskmanager.commons.core.Messages.MESSAGE_DUPLICATE_TASK; import static seedu.taskmanager.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT; import java.util.List; import org.junit.Test; import seedu.taskmanager.commons.core.Messages; import seedu.taskmanager.logic.commands.EditCommand; import seedu.taskmanager.logic.parser.DateTimeUtil; import seedu.taskmanager.model.TaskManager; import seedu.taskmanager.model.task.Task; public class LogicEditCommandTest extends LogicManagerTest { // @@author A0140538J @Test public void execute_edit_blank() throws Exception { String invalidCommand = "edit"; String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, EditCommand.MESSAGE_USAGE); assertCommandFailure(invalidCommand, expectedMessage); } @Test public void execute_edit_missingIndex() throws Exception { String invalidCommand = "edit meeting"; String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, EditCommand.MESSAGE_USAGE); assertCommandFailure(invalidCommand, expectedMessage); } @Test public void execute_edit_noArguments() throws Exception { String invalidCommand = "edit 1"; String expectedMessage = EditCommand.MESSAGE_NOT_EDITED; assertCommandFailure(invalidCommand, expectedMessage); } @Test public void execute_edit_duplicatedName() throws Exception { // setup expectations TestDataHelper helper = new TestDataHelper(); Task task1 = helper.confession(); Task task2 = helper.homework(); // setup starting state model.addTask(task1); // task already in internal task manager model.addTask(task2); // execute command and verify result String invalidCommand = "edit 1 CS Homework"; String expectedMessage = MESSAGE_DUPLICATE_TASK; assertCommandFailure(invalidCommand, expectedMessage); } @Test public void execute_edit_invalidIndex() throws Exception { // setup expectations TestDataHelper helper = new TestDataHelper(); Task task1 = helper.meeting(); // setup starting state is empty task manager // execute command and verify result String expectedMessage = Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX; String invalidCommand1 = "edit 1 birthday"; assertCommandFailure(invalidCommand1, expectedMessage); model.addTask(task1); // task already in internal task manager String invalidCommand2 = "edit 2 birthday"; assertCommandFailure(invalidCommand2, expectedMessage); } @Test public void execute_edit_invalidDateFormat() throws Exception { // setup expectations TestDataHelper helper = new TestDataHelper(); Task task1 = helper.meeting(); // setup starting state model.addTask(task1); // task already in internal task manager // execute command and verify result String expectedMessage = DateTimeUtil.INVALID_DATE_FORMAT; String invalidCommand1 = "edit 1 s/now e/potato"; assertCommandFailure(invalidCommand1, expectedMessage); String invalidCommand2 = "edit 1 s/!@#$%^"; assertCommandFailure(invalidCommand2, expectedMessage); } @Test public void execute_edit_startAfterEndDate() throws Exception { // setup expectations TestDataHelper helper = new TestDataHelper(); Task task = helper.meeting(); // setup starting state model.addTask(task); // task already in internal task manager // execute command and verify result String expectedMessage = Messages.MESSAGE_START_AFTER_END; String invalidCommand = "edit 1 e/1/1/2017"; assertCommandFailure(invalidCommand, expectedMessage); } @Test public void execute_edit_invalidDemotion() throws Exception { // setup expectations TestDataHelper helper = new TestDataHelper(); Task task = helper.meeting(); // setup starting state model.addTask(task); // task already in internal task manager // execute command and verify result String expectedMessage = EditCommand.MESSAGE_INVALID_EDITTED_TASK; String invalidCommand = "edit 1 e/remove"; assertCommandFailure(invalidCommand, expectedMessage); } @Test public void execute_edit_validName() throws Exception { TestDataHelper helper = new TestDataHelper(); Task task1 = helper.generateTaskWithName("potato"); Task task2 = helper.generateTaskWithName("pineapple"); Task editedTask2 = helper.generateTaskWithName("lol"); List<Task> sampleTasks = helper.generateTaskList(task1, task2); TaskManager expectedTM = helper.generateTaskManager(sampleTasks); List<Task> expectedList = helper.generateTaskList(editedTask2, task1); helper.addToModel(model, sampleTasks); String expectedMessage = String.format(EditCommand.MESSAGE_SUCCESS, editedTask2); String validCommand = "edit 2 lol"; assertCommandSuccess(validCommand, expectedMessage, expectedTM, expectedList); } @Test public void execute_edit_validStartDate() throws Exception { TestDataHelper helper = new TestDataHelper(); Task task = helper.generateTaskWithAll("potato", "10/10/2010 10am", "12/12/2012 12am"); Task editedTask = helper.generateTaskWithAll("potato", "11/11/2011 11am", "12/12/2012 12am"); List<Task> sampleTasks = helper.generateTaskList(task); TaskManager expectedTM = helper.generateTaskManager(sampleTasks); List<Task> expectedList = helper.generateTaskList(editedTask); helper.addToModel(model, sampleTasks); String expectedMessage = String.format(EditCommand.MESSAGE_SUCCESS, editedTask); String validCommand = "edit 1 s/11/11/2011 11am"; assertCommandSuccess(validCommand, expectedMessage, expectedTM, expectedList); } @Test public void execute_edit_validEndDate() throws Exception { TestDataHelper helper = new TestDataHelper(); Task task = helper.generateTaskWithName("potato"); Task editedTask = helper.generateTaskWithDueDate("potato", "17 july 2017 1:34pm"); List<Task> sampleTasks = helper.generateTaskList(task); TaskManager expectedTM = helper.generateTaskManager(sampleTasks); List<Task> expectedList = helper.generateTaskList(editedTask); helper.addToModel(model, sampleTasks); String expectedMessage = String.format(EditCommand.MESSAGE_SUCCESS, editedTask); String validCommand = "edit 1 e/17 july 2017 1:34pm"; assertCommandSuccess(validCommand, expectedMessage, expectedTM, expectedList); } // @@author A0130277L @Test public void execute_edit_validAll() throws Exception { TestDataHelper helper = new TestDataHelper(); Task task = helper.generateTaskWithName("kantang"); Task editedTask = helper.generateTaskWithAll("potato", "next hour", "1/2/2023 4.56pm"); List<Task> sampleTasks = helper.generateTaskList(task); TaskManager expectedTM = helper.generateTaskManager(sampleTasks); List<Task> expectedList = helper.generateTaskList(editedTask); helper.addToModel(model, sampleTasks); String expectedMessage = String.format(EditCommand.MESSAGE_SUCCESS, editedTask); String validCommand = "edit 1 potato from next hour to 1/2/2023 4.56pm"; assertCommandSuccess(validCommand, expectedMessage, expectedTM, expectedList); } // @@author A0140538J @Test public void execute_edit_validRemoveStartDate() throws Exception { TestDataHelper helper = new TestDataHelper(); Task task = helper.generateTaskWithAll("potato", "17 july 2016", "17 july 2017 1:34pm"); Task editedTask = helper.generateTaskWithDueDate("potato", "17 july 2017 1:34pm"); List<Task> sampleTasks = helper.generateTaskList(task); TaskManager expectedTM = helper.generateTaskManager(sampleTasks); List<Task> expectedList = helper.generateTaskList(editedTask); helper.addToModel(model, sampleTasks); String expectedMessage = String.format(EditCommand.MESSAGE_SUCCESS, editedTask); String validCommand = "edit 1 s/remove"; assertCommandSuccess(validCommand, expectedMessage, expectedTM, expectedList); } @Test public void execute_edit_validRemoveStartAndEndDate() throws Exception { TestDataHelper helper = new TestDataHelper(); Task task = helper.generateTaskWithAll("potato", "17 july 2016", "17 july 2017 1:34pm"); Task editedTask = helper.generateTaskWithName("potato"); List<Task> sampleTasks = helper.generateTaskList(task); TaskManager expectedTM = helper.generateTaskManager(sampleTasks); List<Task> expectedList = helper.generateTaskList(editedTask); helper.addToModel(model, sampleTasks); String expectedMessage = String.format(EditCommand.MESSAGE_SUCCESS, editedTask); String validCommand = "edit 1 s/remove e/remove"; assertCommandSuccess(validCommand, expectedMessage, expectedTM, expectedList); } }
package org.ovirt.engine.core.vdsbroker.vdsbroker; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.TimeZone; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; import org.ovirt.engine.core.common.businessentities.Disk; import org.ovirt.engine.core.common.businessentities.DisplayType; import org.ovirt.engine.core.common.businessentities.Network; import org.ovirt.engine.core.common.businessentities.VM; import org.ovirt.engine.core.common.businessentities.VMStatus; import org.ovirt.engine.core.common.businessentities.VmBase; import org.ovirt.engine.core.common.businessentities.VmType; import org.ovirt.engine.core.common.businessentities.network_cluster; import org.ovirt.engine.core.common.config.Config; import org.ovirt.engine.core.common.config.ConfigValues; import org.ovirt.engine.core.compat.TimeZoneInfo; import org.ovirt.engine.core.compat.Version; import org.ovirt.engine.core.compat.WindowsJavaTimezoneMapping; import org.ovirt.engine.core.dal.comparators.DiskImageByBootComparator; import org.ovirt.engine.core.dal.comparators.DiskImageByDiskAliasComparator; import org.ovirt.engine.core.dal.dbbroker.DbFacade; import org.ovirt.engine.core.utils.log.Log; import org.ovirt.engine.core.utils.log.LogFactory; import org.ovirt.engine.core.utils.vmproperties.VmPropertiesUtils; import org.ovirt.engine.core.vdsbroker.xmlrpc.XmlRpcStruct; public abstract class VmInfoBuilderBase { protected static final Log log = LogFactory.getLog(VmInfoBuilderBase.class); protected XmlRpcStruct createInfo; protected VM vm; // IDE supports only 4 slots , slot 2 is preserved by VDSM to the CDROM protected int[] ideIndexSlots = new int[] { 0, 1, 3 }; protected void buildVmProperties() { createInfo.add(VdsProperties.vm_guid, vm.getId().toString()); createInfo.add(VdsProperties.vm_name, vm.getvm_name()); createInfo.add(VdsProperties.mem_size_mb, vm.getvm_mem_size_mb()); createInfo.add(VdsProperties.num_of_cpus, (new Integer(vm.getnum_of_cpus())).toString()); if (Config.<Boolean> GetValue(ConfigValues.SendSMPOnRunVm)) { createInfo.add(VdsProperties.cores_per_socket, (Integer.toString(vm.getcpu_per_socket()))); } final String compatibilityVersion = vm.getvds_group_compatibility_version().toString(); addCpuPinning(compatibilityVersion); createInfo.add(VdsProperties.emulatedMachine, Config.<String> GetValue( ConfigValues.EmulatedMachine, compatibilityVersion)); // send cipher suite and spice secure channels parameters only if ssl // enabled. if (Config.<Boolean> GetValue(ConfigValues.SSLEnabled)) { createInfo.add(VdsProperties.spiceSslCipherSuite, Config.<String> GetValue(ConfigValues.CipherSuite)); createInfo.add(VdsProperties.SpiceSecureChannels, Config.<String> GetValue( ConfigValues.SpiceSecureChannels, compatibilityVersion)); } createInfo.add(VdsProperties.kvmEnable, vm.getkvm_enable().toString() .toLowerCase()); createInfo.add(VdsProperties.acpiEnable, vm.getacpi_enable().toString() .toLowerCase()); createInfo.add(VdsProperties.Custom, VmPropertiesUtils.getInstance().getVMProperties(vm.getvds_group_compatibility_version(), vm.getStaticData())); createInfo.add(VdsProperties.vm_type, "kvm"); // "qemu", "kvm" if (vm.getRunAndPause()) { createInfo.add(VdsProperties.launch_paused_param, "true"); } if (vm.getvds_group_cpu_flags_data() != null) { createInfo.add(VdsProperties.cpuType, vm.getvds_group_cpu_flags_data()); } createInfo.add(VdsProperties.niceLevel, (new Integer(vm.getnice_level())).toString()); if (vm.getstatus() == VMStatus.Suspended && !StringUtils.isEmpty(vm.gethibernation_vol_handle())) { createInfo.add(VdsProperties.hiberVolHandle, vm.gethibernation_vol_handle()); } createInfo.add(VdsProperties.KeyboardLayout, Config.<String> GetValue(ConfigValues.VncKeyboardLayout)); if (vm.getvm_os().isLinux()) { createInfo.add(VdsProperties.PitReinjection, "false"); } if (vm.getdisplay_type() == DisplayType.vnc) { createInfo.add(VdsProperties.TabletEnable, "true"); } createInfo.add(VdsProperties.transparent_huge_pages, vm.getTransparentHugePages() ? "true" : "false"); } private void addCpuPinning(final String compatibilityVersion) { final String cpuPinning = vm.getCpuPinning(); if (StringUtils.isNotEmpty(cpuPinning) && Boolean.TRUE.equals(Config.<Boolean> GetValue(ConfigValues.CpuPinningEnabled, compatibilityVersion))) { final XmlRpcStruct pinDict = new XmlRpcStruct(); for (String pin : cpuPinning.split("_")) { final String[] split = pin.split("#"); pinDict.add(split[0], split[1]); } createInfo.add(VdsProperties.cpuPinning, pinDict); } } protected void buildVmNetworkCluster() { // set Display network List<network_cluster> all = DbFacade.getInstance() .getNetworkClusterDao().getAllForCluster(vm.getvds_group_id()); network_cluster networkCluster = null; for (network_cluster tempNetworkCluster : all) { if (tempNetworkCluster.getis_display()) { networkCluster = tempNetworkCluster; break; } } if (networkCluster != null) { Network net = null; List<Network> allNetworks = DbFacade.getInstance().getNetworkDao() .getAll(); for (Network tempNetwork : allNetworks) { if (tempNetwork.getId().equals(networkCluster.getnetwork_id())) { net = tempNetwork; break; } } if (net != null) { createInfo.add(VdsProperties.displaynetwork, net.getname()); } } } protected void buildVmBootOptions() { // Boot Options if (!StringUtils.isEmpty(vm.getinitrd_url())) { createInfo.add(VdsProperties.InitrdUrl, vm.getinitrd_url()); } if (!StringUtils.isEmpty(vm.getkernel_url())) { createInfo.add(VdsProperties.KernelUrl, vm.getkernel_url()); if (!StringUtils.isEmpty(vm.getkernel_params())) { createInfo.add(VdsProperties.KernelParams, vm.getkernel_params()); } } } protected void buildVmTimeZone() { // send vm_dynamic.utc_diff if exist, if not send vm_static.time_zone if (vm.getutc_diff() != null) { createInfo.add(VdsProperties.utc_diff, vm.getutc_diff().toString()); } else { // get vm timezone String timeZone = TimeZoneInfo.Local.getId(); if (!StringUtils.isEmpty(vm.gettime_zone())) { timeZone = vm.gettime_zone(); } int offset = 0; String javaZoneId = null; if (vm.getos().isWindows()) { // convert to java & calculate offset javaZoneId = WindowsJavaTimezoneMapping.windowsToJava.get(timeZone); } else { javaZoneId = timeZone; } if (javaZoneId != null) { offset = (TimeZone.getTimeZone(javaZoneId).getOffset( new Date().getTime()) / 1000); } createInfo.add(VdsProperties.utc_diff, "" + offset); } } protected List<Disk> getSortedDisks() { // order first by drive numbers and then order by boot for the bootable // drive to be first (important for IDE to be index 0) ! List<Disk> diskImages = new ArrayList<Disk>(vm.getDiskMap() .values()); Collections.sort(diskImages, new DiskImageByDiskAliasComparator()); Collections.sort(diskImages, Collections.reverseOrder(new DiskImageByBootComparator())); return diskImages; } /** * gets the vm sound device type * * @param vm * The VM * @param compatibilityVersion * @return String, the sound card device type */ public static String getSoundDevice(VmBase vm, Version compatibilityVersion) { final String OS_REGEX = "^.*%1s,([^,]*).*$"; final String DEFAULT_TYPE = "default"; String ret = DEFAULT_TYPE; if (vm.getvm_type() == VmType.Desktop) { String soundDeviceTypeConfig = Config.<String> GetValue( ConfigValues.DesktopAudioDeviceType, compatibilityVersion.toString()); String vmOS = vm.getos().name(); Pattern regexPattern = Pattern.compile(String .format(OS_REGEX, vmOS)); Matcher regexMatcher = regexPattern.matcher(soundDeviceTypeConfig); if (regexMatcher.find()) { ret = regexMatcher.group(1); } else { regexPattern = Pattern.compile(String.format(OS_REGEX, DEFAULT_TYPE)); regexMatcher = regexPattern.matcher(soundDeviceTypeConfig); if (regexMatcher.find()) { ret = regexMatcher.group(1); } } } return ret; } protected void logUnsupportedInterfaceType() { log.error("Unsupported interface type, ISCSI interface type is not supported."); } protected abstract void buildVmVideoCards(); protected abstract void buildVmCD(); protected abstract void buildVmFloppy(); protected abstract void buildVmDrives(); protected abstract void buildVmNetworkInterfaces(); protected abstract void buildVmSoundDevices(); protected abstract void buildUnmanagedDevices(); protected abstract void buildVmBootSequence(); protected abstract void buildSysprepVmPayload(String strSysPrepContent); protected abstract void buildVmUsbDevices(); protected abstract void buildVmMemoryBalloon(); }
/* * Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.application.authentication.framework.config.builder; import org.wso2.carbon.identity.application.authentication.framework.ApplicationAuthenticator; import org.wso2.carbon.identity.application.authentication.framework.config.model.ApplicationConfig; import org.wso2.carbon.identity.application.authentication.framework.config.model.AuthenticatorConfig; import org.wso2.carbon.identity.application.authentication.framework.config.model.SequenceConfig; import org.wso2.carbon.identity.application.authentication.framework.config.model.StepConfig; import org.wso2.carbon.identity.application.authentication.framework.exception.FrameworkException; import org.wso2.carbon.identity.application.authentication.framework.internal.FrameworkServiceComponent; import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants; import org.wso2.carbon.identity.application.common.IdentityApplicationManagementException; import org.wso2.carbon.identity.application.common.model.AuthenticationStep; import org.wso2.carbon.identity.application.common.model.FederatedAuthenticatorConfig; import org.wso2.carbon.identity.application.common.model.IdentityProvider; import org.wso2.carbon.identity.application.common.model.LocalAuthenticatorConfig; import org.wso2.carbon.identity.application.common.model.RequestPathAuthenticatorConfig; import org.wso2.carbon.identity.application.common.model.ServiceProvider; import org.wso2.carbon.identity.application.mgt.ApplicationInfoProvider; import java.util.ArrayList; import java.util.List; public class UIBasedConfigurationBuilder { private static volatile UIBasedConfigurationBuilder instance; public static UIBasedConfigurationBuilder getInstance() { if (instance == null) { synchronized (UIBasedConfigurationBuilder.class) { if (instance == null) { instance = new UIBasedConfigurationBuilder(); } } } return instance; } public SequenceConfig getSequence(String reqType, String clientId, String tenantDomain) throws FrameworkException { SequenceConfig sequenceConfig = null; ApplicationInfoProvider appInfo = ApplicationInfoProvider.getInstance(); // special case for OpenID Connect, these clients are stored as OAuth2 clients if ("oidc".equals(reqType)) { reqType = "oauth2"; } ServiceProvider serviceProvider; try { serviceProvider = appInfo.getServiceProviderByClienId(clientId, reqType, tenantDomain); } catch (IdentityApplicationManagementException e) { throw new FrameworkException(e.getMessage(), e); } if (serviceProvider == null) { throw new FrameworkException("ServiceProvider cannot be null"); } sequenceConfig = new SequenceConfig(); sequenceConfig.setApplicationId(serviceProvider.getApplicationName()); sequenceConfig.setApplicationConfig(new ApplicationConfig(serviceProvider)); // setting request path authenticators loadRequestPathAuthenticatos(sequenceConfig, serviceProvider); AuthenticationStep[] authenticationSteps = serviceProvider .getLocalAndOutBoundAuthenticationConfig().getAuthenticationSteps(); int stepOrder = 0; if (authenticationSteps == null) { return sequenceConfig; } // for each configured step for (AuthenticationStep authenticationStep : authenticationSteps) { try { stepOrder = authenticationStep.getStepOrder(); } catch (NumberFormatException e) { stepOrder++; } // create a step configuration object StepConfig stepConfig = createStepConfigurationObject(stepOrder, authenticationStep); // loading Federated Authenticators loadFederatedAuthenticators(authenticationStep, stepConfig); // load local authenticators loadLocalAuthenticators(authenticationStep, stepConfig); sequenceConfig.getStepMap().put(stepOrder, stepConfig); } return sequenceConfig; } private StepConfig createStepConfigurationObject(int stepOrder, AuthenticationStep authenticationStep) { StepConfig stepConfig = new StepConfig(); stepConfig.setOrder(stepOrder); stepConfig.setSubjectAttributeStep(authenticationStep.isAttributeStep()); stepConfig.setSubjectIdentifierStep(authenticationStep.isSubjectStep()); return stepConfig; } private void loadRequestPathAuthenticatos(SequenceConfig sequenceConfig, ServiceProvider serviceProvider) { if (serviceProvider.getRequestPathAuthenticatorConfigs() != null && serviceProvider.getRequestPathAuthenticatorConfigs().length > 0) { List<AuthenticatorConfig> requestPathAuthenticators = new ArrayList<AuthenticatorConfig>(); RequestPathAuthenticatorConfig[] reqAuths = serviceProvider .getRequestPathAuthenticatorConfigs(); // for each request path authenticator for (RequestPathAuthenticatorConfig reqAuth : reqAuths) { AuthenticatorConfig authConfig = new AuthenticatorConfig(); String authenticatorName = reqAuth.getName(); authConfig.setName(authenticatorName); authConfig.setEnabled(true); // iterate through each system authentication config for (ApplicationAuthenticator appAuthenticator : FrameworkServiceComponent.getAuthenticators()) { if (authenticatorName.equalsIgnoreCase(appAuthenticator.getName())) { authConfig.setApplicationAuthenticator(appAuthenticator); break; } } requestPathAuthenticators.add(authConfig); } sequenceConfig.setReqPathAuthenticators(requestPathAuthenticators); } } private void loadFederatedAuthenticators(AuthenticationStep authenticationStep, StepConfig stepConfig) { IdentityProvider[] federatedIDPs = authenticationStep.getFederatedIdentityProviders(); if (federatedIDPs != null) { // for each idp in the step for (IdentityProvider federatedIDP : federatedIDPs) { FederatedAuthenticatorConfig federatedAuthenticator = federatedIDP .getDefaultAuthenticatorConfig(); // for each authenticator in the idp String actualAuthenticatorName = federatedAuthenticator.getName(); // assign it to the step loadStepAuthenticator(stepConfig, federatedIDP, actualAuthenticatorName); } } } private void loadLocalAuthenticators(AuthenticationStep authenticationStep, StepConfig stepConfig) { LocalAuthenticatorConfig[] localAuthenticators = authenticationStep .getLocalAuthenticatorConfigs(); if (localAuthenticators != null) { IdentityProvider localIdp = new IdentityProvider(); localIdp.setIdentityProviderName(FrameworkConstants.LOCAL_IDP_NAME); // assign it to the step for (LocalAuthenticatorConfig localAuthenticator : localAuthenticators) { String actualAuthenticatorName = localAuthenticator.getName(); loadStepAuthenticator(stepConfig, localIdp, actualAuthenticatorName); } } } private void loadStepAuthenticator(StepConfig stepConfig, IdentityProvider idp, String authenticatorName) { AuthenticatorConfig authenticatorConfig = null; // check if authenticator already exists for (AuthenticatorConfig authConfig : stepConfig.getAuthenticatorList()) { if (authenticatorName.equals(authConfig.getName())) { authenticatorConfig = authConfig; break; } } if (authenticatorConfig == null) { authenticatorConfig = new AuthenticatorConfig(); authenticatorConfig.setName(authenticatorName); for (ApplicationAuthenticator appAuthenticator : FrameworkServiceComponent.getAuthenticators()) { if (authenticatorName.equalsIgnoreCase(appAuthenticator.getName())) { authenticatorConfig.setApplicationAuthenticator(appAuthenticator); break; } } stepConfig.getAuthenticatorList().add(authenticatorConfig); } if (idp != null) { authenticatorConfig.getIdpNames().add(idp.getIdentityProviderName()); authenticatorConfig.getIdps().put(idp.getIdentityProviderName(), idp); } if (!stepConfig.isMultiOption() && (stepConfig.getAuthenticatorList().size() > 1 || authenticatorConfig.getIdps().size() > 1)) { stepConfig.setMultiOption(true); } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.java.psi; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ex.PathManagerEx; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.impl.JavaPsiFacadeEx; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiUtil; import com.intellij.testFramework.PsiTestUtil; import java.io.File; /** * @author dsl */ public class TypesTest extends GenericsTestCase { @Override protected void setUp() throws Exception { super.setUp(); setupGenericSampleClasses(); final String testPath = PathManagerEx.getTestDataPath().replace(File.separatorChar, '/') + "/psi/types/" + getTestName(true); final VirtualFile[] testRoot = { null }; ApplicationManager.getApplication().runWriteAction(() -> { testRoot[0] = LocalFileSystem.getInstance().refreshAndFindFileByPath(testPath); }); if (testRoot[0] != null) { PsiTestUtil.addSourceRoot(myModule, testRoot[0]); } } public void testSimpleStuff() throws Exception { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classA = psiManager.findClass("A"); assertNotNull(classA); final PsiMethod method = classA.getMethods()[0]; final PsiStatement[] methodStatements = method.getBody().getStatements(); final PsiDeclarationStatement declarationStatement = (PsiDeclarationStatement) methodStatements[0]; final PsiVariable varList = (PsiVariable) declarationStatement.getDeclaredElements()[0]; final PsiType typeListOfA = factory.createTypeFromText("test.List<java.lang.String>", null); assertEquals(varList.getType(), typeListOfA); final PsiType typeListOfObject = factory.createTypeFromText("test.List<java.lang.Object>", null); assertFalse(varList.getType().equals(typeListOfObject)); final PsiReferenceExpression methodExpression = ((PsiMethodCallExpression) ((PsiExpressionStatement) methodStatements[1]).getExpression()).getMethodExpression(); final JavaResolveResult resolveResult = methodExpression.advancedResolve(false); assertTrue(resolveResult.getElement() instanceof PsiMethod); final PsiMethod methodFromList = (PsiMethod) resolveResult.getElement(); final PsiType typeOfFirstParameterOfAdd = methodFromList.getParameterList().getParameters()[0].getType(); final PsiType substitutedType = resolveResult.getSubstitutor().substitute(typeOfFirstParameterOfAdd); final PsiClassType typeA = factory.createTypeByFQClassName("java.lang.String"); assertEquals(typeA, substitutedType); assertTrue(typeA.equalsToText("java.lang.String")); final PsiType aListIteratorType = ((PsiExpressionStatement) methodStatements[2]).getExpression().getType(); final PsiType aIteratorType = factory.createTypeFromText("test.Iterator<java.lang.String>", null); assertEquals(aIteratorType, aListIteratorType); final PsiType objectIteratorType = factory.createTypeFromText("test.Iterator<java.lang.Object>", null); assertFalse(objectIteratorType.equals(aListIteratorType)); } public void testRawTypes() throws Exception { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classA = psiManager.findClass("A"); assertNotNull(classA); final PsiMethod method = classA.getMethods()[0]; final PsiStatement[] methodStatements = method.getBody().getStatements(); final PsiDeclarationStatement declarationStatement = (PsiDeclarationStatement) methodStatements[0]; final PsiVariable varList = (PsiVariable) declarationStatement.getDeclaredElements()[0]; final PsiType typeFromText = factory.createTypeFromText("test.List", null); assertEquals(varList.getType(), typeFromText); final PsiReferenceExpression methodExpression = ((PsiMethodCallExpression) ((PsiExpressionStatement) methodStatements[1]).getExpression()).getMethodExpression(); final JavaResolveResult resolveResult = methodExpression.advancedResolve(false); assertTrue(resolveResult.getElement() instanceof PsiMethod); final PsiMethod methodFromList = (PsiMethod) resolveResult.getElement(); final PsiType typeOfFirstParameterOfAdd = methodFromList.getParameterList().getParameters()[0].getType(); final PsiType substitutedType = resolveResult.getSubstitutor().substitute(typeOfFirstParameterOfAdd); assertEquals(PsiType.getJavaLangObject(getPsiManager(), method.getResolveScope()), substitutedType); final PsiType methodCallType = ((PsiExpressionStatement) methodStatements[2]).getExpression().getType(); final PsiType rawIteratorType = factory.createTypeFromText("test.Iterator", null); assertEquals(rawIteratorType, methodCallType); } public void testSubstWithInheritor() throws Exception { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classA = psiManager.findClass("A"); assertNotNull(classA); final PsiMethod method = classA.getMethods()[0]; final PsiStatement[] methodStatements = method.getBody().getStatements(); final PsiDeclarationStatement declarationStatement = (PsiDeclarationStatement) methodStatements[0]; final PsiVariable varList = (PsiVariable) declarationStatement.getDeclaredElements()[0]; final PsiType typeFromText = factory.createTypeFromText("test.IntList", null); assertEquals(varList.getType(), typeFromText); final PsiReferenceExpression methodExpression = ((PsiMethodCallExpression) ((PsiExpressionStatement) methodStatements[1]).getExpression()).getMethodExpression(); final JavaResolveResult resolveResult = methodExpression.advancedResolve(false); assertTrue(resolveResult.getElement() instanceof PsiMethod); final PsiMethod methodFromList = (PsiMethod) resolveResult.getElement(); final PsiType typeOfFirstParameterOfAdd = methodFromList.getParameterList().getParameters()[0].getType(); final PsiType substitutedType = resolveResult.getSubstitutor().substitute(typeOfFirstParameterOfAdd); final PsiType javaLangInteger = factory.createTypeFromText("java.lang.Integer", null); assertEquals(javaLangInteger, substitutedType); final PsiType intListIteratorReturnType = ((PsiExpressionStatement) methodStatements[2]).getExpression().getType(); final PsiType integerIteratorType = factory.createTypeFromText("test.Iterator<java.lang.Integer>", null); assertEquals(integerIteratorType, intListIteratorReturnType); final PsiType objectIteratorType = factory.createTypeFromText("test.Iterator<java.lang.Object>", null); assertFalse(objectIteratorType.equals(integerIteratorType)); } public void testSimpleRawTypeInMethodArg() throws Exception { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classA = psiManager.findClass("A"); assertNotNull(classA); final PsiMethod method = classA.getMethods()[0]; final PsiStatement[] methodStatements = method.getBody().getStatements(); final PsiVariable variable = (PsiVariable) ((PsiDeclarationStatement) methodStatements[0]).getDeclaredElements()[0]; final PsiClassType type = (PsiClassType) variable.getType(); final PsiClassType.ClassResolveResult resolveClassTypeResult = type.resolveGenerics(); assertNotNull(resolveClassTypeResult.getElement()); final PsiReferenceExpression methodExpression = ((PsiMethodCallExpression) ((PsiExpressionStatement) methodStatements[2]).getExpression()).getMethodExpression(); final PsiExpression qualifierExpression = methodExpression.getQualifierExpression(); final PsiClassType qualifierType = (PsiClassType) qualifierExpression.getType(); assertFalse(qualifierType.hasParameters()); final PsiType typeFromText = factory.createTypeFromText("test.List", null); assertEquals(qualifierType, typeFromText); final PsiElement psiElement = ((PsiReferenceExpression) qualifierExpression).resolve(); assertTrue(psiElement instanceof PsiVariable); final JavaResolveResult resolveResult = methodExpression.advancedResolve(false); assertTrue(resolveResult.getElement() instanceof PsiMethod); final PsiMethod methodFromList = (PsiMethod) resolveResult.getElement(); assertEquals("add", methodFromList.getName()); assertEquals("test.List", methodFromList.getContainingClass().getQualifiedName()); } public void testRawTypeInMethodArg() throws Exception { final PsiClass classA = getJavaFacade().findClass("A"); assertNotNull(classA); final PsiMethod method = classA.getMethods()[0]; final PsiStatement[] methodStatements = method.getBody().getStatements(); final PsiReferenceExpression methodExpression = ((PsiMethodCallExpression) ((PsiExpressionStatement) methodStatements[2]).getExpression()).getMethodExpression(); final JavaResolveResult resolveResult = methodExpression.advancedResolve(false); assertTrue(resolveResult.getElement() instanceof PsiMethod); final PsiMethod methodFromList = (PsiMethod) resolveResult.getElement(); assertEquals("putAll", methodFromList.getName()); assertEquals("test.List", methodFromList.getContainingClass().getQualifiedName()); } public void testBoundedParams() throws Exception { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classA = psiManager.findClass("A"); assertNotNull(classA); final PsiMethod method = classA.getMethods()[0]; final PsiStatement[] statements = method.getBody().getStatements(); final PsiVariable var = (PsiVariable) ((PsiDeclarationStatement) statements[0]).getDeclaredElements()[0]; final PsiType varType = var.getType(); final PsiType typeRawIterator = factory.createTypeFromText("test.Iterator", null); assertEquals(varType, typeRawIterator); final PsiType initializerType = var.getInitializer().getType(); assertEquals(initializerType, typeRawIterator); assertTrue(varType.isAssignableFrom(initializerType)); } public void testRawTypeExtension() throws Exception { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classA = psiManager.findClass("A"); assertNotNull(classA); final PsiMethod method = classA.getMethods()[0]; final PsiStatement[] statements = method.getBody().getStatements(); final PsiVariable var = (PsiVariable) ((PsiDeclarationStatement) statements[0]).getDeclaredElements()[0]; final PsiType varType = var.getType(); final PsiType typeRawIterator = factory.createTypeFromText("test.Iterator", null); assertEquals(varType, typeRawIterator); final PsiType initializerType = var.getInitializer().getType(); assertEquals(initializerType, typeRawIterator); assertTrue(varType.isAssignableFrom(initializerType)); } public void testTypesInGenericClass() { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classA = psiManager.findClass("A"); assertNotNull(classA); final PsiTypeParameter parameterT = classA.getTypeParameters()[0]; assertEquals("T", parameterT.getName()); final PsiMethod method = classA.findMethodsByName("method", false)[0]; final PsiType type = ((PsiExpressionStatement) method.getBody().getStatements()[0]).getExpression().getType(); final PsiClassType typeT = factory.createType(parameterT); assertEquals("T", typeT.getPresentableText()); assertEquals(typeT, type); } public void testAssignableSubInheritor() throws Exception { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classCollection = psiManager.findClass("test.Collection"); final PsiClass classList = psiManager.findClass("test.List"); final PsiType collectionType = factory.createType(classCollection, PsiSubstitutor.EMPTY); final PsiType listType = factory.createType(classList, PsiSubstitutor.EMPTY); assertEquals(collectionType.getCanonicalText(), "test.Collection<E>"); assertEquals(listType.getCanonicalText(), "test.List<T>"); final PsiType typeListOfString = factory.createTypeFromText("test.List<java.lang.String>", null); final PsiType typeCollectionOfString = factory.createTypeFromText("test.Collection<java.lang.String>", null); assertTrue(typeCollectionOfString.isAssignableFrom(typeListOfString)); } public void testComplexInheritance() throws Exception { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classA = psiManager.findClass("A"); assertNotNull(classA); final PsiMethod method = classA.findMethodsByName("method", false)[0]; final PsiExpression expression = ((PsiExpressionStatement) method.getBody().getStatements()[1]).getExpression(); assertEquals("l.get(0)", expression.getText()); final PsiType type = expression.getType(); final PsiType listOfInteger = factory.createTypeFromText("test.List<java.lang.Integer>", null); assertEquals(listOfInteger, type); final PsiType collectionOfInteger = factory.createTypeFromText("test.Collection<java.lang.Integer>", null); assertTrue(collectionOfInteger.isAssignableFrom(type)); } public void testListListInheritance() throws Exception { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classA = psiManager.findClass("A"); assertNotNull(classA); final PsiMethod method = classA.findMethodsByName("method", false)[0]; final PsiExpression expression1 = ((PsiExpressionStatement) method.getBody().getStatements()[1]).getExpression(); assertEquals("l.get(0)", expression1.getText()); final PsiType type1 = expression1.getType(); final PsiType typeListOfInteger = factory.createTypeFromText("test.List<java.lang.Integer>", null); assertEquals(typeListOfInteger, type1); assertTrue(typeListOfInteger.isAssignableFrom(type1)); final PsiExpression expression2 = ((PsiExpressionStatement) method.getBody().getStatements()[3]).getExpression(); assertEquals("b.get(0)", expression2.getText()); final PsiType type2 = expression2.getType(); assertEquals(typeListOfInteger, type2); } public void testSpaceInTypeParameterList() throws Exception { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classA = psiManager.findClass("A"); assertNotNull(classA); final PsiMethod method = classA.findMethodsByName("method", false)[0]; final PsiVariable variable = (PsiVariable) ((PsiDeclarationStatement) method.getBody().getStatements()[0]).getDeclaredElements()[0]; final PsiType type = variable.getType(); final PsiType typeListOfListOfInteger = factory.createTypeFromText("test.List<test.List<java.lang.Integer>>", null); assertEquals(typeListOfListOfInteger, type); } public void testMethodTypeParameter() throws Exception { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classA = psiManager.findClass("A"); assertNotNull(classA); final PsiMethod method = classA.findMethodsByName("method", false)[0]; final PsiStatement[] statements = method.getBody().getStatements(); final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression) ((PsiExpressionStatement) statements[1]).getExpression(); isCollectionUtilSort(methodCallExpression, factory.createTypeFromText("java.lang.Integer", null)); final PsiMethodCallExpression methodCallExpression1 = (PsiMethodCallExpression) ((PsiExpressionStatement) statements[3]).getExpression(); isCollectionUtilSort(methodCallExpression1, null); } private static void isCollectionUtilSort(final PsiMethodCallExpression methodCallExpression, final PsiType typeParameterValue) { final PsiReferenceExpression methodExpression = methodCallExpression.getMethodExpression(); final JavaResolveResult methodResolve = methodExpression.advancedResolve(false); assertTrue(methodResolve.getElement() instanceof PsiMethod); final PsiMethod methodSort = (PsiMethod) methodResolve.getElement(); assertEquals("sort", methodSort.getName()); assertEquals("test.CollectionUtil", methodSort.getContainingClass().getQualifiedName()); final PsiTypeParameter methodSortTypeParameter = methodSort.getTypeParameters()[0]; final PsiType sortParameterActualType = methodResolve.getSubstitutor().substitute(methodSortTypeParameter); assertTrue(Comparing.equal(sortParameterActualType, typeParameterValue)); assertTrue( PsiUtil.isApplicable(methodSort, methodResolve.getSubstitutor(), methodCallExpression.getArgumentList())); } public void testRawArrayTypes() throws Exception { final JavaPsiFacadeEx psiManager = getJavaFacade(); final PsiElementFactory factory = psiManager.getElementFactory(); final PsiClass classA = psiManager.findClass("A"); assertNotNull(classA); final PsiMethod method = classA.findMethodsByName("method", false)[0]; final PsiStatement[] statements = method.getBody().getStatements(); final PsiDeclarationStatement declarationStatement = (PsiDeclarationStatement) statements[0]; final PsiClassType typeOfL = (PsiClassType) ((PsiVariable) declarationStatement.getDeclaredElements()[0]).getType(); final PsiType typeRawList = factory.createTypeFromText("test.List", null); assertTrue(Comparing.equal(typeOfL, typeRawList)); final PsiSubstitutor typeOfLSubstitutor = typeOfL.resolveGenerics().getSubstitutor(); final PsiMethodCallExpression exprGetArray = (PsiMethodCallExpression) ((PsiExpressionStatement) statements[1]).getExpression(); final PsiType typeOfGetArrayCall = exprGetArray.getType(); final PsiType objectArrayType = factory.createTypeFromText("java.lang.Object[]", null); assertTrue(Comparing.equal(typeOfGetArrayCall, objectArrayType)); final PsiMethod methodGetArray = (PsiMethod) exprGetArray.getMethodExpression().resolve(); final PsiType subtitutedGetArrayReturnType = typeOfLSubstitutor.substitute(methodGetArray.getReturnType()); assertTrue(Comparing.equal(subtitutedGetArrayReturnType, objectArrayType)); final PsiMethodCallExpression exprGetListOfArray = (PsiMethodCallExpression) ((PsiExpressionStatement) statements[2]).getExpression(); final PsiMethod methodGetListOfArray = (PsiMethod) exprGetListOfArray.getMethodExpression().resolve(); final PsiType returnType = methodGetListOfArray.getReturnType(); final PsiType substitutedReturnType = typeOfLSubstitutor.substitute(returnType); assertTrue(Comparing.equal(substitutedReturnType, typeRawList)); final PsiType typeOfGetListOfArrayCall = exprGetListOfArray.getType(); assertTrue(Comparing.equal(typeOfGetListOfArrayCall, typeRawList)); } public void testWildcardTypeParsing() throws Exception{ final GlobalSearchScope scope = GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(myModule); final PsiClassType javaLangObject = PsiType.getJavaLangObject(myPsiManager, scope); PsiElement element = ((PsiDeclarationStatement)myJavaFacade.getElementFactory().createStatementFromText("X<? extends Y, ? super Z<A,B>, ?> x;", null)).getDeclaredElements()[0]; PsiJavaCodeReferenceElement referenceElement = ((PsiVariable) element).getTypeElement().getInnermostComponentReferenceElement(); PsiType[] typeArguments = referenceElement.getTypeParameters(); assertEquals(3, typeArguments.length); assertTrue(typeArguments[0] instanceof PsiWildcardType); assertTrue(typeArguments[1] instanceof PsiWildcardType); assertTrue(typeArguments[2] instanceof PsiWildcardType); PsiWildcardType extendsWildcard = (PsiWildcardType)typeArguments[0]; PsiWildcardType superWildcard = (PsiWildcardType)typeArguments[1]; PsiWildcardType unboundedWildcard = (PsiWildcardType)typeArguments[2]; // extends wildcard test assertTrue(extendsWildcard.isExtends()); assertFalse(extendsWildcard.isSuper()); assertEquals("Y", extendsWildcard.getBound().getCanonicalText()); assertEquals("Y", extendsWildcard.getExtendsBound().getCanonicalText()); assertEquals(extendsWildcard.getSuperBound(), PsiType.NULL); // super wildcard test assertFalse(superWildcard.isExtends()); assertTrue(superWildcard.isSuper()); assertEquals("Z<A,B>", superWildcard.getBound().getCanonicalText()); assertEquals(superWildcard.getExtendsBound(), javaLangObject); assertEquals("Z<A,B>", superWildcard.getSuperBound().getCanonicalText()); // unbounded wildcard test assertFalse(unboundedWildcard.isExtends()); assertFalse(unboundedWildcard.isSuper()); assertNull(unboundedWildcard.getBound()); assertEquals(unboundedWildcard.getExtendsBound(), javaLangObject); assertEquals(unboundedWildcard.getSuperBound(), PsiType.NULL); } public void testWildcardTypesAssignable() throws Exception { PsiClassType listOfExtendsBase = (PsiClassType)myJavaFacade.getElementFactory().createTypeFromText("test.List<? extends usages.Base>", null); PsiClassType.ClassResolveResult classResolveResult = listOfExtendsBase.resolveGenerics(); PsiClass listClass = classResolveResult.getElement(); assertNotNull(listClass); PsiTypeParameter listTypeParameter = PsiUtil.typeParametersIterator(listClass).next(); PsiType listParameterTypeValue = classResolveResult.getSubstitutor().substitute(listTypeParameter); assertTrue(listParameterTypeValue instanceof PsiWildcardType); assertTrue(((PsiWildcardType)listParameterTypeValue).isExtends()); assertEquals("usages.Base", ((PsiWildcardType)listParameterTypeValue).getBound().getCanonicalText()); PsiClassType listOfIntermediate = (PsiClassType)myJavaFacade.getElementFactory().createTypeFromText("test.List<usages.Intermediate>", null); assertNotNull(listOfIntermediate.resolve()); assertTrue(listOfExtendsBase.isAssignableFrom(listOfIntermediate)); } public void testEllipsisType() throws Exception { PsiElementFactory factory = myJavaFacade.getElementFactory(); PsiMethod method = factory.createMethodFromText("void foo (int ... args) {}", null); PsiType paramType = method.getParameterList().getParameters()[0].getType(); assertTrue(paramType instanceof PsiEllipsisType); PsiType arrayType = ((PsiEllipsisType)paramType).getComponentType().createArrayType(); assertTrue(paramType.isAssignableFrom(arrayType)); assertTrue(arrayType.isAssignableFrom(paramType)); PsiType typeFromText = factory.createTypeFromText("int ...", null); assertTrue(typeFromText instanceof PsiEllipsisType); } public void testBinaryNumericPromotion() throws Exception { PsiElementFactory factory = myJavaFacade.getElementFactory(); final PsiExpression conditional = factory.createExpressionFromText("b ? new Integer (0) : new Double(0.0)", null); assertEquals(PsiType.DOUBLE, conditional.getType()); final PsiExpression shift = factory.createExpressionFromText("Integer.valueOf(0) << 2", null); assertEquals(PsiType.INT, shift.getType()); } public void testUnaryExpressionType() throws Exception { final PsiElementFactory factory = myJavaFacade.getElementFactory(); final PsiExpression plusPrefix = factory.createExpressionFromText("+Integer.valueOf(1)", null); assertEquals(PsiType.INT, plusPrefix.getType()); final PsiExpression plusBytePrefix = factory.createExpressionFromText("+Byte.valueOf(1)", null); assertEquals(PsiType.INT, plusBytePrefix.getType()); final PsiStatement declaration = factory.createStatementFromText("Byte b = 1;", null); final PsiExpression plusPlusPostfix = factory.createExpressionFromText("b++", declaration); assertEquals(PsiType.BYTE.getBoxedType(declaration), plusPlusPostfix.getType()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.management.internal.web.controllers; import java.io.PrintWriter; import java.io.StringWriter; import java.lang.management.ManagementFactory; import java.net.URI; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import javax.management.JMX; import javax.management.MBeanServer; import javax.management.MalformedObjectNameException; import javax.management.ObjectName; import javax.management.Query; import javax.management.QueryExp; import com.gemstone.gemfire.internal.lang.StringUtils; import com.gemstone.gemfire.internal.logging.LogService; import com.gemstone.gemfire.internal.logging.log4j.LogMarker; import com.gemstone.gemfire.internal.util.ArrayUtils; import com.gemstone.gemfire.management.DistributedSystemMXBean; import com.gemstone.gemfire.management.MemberMXBean; import com.gemstone.gemfire.management.internal.MBeanJMXAdapter; import com.gemstone.gemfire.management.internal.ManagementConstants; import com.gemstone.gemfire.management.internal.cli.shell.Gfsh; import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder; import com.gemstone.gemfire.management.internal.web.controllers.support.EnvironmentVariablesHandlerInterceptor; import com.gemstone.gemfire.management.internal.web.controllers.support.MemberMXBeanAdapter; import com.gemstone.gemfire.management.internal.web.util.UriUtils; import org.apache.logging.log4j.Logger; import org.springframework.beans.propertyeditors.StringArrayPropertyEditor; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.WebDataBinder; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.InitBinder; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.context.request.WebRequest; import org.springframework.web.servlet.support.ServletUriComponentsBuilder; import org.springframework.web.util.UriComponentsBuilder; /** * The AbstractCommandsController class is the abstract base class encapsulating common functionality across all * Management Controller classes that expose REST API web service endpoints (URLs/URIs) for GemFire shell (Gfsh) * commands. * * @author John Blum * @see com.gemstone.gemfire.management.MemberMXBean * @see com.gemstone.gemfire.management.internal.cli.shell.Gfsh * @see org.springframework.stereotype.Controller * @see org.springframework.web.bind.annotation.ExceptionHandler * @see org.springframework.web.bind.annotation.InitBinder * @since 8.0 */ @SuppressWarnings("unused") public abstract class AbstractCommandsController { private static final Logger logger = LogService.getLogger(); protected static final String DEFAULT_ENCODING = UriUtils.DEFAULT_ENCODING; protected static final String REST_API_VERSION = "/v1"; private MemberMXBean managingMemberMXBeanProxy; /** * Asserts the argument is valid, as determined by the caller passing the result of an evaluated expression to this * assertion. * * @param validArg a boolean value indicating the evaluation of the expression validating the argument. * @param message a String value used as the message when constructing an IllegalArgumentException. * @param args Object arguments used to populate placeholder's in the message. * @throws IllegalArgumentException if the argument is not valid. * @see java.lang.String#format(String, Object...) */ protected static void assertArgument(final boolean validArg, final String message, final Object... args) { if (!validArg) { throw new IllegalArgumentException(String.format(message, args)); } } /** * Asserts the Object reference is not null! * * @param obj the reference to the Object. * @param message the String value used as the message when constructing and throwing a NullPointerException. * @param args Object arguments used to populate placeholder's in the message. * @throws NullPointerException if the Object reference is null. * @see java.lang.String#format(String, Object...) */ protected static void assertNotNull(final Object obj, final String message, final Object... args) { if (obj == null) { throw new NullPointerException(String.format(message, args)); } } /** * Asserts whether state, based on the evaluation of a conditional expression, passed to this assertion is valid. * * @param validState a boolean value indicating the evaluation of the expression from which the conditional state * is based. For example, a caller might use an expression of the form (initableObj.isInitialized()). * @param message a String values used as the message when constructing an IllegalStateException. * @param args Object arguments used to populate placeholder's in the message. * @throws IllegalStateException if the conditional state is not valid. * @see java.lang.String#format(String, Object...) */ protected static void assertState(final boolean validState, final String message, final Object... args) { if (!validState) { throw new IllegalStateException(String.format(message, args)); } } /** * Decodes the encoded String value using the default encoding UTF-8. It is assumed the String value was encoded * with the URLEncoder using the UTF-8 encoding. This method handles UnsupportedEncodingException by just returning * the encodedValue. * * @param encodedValue the encoded String value to decode. * @return the decoded value of the String or encodedValue if the UTF-8 encoding is unsupported. * @see com.gemstone.gemfire.management.internal.web.util.UriUtils#decode(String) */ protected static String decode(final String encodedValue) { return UriUtils.decode(encodedValue); } /** * Decodes the encoded String value using the specified encoding (such as UTF-8). It is assumed the String value * was encoded with the URLEncoder using the specified encoding. This method handles UnsupportedEncodingException * by just returning the encodedValue. * * @param encodedValue a String value encoded in the encoding. * @param encoding a String value specifying the encoding. * @return the decoded value of the String or encodedValue if the specified encoding is unsupported. * @see com.gemstone.gemfire.management.internal.web.util.UriUtils#decode(String, String) */ protected static String decode(final String encodedValue, final String encoding) { return UriUtils.decode(encodedValue, encoding); } /** * Gets the specified value if not null or empty, otherwise returns the default value. * * @param value the String value being evaluated for having value (not null and not empty). * @param defaultValue the default String value returned if 'value' has no value. * @return 'value' if not null or empty, otherwise returns the default value. * @see #hasValue(String) */ protected static String defaultIfNoValue(final String value, final String defaultValue) { return (hasValue(value) ? value : defaultValue); } /** * Encodes the String value using the default encoding UTF-8. * * @param value the String value to encode. * @return an encoded value of the String using the default encoding UTF-8 or value if the UTF-8 encoding * is unsupported. * @see com.gemstone.gemfire.management.internal.web.util.UriUtils#encode(String) */ protected static String encode(final String value) { return UriUtils.encode(value); } /** * Encodes the String value using the specified encoding (such as UTF-8). * * @param value the String value to encode. * @param encoding a String value indicating the encoding. * @return an encoded value of the String using the specified encoding or value if the specified encoding * is unsupported. * @see com.gemstone.gemfire.management.internal.web.util.UriUtils#encode(String, String) */ protected static String encode(final String value, final String encoding) { return UriUtils.encode(value, encoding); } /** * Determines whether the specified Object has value, which is determined by a non-null Object reference. * * @param value the Object value being evaluated for value. * @return a boolean value indicating whether the specified Object has value. * @see java.lang.Object */ protected static boolean hasValue(final Object value) { return (value instanceof String[] ? hasValue((String[]) value) : (value instanceof String ? hasValue((String) value) : value != null)); } /** * Determines whether the specified String has value, determined by whether the String is non-null, not empty * and not blank. * * @param value the String being evaluated for value. * @return a boolean indicating whether the specified String has value or not. * @see java.lang.String */ protected static boolean hasValue(final String value) { return !StringUtils.isBlank(value); } /** * Determines whether the specified String array has any value, which is determined by a non-null String array * reference along with containing at least 1 non-null, not empty and not blank element. * * @param array an String array being evaluated for value. * @return a boolean indicating whether the specified String array has any value. * @see #hasValue(String) * @see java.lang.String */ protected static boolean hasValue(final String[] array) { if (array != null && array.length > 0) { for (final String element : array) { if (hasValue(element)) { return true; } } } return false; } /** * Writes the stack trace of the Throwable to a String. * * @param t a Throwable object who's stack trace will be written to a String. * @return a String containing the stack trace of the Throwable. * @see java.io.StringWriter * @see java.lang.Throwable#printStackTrace(java.io.PrintWriter) */ protected static String printStackTrace(final Throwable t) { final StringWriter stackTraceWriter = new StringWriter(); t.printStackTrace(new PrintWriter(stackTraceWriter)); return stackTraceWriter.toString(); } /** * Converts the URI relative path to an absolute path based on the Servlet context information. * * @param path the URI relative path to append to the Servlet context path. * @param scheme the scheme to use for the URI * @return a URI constructed with all component path information. * @see java.net.URI * @see org.springframework.web.servlet.support.ServletUriComponentsBuilder */ protected /*static*/ URI toUri(final String path, final String scheme) { return ServletUriComponentsBuilder.fromCurrentContextPath() .path(REST_API_VERSION).path(path).scheme(scheme).build().toUri(); } /** * Handles any Exception thrown by a REST API web service endpoint, HTTP request handler method during the invocation * and processing of a command. * * @param cause the Exception causing the error. * @return a ResponseEntity with an appropriate HTTP status code (500 - Internal Server Error) and HTTP response body * containing the stack trace of the Exception. * @see java.lang.Exception * @see org.springframework.http.ResponseEntity * @see org.springframework.web.bind.annotation.ExceptionHandler * @see org.springframework.web.bind.annotation.ResponseBody */ @ExceptionHandler(Exception.class) @ResponseBody public ResponseEntity<String> handleException(final Exception cause) { final String stackTrace = printStackTrace(cause); logger.fatal(stackTrace); return new ResponseEntity<String>(stackTrace, HttpStatus.INTERNAL_SERVER_ERROR); } /** * Initializes data bindings for various HTTP request handler method parameter Java class types. * * @param dataBinder the DataBinder implementation used for Web transactions. * @see org.springframework.web.bind.WebDataBinder * @see org.springframework.web.bind.annotation.InitBinder */ @InitBinder public void initBinder(final WebDataBinder dataBinder) { dataBinder.registerCustomEditor(String[].class, new StringArrayPropertyEditor( StringArrayPropertyEditor.DEFAULT_SEPARATOR, false)); } /** * Logs the client's HTTP (web) request including details of the HTTP headers and request parameters along with the * web request context and description. * * @param request the object encapsulating the details of the client's HTTP (web) request. * @see org.springframework.web.context.request.WebRequest */ protected void logRequest(final WebRequest request) { if (request != null) { final Map<String, String> headers = new HashMap<java.lang.String, java.lang.String>(); for (Iterator<String> it = request.getHeaderNames(); it.hasNext(); ) { final String headerName = it.next(); headers.put(headerName, ArrayUtils.toString((Object[]) request.getHeaderValues(headerName))); } final Map<String, String> parameters = new HashMap<String, String>(request.getParameterMap().size()); for (Iterator<String> it = request.getParameterNames(); it.hasNext(); ) { final String parameterName = it.next(); parameters.put(parameterName, ArrayUtils.toString((Object[]) request.getParameterValues(parameterName))); } logger.info("HTTP-request: description ({}), context ({}), headers ({}), parameters ({})", request.getDescription(false), request.getContextPath(), headers, parameters); } } /** * Gets a reference to the platform MBeanServer running in this JVM process. The MBeanServer instance constitutes * a connection to the MBeanServer. * * @return a reference to the platform MBeanServer for this JVM process. * @see java.lang.management.ManagementFactory#getPlatformMBeanServer() * @see javax.management.MBeanServer */ protected MBeanServer getMBeanServer() { return ManagementFactory.getPlatformMBeanServer(); } /** * Gets the MemberMXBean from the JVM Platform MBeanServer for the specified member, identified by name or ID * in the GemFire cluster. * * @param memberNameId a String indicating the name or ID of the GemFire member. * @return a proxy to the GemFire member's MemberMXBean. * @throws IllegalStateException if no MemberMXBean could be found for GemFire member with ID or name. * @throws RuntimeException wrapping the MalformedObjectNameException if the ObjectName pattern is malformed. * @see #getMBeanServer() * @see #isMemberMXBeanFound(java.util.Collection) * @see javax.management.ObjectName * @see javax.management.QueryExp * @see javax.management.MBeanServer#queryNames(javax.management.ObjectName, javax.management.QueryExp) * @see javax.management.JMX#newMXBeanProxy(javax.management.MBeanServerConnection, javax.management.ObjectName, Class) * @see com.gemstone.gemfire.management.MemberMXBean */ protected MemberMXBean getMemberMXBean(final String memberNameId) { try { final MBeanServer connection = getMBeanServer(); final String objectNamePattern = ManagementConstants.OBJECTNAME__PREFIX.concat("type=Member,*"); // NOTE throws a MalformedObjectNameException, but this should not happen since we constructed the ObjectName above final ObjectName objectName = ObjectName.getInstance(objectNamePattern); final QueryExp query = Query.or( Query.eq(Query.attr("Name"), Query.value(memberNameId)), Query.eq(Query.attr("Id"), Query.value(memberNameId)) ); final Set<ObjectName> objectNames = connection.queryNames(objectName, query); assertState(isMemberMXBeanFound(objectNames), "No MemberMXBean with ObjectName (%1$s) based on Query (%2$s) was found in the Platform MBeanServer for member (%3$s)!", objectName, query, memberNameId); return JMX.newMXBeanProxy(connection, objectNames.iterator().next(), MemberMXBean.class); } catch (MalformedObjectNameException e) { throw new RuntimeException(e); } } /** * Determines whether the desired MemberMXBean, identified by name or ID, was found in the platform MBeanServer * of this JVM process. * * @param objectNames a Collection of ObjectNames possibly referring to the desired MemberMXBean. * @return a boolean value indicating whether the desired MemberMXBean was found. * @see javax.management.ObjectName */ private boolean isMemberMXBeanFound(final Collection<ObjectName> objectNames) { return !(objectNames == null || objectNames.isEmpty()); } /** * Lookup operation for the MemberMXBean representing the Manager in the GemFire cluster. This method gets * an instance fo the Platform MBeanServer for this JVM process and uses it to lookup the MemberMXBean for the * GemFire Manager based on the ObjectName declared in the DistributedSystemMXBean.getManagerObjectName() operation. * * @return a proxy instance to the MemberMXBean of the GemFire Manager. * @see #getMBeanServer() * @see #createMemberMXBeanForManagerUsingAdapter(javax.management.MBeanServer, javax.management.ObjectName) * @see #createMemberMXBeanForManagerUsingProxy(javax.management.MBeanServer, javax.management.ObjectName) * @see com.gemstone.gemfire.management.DistributedSystemMXBean * @see com.gemstone.gemfire.management.MemberMXBean */ protected synchronized MemberMXBean getManagingMemberMXBean() { if (managingMemberMXBeanProxy == null) { final MBeanServer platformMBeanServer = getMBeanServer(); final DistributedSystemMXBean distributedSystemMXBean = JMX.newMXBeanProxy(platformMBeanServer, MBeanJMXAdapter.getDistributedSystemName(), DistributedSystemMXBean.class); //managingMemberMXBeanProxy = createMemberMXBeanForManagerUsingAdapter(platformMBeanServer, // distributedSystemMXBean.getMemberObjectName()); managingMemberMXBeanProxy = createMemberMXBeanForManagerUsingProxy(platformMBeanServer, distributedSystemMXBean.getMemberObjectName()); } return managingMemberMXBeanProxy; } /** * Creates an Adapter using the Platform MBeanServer and ObjectName to invoke operations on the GemFire Manager's * MemberMXBean. * * @param server a reference to this JVM's Platform MBeanServer. * @param managingMemberObjectName the ObjectName of the GemFire Manager's MemberMXBean registered in * the Platform MBeanServer. * @return an Adapter for invoking operations on the GemFire Manager's MemberMXBean. * @see com.gemstone.gemfire.management.internal.web.controllers.AbstractCommandsController.MemberMXBeanProxy * @see #createMemberMXBeanForManagerUsingProxy(javax.management.MBeanServer, javax.management.ObjectName) */ private MemberMXBean createMemberMXBeanForManagerUsingAdapter(final MBeanServer server, final ObjectName managingMemberObjectName) { return new MemberMXBeanProxy(server, managingMemberObjectName); } /** * Creates a Proxy using the Platform MBeanServer and ObjectName in order to access attributes and invoke operations * on the GemFire Manager's MemberMXBean. * * @param server a reference to this JVM's Platform MBeanServer. * @param managingMemberObjectName the ObjectName of the GemFire Manager's MemberMXBean registered in * the Platform MBeanServer. * @return a Proxy for accessing attributes and invoking operations on the GemFire Manager's MemberMXBean. * @see #createMemberMXBeanForManagerUsingAdapter(javax.management.MBeanServer, javax.management.ObjectName) * @see javax.management.JMX#newMXBeanProxy(javax.management.MBeanServerConnection, javax.management.ObjectName, Class) */ private MemberMXBean createMemberMXBeanForManagerUsingProxy(final MBeanServer server, final ObjectName managingMemberObjectName) { return JMX.newMXBeanProxy(server, managingMemberObjectName, MemberMXBean.class); } /** * Gets the environment setup during this HTTP/command request for the current command process execution. * * @return a mapping of environment variables to values. * @see com.gemstone.gemfire.management.internal.web.controllers.support.EnvironmentVariablesHandlerInterceptor#getEnvironment() */ protected Map<String, String> getEnvironment() { final Map<String, String> environment = new HashMap<String, String>(); environment.putAll(EnvironmentVariablesHandlerInterceptor.getEnvironment()); environment.put(Gfsh.ENV_APP_NAME, Gfsh.GFSH_APP_NAME); return environment; } /** * Adds the named option to the command String to be processed if the named option has value or the named option is * present in the HTTP request. * * @param request the WebRequest object encapsulating the details (headers, request parameters and message body) * of the user HTTP request. * @param command the Gfsh command String to append options and process. * @param optionName the name of the command option. * @param optionValue the value for the named command option. * @see #hasValue(Object) * @see #hasValue(String[]) * @see com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder * @see org.springframework.web.context.request.WebRequest */ protected void addCommandOption(final WebRequest request, final CommandStringBuilder command, final String optionName, final Object optionValue) { assertNotNull(command, "The command to append options to cannot be null!"); assertNotNull(optionName, "The name of the option to add to the command cannot be null!"); if (hasValue(optionValue)) { final String optionValueString = (optionValue instanceof String[] ? StringUtils.concat((String[]) optionValue, StringUtils.COMMA_DELIMITER) : String.valueOf(optionValue)); command.addOption(optionName, optionValueString); } else if (request != null && request.getParameterMap().containsKey(optionName)) { command.addOption(optionName); } else { // do nothing! } } /** * Executes the specified command as entered by the user using the GemFire Shell (Gfsh). Note, Gfsh performs * validation of the command during parsing before sending the command to the Manager for processing. * * @param command a String value containing a valid command String as would be entered by the user in Gfsh. * @return a result of the command execution as a String, typically marshalled in JSON to be serialized back to Gfsh. * @see com.gemstone.gemfire.management.internal.cli.shell.Gfsh * @see com.gemstone.gemfire.management.internal.web.controllers.support.EnvironmentVariablesHandlerInterceptor#getEnvironment() * @see #getEnvironment() * @see #processCommand(String, java.util.Map, byte[][]) */ protected String processCommand(final String command) { return processCommand(command, getEnvironment(), null); } /** * Executes the specified command as entered by the user using the GemFire Shell (Gfsh). Note, Gfsh performs * validation of the command during parsing before sending the command to the Manager for processing. * * @param command a String value containing a valid command String as would be entered by the user in Gfsh. * @param fileData is a two-dimensional byte array containing the pathnames and contents of file data streamed to * the Manager, usually for the 'deploy' Gfsh command. * @return a result of the command execution as a String, typically marshalled in JSON to be serialized back to Gfsh. * @see com.gemstone.gemfire.management.internal.cli.shell.Gfsh * @see com.gemstone.gemfire.management.internal.web.controllers.support.EnvironmentVariablesHandlerInterceptor#getEnvironment() * @see #getEnvironment() * @see #processCommand(String, java.util.Map, byte[][]) */ protected String processCommand(final String command, final byte[][] fileData) { return processCommand(command, getEnvironment(), fileData); } /** * Executes the specified command as entered by the user using the GemFire Shell (Gfsh). Note, Gfsh performs * validation of the command during parsing before sending the command to the Manager for processing. * * @param command a String value containing a valid command String as would be entered by the user in Gfsh. * @param environment a Map containing any environment configuration settings to be used by the Manager during * command execution. For example, when executing commands originating from Gfsh, the key/value pair (APP_NAME=gfsh) * is a specified mapping in the "environment. Note, it is common for the REST API to act as a bridge, or an adapter * between Gfsh and the Manager, and thus need to specify this key/value pair mapping. * @return a result of the command execution as a String, typically marshalled in JSON to be serialized back to Gfsh. * @see com.gemstone.gemfire.management.internal.cli.shell.Gfsh * @see com.gemstone.gemfire.management.internal.web.controllers.support.EnvironmentVariablesHandlerInterceptor#getEnvironment() * @see #processCommand(String, java.util.Map, byte[][]) */ protected String processCommand(final String command, final Map<String, String> environment) { return processCommand(command, environment, null); } /** * Executes the specified command as entered by the user using the GemFire Shell (Gfsh). Note, Gfsh performs * validation of the command during parsing before sending the command to the Manager for processing. * * @param command a String value containing a valid command String as would be entered by the user in Gfsh. * @param environment a Map containing any environment configuration settings to be used by the Manager during * command execution. For example, when executing commands originating from Gfsh, the key/value pair (APP_NAME=gfsh) * is a specified mapping in the "environment. Note, it is common for the REST API to act as a bridge, or an adapter * between Gfsh and the Manager, and thus need to specify this key/value pair mapping. * @param fileData is a two-dimensional byte array containing the pathnames and contents of file data streamed to * the Manager, usually for the 'deploy' Gfsh command. * @return a result of the command execution as a String, typically marshalled in JSON to be serialized back to Gfsh. * @see com.gemstone.gemfire.management.MemberMXBean#processCommand(String, java.util.Map, Byte[][]) */ protected String processCommand(final String command, final Map<String, String> environment, final byte[][] fileData) { logger.info(LogMarker.CONFIG, "Processing Command ({}) with Environment ({}) having File Data ({})...", command, environment, (fileData != null)); return getManagingMemberMXBean().processCommand(command, environment, ArrayUtils.toByteArray(fileData)); } /** * The MemberMXBeanProxy class is a proxy for the MemberMXBean interface transforming an operation on the member * MBean into a invocation on the MBeanServer, invoke method. * * @see com.gemstone.gemfire.management.internal.web.controllers.support.MemberMXBeanAdapter */ private static class MemberMXBeanProxy extends MemberMXBeanAdapter { private final MBeanServer server; private final ObjectName objectName; public MemberMXBeanProxy(final MBeanServer server, final ObjectName objectName) { assertNotNull(server, "The connection or reference to the Platform MBeanServer cannot be null!"); assertNotNull(objectName, "The JMX ObjectName for the GemFire Manager MemberMXBean cannot be null!"); this.server = server; this.objectName = objectName; } protected MBeanServer getMBeanServer() { return server; } protected ObjectName getObjectName() { return objectName; } @Override public String processCommand(final String commandString, final Map<String, String> env) { try { return String.valueOf(getMBeanServer().invoke(getObjectName(), "processCommand", new Object[] { commandString, env }, new String[] { String.class.getName(), Map.class.getName() })); } catch (Exception e) { throw new RuntimeException(String.format( "An error occurred while executing processCommand with command String (%1$s) on the MemberMXBean (%2$s) of the GemFire Manager using environment (%3$s)!", commandString, getObjectName(), env), e); } } } }
package org.nybatis.core.reflection; import org.adrianwalker.multilinestring.Multiline; import org.nybatis.core.clone.Cloner; import org.nybatis.core.db.constant.NullValue; import org.nybatis.core.log.NLogger; import org.nybatis.core.model.NDate; import org.nybatis.core.model.NMap; import org.nybatis.core.reflection.vo.*; import org.nybatis.core.testModel.Link; import org.testng.annotations.Test; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.text.ParseException; import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.Map; import static org.testng.Assert.*; public class ReflectorTest { @Test public void simpleTest() { NLogger.debug( Reflector.toJson( makeTestPerson(), true ) ); String json = Reflector.toJson( makeTestPerson(), false ); assertEquals( "{\"firstName\":\"Hwasu\",\"lastName\":\"Jung\",\"phone\":{\"code\":2,\"number\":\"322-3493\"},\"fax\":{\"code\":9999,\"number\":\"00100\"},\"phoneList\":[]}", json ); Map map1 = Reflector.toMapFrom( json ); NLogger.debug( map1 ); Person p1 = Reflector.toBeanFrom( json, Person.class ); NLogger.debug( Reflector.toString( p1 ) ); Person p2 = Reflector.toBeanFrom( map1, Person.class ); NLogger.debug( Reflector.toString( p2 ) ); Map map2 = Reflector.toMapFrom( p2 ); NLogger.debug( map2 ); } private Person makeTestPerson() { Person p = new Person(); p.firstName = "Hwasu"; p.lastName = "Jung"; p.phone.code = 2; p.phone.number = "322-3493"; p.fax.code = 9999; p.fax.number = "00100"; return p; } @Test public void cloneTest() { Link link = new Link( new File("//NAS/emul/SuperFamicom/emulator/snes9x1.45 NK Custom/snes9x.exe") ); link.setId( 123456 ); link.setTitle( "Merong" ); link.setGroupName( "Samurai Showdown" ); NLogger.debug( link ); link = link.clone(); link.setId( null ); link.setTitle( "Modified Merong !!" ); NLogger.debug( link ); } @Test public void singleQuoteTesst() { String jsonText = String.format( "{'path':'%s/target/classes/ibatis_config/sql'}", "MERONG"); NLogger.debug( jsonText ); Map<String, Object> map = Reflector.toMapFrom( jsonText ); NLogger.debug( map ); } @Test public void objectMapperTest() throws ParseException { FromVo fromVo = new FromVo( "Hwasu", 39, "1977-01-22" ); NLogger.debug( "fromVo : {}", Reflector.toJson( fromVo ) ); Map map = Reflector.toMapFrom( fromVo ); NLogger.debug( new NMap( map ).toDebugString() ); NMap expectedMap = new NMap(); expectedMap.put( "name", "Hwasu" ); expectedMap.put( "age", 39 ); expectedMap.put( "birth", "1977-01-22 00:00:00" ); assertEquals( expectedMap, map, "convert bean to map" ); ToVo bean = Reflector.toBeanFrom( map, ToVo.class ); NLogger.debug( bean ); ToVo expectedBean = new ToVo( "Hwasu", "1977-01-22" ); expectedBean.setAge( 39 ); assertEquals( expectedBean.toString(), bean.toString(), "convert map to bean" ); } @Test public void mergeMapTest() { NMap fromNMap = new NMap(); fromNMap.put( "name", "hwasu" ); fromNMap.put( "age", "20" ); ToVo toVo = new ToVo(); NLogger.debug( "before\n{}", fromNMap ); Reflector.merge( fromNMap, toVo ); NLogger.debug( "after\n{}", toVo ); assertEquals( toVo.age, 40 ); assertEquals( toVo.name, "hwasu" ); assertNotNull( toVo.birth ); } @Test public void simpleArrayTest() { List array = Arrays.asList( "A", "B", "C", "D", "E", 99 ); String json = Reflector.toJson( array ); assertEquals( json, "[\"A\",\"B\",\"C\",\"D\",\"E\",99]" ); List arrayFromJson = Reflector.toListFromJson( json ); assertEquals( arrayFromJson, array ); } @Test public void copyTest() { Person person = new Person(); person.firstName = "Hwasu"; person.lastName = "Jung"; person.phone = new PhoneNumber( 0, "Phone-111-222-333" ); person.fax = new PhoneNumber( 0, "Fax-77948-22328" ); person.phoneList.add( person.phone ); person.phoneList.add( person.fax ); System.out.println( person ); Person clone = new Person(); Reflector.copy( person, clone ); System.out.println( clone ); PersonAnother another = new PersonAnother(); another.prefix = "testPrefix"; Reflector.copy( person, another ); System.out.println( another ); } @Test public void mergeBeanTest() { Person person = new Person(); person.firstName = "Hwasu"; person.lastName = "Jung"; person.phone = new PhoneNumber( 0, "Phone-111-222-333" ); person.fax = new PhoneNumber( 0, "Fax-77948-22328" ); PersonAnother another = new PersonAnother(); another.prefix = "testPrefix"; Reflector.merge( person, another ); System.out.println( another ); assertEquals( another.lastName, "Jung" ); assertEquals( another.prefix, "testPrefix" ); assertTrue( another.fax.equals( person.fax ) ); } @Test public void nullTest() { NDate date = Reflector.toBeanFrom( null, NDate.class ); NLogger.debug( date ); Person person = Reflector.toBeanFrom( null, Person.class ); NLogger.debug( person ); } @Test public void variableNamedWithCharacterAndNumber() { String json = "{\"S01\":\"Y\",\"S02\":\"N\"}"; TestVo testVo = Reflector.toBeanFrom( json, TestVo.class ); assertEquals( testVo.toString(), json ); } @Test public void nullValueTest() { int a = 0; assertTrue( a == NullValue.INTEGER ); Integer b = 0; assertFalse( b == NullValue.INTEGER ); } @Test public void setNybatisDbNullValueTest() { Person person = new Person(); person.firstName = "HWASU"; person.lastName = "JUNG"; person.age = 0; person.weight = 25L; person.birthDate = new Date(); person.birthNDate = new NDate(); person.profileNList.add( "key1", "val1" ); person.profileNList.add( "key2", "val2" ); assertTrue( person.age != NullValue.INTEGER, "age must be defined as Integer class." ); NLogger.debug( Reflector.toJson( person ) ); person.firstName = NullValue.STRING; person.lastName = NullValue.STRING; person.age = NullValue.INTEGER; person.weight = NullValue.LONG; person.phoneList = NullValue.LIST; person.previousAddresses = NullValue.ARRAY_STRING; person.profileNList = NullValue.NLIST; person.profileSet = NullValue.SET; person.birthDate = NullValue.DATE; person.birthNDate = NullValue.NDATE; for( int i = 0; i < 10; i++ ) { NLogger.debug( "---------------------" ); NLogger.debug( Reflector.toJson( person ) ); } NLogger.debug( "---------------------" ); Map<String, Object> map = Reflector.toMapFrom( person ); assertEquals( map.get( "firstName" ).toString(), NullValue.STRING ); assertEquals( map.get( "lastName" ).toString(), NullValue.STRING ); assertEquals( map.get( "age" ).toString(), NullValue.STRING ); assertEquals( map.get( "weight" ).toString(), NullValue.STRING ); assertEquals( map.get( "phoneList" ).toString(), NullValue.STRING ); assertEquals( map.get( "previousAddresses" ).toString(), NullValue.STRING ); assertEquals( map.get( "profileNList" ).toString(), NullValue.STRING ); assertEquals( map.get( "profileSet" ).toString(), NullValue.STRING ); assertEquals( map.get( "birthDate" ).toString(), NullValue.STRING ); assertEquals( map.get( "birthNDate" ).toString(), NullValue.STRING ); } @Test public void parseJson() throws IOException { TestRes res = Reflector.toBeanFrom( validJson, TestRes.class ); NLogger.debug( Reflector.toJson( res, true ) ); try { Reflector.toBeanFrom( invalidJson, TestRes.class ); assertFalse( true ); } catch( Exception e ) { NLogger.debug( e ); } } @Test public void populate() throws InvocationTargetException, IllegalAccessException { System.out.println( invalidJson ); TestRes res = Reflector.toBeanFrom( validJson, TestRes.class ); System.out.println( Reflector.toJson( res, true ) ); Map<String, Object> map = Reflector.toMapFrom( populationJson ); // BeanUtils.populate( res, map ); Cloner cloner = new Cloner(); // cloner.shallowClone( ) Reflector.copy( map, res ); System.out.println( Reflector.toJson( res, true ) ); } /** { "name": "test", "count": 5, "date": "20140101T000000+0900", "etcProp": {}, "subList": [{ "name": "sub-1", "value": "sub-value-1" }, { "name": "sub-2", "value": "sub-value-2" }], "sub" : { "name": "sub-3", "value": "sub-value-3" } } */ @Multiline private String validJson; /** { "count": 7, "sub": { "value": "populated-3" } } */ @Multiline private String populationJson; /** { "name": "test", "count": 5, "date": { "type": "date/reg", "text": "20140101T000000+0900" }, "etcProp": {}, "subList": [{ "name": "sub-1", "value": "sub-value-1" }, { "name": "sub-2", "value": "sub-value-2" }] } */ @Multiline private String invalidJson; }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.appsync.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/appsync-2017-07-25/CreateGraphqlApi" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateGraphqlApiRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * A user-supplied name for the <code>GraphqlApi</code>. * </p> */ private String name; /** * <p> * The Amazon CloudWatch Logs configuration. * </p> */ private LogConfig logConfig; /** * <p> * The authentication type: API key, Identity and Access Management (IAM), OpenID Connect (OIDC), Amazon Cognito * user pools, or Lambda. * </p> */ private String authenticationType; /** * <p> * The Amazon Cognito user pool configuration. * </p> */ private UserPoolConfig userPoolConfig; /** * <p> * The OIDC configuration. * </p> */ private OpenIDConnectConfig openIDConnectConfig; /** * <p> * A <code>TagMap</code> object. * </p> */ private java.util.Map<String, String> tags; /** * <p> * A list of additional authentication providers for the <code>GraphqlApi</code> API. * </p> */ private java.util.List<AdditionalAuthenticationProvider> additionalAuthenticationProviders; /** * <p> * A flag indicating whether to use X-Ray tracing for the <code>GraphqlApi</code>. * </p> */ private Boolean xrayEnabled; /** * <p> * Configuration for Lambda function authorization. * </p> */ private LambdaAuthorizerConfig lambdaAuthorizerConfig; /** * <p> * A user-supplied name for the <code>GraphqlApi</code>. * </p> * * @param name * A user-supplied name for the <code>GraphqlApi</code>. */ public void setName(String name) { this.name = name; } /** * <p> * A user-supplied name for the <code>GraphqlApi</code>. * </p> * * @return A user-supplied name for the <code>GraphqlApi</code>. */ public String getName() { return this.name; } /** * <p> * A user-supplied name for the <code>GraphqlApi</code>. * </p> * * @param name * A user-supplied name for the <code>GraphqlApi</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateGraphqlApiRequest withName(String name) { setName(name); return this; } /** * <p> * The Amazon CloudWatch Logs configuration. * </p> * * @param logConfig * The Amazon CloudWatch Logs configuration. */ public void setLogConfig(LogConfig logConfig) { this.logConfig = logConfig; } /** * <p> * The Amazon CloudWatch Logs configuration. * </p> * * @return The Amazon CloudWatch Logs configuration. */ public LogConfig getLogConfig() { return this.logConfig; } /** * <p> * The Amazon CloudWatch Logs configuration. * </p> * * @param logConfig * The Amazon CloudWatch Logs configuration. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateGraphqlApiRequest withLogConfig(LogConfig logConfig) { setLogConfig(logConfig); return this; } /** * <p> * The authentication type: API key, Identity and Access Management (IAM), OpenID Connect (OIDC), Amazon Cognito * user pools, or Lambda. * </p> * * @param authenticationType * The authentication type: API key, Identity and Access Management (IAM), OpenID Connect (OIDC), Amazon * Cognito user pools, or Lambda. * @see AuthenticationType */ public void setAuthenticationType(String authenticationType) { this.authenticationType = authenticationType; } /** * <p> * The authentication type: API key, Identity and Access Management (IAM), OpenID Connect (OIDC), Amazon Cognito * user pools, or Lambda. * </p> * * @return The authentication type: API key, Identity and Access Management (IAM), OpenID Connect (OIDC), Amazon * Cognito user pools, or Lambda. * @see AuthenticationType */ public String getAuthenticationType() { return this.authenticationType; } /** * <p> * The authentication type: API key, Identity and Access Management (IAM), OpenID Connect (OIDC), Amazon Cognito * user pools, or Lambda. * </p> * * @param authenticationType * The authentication type: API key, Identity and Access Management (IAM), OpenID Connect (OIDC), Amazon * Cognito user pools, or Lambda. * @return Returns a reference to this object so that method calls can be chained together. * @see AuthenticationType */ public CreateGraphqlApiRequest withAuthenticationType(String authenticationType) { setAuthenticationType(authenticationType); return this; } /** * <p> * The authentication type: API key, Identity and Access Management (IAM), OpenID Connect (OIDC), Amazon Cognito * user pools, or Lambda. * </p> * * @param authenticationType * The authentication type: API key, Identity and Access Management (IAM), OpenID Connect (OIDC), Amazon * Cognito user pools, or Lambda. * @return Returns a reference to this object so that method calls can be chained together. * @see AuthenticationType */ public CreateGraphqlApiRequest withAuthenticationType(AuthenticationType authenticationType) { this.authenticationType = authenticationType.toString(); return this; } /** * <p> * The Amazon Cognito user pool configuration. * </p> * * @param userPoolConfig * The Amazon Cognito user pool configuration. */ public void setUserPoolConfig(UserPoolConfig userPoolConfig) { this.userPoolConfig = userPoolConfig; } /** * <p> * The Amazon Cognito user pool configuration. * </p> * * @return The Amazon Cognito user pool configuration. */ public UserPoolConfig getUserPoolConfig() { return this.userPoolConfig; } /** * <p> * The Amazon Cognito user pool configuration. * </p> * * @param userPoolConfig * The Amazon Cognito user pool configuration. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateGraphqlApiRequest withUserPoolConfig(UserPoolConfig userPoolConfig) { setUserPoolConfig(userPoolConfig); return this; } /** * <p> * The OIDC configuration. * </p> * * @param openIDConnectConfig * The OIDC configuration. */ public void setOpenIDConnectConfig(OpenIDConnectConfig openIDConnectConfig) { this.openIDConnectConfig = openIDConnectConfig; } /** * <p> * The OIDC configuration. * </p> * * @return The OIDC configuration. */ public OpenIDConnectConfig getOpenIDConnectConfig() { return this.openIDConnectConfig; } /** * <p> * The OIDC configuration. * </p> * * @param openIDConnectConfig * The OIDC configuration. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateGraphqlApiRequest withOpenIDConnectConfig(OpenIDConnectConfig openIDConnectConfig) { setOpenIDConnectConfig(openIDConnectConfig); return this; } /** * <p> * A <code>TagMap</code> object. * </p> * * @return A <code>TagMap</code> object. */ public java.util.Map<String, String> getTags() { return tags; } /** * <p> * A <code>TagMap</code> object. * </p> * * @param tags * A <code>TagMap</code> object. */ public void setTags(java.util.Map<String, String> tags) { this.tags = tags; } /** * <p> * A <code>TagMap</code> object. * </p> * * @param tags * A <code>TagMap</code> object. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateGraphqlApiRequest withTags(java.util.Map<String, String> tags) { setTags(tags); return this; } /** * Add a single Tags entry * * @see CreateGraphqlApiRequest#withTags * @returns a reference to this object so that method calls can be chained together. */ public CreateGraphqlApiRequest addTagsEntry(String key, String value) { if (null == this.tags) { this.tags = new java.util.HashMap<String, String>(); } if (this.tags.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.tags.put(key, value); return this; } /** * Removes all the entries added into Tags. * * @return Returns a reference to this object so that method calls can be chained together. */ public CreateGraphqlApiRequest clearTagsEntries() { this.tags = null; return this; } /** * <p> * A list of additional authentication providers for the <code>GraphqlApi</code> API. * </p> * * @return A list of additional authentication providers for the <code>GraphqlApi</code> API. */ public java.util.List<AdditionalAuthenticationProvider> getAdditionalAuthenticationProviders() { return additionalAuthenticationProviders; } /** * <p> * A list of additional authentication providers for the <code>GraphqlApi</code> API. * </p> * * @param additionalAuthenticationProviders * A list of additional authentication providers for the <code>GraphqlApi</code> API. */ public void setAdditionalAuthenticationProviders(java.util.Collection<AdditionalAuthenticationProvider> additionalAuthenticationProviders) { if (additionalAuthenticationProviders == null) { this.additionalAuthenticationProviders = null; return; } this.additionalAuthenticationProviders = new java.util.ArrayList<AdditionalAuthenticationProvider>(additionalAuthenticationProviders); } /** * <p> * A list of additional authentication providers for the <code>GraphqlApi</code> API. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setAdditionalAuthenticationProviders(java.util.Collection)} or * {@link #withAdditionalAuthenticationProviders(java.util.Collection)} if you want to override the existing values. * </p> * * @param additionalAuthenticationProviders * A list of additional authentication providers for the <code>GraphqlApi</code> API. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateGraphqlApiRequest withAdditionalAuthenticationProviders(AdditionalAuthenticationProvider... additionalAuthenticationProviders) { if (this.additionalAuthenticationProviders == null) { setAdditionalAuthenticationProviders(new java.util.ArrayList<AdditionalAuthenticationProvider>(additionalAuthenticationProviders.length)); } for (AdditionalAuthenticationProvider ele : additionalAuthenticationProviders) { this.additionalAuthenticationProviders.add(ele); } return this; } /** * <p> * A list of additional authentication providers for the <code>GraphqlApi</code> API. * </p> * * @param additionalAuthenticationProviders * A list of additional authentication providers for the <code>GraphqlApi</code> API. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateGraphqlApiRequest withAdditionalAuthenticationProviders( java.util.Collection<AdditionalAuthenticationProvider> additionalAuthenticationProviders) { setAdditionalAuthenticationProviders(additionalAuthenticationProviders); return this; } /** * <p> * A flag indicating whether to use X-Ray tracing for the <code>GraphqlApi</code>. * </p> * * @param xrayEnabled * A flag indicating whether to use X-Ray tracing for the <code>GraphqlApi</code>. */ public void setXrayEnabled(Boolean xrayEnabled) { this.xrayEnabled = xrayEnabled; } /** * <p> * A flag indicating whether to use X-Ray tracing for the <code>GraphqlApi</code>. * </p> * * @return A flag indicating whether to use X-Ray tracing for the <code>GraphqlApi</code>. */ public Boolean getXrayEnabled() { return this.xrayEnabled; } /** * <p> * A flag indicating whether to use X-Ray tracing for the <code>GraphqlApi</code>. * </p> * * @param xrayEnabled * A flag indicating whether to use X-Ray tracing for the <code>GraphqlApi</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateGraphqlApiRequest withXrayEnabled(Boolean xrayEnabled) { setXrayEnabled(xrayEnabled); return this; } /** * <p> * A flag indicating whether to use X-Ray tracing for the <code>GraphqlApi</code>. * </p> * * @return A flag indicating whether to use X-Ray tracing for the <code>GraphqlApi</code>. */ public Boolean isXrayEnabled() { return this.xrayEnabled; } /** * <p> * Configuration for Lambda function authorization. * </p> * * @param lambdaAuthorizerConfig * Configuration for Lambda function authorization. */ public void setLambdaAuthorizerConfig(LambdaAuthorizerConfig lambdaAuthorizerConfig) { this.lambdaAuthorizerConfig = lambdaAuthorizerConfig; } /** * <p> * Configuration for Lambda function authorization. * </p> * * @return Configuration for Lambda function authorization. */ public LambdaAuthorizerConfig getLambdaAuthorizerConfig() { return this.lambdaAuthorizerConfig; } /** * <p> * Configuration for Lambda function authorization. * </p> * * @param lambdaAuthorizerConfig * Configuration for Lambda function authorization. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateGraphqlApiRequest withLambdaAuthorizerConfig(LambdaAuthorizerConfig lambdaAuthorizerConfig) { setLambdaAuthorizerConfig(lambdaAuthorizerConfig); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getLogConfig() != null) sb.append("LogConfig: ").append(getLogConfig()).append(","); if (getAuthenticationType() != null) sb.append("AuthenticationType: ").append(getAuthenticationType()).append(","); if (getUserPoolConfig() != null) sb.append("UserPoolConfig: ").append(getUserPoolConfig()).append(","); if (getOpenIDConnectConfig() != null) sb.append("OpenIDConnectConfig: ").append(getOpenIDConnectConfig()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()).append(","); if (getAdditionalAuthenticationProviders() != null) sb.append("AdditionalAuthenticationProviders: ").append(getAdditionalAuthenticationProviders()).append(","); if (getXrayEnabled() != null) sb.append("XrayEnabled: ").append(getXrayEnabled()).append(","); if (getLambdaAuthorizerConfig() != null) sb.append("LambdaAuthorizerConfig: ").append(getLambdaAuthorizerConfig()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateGraphqlApiRequest == false) return false; CreateGraphqlApiRequest other = (CreateGraphqlApiRequest) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getLogConfig() == null ^ this.getLogConfig() == null) return false; if (other.getLogConfig() != null && other.getLogConfig().equals(this.getLogConfig()) == false) return false; if (other.getAuthenticationType() == null ^ this.getAuthenticationType() == null) return false; if (other.getAuthenticationType() != null && other.getAuthenticationType().equals(this.getAuthenticationType()) == false) return false; if (other.getUserPoolConfig() == null ^ this.getUserPoolConfig() == null) return false; if (other.getUserPoolConfig() != null && other.getUserPoolConfig().equals(this.getUserPoolConfig()) == false) return false; if (other.getOpenIDConnectConfig() == null ^ this.getOpenIDConnectConfig() == null) return false; if (other.getOpenIDConnectConfig() != null && other.getOpenIDConnectConfig().equals(this.getOpenIDConnectConfig()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; if (other.getAdditionalAuthenticationProviders() == null ^ this.getAdditionalAuthenticationProviders() == null) return false; if (other.getAdditionalAuthenticationProviders() != null && other.getAdditionalAuthenticationProviders().equals(this.getAdditionalAuthenticationProviders()) == false) return false; if (other.getXrayEnabled() == null ^ this.getXrayEnabled() == null) return false; if (other.getXrayEnabled() != null && other.getXrayEnabled().equals(this.getXrayEnabled()) == false) return false; if (other.getLambdaAuthorizerConfig() == null ^ this.getLambdaAuthorizerConfig() == null) return false; if (other.getLambdaAuthorizerConfig() != null && other.getLambdaAuthorizerConfig().equals(this.getLambdaAuthorizerConfig()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getLogConfig() == null) ? 0 : getLogConfig().hashCode()); hashCode = prime * hashCode + ((getAuthenticationType() == null) ? 0 : getAuthenticationType().hashCode()); hashCode = prime * hashCode + ((getUserPoolConfig() == null) ? 0 : getUserPoolConfig().hashCode()); hashCode = prime * hashCode + ((getOpenIDConnectConfig() == null) ? 0 : getOpenIDConnectConfig().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); hashCode = prime * hashCode + ((getAdditionalAuthenticationProviders() == null) ? 0 : getAdditionalAuthenticationProviders().hashCode()); hashCode = prime * hashCode + ((getXrayEnabled() == null) ? 0 : getXrayEnabled().hashCode()); hashCode = prime * hashCode + ((getLambdaAuthorizerConfig() == null) ? 0 : getLambdaAuthorizerConfig().hashCode()); return hashCode; } @Override public CreateGraphqlApiRequest clone() { return (CreateGraphqlApiRequest) super.clone(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.sql.avatica; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.name.Names; import org.apache.calcite.avatica.AvaticaClientRuntimeException; import org.apache.calcite.avatica.Meta; import org.apache.calcite.avatica.MissingResultsException; import org.apache.calcite.avatica.NoSuchStatementException; import org.apache.druid.guice.GuiceInjectors; import org.apache.druid.initialization.Initialization; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.server.DruidNode; import org.apache.druid.server.RequestLogLine; import org.apache.druid.server.log.TestRequestLogger; import org.apache.druid.server.metrics.NoopServiceEmitter; import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.server.security.AuthenticatorMapper; import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.Escalator; import org.apache.druid.sql.SqlLifecycleFactory; import org.apache.druid.sql.calcite.planner.Calcites; import org.apache.druid.sql.calcite.planner.DruidOperatorTable; import org.apache.druid.sql.calcite.planner.PlannerConfig; import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.schema.DruidSchema; import org.apache.druid.sql.calcite.schema.SystemSchema; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; import org.apache.druid.sql.calcite.util.QueryLogHook; import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; import org.eclipse.jetty.server.Server; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import java.io.IOException; import java.net.InetSocketAddress; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.Date; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import java.sql.Types; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ThreadLocalRandom; public class DruidAvaticaHandlerTest extends CalciteTestBase { private static final AvaticaServerConfig AVATICA_CONFIG = new AvaticaServerConfig() { @Override public int getMaxConnections() { // This must match the number of Connection objects created in setUp() return 3; } @Override public int getMaxStatementsPerConnection() { return 4; } }; private static final String DUMMY_SQL_QUERY_ID = "dummy"; private static QueryRunnerFactoryConglomerate conglomerate; private static Closer resourceCloser; @BeforeClass public static void setUpClass() { final Pair<QueryRunnerFactoryConglomerate, Closer> conglomerateCloserPair = CalciteTests .createQueryRunnerFactoryConglomerate(); conglomerate = conglomerateCloserPair.lhs; resourceCloser = conglomerateCloserPair.rhs; } @AfterClass public static void tearDownClass() throws IOException { resourceCloser.close(); } @Rule public ExpectedException expectedException = ExpectedException.none(); @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule public QueryLogHook queryLogHook = QueryLogHook.create(); private SpecificSegmentsQuerySegmentWalker walker; private Server server; private Connection client; private Connection superuserClient; private Connection clientLosAngeles; private DruidMeta druidMeta; private String url; private Injector injector; private TestRequestLogger testRequestLogger; @Before public void setUp() throws Exception { walker = CalciteTests.createMockWalker(conglomerate, temporaryFolder.newFolder()); final PlannerConfig plannerConfig = new PlannerConfig(); final DruidSchema druidSchema = CalciteTests.createMockSchema(conglomerate, walker, plannerConfig); final SystemSchema systemSchema = CalciteTests.createMockSystemSchema(druidSchema, walker, plannerConfig); final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable(); final ExprMacroTable macroTable = CalciteTests.createExprMacroTable(); injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), ImmutableList.of( new Module() { @Override public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); binder.bind(AuthenticatorMapper.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_MAPPER); binder.bind(AuthorizerMapper.class).toInstance(CalciteTests.TEST_AUTHORIZER_MAPPER); binder.bind(Escalator.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_ESCALATOR); } } ) ); testRequestLogger = new TestRequestLogger(); final PlannerFactory plannerFactory = new PlannerFactory( druidSchema, systemSchema, CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate), operatorTable, macroTable, plannerConfig, CalciteTests.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper() ); druidMeta = new DruidMeta( new SqlLifecycleFactory( plannerFactory, new NoopServiceEmitter(), testRequestLogger ), AVATICA_CONFIG, injector ); final DruidAvaticaHandler handler = new DruidAvaticaHandler( druidMeta, new DruidNode("dummy", "dummy", false, 1, null, true, false), new AvaticaMonitor() ); final int port = ThreadLocalRandom.current().nextInt(9999) + 10000; server = new Server(new InetSocketAddress("127.0.0.1", port)); server.setHandler(handler); server.start(); url = StringUtils.format( "jdbc:avatica:remote:url=http://127.0.0.1:%d%s", port, DruidAvaticaHandler.AVATICA_PATH ); client = DriverManager.getConnection(url, "regularUser", "druid"); superuserClient = DriverManager.getConnection(url, CalciteTests.TEST_SUPERUSER_NAME, "druid"); final Properties propertiesLosAngeles = new Properties(); propertiesLosAngeles.setProperty("sqlTimeZone", "America/Los_Angeles"); propertiesLosAngeles.setProperty("user", "regularUserLA"); propertiesLosAngeles.setProperty("sqlQueryId", DUMMY_SQL_QUERY_ID); clientLosAngeles = DriverManager.getConnection(url, propertiesLosAngeles); } @After public void tearDown() throws Exception { client.close(); clientLosAngeles.close(); server.stop(); walker.close(); walker = null; client = null; clientLosAngeles = null; server = null; } @Test public void testSelectCount() throws Exception { final ResultSet resultSet = client.createStatement().executeQuery("SELECT COUNT(*) AS cnt FROM druid.foo"); final List<Map<String, Object>> rows = getRows(resultSet); Assert.assertEquals( ImmutableList.of( ImmutableMap.of("cnt", 6L) ), rows ); } @Test public void testSelectCountAlternateStyle() throws Exception { final ResultSet resultSet = client.prepareStatement("SELECT COUNT(*) AS cnt FROM druid.foo").executeQuery(); final List<Map<String, Object>> rows = getRows(resultSet); Assert.assertEquals( ImmutableList.of( ImmutableMap.of("cnt", 6L) ), rows ); } @Test public void testTimestampsInResponse() throws Exception { final ResultSet resultSet = client.createStatement().executeQuery( "SELECT __time, CAST(__time AS DATE) AS t2 FROM druid.foo LIMIT 1" ); Assert.assertEquals( ImmutableList.of( ImmutableMap.of( "__time", new Timestamp(DateTimes.of("2000-01-01T00:00:00.000Z").getMillis()), "t2", new Date(DateTimes.of("2000-01-01").getMillis()) ) ), getRows(resultSet) ); } @Test public void testTimestampsInResponseLosAngelesTimeZone() throws Exception { final ResultSet resultSet = clientLosAngeles.createStatement().executeQuery( "SELECT __time, CAST(__time AS DATE) AS t2 FROM druid.foo LIMIT 1" ); final DateTimeZone timeZone = DateTimes.inferTzFromString("America/Los_Angeles"); final DateTime localDateTime = new DateTime("2000-01-01T00Z", timeZone); final List<Map<String, Object>> resultRows = getRows(resultSet); Assert.assertEquals( ImmutableList.of( ImmutableMap.of( "__time", new Timestamp(Calcites.jodaToCalciteTimestamp(localDateTime, timeZone)), "t2", new Date(Calcites.jodaToCalciteTimestamp(localDateTime.dayOfMonth().roundFloorCopy(), timeZone)) ) ), resultRows ); } @Test public void testFieldAliasingSelect() throws Exception { final ResultSet resultSet = client.createStatement().executeQuery( "SELECT dim2 AS \"x\", dim2 AS \"y\" FROM druid.foo LIMIT 1" ); Assert.assertEquals( ImmutableList.of( ImmutableMap.of("x", "a", "y", "a") ), getRows(resultSet) ); } @Test public void testSelectBoolean() throws Exception { final ResultSet resultSet = client.createStatement().executeQuery( "SELECT dim2, dim2 IS NULL AS isnull FROM druid.foo LIMIT 1" ); Assert.assertEquals( ImmutableList.of( ImmutableMap.of("dim2", "a", "isnull", false) ), getRows(resultSet) ); } @Test public void testExplainSelectCount() throws Exception { final ResultSet resultSet = clientLosAngeles.createStatement().executeQuery( "EXPLAIN PLAN FOR SELECT COUNT(*) AS cnt FROM druid.foo" ); Assert.assertEquals( ImmutableList.of( ImmutableMap.of( "PLAN", StringUtils.format("DruidQueryRel(query=[{\"queryType\":\"timeseries\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"descending\":false,\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"limit\":2147483647,\"context\":{\"skipEmptyBuckets\":true,\"sqlQueryId\":\"%s\",\"sqlTimeZone\":\"America/Los_Angeles\"}}], signature=[{a0:LONG}])\n", DUMMY_SQL_QUERY_ID ) ) ), getRows(resultSet) ); } @Test public void testDatabaseMetaDataCatalogs() throws Exception { final DatabaseMetaData metaData = client.getMetaData(); Assert.assertEquals( ImmutableList.of( row(Pair.of("TABLE_CAT", "druid")) ), getRows(metaData.getCatalogs()) ); } @Test public void testDatabaseMetaDataSchemas() throws Exception { final DatabaseMetaData metaData = client.getMetaData(); Assert.assertEquals( ImmutableList.of( row(Pair.of("TABLE_CATALOG", "druid"), Pair.of("TABLE_SCHEM", "druid")) ), getRows(metaData.getSchemas(null, "druid")) ); } @Test public void testDatabaseMetaDataTables() throws Exception { final DatabaseMetaData metaData = client.getMetaData(); Assert.assertEquals( ImmutableList.of( row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.DATASOURCE1), Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_TYPE", "TABLE") ), row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.DATASOURCE2), Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_TYPE", "TABLE") ), row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.DATASOURCE3), Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_TYPE", "TABLE") ) ), getRows( metaData.getTables(null, "druid", "%", null), ImmutableSet.of("TABLE_CAT", "TABLE_NAME", "TABLE_SCHEM", "TABLE_TYPE") ) ); } @Test public void testDatabaseMetaDataTablesAsSuperuser() throws Exception { final DatabaseMetaData metaData = superuserClient.getMetaData(); Assert.assertEquals( ImmutableList.of( row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.DATASOURCE1), Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_TYPE", "TABLE") ), row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.DATASOURCE2), Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_TYPE", "TABLE") ), row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_TYPE", "TABLE") ), row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.DATASOURCE3), Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_TYPE", "TABLE") ) ), getRows( metaData.getTables(null, "druid", "%", null), ImmutableSet.of("TABLE_CAT", "TABLE_NAME", "TABLE_SCHEM", "TABLE_TYPE") ) ); } @Test public void testDatabaseMetaDataColumns() throws Exception { final DatabaseMetaData metaData = client.getMetaData(); Assert.assertEquals( ImmutableList.of( row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "__time"), Pair.of("DATA_TYPE", Types.TIMESTAMP), Pair.of("TYPE_NAME", "TIMESTAMP"), Pair.of("IS_NULLABLE", "NO") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "cnt"), Pair.of("DATA_TYPE", Types.BIGINT), Pair.of("TYPE_NAME", "BIGINT"), Pair.of("IS_NULLABLE", "NO") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "dim1"), Pair.of("DATA_TYPE", Types.VARCHAR), Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "dim2"), Pair.of("DATA_TYPE", Types.VARCHAR), Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "dim3"), Pair.of("DATA_TYPE", Types.VARCHAR), Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "m1"), Pair.of("DATA_TYPE", Types.FLOAT), Pair.of("TYPE_NAME", "FLOAT"), Pair.of("IS_NULLABLE", "NO") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "m2"), Pair.of("DATA_TYPE", Types.DOUBLE), Pair.of("TYPE_NAME", "DOUBLE"), Pair.of("IS_NULLABLE", "NO") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "unique_dim1"), Pair.of("DATA_TYPE", Types.OTHER), Pair.of("TYPE_NAME", "OTHER"), Pair.of("IS_NULLABLE", "YES") ) ), getRows( metaData.getColumns(null, "dr_id", "foo", null), ImmutableSet.of("IS_NULLABLE", "TABLE_NAME", "TABLE_SCHEM", "COLUMN_NAME", "DATA_TYPE", "TYPE_NAME") ) ); } @Test public void testDatabaseMetaDataColumnsOnForbiddenDatasource() throws Exception { final DatabaseMetaData metaData = client.getMetaData(); Assert.assertEquals( ImmutableList.of(), getRows( metaData.getColumns(null, "dr_id", CalciteTests.FORBIDDEN_DATASOURCE, null), ImmutableSet.of("IS_NULLABLE", "TABLE_NAME", "TABLE_SCHEM", "COLUMN_NAME", "DATA_TYPE", "TYPE_NAME") ) ); } @Test public void testDatabaseMetaDataColumnsWithSuperuser() throws Exception { final DatabaseMetaData metaData = superuserClient.getMetaData(); Assert.assertEquals( ImmutableList.of( row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "__time"), Pair.of("DATA_TYPE", Types.TIMESTAMP), Pair.of("TYPE_NAME", "TIMESTAMP"), Pair.of("IS_NULLABLE", "NO") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "cnt"), Pair.of("DATA_TYPE", Types.BIGINT), Pair.of("TYPE_NAME", "BIGINT"), Pair.of("IS_NULLABLE", "NO") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "dim1"), Pair.of("DATA_TYPE", Types.VARCHAR), Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "dim2"), Pair.of("DATA_TYPE", Types.VARCHAR), Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "m1"), Pair.of("DATA_TYPE", Types.FLOAT), Pair.of("TYPE_NAME", "FLOAT"), Pair.of("IS_NULLABLE", "NO") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "m2"), Pair.of("DATA_TYPE", Types.DOUBLE), Pair.of("TYPE_NAME", "DOUBLE"), Pair.of("IS_NULLABLE", "NO") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "unique_dim1"), Pair.of("DATA_TYPE", Types.OTHER), Pair.of("TYPE_NAME", "OTHER"), Pair.of("IS_NULLABLE", "YES") ) ), getRows( metaData.getColumns(null, "dr_id", CalciteTests.FORBIDDEN_DATASOURCE, null), ImmutableSet.of("IS_NULLABLE", "TABLE_NAME", "TABLE_SCHEM", "COLUMN_NAME", "DATA_TYPE", "TYPE_NAME") ) ); } @Test(timeout = 90_000L) public void testConcurrentQueries() throws Exception { final List<ListenableFuture<Integer>> futures = new ArrayList<>(); final ListeningExecutorService exec = MoreExecutors.listeningDecorator( Executors.newFixedThreadPool(AVATICA_CONFIG.getMaxStatementsPerConnection()) ); for (int i = 0; i < 2000; i++) { final String query = StringUtils.format("SELECT COUNT(*) + %s AS ci FROM foo", i); futures.add( exec.submit(() -> { try ( final Statement statement = client.createStatement(); final ResultSet resultSet = statement.executeQuery(query) ) { final List<Map<String, Object>> rows = getRows(resultSet); return ((Number) Iterables.getOnlyElement(rows).get("ci")).intValue(); } catch (SQLException e) { throw new RuntimeException(e); } }) ); } final List<Integer> integers = Futures.allAsList(futures).get(); for (int i = 0; i < 2000; i++) { Assert.assertEquals(i + 6, (int) integers.get(i)); } } @Test public void testTooManyStatements() throws Exception { final Statement statement1 = client.createStatement(); final Statement statement2 = client.createStatement(); final Statement statement3 = client.createStatement(); final Statement statement4 = client.createStatement(); expectedException.expect(AvaticaClientRuntimeException.class); expectedException.expectMessage("Too many open statements, limit is[4]"); final Statement statement5 = client.createStatement(); } @Test public void testNotTooManyStatementsWhenYouCloseThem() throws Exception { client.createStatement().close(); client.createStatement().close(); client.createStatement().close(); client.createStatement().close(); client.createStatement().close(); client.createStatement().close(); client.createStatement().close(); client.createStatement().close(); client.createStatement().close(); client.createStatement().close(); Assert.assertTrue(true); } @Test public void testNotTooManyStatementsWhenYouFullyIterateThem() throws Exception { for (int i = 0; i < 50; i++) { final ResultSet resultSet = client.createStatement().executeQuery( "SELECT COUNT(*) AS cnt FROM druid.foo" ); Assert.assertEquals( ImmutableList.of( ImmutableMap.of("cnt", 6L) ), getRows(resultSet) ); } Assert.assertTrue(true); } @Test public void testNotTooManyStatementsWhenTheyThrowErrors() throws Exception { for (int i = 0; i < 50; i++) { Exception thrown = null; try { client.createStatement().executeQuery("SELECT SUM(nonexistent) FROM druid.foo"); } catch (Exception e) { thrown = e; } Assert.assertNotNull(thrown); final ResultSet resultSet = client.createStatement().executeQuery("SELECT COUNT(*) AS cnt FROM druid.foo"); Assert.assertEquals( ImmutableList.of(ImmutableMap.of("cnt", 6L)), getRows(resultSet) ); } Assert.assertTrue(true); } @Test public void testAutoReconnectOnNoSuchConnection() throws Exception { for (int i = 0; i < 50; i++) { final ResultSet resultSet = client.createStatement().executeQuery("SELECT COUNT(*) AS cnt FROM druid.foo"); Assert.assertEquals( ImmutableList.of(ImmutableMap.of("cnt", 6L)), getRows(resultSet) ); druidMeta.closeAllConnections(); } Assert.assertTrue(true); } @Test public void testTooManyConnections() throws Exception { final Connection connection1 = DriverManager.getConnection(url); final Statement statement1 = connection1.createStatement(); final Connection connection2 = DriverManager.getConnection(url); final Statement statement2 = connection2.createStatement(); final Connection connection3 = DriverManager.getConnection(url); final Statement statement3 = connection3.createStatement(); expectedException.expect(AvaticaClientRuntimeException.class); expectedException.expectMessage("Too many connections, limit is[3]"); final Connection connection4 = DriverManager.getConnection(url); } @Test public void testNotTooManyConnectionsWhenTheyAreEmpty() throws Exception { final Connection connection1 = DriverManager.getConnection(url); connection1.createStatement().close(); final Connection connection2 = DriverManager.getConnection(url); connection2.createStatement().close(); final Connection connection3 = DriverManager.getConnection(url); connection3.createStatement().close(); final Connection connection4 = DriverManager.getConnection(url); Assert.assertTrue(true); } @Test public void testMaxRowsPerFrame() throws Exception { final AvaticaServerConfig smallFrameConfig = new AvaticaServerConfig() { @Override public int getMaxConnections() { return 2; } @Override public int getMaxStatementsPerConnection() { return 4; } @Override public int getMaxRowsPerFrame() { return 2; } }; final PlannerConfig plannerConfig = new PlannerConfig(); final DruidSchema druidSchema = CalciteTests.createMockSchema(conglomerate, walker, plannerConfig); final SystemSchema systemSchema = CalciteTests.createMockSystemSchema(druidSchema, walker, plannerConfig); final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable(); final ExprMacroTable macroTable = CalciteTests.createExprMacroTable(); final List<Meta.Frame> frames = new ArrayList<>(); DruidMeta smallFrameDruidMeta = new DruidMeta( CalciteTests.createSqlLifecycleFactory( new PlannerFactory( druidSchema, systemSchema, CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate), operatorTable, macroTable, plannerConfig, AuthTestUtils.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper() ) ), smallFrameConfig, injector ) { @Override public Frame fetch( final StatementHandle statement, final long offset, final int fetchMaxRowCount ) throws NoSuchStatementException, MissingResultsException { // overriding fetch allows us to track how many frames are processed after the first frame Frame frame = super.fetch(statement, offset, fetchMaxRowCount); frames.add(frame); return frame; } }; final DruidAvaticaHandler handler = new DruidAvaticaHandler( smallFrameDruidMeta, new DruidNode("dummy", "dummy", false, 1, null, true, false), new AvaticaMonitor() ); final int port = ThreadLocalRandom.current().nextInt(9999) + 20000; Server smallFrameServer = new Server(new InetSocketAddress("127.0.0.1", port)); smallFrameServer.setHandler(handler); smallFrameServer.start(); String smallFrameUrl = StringUtils.format( "jdbc:avatica:remote:url=http://127.0.0.1:%d%s", port, DruidAvaticaHandler.AVATICA_PATH ); Connection smallFrameClient = DriverManager.getConnection(smallFrameUrl, "regularUser", "druid"); final ResultSet resultSet = smallFrameClient.createStatement().executeQuery( "SELECT dim1 FROM druid.foo" ); List<Map<String, Object>> rows = getRows(resultSet); Assert.assertEquals(2, frames.size()); Assert.assertEquals( ImmutableList.of( ImmutableMap.of("dim1", ""), ImmutableMap.of("dim1", "10.1"), ImmutableMap.of("dim1", "2"), ImmutableMap.of("dim1", "1"), ImmutableMap.of("dim1", "def"), ImmutableMap.of("dim1", "abc") ), rows ); } @Test @SuppressWarnings("unchecked") public void testSqlRequestLog() throws Exception { // valid sql for (int i = 0; i < 3; i++) { client.createStatement().executeQuery("SELECT COUNT(*) AS cnt FROM druid.foo"); } Assert.assertEquals(3, testRequestLogger.getSqlQueryLogs().size()); for (RequestLogLine logLine : testRequestLogger.getSqlQueryLogs()) { final Map<String, Object> stats = logLine.getQueryStats().getStats(); Assert.assertEquals(true, stats.get("success")); Assert.assertEquals("regularUser", stats.get("identity")); Assert.assertTrue(stats.containsKey("sqlQuery/time")); Assert.assertTrue(stats.containsKey("sqlQuery/bytes")); } // invalid sql testRequestLogger.clear(); try { client.createStatement().executeQuery("SELECT notexist FROM druid.foo"); Assert.fail("invalid sql should throw SQLException"); } catch (SQLException e) { } Assert.assertEquals(1, testRequestLogger.getSqlQueryLogs().size()); final Map<String, Object> stats = testRequestLogger.getSqlQueryLogs().get(0).getQueryStats().getStats(); Assert.assertEquals(false, stats.get("success")); Assert.assertEquals("regularUser", stats.get("identity")); Assert.assertTrue(stats.containsKey("exception")); // unauthorized sql testRequestLogger.clear(); try { client.createStatement().executeQuery("SELECT count(*) FROM druid.forbiddenDatasource"); Assert.fail("unauthorzed sql should throw SQLException"); } catch (SQLException e) { } Assert.assertEquals(0, testRequestLogger.getSqlQueryLogs().size()); } private static List<Map<String, Object>> getRows(final ResultSet resultSet) throws SQLException { return getRows(resultSet, null); } private static List<Map<String, Object>> getRows(final ResultSet resultSet, final Set<String> returnKeys) throws SQLException { try { final ResultSetMetaData metaData = resultSet.getMetaData(); final List<Map<String, Object>> rows = new ArrayList<>(); while (resultSet.next()) { final Map<String, Object> row = new HashMap<>(); for (int i = 0; i < metaData.getColumnCount(); i++) { if (returnKeys == null || returnKeys.contains(metaData.getColumnLabel(i + 1))) { row.put(metaData.getColumnLabel(i + 1), resultSet.getObject(i + 1)); } } rows.add(row); } return rows; } finally { resultSet.close(); } } private static Map<String, Object> row(final Pair<String, ?>... entries) { final Map<String, Object> m = new HashMap<>(); for (Pair<String, ?> entry : entries) { m.put(entry.lhs, entry.rhs); } return m; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.codec.language.bm; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.EnumMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.apache.commons.codec.language.bm.Languages.LanguageSet; import org.apache.commons.codec.language.bm.Rule.Phoneme; /** * Converts words into potential phonetic representations. * <p> * This is a two-stage process. Firstly, the word is converted into a phonetic representation that takes * into account the likely source language. Next, this phonetic representation is converted into a * pan-European 'average' representation, allowing comparison between different versions of essentially * the same word from different languages. * <p> * This class is intentionally immutable and thread-safe. * If you wish to alter the settings for a PhoneticEngine, you * must make a new one with the updated settings. * <p> * Ported from phoneticengine.php * * @since 1.6 * @version $Id: PhoneticEngine.java 1634418 2014-10-27 00:43:25Z ggregory $ */ public class PhoneticEngine { /** * Utility for manipulating a set of phonemes as they are being built up. Not intended for use outside * this package, and probably not outside the {@link PhoneticEngine} class. * * @since 1.6 */ static final class PhonemeBuilder { /** * An empty builder where all phonemes must come from some set of languages. This will contain a single * phoneme of zero characters. This can then be appended to. This should be the only way to create a new * phoneme from scratch. * * @param languages the set of languages * @return a new, empty phoneme builder */ public static PhonemeBuilder empty(final Languages.LanguageSet languages) { return new PhonemeBuilder(new Rule.Phoneme("", languages)); } private final Set<Rule.Phoneme> phonemes; private PhonemeBuilder(final Rule.Phoneme phoneme) { this.phonemes = new LinkedHashSet<Rule.Phoneme>(); this.phonemes.add(phoneme); } private PhonemeBuilder(final Set<Rule.Phoneme> phonemes) { this.phonemes = phonemes; } /** * Creates a new phoneme builder containing all phonemes in this one extended by <code>str</code>. * * @param str the characters to append to the phonemes */ public void append(final CharSequence str) { for (final Rule.Phoneme ph : this.phonemes) { ph.append(str); } } /** * Applies the given phoneme expression to all phonemes in this phoneme builder. * <p> * This will lengthen phonemes that have compatible language sets to the expression, and drop those that are * incompatible. * * @param phonemeExpr the expression to apply * @param maxPhonemes the maximum number of phonemes to build up */ public void apply(final Rule.PhonemeExpr phonemeExpr, final int maxPhonemes) { final Set<Rule.Phoneme> newPhonemes = new LinkedHashSet<Rule.Phoneme>(maxPhonemes); EXPR: for (final Rule.Phoneme left : this.phonemes) { for (final Rule.Phoneme right : phonemeExpr.getPhonemes()) { final LanguageSet languages = left.getLanguages().restrictTo(right.getLanguages()); if (!languages.isEmpty()) { final Rule.Phoneme join = new Phoneme(left, right, languages); if (newPhonemes.size() < maxPhonemes) { newPhonemes.add(join); if (newPhonemes.size() >= maxPhonemes) { break EXPR; } } } } } this.phonemes.clear(); this.phonemes.addAll(newPhonemes); } /** * Gets underlying phoneme set. Please don't mutate. * * @return the phoneme set */ public Set<Rule.Phoneme> getPhonemes() { return this.phonemes; } /** * Stringifies the phoneme set. This produces a single string of the strings of each phoneme, * joined with a pipe. This is explicitly provided in place of toString as it is a potentially * expensive operation, which should be avoided when debugging. * * @return the stringified phoneme set */ public String makeString() { final StringBuilder sb = new StringBuilder(); for (final Rule.Phoneme ph : this.phonemes) { if (sb.length() > 0) { sb.append("|"); } sb.append(ph.getPhonemeText()); } return sb.toString(); } } /** * A function closure capturing the application of a list of rules to an input sequence at a particular offset. * After invocation, the values <code>i</code> and <code>found</code> are updated. <code>i</code> points to the * index of the next char in <code>input</code> that must be processed next (the input up to that index having been * processed already), and <code>found</code> indicates if a matching rule was found or not. In the case where a * matching rule was found, <code>phonemeBuilder</code> is replaced with a new builder containing the phonemes * updated by the matching rule. * * Although this class is not thread-safe (it has mutable unprotected fields), it is not shared between threads * as it is constructed as needed by the calling methods. * @since 1.6 */ private static final class RulesApplication { private final Map<String, List<Rule>> finalRules; private final CharSequence input; private PhonemeBuilder phonemeBuilder; private int i; private final int maxPhonemes; private boolean found; public RulesApplication(final Map<String, List<Rule>> finalRules, final CharSequence input, final PhonemeBuilder phonemeBuilder, final int i, final int maxPhonemes) { if (finalRules == null) { throw new NullPointerException("The finalRules argument must not be null"); } this.finalRules = finalRules; this.phonemeBuilder = phonemeBuilder; this.input = input; this.i = i; this.maxPhonemes = maxPhonemes; } public int getI() { return this.i; } public PhonemeBuilder getPhonemeBuilder() { return this.phonemeBuilder; } /** * Invokes the rules. Loops over the rules list, stopping at the first one that has a matching context * and pattern. Then applies this rule to the phoneme builder to produce updated phonemes. If there was no * match, <code>i</code> is advanced one and the character is silently dropped from the phonetic spelling. * * @return <code>this</code> */ public RulesApplication invoke() { this.found = false; int patternLength = 1; final List<Rule> rules = this.finalRules.get(input.subSequence(i, i+patternLength)); if (rules != null) { for (final Rule rule : rules) { final String pattern = rule.getPattern(); patternLength = pattern.length(); if (rule.patternAndContextMatches(this.input, this.i)) { this.phonemeBuilder.apply(rule.getPhoneme(), maxPhonemes); this.found = true; break; } } } if (!this.found) { patternLength = 1; } this.i += patternLength; return this; } public boolean isFound() { return this.found; } } private static final Map<NameType, Set<String>> NAME_PREFIXES = new EnumMap<NameType, Set<String>>(NameType.class); static { NAME_PREFIXES.put(NameType.ASHKENAZI, Collections.unmodifiableSet( new HashSet<String>(Arrays.asList("bar", "ben", "da", "de", "van", "von")))); NAME_PREFIXES.put(NameType.SEPHARDIC, Collections.unmodifiableSet( new HashSet<String>(Arrays.asList("al", "el", "da", "dal", "de", "del", "dela", "de la", "della", "des", "di", "do", "dos", "du", "van", "von")))); NAME_PREFIXES.put(NameType.GENERIC, Collections.unmodifiableSet( new HashSet<String>(Arrays.asList("da", "dal", "de", "del", "dela", "de la", "della", "des", "di", "do", "dos", "du", "van", "von")))); } /** * Joins some strings with an internal separator. * @param strings Strings to join * @param sep String to separate them with * @return a single String consisting of each element of <code>strings</code> interleaved by <code>sep</code> */ private static String join(final Iterable<String> strings, final String sep) { final StringBuilder sb = new StringBuilder(); final Iterator<String> si = strings.iterator(); if (si.hasNext()) { sb.append(si.next()); } while (si.hasNext()) { sb.append(sep).append(si.next()); } return sb.toString(); } private static final int DEFAULT_MAX_PHONEMES = 20; private final Lang lang; private final NameType nameType; private final RuleType ruleType; private final boolean concat; private final int maxPhonemes; /** * Generates a new, fully-configured phonetic engine. * * @param nameType * the type of names it will use * @param ruleType * the type of rules it will apply * @param concat * if it will concatenate multiple encodings */ public PhoneticEngine(final NameType nameType, final RuleType ruleType, final boolean concat) { this(nameType, ruleType, concat, DEFAULT_MAX_PHONEMES); } /** * Generates a new, fully-configured phonetic engine. * * @param nameType * the type of names it will use * @param ruleType * the type of rules it will apply * @param concat * if it will concatenate multiple encodings * @param maxPhonemes * the maximum number of phonemes that will be handled * @since 1.7 */ public PhoneticEngine(final NameType nameType, final RuleType ruleType, final boolean concat, final int maxPhonemes) { if (ruleType == RuleType.RULES) { throw new IllegalArgumentException("ruleType must not be " + RuleType.RULES); } this.nameType = nameType; this.ruleType = ruleType; this.concat = concat; this.lang = Lang.instance(nameType); this.maxPhonemes = maxPhonemes; } /** * Applies the final rules to convert from a language-specific phonetic representation to a * language-independent representation. * * @param phonemeBuilder the current phonemes * @param finalRules the final rules to apply * @return the resulting phonemes */ private PhonemeBuilder applyFinalRules(final PhonemeBuilder phonemeBuilder, final Map<String, List<Rule>> finalRules) { if (finalRules == null) { throw new NullPointerException("finalRules can not be null"); } if (finalRules.isEmpty()) { return phonemeBuilder; } final Map<Rule.Phoneme, Rule.Phoneme> phonemes = new TreeMap<Rule.Phoneme, Rule.Phoneme>(Rule.Phoneme.COMPARATOR); for (final Rule.Phoneme phoneme : phonemeBuilder.getPhonemes()) { PhonemeBuilder subBuilder = PhonemeBuilder.empty(phoneme.getLanguages()); final String phonemeText = phoneme.getPhonemeText().toString(); for (int i = 0; i < phonemeText.length();) { final RulesApplication rulesApplication = new RulesApplication(finalRules, phonemeText, subBuilder, i, maxPhonemes).invoke(); final boolean found = rulesApplication.isFound(); subBuilder = rulesApplication.getPhonemeBuilder(); if (!found) { // not found, appending as-is subBuilder.append(phonemeText.subSequence(i, i + 1)); } i = rulesApplication.getI(); } // the phonemes map orders the phonemes only based on their text, but ignores the language set // when adding new phonemes, check for equal phonemes and merge their language set, otherwise // phonemes with the same text but different language set get lost for (final Rule.Phoneme newPhoneme : subBuilder.getPhonemes()) { if (phonemes.containsKey(newPhoneme)) { final Rule.Phoneme oldPhoneme = phonemes.remove(newPhoneme); final Rule.Phoneme mergedPhoneme = oldPhoneme.mergeWithLanguage(newPhoneme.getLanguages()); phonemes.put(mergedPhoneme, mergedPhoneme); } else { phonemes.put(newPhoneme, newPhoneme); } } } return new PhonemeBuilder(phonemes.keySet()); } /** * Encodes a string to its phonetic representation. * * @param input * the String to encode * @return the encoding of the input */ public String encode(final String input) { final Languages.LanguageSet languageSet = this.lang.guessLanguages(input); return encode(input, languageSet); } /** * Encodes an input string into an output phonetic representation, given a set of possible origin languages. * * @param input * String to phoneticise; a String with dashes or spaces separating each word * @param languageSet * set of possible origin languages * @return a phonetic representation of the input; a String containing '-'-separated phonetic representations of the * input */ public String encode(String input, final Languages.LanguageSet languageSet) { final Map<String, List<Rule>> rules = Rule.getInstanceMap(this.nameType, RuleType.RULES, languageSet); // rules common across many (all) languages final Map<String, List<Rule>> finalRules1 = Rule.getInstanceMap(this.nameType, this.ruleType, "common"); // rules that apply to a specific language that may be ambiguous or wrong if applied to other languages final Map<String, List<Rule>> finalRules2 = Rule.getInstanceMap(this.nameType, this.ruleType, languageSet); // tidy the input // lower case is a locale-dependent operation input = input.toLowerCase(Locale.ENGLISH).replace('-', ' ').trim(); if (this.nameType == NameType.GENERIC) { if (input.length() >= 2 && input.substring(0, 2).equals("d'")) { // check for d' final String remainder = input.substring(2); final String combined = "d" + remainder; return "(" + encode(remainder) + ")-(" + encode(combined) + ")"; } for (final String l : NAME_PREFIXES.get(this.nameType)) { // handle generic prefixes if (input.startsWith(l + " ")) { // check for any prefix in the words list final String remainder = input.substring(l.length() + 1); // input without the prefix final String combined = l + remainder; // input with prefix without space return "(" + encode(remainder) + ")-(" + encode(combined) + ")"; } } } final List<String> words = Arrays.asList(input.split("\\s+")); final List<String> words2 = new ArrayList<String>(); // special-case handling of word prefixes based upon the name type switch (this.nameType) { case SEPHARDIC: for (final String aWord : words) { final String[] parts = aWord.split("'"); final String lastPart = parts[parts.length - 1]; words2.add(lastPart); } words2.removeAll(NAME_PREFIXES.get(this.nameType)); break; case ASHKENAZI: words2.addAll(words); words2.removeAll(NAME_PREFIXES.get(this.nameType)); break; case GENERIC: words2.addAll(words); break; default: throw new IllegalStateException("Unreachable case: " + this.nameType); } if (this.concat) { // concat mode enabled input = join(words2, " "); } else if (words2.size() == 1) { // not a multi-word name input = words.iterator().next(); } else { // encode each word in a multi-word name separately (normally used for approx matches) final StringBuilder result = new StringBuilder(); for (final String word : words2) { result.append("-").append(encode(word)); } // return the result without the leading "-" return result.substring(1); } PhonemeBuilder phonemeBuilder = PhonemeBuilder.empty(languageSet); // loop over each char in the input - we will handle the increment manually for (int i = 0; i < input.length();) { final RulesApplication rulesApplication = new RulesApplication(rules, input, phonemeBuilder, i, maxPhonemes).invoke(); i = rulesApplication.getI(); phonemeBuilder = rulesApplication.getPhonemeBuilder(); } // Apply the general rules phonemeBuilder = applyFinalRules(phonemeBuilder, finalRules1); // Apply the language-specific rules phonemeBuilder = applyFinalRules(phonemeBuilder, finalRules2); return phonemeBuilder.makeString(); } /** * Gets the Lang language guessing rules being used. * * @return the Lang in use */ public Lang getLang() { return this.lang; } /** * Gets the NameType being used. * * @return the NameType in use */ public NameType getNameType() { return this.nameType; } /** * Gets the RuleType being used. * * @return the RuleType in use */ public RuleType getRuleType() { return this.ruleType; } /** * Gets if multiple phonetic encodings are concatenated or if just the first one is kept. * * @return true if multiple phonetic encodings are returned, false if just the first is */ public boolean isConcat() { return this.concat; } /** * Gets the maximum number of phonemes the engine will calculate for a given input. * * @return the maximum number of phonemes * @since 1.7 */ public int getMaxPhonemes() { return this.maxPhonemes; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor.aggregate.zipfile; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.nio.file.StandardOpenOption; import java.util.HashMap; import java.util.Map; import org.apache.camel.AggregationStrategy; import org.apache.camel.Exchange; import org.apache.camel.ExtendedExchange; import org.apache.camel.WrappedFile; import org.apache.camel.component.file.FileConsumer; import org.apache.camel.component.file.GenericFile; import org.apache.camel.component.file.GenericFileMessage; import org.apache.camel.component.file.GenericFileOperationFailedException; import org.apache.camel.spi.Synchronization; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.util.FileUtil; /** * This aggregation strategy will aggregate all incoming messages into a ZIP file. * <p> * If the incoming exchanges contain {@link GenericFileMessage} file name will be taken from the body otherwise the body * content will be treated as a byte array and the ZIP entry will be named using the message id (unless the flag * useFilenameHeader is set to true. * </p> * <p> * <b>Note:</b> Please note that this aggregation strategy requires eager completion check to work properly. * </p> */ public class ZipAggregationStrategy implements AggregationStrategy { private String filePrefix; private String fileSuffix = ".zip"; private boolean preserveFolderStructure; private boolean useFilenameHeader; private boolean useTempFile; private File parentDir = new File(System.getProperty("java.io.tmpdir")); public ZipAggregationStrategy() { this(false); } /** * @param preserveFolderStructure if true, the folder structure is preserved when the source is a type of * {@link GenericFileMessage}. If used with a file, use recursive=true. */ public ZipAggregationStrategy(boolean preserveFolderStructure) { this(preserveFolderStructure, false); } /** * @param preserveFolderStructure if true, the folder structure is preserved when the source is a type of * {@link GenericFileMessage}. If used with a file, use recursive=true. * @param useFilenameHeader if true, the filename header will be used to name aggregated byte arrays within * the ZIP file. */ public ZipAggregationStrategy(boolean preserveFolderStructure, boolean useFilenameHeader) { this(preserveFolderStructure, useFilenameHeader, false); } /** * @param preserveFolderStructure if true, the folder structure is preserved when the source is a type of * {@link GenericFileMessage}. If used with a file, use recursive=true. * @param useFilenameHeader if true, the filename header will be used to name aggregated byte arrays within * the ZIP file. * @param useTempFile if true, the ZipFileSystem will use temporary files for zip manipulations instead * of memory. */ public ZipAggregationStrategy(boolean preserveFolderStructure, boolean useFilenameHeader, boolean useTempFile) { this.preserveFolderStructure = preserveFolderStructure; this.useFilenameHeader = useFilenameHeader; this.useTempFile = useTempFile; } /** * Gets the prefix used when creating the ZIP file name. * * @return the prefix */ public String getFilePrefix() { return filePrefix; } /** * Sets the prefix that will be used when creating the ZIP filename. * * @param filePrefix prefix to use on ZIP file. */ public void setFilePrefix(String filePrefix) { this.filePrefix = filePrefix; } /** * Gets the suffix used when creating the ZIP file name. * * @return the suffix */ public String getFileSuffix() { return fileSuffix; } /** * Sets the suffix that will be used when creating the ZIP filename. * * @param fileSuffix suffix to use on ZIP file. */ public void setFileSuffix(String fileSuffix) { this.fileSuffix = fileSuffix; } public File getParentDir() { return parentDir; } /** * Sets the parent directory to use for writing temporary files. */ public void setParentDir(File parentDir) { this.parentDir = parentDir; } /** * Sets the parent directory to use for writing temporary files. */ public void setParentDir(String parentDir) { this.parentDir = new File(parentDir); } @Override public Exchange aggregate(Exchange oldExchange, Exchange newExchange) { File zipFile; Exchange answer = oldExchange; // Guard against empty new exchanges if (newExchange == null) { return oldExchange; } // First time for this aggregation if (oldExchange == null) { try { zipFile = FileUtil.createTempFile(this.filePrefix, this.fileSuffix, this.parentDir); newZipFile(zipFile); } catch (IOException | URISyntaxException e) { throw new GenericFileOperationFailedException(e.getMessage(), e); } answer = newExchange; answer.adapt(ExtendedExchange.class).addOnCompletion(new DeleteZipFileOnCompletion(zipFile)); } else { zipFile = oldExchange.getIn().getBody(File.class); } Object body = newExchange.getIn().getBody(); if (body instanceof WrappedFile) { body = ((WrappedFile) body).getFile(); } String charset = ExchangeHelper.getCharsetName(newExchange, true); if (body instanceof File) { try { File appendFile = (File) body; // do not try to append empty files if (appendFile.length() > 0) { String entryName = preserveFolderStructure ? newExchange.getIn().getHeader(Exchange.FILE_NAME, String.class) : newExchange.getIn().getMessageId(); addFileToZip(zipFile, appendFile, this.preserveFolderStructure ? entryName : null); } } catch (Exception e) { throw new GenericFileOperationFailedException(e.getMessage(), e); } } else { // Handle all other messages try { byte[] buffer = newExchange.getIn().getMandatoryBody(byte[].class); // do not try to append empty data if (buffer.length > 0) { String entryName = useFilenameHeader ? newExchange.getIn().getHeader(Exchange.FILE_NAME, String.class) : newExchange.getIn().getMessageId(); addEntryToZip(zipFile, entryName, buffer, charset); } } catch (Exception e) { throw new GenericFileOperationFailedException(e.getMessage(), e); } } GenericFile<File> genericFile = FileConsumer.asGenericFile(zipFile.getParent(), zipFile, charset, false); genericFile.bindToExchange(answer); return answer; } private static void newZipFile(File zipFile) throws URISyntaxException, IOException { if (zipFile.exists() && !zipFile.delete()) { //Delete, because ZipFileSystem needs to create file on its own (with correct END bytes in the file) throw new IOException("Cannot delete file " + zipFile); } Map<String, Object> env = new HashMap<>(); env.put("create", Boolean.TRUE.toString()); //Intentionally String, it is implemented this way in ZipFileSystem try (FileSystem ignored = FileSystems.newFileSystem(getZipURI(zipFile), env)) { //noop, just open and close FileSystem to initialize correct headers in file } } private void addFileToZip(File zipFile, File file, String fileName) throws IOException, URISyntaxException { String entryName = fileName == null ? file.getName() : fileName; Map<String, Object> env = new HashMap<>(); env.put("useTempFile", this.useTempFile); //Intentionally boolean, it is implemented this way in ZipFileSystem try (FileSystem fs = FileSystems.newFileSystem(getZipURI(zipFile), env)) { Path dest = fs.getPath("/", entryName); Path parent = dest.getParent(); if (parent != null) { Files.createDirectories(parent); Files.copy(file.toPath(), dest, StandardCopyOption.REPLACE_EXISTING); } else { // TODO do some logging } } } private void addEntryToZip(File zipFile, String entryName, byte[] buffer, String charset) throws IOException, URISyntaxException { Map<String, Object> env = new HashMap<>(); env.put("encoding", charset); env.put("useTempFile", this.useTempFile); //Intentionally boolean, it is implemented this way in ZipFileSystem try (FileSystem fs = FileSystems.newFileSystem(getZipURI(zipFile), env)) { Path dest = fs.getPath("/", entryName); Path parent = dest.getParent(); if (parent != null) { Files.createDirectories(parent); Files.write(dest, buffer, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); } else { // TODO do some logging } } } private static URI getZipURI(File zipFile) throws URISyntaxException { return new URI("jar", zipFile.toURI().toString(), null); } /** * This callback class is used to clean up the temporary ZIP file once the exchange has completed. */ private static class DeleteZipFileOnCompletion implements Synchronization { private final File fileToDelete; DeleteZipFileOnCompletion(File fileToDelete) { this.fileToDelete = fileToDelete; } @Override public void onFailure(Exchange exchange) { // Keep the file if something gone a miss. } @Override public void onComplete(Exchange exchange) { FileUtil.deleteFile(this.fileToDelete); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.byteCode; import com.facebook.presto.byteCode.debug.LineNumberNode; import com.facebook.presto.byteCode.instruction.Constant; import com.facebook.presto.byteCode.instruction.InvokeInstruction; import com.facebook.presto.byteCode.instruction.JumpInstruction; import com.facebook.presto.byteCode.instruction.LabelNode; import com.facebook.presto.byteCode.instruction.TypeInstruction; import com.facebook.presto.byteCode.instruction.VariableInstruction; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import org.objectweb.asm.MethodVisitor; import javax.annotation.concurrent.NotThreadSafe; import java.lang.invoke.MethodType; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.List; import static com.facebook.presto.byteCode.Access.STATIC; import static com.facebook.presto.byteCode.ParameterizedType.type; import static com.facebook.presto.byteCode.instruction.Constant.loadBoolean; import static com.facebook.presto.byteCode.instruction.Constant.loadClass; import static com.facebook.presto.byteCode.instruction.Constant.loadDouble; import static com.facebook.presto.byteCode.instruction.Constant.loadFloat; import static com.facebook.presto.byteCode.instruction.Constant.loadInt; import static com.facebook.presto.byteCode.instruction.Constant.loadLong; import static com.facebook.presto.byteCode.instruction.Constant.loadNumber; import static com.facebook.presto.byteCode.instruction.FieldInstruction.getFieldInstruction; import static com.facebook.presto.byteCode.instruction.FieldInstruction.getStaticInstruction; import static com.facebook.presto.byteCode.instruction.FieldInstruction.putFieldInstruction; import static com.facebook.presto.byteCode.instruction.FieldInstruction.putStaticInstruction; import static com.facebook.presto.byteCode.instruction.TypeInstruction.cast; import static com.facebook.presto.byteCode.instruction.TypeInstruction.instanceOf; import static com.google.common.base.Preconditions.checkArgument; import static java.lang.invoke.MethodType.methodType; @NotThreadSafe public class Block implements ByteCodeNode { private final CompilerContext context; private final List<ByteCodeNode> nodes = new ArrayList<>(); // private final List<TryCatchBlockNode> tryCatchBlocks = new ArrayList<>(); private String description; public Block(CompilerContext context) { this.context = context; } public String getDescription() { return description; } public Block setDescription(String description) { this.description = description; return this; } @Override public List<ByteCodeNode> getChildNodes() { return ImmutableList.copyOf(nodes); } public Block append(ByteCodeNode node) { if (node != OpCodes.NOP && !(node instanceof Block && ((Block) node).isEmpty())) { nodes.add(node); } return this; } public Block comment(String comment) { nodes.add(new Comment(comment)); return this; } public Block comment(String comment, Object... args) { nodes.add(new Comment(String.format(comment, args))); return this; } public boolean isEmpty() { return nodes.size() == 0; } // public List<TryCatchBlockNode> getTryCatchBlocks() // { // return tryCatchBlocks; // } // // public BlockDefinition tryCatch(BlockDefinition tryBlock, BlockDefinition handlerBlock, ParameterizedType exceptionType) // { // LabelNode tryStart = new LabelNode(); // LabelNode tryEnd = new LabelNode(); // LabelNode handler = new LabelNode(); // LabelNode done = new LabelNode(); // // String exceptionName = null; // if (exceptionType != null) { // exceptionName = exceptionType.getClassName(); // } // tryCatchBlocks.add(new TryCatchBlockNode(tryStart, tryEnd, handler, exceptionName)); // // // // // try block // visitLabel(tryStart); // append(tryBlock); // visitLabel(tryEnd); // gotoLabel(done); // // // // // handler block // // visitLabel(handler); // // // store exception // Variable exception = context.createTempVariable(); // storeVariable(exception.getLocalVariableDefinition()); // // // execute handler code // append(handlerBlock); // // // load and rethrow exception // loadVariable(exception.getLocalVariableDefinition()); // context.dropTempVariable(exception); // throwObject(); // // // all done // visitLabel(done); // // return this; // } public Block visitLabel(LabelNode label) { nodes.add(label); return this; } public Block gotoLabel(LabelNode label) { nodes.add(JumpInstruction.jump(label)); return this; } public Block ifFalseGoto(LabelNode label) { return ifZeroGoto(label); } public Block ifTrueGoto(LabelNode label) { return ifNotZeroGoto(label); } public Block ifZeroGoto(LabelNode label) { nodes.add(JumpInstruction.jumpIfEqualZero(label)); return this; } public Block ifNotZeroGoto(LabelNode label) { nodes.add(JumpInstruction.jumpIfNotEqualZero(label)); return this; } public Block ifNullGoto(LabelNode label) { nodes.add(JumpInstruction.jumpIfNull(label)); return this; } public Block ifNotNullGoto(LabelNode label) { nodes.add(JumpInstruction.jumpIfNotNull(label)); return this; } public Block intLeftShift() { nodes.add(OpCodes.ISHL); return this; } public Block intRightShift() { nodes.add(OpCodes.ISHR); return this; } public Block longLeftShift() { nodes.add(OpCodes.LSHL); return this; } public Block longRightShift() { nodes.add(OpCodes.LSHR); return this; } public Block unsignedIntRightShift() { nodes.add(OpCodes.IUSHR); return this; } public Block unsignedLongRightShift() { nodes.add(OpCodes.LUSHR); return this; } public Block intBitAnd() { nodes.add(OpCodes.IAND); return this; } public Block intBitOr() { nodes.add(OpCodes.IOR); return this; } public Block intBitXor() { nodes.add(OpCodes.IXOR); return this; } public Block longBitAnd() { nodes.add(OpCodes.LAND); return this; } public Block longBitOr() { nodes.add(OpCodes.LOR); return this; } public Block longBitXor() { nodes.add(OpCodes.LXOR); return this; } public Block intNegate() { nodes.add(OpCodes.INEG); return this; } public Block longNegate() { nodes.add(OpCodes.LNEG); return this; } public Block longToInt() { nodes.add(OpCodes.L2I); return this; } public Block isInstanceOf(Class<?> type) { nodes.add(instanceOf(type)); return this; } public Block isInstanceOf(ParameterizedType type) { nodes.add(instanceOf(type)); return this; } public Block checkCast(Class<?> type) { nodes.add(cast(type)); return this; } public Block checkCast(ParameterizedType type) { nodes.add(cast(type)); return this; } public Block invokeStatic(Method method) { nodes.add(InvokeInstruction.invokeStatic(method)); return this; } public Block invokeStatic(MethodDefinition method) { nodes.add(InvokeInstruction.invokeStatic(method)); return this; } public Block invokeStatic(Class<?> type, String name, Class<?> returnType, Class<?>... parameterTypes) { nodes.add(InvokeInstruction.invokeStatic(type, name, returnType, parameterTypes)); return this; } public Block invokeStatic(Class<?> type, String name, Class<?> returnType, Iterable<Class<?>> parameterTypes) { nodes.add(InvokeInstruction.invokeStatic(type, name, returnType, parameterTypes)); return this; } public Block invokeStatic(ParameterizedType type, String name, ParameterizedType returnType, ParameterizedType... parameterTypes) { nodes.add(InvokeInstruction.invokeStatic(type, name, returnType, parameterTypes)); return this; } public Block invokeStatic(ParameterizedType type, String name, ParameterizedType returnType, Iterable<ParameterizedType> parameterTypes) { nodes.add(InvokeInstruction.invokeStatic(type, name, returnType, parameterTypes)); return this; } public Block invokeVirtual(Method method) { nodes.add(InvokeInstruction.invokeVirtual(method)); return this; } public Block invokeVirtual(MethodDefinition method) { nodes.add(InvokeInstruction.invokeVirtual(method)); return this; } public Block invokeVirtual(Class<?> type, String name, Class<?> returnType, Class<?>... parameterTypes) { nodes.add(InvokeInstruction.invokeVirtual(type, name, returnType, parameterTypes)); return this; } public Block invokeVirtual(Class<?> type, String name, Class<?> returnType, Iterable<Class<?>> parameterTypes) { nodes.add(InvokeInstruction.invokeVirtual(type, name, returnType, parameterTypes)); return this; } public Block invokeVirtual(ParameterizedType type, String name, ParameterizedType returnType, ParameterizedType... parameterTypes) { nodes.add(InvokeInstruction.invokeVirtual(type, name, returnType, parameterTypes)); return this; } public Block invokeVirtual(ParameterizedType type, String name, ParameterizedType returnType, Iterable<ParameterizedType> parameterTypes) { nodes.add(InvokeInstruction.invokeVirtual(type, name, returnType, parameterTypes)); return this; } public Block invokeInterface(Method method) { nodes.add(InvokeInstruction.invokeInterface(method)); return this; } public Block invokeInterface(MethodDefinition method) { nodes.add(InvokeInstruction.invokeInterface(method)); return this; } public Block invokeInterface(Class<?> type, String name, Class<?> returnType, Class<?>... parameterTypes) { nodes.add(InvokeInstruction.invokeInterface(type, name, returnType, parameterTypes)); return this; } public Block invokeInterface(Class<?> type, String name, Class<?> returnType, Iterable<Class<?>> parameterTypes) { nodes.add(InvokeInstruction.invokeInterface(type, name, returnType, parameterTypes)); return this; } public Block invokeInterface(ParameterizedType type, String name, ParameterizedType returnType, ParameterizedType... parameterTypes) { nodes.add(InvokeInstruction.invokeInterface(type, name, returnType, parameterTypes)); return this; } public Block invokeInterface(ParameterizedType type, String name, ParameterizedType returnType, Iterable<ParameterizedType> parameterTypes) { nodes.add(InvokeInstruction.invokeInterface(type, name, returnType, parameterTypes)); return this; } public Block invokeConstructor(Constructor<?> constructor) { nodes.add(InvokeInstruction.invokeConstructor(constructor)); return this; } public Block invokeConstructor(Class<?> type, Class<?>... parameterTypes) { nodes.add(InvokeInstruction.invokeConstructor(type, parameterTypes)); return this; } public Block invokeConstructor(Class<?> type, Iterable<Class<?>> parameterTypes) { nodes.add(InvokeInstruction.invokeConstructor(type, parameterTypes)); return this; } public Block invokeConstructor(ParameterizedType type, ParameterizedType... parameterTypes) { nodes.add(InvokeInstruction.invokeConstructor(type, parameterTypes)); return this; } public Block invokeConstructor(ParameterizedType type, Iterable<ParameterizedType> parameterTypes) { nodes.add(InvokeInstruction.invokeConstructor(type, parameterTypes)); return this; } public Block invokeSpecial(Method method) { nodes.add(InvokeInstruction.invokeSpecial(method)); return this; } public Block invokeSpecial(MethodDefinition method) { nodes.add(InvokeInstruction.invokeSpecial(method)); return this; } public Block invokeSpecial(Class<?> type, String name, Class<?> returnType, Class<?>... parameterTypes) { nodes.add(InvokeInstruction.invokeSpecial(type, name, returnType, parameterTypes)); return this; } public Block invokeSpecial(Class<?> type, String name, Class<?> returnType, Iterable<Class<?>> parameterTypes) { nodes.add(InvokeInstruction.invokeSpecial(type, name, returnType, parameterTypes)); return this; } public Block invokeSpecial(ParameterizedType type, String name, ParameterizedType returnType, ParameterizedType... parameterTypes) { nodes.add(InvokeInstruction.invokeSpecial(type, name, returnType, parameterTypes)); return this; } public Block invokeSpecial(ParameterizedType type, String name, ParameterizedType returnType, Iterable<ParameterizedType> parameterTypes) { nodes.add(InvokeInstruction.invokeSpecial(type, name, returnType, parameterTypes)); return this; } public Block invokeDynamic(String name, Class<?> returnType, List<Class<?>> parameterTypes) { return invokeDynamic(name, methodType(returnType, parameterTypes)); } public Block invokeDynamic(String name, Class<?> returnType, Class<?>... parameterTypes) { return invokeDynamic(name, methodType(returnType, ImmutableList.copyOf(parameterTypes))); } public Block invokeDynamic(String name, MethodType methodType) { return invokeDynamic(name, methodType, context.getDefaultBootstrapMethod(), context.getDefaultBootstrapArguments()); } public Block invokeDynamic(String name, MethodType methodType, Object... defaultBootstrapArguments) { nodes.add(InvokeInstruction.invokeDynamic(name, methodType, context.getDefaultBootstrapMethod(), defaultBootstrapArguments)); return this; } public Block invokeDynamic(String name, MethodType methodType, Method bootstrapMethod, Object... defaultBootstrapArguments) { nodes.add(InvokeInstruction.invokeDynamic(name, methodType, bootstrapMethod, defaultBootstrapArguments)); return this; } public Block ret() { nodes.add(OpCodes.RETURN); return this; } public Block retObject() { nodes.add(OpCodes.ARETURN); return this; } public Block retBoolean() { nodes.add(OpCodes.IRETURN); return this; } public Block retLong() { nodes.add(OpCodes.LRETURN); return this; } public Block retInt() { nodes.add(OpCodes.IRETURN); return this; } public Block throwObject() { nodes.add(OpCodes.ATHROW); return this; } public Block newObject(Class<?> type) { nodes.add(TypeInstruction.newObject(type)); return this; } public Block newObject(ParameterizedType type) { nodes.add(TypeInstruction.newObject(type)); return this; } public Block newArray(Class<?> type) { nodes.add(TypeInstruction.newObjectArray(type)); return this; } public Block dup() { nodes.add(OpCodes.DUP); return this; } public Block dup(Class<?> type) { if (type == long.class || type == double.class) { nodes.add(OpCodes.DUP2); } else { nodes.add(OpCodes.DUP); } return this; } public Block pop() { nodes.add(OpCodes.POP); return this; } public Block pop(Class<?> type) { if (type == long.class || type == double.class) { nodes.add(OpCodes.POP2); } else if (type != void.class) { nodes.add(OpCodes.POP); } return this; } public Block swap() { nodes.add(OpCodes.SWAP); return this; } // // Fields (non-static) // public Block getField(Field field) { return getField(field.getDeclaringClass(), field.getName(), field.getType()); } public Block getField(FieldDefinition field) { getField(field.getDeclaringClass().getType(), field.getName(), field.getType()); return this; } public Block getField(Class<?> target, String fieldName, Class<?> fieldType) { getField(type(target), fieldName, type(fieldType)); return this; } public Block getField(ParameterizedType target, String fieldName, ParameterizedType fieldType) { nodes.add(getFieldInstruction(target, fieldName, fieldType)); return this; } public Block putField(Field field) { return putField(field.getDeclaringClass(), field.getName(), field.getType()); } public Block putField(Class<?> target, String fieldName, Class<?> fieldType) { putField(type(target), fieldName, type(fieldType)); return this; } public Block putField(FieldDefinition field) { checkArgument(!field.getAccess().contains(STATIC), "Field is static: %s", field); putField(field.getDeclaringClass().getType(), field.getName(), field.getType()); return this; } public Block putField(ParameterizedType target, String fieldName, ParameterizedType fieldType) { nodes.add(putFieldInstruction(target, fieldName, fieldType)); return this; } // // Static fields // public Block getStaticField(FieldDefinition field) { getStaticField(field.getDeclaringClass().getType(), field.getName(), field.getType()); return this; } public Block getStaticField(Field field) { checkArgument(Modifier.isStatic(field.getModifiers()), "Field is not static: %s", field); getStaticField(type(field.getDeclaringClass()), field.getName(), type(field.getType())); return this; } public Block getStaticField(Class<?> target, String fieldName, Class<?> fieldType) { nodes.add(getStaticInstruction(target, fieldName, fieldType)); return this; } public Block getStaticField(ParameterizedType target, String fieldName, ParameterizedType fieldType) { nodes.add(getStaticInstruction(target, fieldName, fieldType)); return this; } public Block getStaticField(ParameterizedType target, FieldDefinition field) { nodes.add(getStaticInstruction(target, field.getName(), field.getType())); return this; } public Block putStaticField(FieldDefinition field) { putStaticField(field.getDeclaringClass().getType(), field.getName(), field.getType()); return this; } public Block putStaticField(ParameterizedType target, FieldDefinition field) { checkArgument(field.getAccess().contains(STATIC), "Field is not static: %s", field); putStaticField(target, field.getName(), field.getType()); return this; } public Block putStaticField(ParameterizedType target, String fieldName, ParameterizedType fieldType) { nodes.add(putStaticInstruction(target, fieldName, fieldType)); return this; } // // Load constants // public Block pushThis() { getVariable("this"); return this; } public Block pushNull() { nodes.add(OpCodes.ACONST_NULL); return this; } public Block push(Class<?> type) { nodes.add(loadClass(type)); return this; } public Block push(ParameterizedType type) { nodes.add(loadClass(type)); return this; } public Block push(String value) { nodes.add(Constant.loadString(value)); return this; } public Block push(Number value) { nodes.add(loadNumber(value)); return this; } public Block push(int value) { nodes.add(loadInt(value)); return this; } public Block push(boolean value) { nodes.add(loadBoolean(value)); return this; } public Block pushJavaDefault(Class<?> type) { if (type == void.class) { return this; } if (type == boolean.class || type == byte.class || type == char.class || type == short.class || type == int.class) { return push(0); } if (type == long.class) { return push(0L); } if (type == float.class) { return push(0.0f); } if (type == double.class) { return push(0.0d); } return pushNull(); } public Block getVariable(String name) { append(context.getVariable(name).getValue()); return this; } public Block getVariable(String name, ParameterizedType type) { getVariable(name); checkCast(type); return this; } public Block initializeVariable(LocalVariableDefinition variable) { ParameterizedType type = variable.getType(); if (type.getType().length() == 1) { switch (type.getType().charAt(0)) { case 'B': case 'Z': case 'S': case 'C': case 'I': nodes.add(loadInt(0)); break; case 'F': nodes.add(loadFloat(0)); break; case 'D': nodes.add(loadDouble(0)); break; case 'J': nodes.add(loadLong(0)); break; default: checkArgument(false, "Unknown type '%s'", variable.getType()); } } else { nodes.add(Constant.loadNull()); } nodes.add(VariableInstruction.storeVariable(variable)); return this; } public Block getVariable(LocalVariableDefinition variable) { nodes.add(VariableInstruction.loadVariable(variable)); return this; } public Block putVariable(String name) { append(context.getVariable(name).setValue()); return this; } public Block putVariable(String name, Class<?> type) { nodes.add(loadClass(type)); putVariable(name); return this; } public Block putVariable(String name, ParameterizedType type) { nodes.add(loadClass(type)); putVariable(name); return this; } public Block putVariable(String name, String value) { nodes.add(Constant.loadString(value)); putVariable(name); return this; } public Block putVariable(String name, Number value) { nodes.add(loadNumber(value)); putVariable(name); return this; } public Block putVariable(String name, int value) { nodes.add(loadInt(value)); putVariable(name); return this; } public Block putVariable(String name, boolean value) { nodes.add(loadBoolean(value)); putVariable(name); return this; } public Block putVariable(LocalVariableDefinition variable) { nodes.add(VariableInstruction.storeVariable(variable)); return this; } public Block putVariable(LocalVariableDefinition variable, Class<?> type) { nodes.add(loadClass(type)); putVariable(variable); return this; } public Block putVariable(LocalVariableDefinition variable, ParameterizedType type) { nodes.add(loadClass(type)); putVariable(variable); return this; } public Block putVariable(LocalVariableDefinition variable, String value) { nodes.add(Constant.loadString(value)); putVariable(variable); return this; } public Block putVariable(LocalVariableDefinition variable, Number value) { nodes.add(loadNumber(value)); putVariable(variable); return this; } public Block putVariable(LocalVariableDefinition variable, int value) { nodes.add(loadInt(value)); putVariable(variable); return this; } public Block putVariable(LocalVariableDefinition variable, boolean value) { nodes.add(loadBoolean(value)); putVariable(variable); return this; } public Block incrementVariable(LocalVariableDefinition variable, byte increment) { String type = variable.getType().getClassName(); Preconditions.checkArgument(ImmutableList.of("byte", "short", "int").contains(type), "variable must be an byte, short or int, but is %s", type); nodes.add(VariableInstruction.incrementVariable(variable, increment)); return this; } public Block getObjectArrayElement() { nodes.add(OpCodes.AALOAD); return this; } public Block putObjectArrayElement() { nodes.add(OpCodes.AASTORE); return this; } public Block visitLineNumber(int line) { if (line <= 0) { context.cleanLineNumber(); } else if (!context.hasVisitedLine(line)) { nodes.add(new LineNumberNode(line)); context.visitLine(line); } return this; } @Override public void accept(MethodVisitor visitor) { for (ByteCodeNode node : nodes) { node.accept(visitor); } } @Override public <T> T accept(ByteCodeNode parent, ByteCodeVisitor<T> visitor) { return visitor.visitBlock(parent, this); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.plan; import org.apache.calcite.DataContext; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexExecutable; import org.apache.calcite.rex.RexExecutorImpl; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.rex.RexVisitorImpl; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlCastFunction; import org.apache.calcite.util.Pair; import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Checks whether one condition logically implies another. * * <p>If A &rArr; B, whenever A is true, B will be true also. * * <p>For example: * <ul> * <li>(x &gt; 10) &rArr; (x &gt; 5) * <li>(y = 10) &rArr; (y &lt; 30 OR x &gt; 30) * </ul> */ public class RexImplicationChecker { final RexBuilder builder; final RexExecutorImpl executor; final RelDataType rowType; public RexImplicationChecker( RexBuilder builder, RexExecutorImpl executor, RelDataType rowType) { this.builder = builder; this.executor = executor; this.rowType = rowType; } /** * Checks if condition first implies (&rArr;) condition second. * * <p>This reduces to SAT problem which is NP-Complete. * When this method says first implies second then it is definitely true. * But it cannot prove that first does not imply second. * * @param first first condition * @param second second condition * @return true if it can prove first &rArr; second; otherwise false i.e., * it doesn't know if implication holds */ public boolean implies(RexNode first, RexNode second) { // Validation if (!validate(first, second)) { return false; } RexCall firstCond = (RexCall) first; RexCall secondCond = (RexCall) second; // Get DNF RexNode firstDnf = RexUtil.toDnf(builder, first); RexNode secondDnf = RexUtil.toDnf(builder, second); // Check Trivial Cases if (firstDnf.isAlwaysFalse() || secondDnf.isAlwaysTrue()) { return true; } /** Decomposes DNF into List of Conjunctions. * * <p>For example, * {@code x > 10 AND y > 30) OR (z > 90)} * will be converted to * list of 2 conditions: * * <ul> * <li>(x > 10 AND y > 30)</li> * <li>z > 90</li> * </ul> */ List<RexNode> firstDnfs = RelOptUtil.disjunctions(firstDnf); List<RexNode> secondDnfs = RelOptUtil.disjunctions(secondDnf); for (RexNode f : firstDnfs) { if (!f.isAlwaysFalse()) { // Check if f implies at least // one of the conjunctions in list secondDnfs boolean implyOneConjunction = false; for (RexNode s : secondDnfs) { if (s.isAlwaysFalse()) { // f cannot imply s continue; } if (impliesConjunction(f, s)) { // Satisfies one of the condition, so lets // move to next conjunction in firstDnfs implyOneConjunction = true; break; } } // If f could not imply even one conjunction in // secondDnfs, then final implication may be false if (!implyOneConjunction) { return false; } } } return true; } /** Returns whether first implies second (both are conjunctions). */ private boolean impliesConjunction(RexNode first, RexNode second) { final InputUsageFinder firstUsageFinder = new InputUsageFinder(); final InputUsageFinder secondUsageFinder = new InputUsageFinder(); RexUtil.apply(firstUsageFinder, new ArrayList<RexNode>(), first); RexUtil.apply(secondUsageFinder, new ArrayList<RexNode>(), second); // Check Support if (!checkSupport(firstUsageFinder, secondUsageFinder)) { return false; } List<Pair<RexInputRef, RexNode>> usageList = new ArrayList<>(); for (Map.Entry<RexInputRef, InputRefUsage<SqlOperator, RexNode>> entry : firstUsageFinder.usageMap.entrySet()) { final Pair<SqlOperator, RexNode> pair = entry.getValue().usageList.get(0); usageList.add(Pair.of(entry.getKey(), pair.getValue())); } // Get the literals from first conjunction and executes second conjunction // using them. // // E.g., for // x > 30 &rArr; x > 10, // we will replace x by 30 in second expression and execute it i.e., // 30 > 10 // // If it's true then we infer implication. final DataContext dataValues = VisitorDataContext.of(rowType, usageList); if (dataValues == null) { return false; } ImmutableList<RexNode> constExps = ImmutableList.of(second); final RexExecutable exec = executor.getExecutable(builder, constExps, rowType); Object[] result; exec.setDataContext(dataValues); try { result = exec.execute(); } catch (Exception e) { // TODO: CheckSupport should not allow this exception to be thrown // Need to monitor it and handle all the cases raising them. return false; } return result != null && result.length == 1 && result[0] instanceof Boolean && (Boolean) result[0]; } /** * Looks at the usage of variables in first and second conjunction to decide * whether this kind of expression is currently supported for proving first * implies second. * * <ol> * <li>Variables should be used only once in both the conjunction against * given set of operations only: >, <, <=, >=, =, != * * <li>All the variables used in second condition should be used even in the * first. * * <li>If operator used for variable in first is op1 and op2 for second, then * we support these combination for conjunction (op1, op2) then op1, op2 * belongs to one of the following sets: * * <ul> * <li>(<, <=) X (<, <=) <i>note: X represents cartesian product</i> * <li>(> / >=) X (>, >=) * <li>(=) X (>, >=, <, <=, =, !=) * <li>(!=, =) * </ul> * </ol> * * @return whether input usage pattern is supported */ private boolean checkSupport(InputUsageFinder firstUsageFinder, InputUsageFinder secondUsageFinder) { final Map<RexInputRef, InputRefUsage<SqlOperator, RexNode>> firstUsageMap = firstUsageFinder.usageMap; final Map<RexInputRef, InputRefUsage<SqlOperator, RexNode>> secondUsageMap = secondUsageFinder.usageMap; for (Map.Entry<RexInputRef, InputRefUsage<SqlOperator, RexNode>> entry : firstUsageMap.entrySet()) { if (entry.getValue().usageCount > 1) { return false; } } for (Map.Entry<RexInputRef, InputRefUsage<SqlOperator, RexNode>> entry : secondUsageMap.entrySet()) { final InputRefUsage<SqlOperator, RexNode> secondUsage = entry.getValue(); if (secondUsage.usageCount > 1 || secondUsage.usageList.size() != 1) { return false; } final InputRefUsage<SqlOperator, RexNode> firstUsage = firstUsageMap.get(entry.getKey()); if (firstUsage == null || firstUsage.usageList.size() != 1) { return false; } final Pair<SqlOperator, RexNode> fUse = firstUsage.usageList.get(0); final Pair<SqlOperator, RexNode> sUse = secondUsage.usageList.get(0); final SqlKind fKind = fUse.getKey().getKind(); if (fKind != SqlKind.EQUALS) { switch (sUse.getKey().getKind()) { case GREATER_THAN: case GREATER_THAN_OR_EQUAL: if (!(fKind == SqlKind.GREATER_THAN) && !(fKind == SqlKind.GREATER_THAN_OR_EQUAL)) { return false; } break; case LESS_THAN: case LESS_THAN_OR_EQUAL: if (!(fKind == SqlKind.LESS_THAN) && !(fKind == SqlKind.LESS_THAN_OR_EQUAL)) { return false; } break; default: return false; } } } return true; } private boolean validate(RexNode first, RexNode second) { return first instanceof RexCall && second instanceof RexCall; } /** * Visitor that builds a usage map of inputs used by an expression. * * <p>E.g: for x > 10 AND y < 20 AND x = 40, usage map is as follows: * <ul> * <li>key: x value: {(>, 10),(=, 40), usageCount = 2} * <li>key: y value: {(>, 20), usageCount = 1} * </ul> */ private static class InputUsageFinder extends RexVisitorImpl<Void> { public final Map<RexInputRef, InputRefUsage<SqlOperator, RexNode>> usageMap = new HashMap<>(); public InputUsageFinder() { super(true); } public Void visitInputRef(RexInputRef inputRef) { InputRefUsage<SqlOperator, RexNode> inputRefUse = getUsageMap(inputRef); inputRefUse.usageCount++; return null; } @Override public Void visitCall(RexCall call) { switch (call.getOperator().getKind()) { case GREATER_THAN: case GREATER_THAN_OR_EQUAL: case LESS_THAN: case LESS_THAN_OR_EQUAL: case EQUALS: case NOT_EQUALS: updateUsage(call); break; default: } return super.visitCall(call); } private void updateUsage(RexCall call) { final List<RexNode> operands = call.getOperands(); RexNode first = removeCast(operands.get(0)); RexNode second = removeCast(operands.get(1)); if (first.isA(SqlKind.INPUT_REF) && second.isA(SqlKind.LITERAL)) { updateUsage(call.getOperator(), (RexInputRef) first, second); } if (first.isA(SqlKind.LITERAL) && second.isA(SqlKind.INPUT_REF)) { updateUsage(reverse(call.getOperator()), (RexInputRef) second, first); } } private SqlOperator reverse(SqlOperator op) { return RelOptUtil.op(op.getKind().reverse(), op); } private static RexNode removeCast(RexNode inputRef) { if (inputRef instanceof RexCall) { final RexCall castedRef = (RexCall) inputRef; final SqlOperator operator = castedRef.getOperator(); if (operator instanceof SqlCastFunction) { inputRef = castedRef.getOperands().get(0); } } return inputRef; } private void updateUsage(SqlOperator op, RexInputRef inputRef, RexNode literal) { final InputRefUsage<SqlOperator, RexNode> inputRefUse = getUsageMap(inputRef); Pair<SqlOperator, RexNode> use = Pair.of(op, literal); inputRefUse.usageList.add(use); } private InputRefUsage<SqlOperator, RexNode> getUsageMap(RexInputRef rex) { InputRefUsage<SqlOperator, RexNode> inputRefUse = usageMap.get(rex); if (inputRefUse == null) { inputRefUse = new InputRefUsage<>(); usageMap.put(rex, inputRefUse); } return inputRefUse; } } /** * Usage of a {@link RexInputRef} in an expression. */ private static class InputRefUsage<T1, T2> { private final List<Pair<T1, T2>> usageList = new ArrayList<>(); private int usageCount = 0; } } // End RexImplicationChecker.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.launchpad.app; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.LineNumberReader; import java.io.OutputStreamWriter; import java.lang.management.LockInfo; import java.lang.management.ManagementFactory; import java.lang.management.MonitorInfo; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; import java.math.BigInteger; import java.net.ConnectException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.Set; /** * The <code>ControlListener</code> class is a helper class for the {@link Main} * class to support in Sling standalone application process communication. This * class implements the client and server sides of a TCP/IP based communication * channel to control a running Sling application. * <p> * The server side listens for commands on a configurable host and port &endash; * <code>localhost:63000</code> by default &endash; supporting the following * commands: * <table> * <tr> * <th>Command</th> * <th>Description</th> * </tr> * <tr> * <td><code>status</code></td> * <td>Request status information. Currently only <i>OK</i> is sent back. If no * connection can be created to the server the client assumes Sling is not * running.</td> * </tr> * <tr> * <td><code>stop</code></td> * <td>Requests Sling to shutdown.</td> * </tr> * </table> */ class ControlListener implements Runnable { // command sent by the client to cause Sling to shutdown static final String COMMAND_STOP = "stop"; // command sent by the client to check for the status of the server static final String COMMAND_STATUS = "status"; // command sent by the client to request a thread dump static final String COMMAND_THREADS = "threads"; // the response sent by the server if the command executed successfully private static final String RESPONSE_OK = "OK"; // the status response sent by the server when shutting down private static final String RESPONSE_STOPPING = "STOPPING"; // The default interface to listen on private static final String DEFAULT_LISTEN_INTERFACE = "127.0.0.1"; // The default port to listen on and to connect to - we select it randomly private static final int DEFAULT_LISTEN_PORT = 0; // The reference to the Main class to shutdown on request private final Main slingMain; private final String listenSpec; private String secretKey; private InetSocketAddress socketAddress; private volatile Thread shutdownThread = null; /** * Creates an instance of this control support class. * <p> * The host (name or address) and port number of the socket is defined by * the <code>listenSpec</code> parameter. This parameter is defined as * <code>[ host ":" ] port</code>. If the parameter is empty or * <code>null</code> it defaults to <i>localhost:0</i>. If the host name * is missing it defaults to <i>localhost</i>. * * @param slingMain The Main class reference. This is only required if this * instance is used for the server side to listen for remote stop * commands. Otherwise this argument may be <code>null</code>. * @param listenSpec The specification for the host and port for the socket * connection. See above for the format of this parameter. */ ControlListener(final Main slingMain, final String listenSpec) { this.slingMain = slingMain; this.listenSpec = listenSpec; // socketAddress = this.getSocketAddress(listenSpec, selectNewPort); } /** * Implements the server side of the control connection starting a thread * listening on the host and port configured on setup of this instance. */ boolean listen() { final File configFile = getConfigFile(); if (configFile.canRead() && statusServer() == 0) { // server already running, fail Main.error("Sling already active in " + this.slingMain.getSlingHome(), null); return false; } configFile.delete(); final Thread listener = new Thread(this); listener.setDaemon(true); listener.setName("Apache Sling Control Listener (inactive)"); listener.start(); return true; } /** * Implements the client side of the control connection sending the command * to shutdown Sling. */ int shutdownServer() { return sendCommand(COMMAND_STOP); } /** * Implements the client side of the control connection sending the command * to check whether Sling is active. */ int statusServer() { return sendCommand(COMMAND_STATUS); } /** * Implements the client side of the control connection sending the command * to retrieve a thread dump. */ int dumpThreads() { return sendCommand(COMMAND_THREADS); } // ---------- Runnable interface /** * Implements the server thread receiving commands from clients and acting * upon them. */ @Override public void run() { this.configure(false); final ServerSocket server; try { server = new ServerSocket(); server.bind(this.socketAddress); writePortToConfigFile(getConfigFile(), new InetSocketAddress(server.getInetAddress(), server.getLocalPort()), this.secretKey); Thread.currentThread().setName( "Apache Sling Control Listener@" + server.getInetAddress() + ":" + server.getLocalPort()); Main.info("Apache Sling Control Listener started", null); } catch (final IOException ioe) { Main.error("Failed to start Apache Sling Control Listener", ioe); return; } long delay = 0; try { while (true) { final Socket s; try { s = server.accept(); } catch (IOException ioe) { // accept terminated, most probably due to Socket.close() // just end the loop and exit break; } // delay processing after unsuccessful attempts if (delay > 0) { Main.info(s.getRemoteSocketAddress() + ": Delay: " + (delay / 1000), null); try { Thread.sleep(delay); } catch (InterruptedException e) { } } try { final String commandLine = readLine(s); if (commandLine == null) { final String msg = "ERR: missing command"; writeLine(s, msg); continue; } final int blank = commandLine.indexOf(' '); if (blank < 0) { final String msg = "ERR: missing key"; writeLine(s, msg); continue; } if (!secretKey.equals(commandLine.substring(0, blank))) { final String msg = "ERR: wrong key"; writeLine(s, msg); delay = (delay > 0) ? delay * 2 : 1000L; continue; } final String command = commandLine.substring(blank + 1); Main.info(s.getRemoteSocketAddress() + ">" + command, null); if (COMMAND_STOP.equals(command)) { if (this.shutdownThread != null) { writeLine(s, RESPONSE_STOPPING); } else { this.shutdownThread = new Thread("Apache Sling Control Listener: Shutdown") { @Override public void run() { slingMain.doStop(); try { server.close(); } catch (final IOException ignore) { } } }; this.shutdownThread.start(); writeLine(s, RESPONSE_OK); } } else if (COMMAND_STATUS.equals(command)) { writeLine(s, (this.shutdownThread == null) ? RESPONSE_OK : RESPONSE_STOPPING); } else if (COMMAND_THREADS.equals(command)) { dumpThreads(s); } else { final String msg = "ERR:" + command; writeLine(s, msg); } } finally { try { s.close(); } catch (IOException ignore) { } } } } catch (final IOException ioe) { Main.error("Failure reading from client", ioe); } finally { try { server.close(); } catch (final IOException ignore) { } } getConfigFile().delete(); // everything has stopped and when this thread terminates, // the VM should stop. If there are still some non-daemon threads // active, this will not happen, so we force this here ... Main.info("Apache Sling terminated, exiting Java VM", null); this.slingMain.terminateVM(0); } // ---------- socket support private void dumpThreads(final Socket socket) throws IOException { final ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); final ThreadInfo[] threadInfos = threadBean.dumpAllThreads(true, true); for (ThreadInfo thread : threadInfos) { printThread(socket, thread); // add locked synchronizers final LockInfo[] locks = thread.getLockedSynchronizers(); writeLine(socket, "-"); writeLine(socket, "- Locked ownable synchronizers:"); if (locks.length > 0) { for (LockInfo li : locks) { writeLine(socket, String.format("- - locked %s", formatLockInfo( li.getClassName(), li.getIdentityHashCode() ) )); } } else { writeLine(socket, "- - None"); } // empty separator line writeLine(socket, "-"); } final long[] deadLocked; if (threadBean.isSynchronizerUsageSupported()) { deadLocked = threadBean.findDeadlockedThreads(); } else { deadLocked = threadBean.findMonitorDeadlockedThreads(); } if (deadLocked != null) { final ThreadInfo[] dl = threadBean.getThreadInfo(deadLocked, true, true); final Set<ThreadInfo> dlSet = new HashSet<ThreadInfo>(Arrays.asList(dl)); int deadlockCount = 0; for (ThreadInfo current : dl) { if (dlSet.remove(current)) { // find and record a single deadlock ArrayList<ThreadInfo> loop = new ArrayList<ThreadInfo>(); do { loop.add(current); for (ThreadInfo cand : dl) { if (cand.getThreadId() == current.getLockOwnerId()) { current = (dlSet.remove(cand)) ? cand : null; break; } } } while (current != null); deadlockCount++; // print the deadlock writeLine(socket, "-Found one Java-level deadlock:"); writeLine(socket, "-============================="); for (ThreadInfo thread : loop) { writeLine(socket, String.format("-\"%s\" #%d", thread.getThreadName(), thread.getThreadId() )); writeLine(socket, String.format("- waiting on %s", formatLockInfo( thread.getLockInfo().getClassName(), thread.getLockInfo().getIdentityHashCode() ) )); writeLine(socket, String.format("- which is held by \"%s\" #%d", thread.getLockOwnerName(), thread.getLockOwnerId() )); } writeLine(socket, "-"); writeLine(socket, "-Java stack information for the threads listed above:"); writeLine(socket, "-==================================================="); for (ThreadInfo thread : loop) { printThread(socket, thread); } writeLine(socket, "-"); } } // "Thread-8": // waiting to lock monitor 7f89fb80da08 (object 7f37a0968, a java.lang.Object), // which is held by "Thread-7" // "Thread-7": // waiting to lock monitor 7f89fb80b0b0 (object 7f37a0958, a java.lang.Object), // which is held by "Thread-8" writeLine(socket, String.format("-Found %d deadlocks.", deadlockCount )); } writeLine(socket, RESPONSE_OK); } private String formatLockInfo(final String className, final int objectId) { return String.format("<%08x> (a %s)", objectId, className); } private void printThread(final Socket socket, final ThreadInfo thread) throws IOException { writeLine(socket, String.format("-\"%s\" #%d", thread.getThreadName(), thread.getThreadId() )); writeLine(socket, String.format("- java.lang.Thread.State: %s", thread.getThreadState() )); final MonitorInfo[] monitors = thread.getLockedMonitors(); final StackTraceElement[] trace = thread.getStackTrace(); for (int i=0; i < trace.length; i++) { StackTraceElement ste = trace[i]; if (ste.isNativeMethod()) { writeLine(socket, String.format("- at %s.%s(Native Method)", ste.getClassName(), ste.getMethodName() )); } else { writeLine(socket, String.format("- at %s.%s(%s:%d)", ste.getClassName(), ste.getMethodName(), ste.getFileName(), ste.getLineNumber() )); } if (i == 0 && thread.getLockInfo() != null) { writeLine(socket, String.format("- - waiting on %s%s", formatLockInfo( thread.getLockInfo().getClassName(), thread.getLockInfo().getIdentityHashCode() ), (thread.getLockOwnerId() >= 0) ? String.format(" owned by \"%s\" #%d", thread.getLockOwnerName(), thread.getLockOwnerId() ):"" )); } for (MonitorInfo mi : monitors) { if (i == mi.getLockedStackDepth()) { writeLine(socket, String.format("- - locked %s", formatLockInfo( mi.getClassName(), mi.getIdentityHashCode() ) )); } } } } /** * Sends the given command to the server indicated by the configured * socket address and logs the reply. * * @param command The command to send * * @return A code indicating success of sending the command. */ private int sendCommand(final String command) { if (configure(true)) { if (this.secretKey == null) { Main.info("Missing secret key to protect sending '" + command + "' to " + this.socketAddress, null); return 4; // LSB code for unknown status } Socket socket = null; try { socket = new Socket(); socket.connect(this.socketAddress); writeLine0(socket, this.secretKey + " " + command); final String result = readLine(socket); Main.info("Sent '" + command + "' to " + this.socketAddress + ": " + result, null); return 0; // LSB code for everything's fine } catch (final ConnectException ce) { Main.info("No Apache Sling running at " + this.socketAddress, null); return 3; // LSB code for programm not running } catch (final IOException ioe) { Main.error("Failed sending '" + command + "' to " + this.socketAddress, ioe); return 1; // LSB code for programm dead } finally { if (socket != null) { try { socket.close(); } catch (IOException ignore) { } } } } Main.info("No socket address to send '" + command + "' to", null); return 4; // LSB code for unknown status } private String readLine(final Socket socket) throws IOException { final BufferedReader br = new BufferedReader(new InputStreamReader( socket.getInputStream(), "UTF-8")); StringBuilder b = new StringBuilder(); boolean more = true; while (more) { String s = br.readLine(); if (s != null && s.startsWith("-")) { s = s.substring(1); } else { more = false; } if (b.length() > 0) { b.append("\r\n"); } b.append(s); } return b.toString(); } private void writeLine(final Socket socket, final String line) throws IOException { Main.info(socket.getRemoteSocketAddress() + "<" + line, null); this.writeLine0(socket, line); } private void writeLine0(final Socket socket, final String line) throws IOException { final BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(socket.getOutputStream(), "UTF-8")); bw.write(line); bw.write("\r\n"); bw.flush(); } /** * Read the port from the config file * @return The port or null */ private boolean configure(final boolean fromConfigFile) { boolean result = false; if (fromConfigFile) { final File configFile = this.getConfigFile(); if (configFile.canRead()) { try ( final LineNumberReader lnr = new LineNumberReader(new FileReader(configFile))) { this.socketAddress = getSocketAddress(lnr.readLine()); this.secretKey = lnr.readLine(); result = true; } catch (final IOException ignore) { // ignore } } } else { this.socketAddress = getSocketAddress(this.listenSpec); this.secretKey = generateKey(); result = true; } return result; } private static String generateKey() { return new BigInteger(165, new SecureRandom()).toString(32); } /** * Return the control port file */ private File getConfigFile() { final File configDir = new File(this.slingMain.getSlingHome(), "conf"); return new File(configDir, "controlport"); } private static InetSocketAddress getSocketAddress(String listenSpec) { try { final String address; final int port; if (listenSpec == null) { address = DEFAULT_LISTEN_INTERFACE; port = DEFAULT_LISTEN_PORT; } else { final int colon = listenSpec.indexOf(':'); if (colon < 0) { address = DEFAULT_LISTEN_INTERFACE; port = Integer.parseInt(listenSpec); } else { address = listenSpec.substring(0, colon); port = Integer.parseInt(listenSpec.substring(colon + 1)); } } final InetSocketAddress addr = new InetSocketAddress(address, port); if (!addr.isUnresolved()) { return addr; } Main.error("Unknown host in '" + listenSpec, null); } catch (final NumberFormatException nfe) { Main.error("Cannot parse port number from '" + listenSpec + "'", null); } return null; } private static void writePortToConfigFile(final File configFile, final InetSocketAddress socketAddress, final String secretKey) { configFile.getParentFile().mkdirs(); FileWriter fw = null; try { fw = new FileWriter(configFile); fw.write(socketAddress.getAddress().getHostAddress()); fw.write(':'); fw.write(String.valueOf(socketAddress.getPort())); fw.write('\n'); fw.write(secretKey); fw.write('\n'); } catch (final IOException ignore) { // ignore } finally { if (fw != null) { try { fw.close(); } catch (final IOException ignore) { } } } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.watcher.test; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.template.get.GetComposableIndexTemplateAction; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.analysis.common.CommonAnalysisPlugin; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockMustacheScriptEngine; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.watcher.WatcherState; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.execution.TriggeredWatchStoreField; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; import org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.core.watcher.transport.actions.service.WatcherServiceRequestBuilder; import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsRequestBuilder; import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsResponse; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.ilm.IndexLifecycle; import org.elasticsearch.xpack.watcher.ClockHolder; import org.elasticsearch.xpack.watcher.notification.email.Authentication; import org.elasticsearch.xpack.watcher.notification.email.Email; import org.elasticsearch.xpack.watcher.notification.email.EmailService; import org.elasticsearch.xpack.watcher.notification.email.Profile; import org.elasticsearch.xpack.watcher.trigger.ScheduleTriggerEngineMock; import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.hamcrest.Matcher; import org.junit.After; import org.junit.Before; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField.HISTORY_TEMPLATE_NAME; import static org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField.TRIGGERED_TEMPLATE_NAME; import static org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField.WATCHES_TEMPLATE_NAME; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; import static org.mockito.Mockito.mock; /** * Base class for Watcher integration tests * * Note that SLM has been observed to cause timing issues during testsuite teardown: * https://github.com/elastic/elasticsearch/issues/50302 */ @ClusterScope(scope = SUITE, numClientNodes = 0, maxNumDataNodes = 3) public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase { private TimeWarp timeWarp; @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put(XPackSettings.SECURITY_ENABLED.getKey(), false) .put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial") // we do this by default in core, but for watcher this isn't needed and only adds noise. .put("index.store.mock.check_index_on_close", false) // watcher settings that should work despite randomization .put("xpack.watcher.execution.scroll.size", randomIntBetween(1, 100)) .put("xpack.watcher.watch.scroll.size", randomIntBetween(1, 100)) .put("indices.lifecycle.history_index_enabled", false) .build(); } @Override protected Set<String> excludeTemplates() { Set<String> excludes = new HashSet<>(); excludes.addAll(Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES)); return Collections.unmodifiableSet(excludes); } @Override protected Collection<Class<? extends Plugin>> getMockPlugins() { Set<Class<? extends Plugin>> plugins = new HashSet<>(super.getMockPlugins()); // security has its own transport service plugins.remove(MockTransportService.TestPlugin.class); // security has its own transport // we have to explicitly add it otherwise we will fail to set the check_index_on_close setting plugins.add(MockFSIndexStore.TestPlugin.class); plugins.add(MockMustacheScriptEngine.TestPlugin.class); return plugins; } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return pluginTypes(); } protected List<Class<? extends Plugin>> pluginTypes() { List<Class<? extends Plugin>> types = new ArrayList<>(); if (timeWarped()) { types.add(TimeWarpedWatcher.class); } else { types.add(LocalStateWatcher.class); } types.add(CommonAnalysisPlugin.class); // ILM is required for watcher template index settings types.add(IndexLifecycle.class); return types; } /** * @return whether the test suite should run in time warp mode. By default this will be determined globally * to all test suites based on {@code -Dtests.timewarp} system property (when missing, defaults to * {@code true}). If a test suite requires to force the mode or force not running under this mode * this method can be overridden. */ protected boolean timeWarped() { return true; } @Before public void _setup() throws Exception { if (timeWarped()) { timeWarp = new TimeWarp(internalCluster().getInstances(ScheduleTriggerEngineMock.class), (ClockMock)getInstanceFromMaster(ClockHolder.class).clock); } if (internalCluster().size() > 0) { ensureLicenseEnabled(); if (timeWarped()) { // now that the license is enabled and valid we can freeze all nodes clocks logger.info("[{}#{}]: freezing time on nodes", getTestClass().getSimpleName(), getTestName()); TimeFreezeDisruption ice = new TimeFreezeDisruption(); internalCluster().setDisruptionScheme(ice); ice.startDisrupting(); } stopWatcher(); createWatcherIndicesOrAliases(); startWatcher(); } } @After public void _cleanup() throws Exception { // Clear all internal watcher state for the next test method: logger.info("[#{}]: clearing watcher state", getTestName()); stopWatcher(); } /** * In order to test, that .watches and .triggered-watches indices can also point to an alias, we will rarely create those * after starting watcher * * The idea behind this is the possible use of the migration helper for upgrades, see * https://github.com/elastic/elasticsearch-migration/ * */ private void createWatcherIndicesOrAliases() throws Exception { if (internalCluster().size() > 0) { ensureWatcherTemplatesAdded(); // alias for .watches, setting the index template to the same as well String watchIndexName; String triggeredWatchIndexName; if (randomBoolean()) { // Create an index to get the template String tempIndex = ".watches" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT); CreateIndexResponse response = client().admin().indices().prepareCreate(tempIndex) .setCause("Index to test aliases with .watches index") .addAlias(new Alias(Watch.INDEX)) .get(); assertAcked(response); // Now replace it with a randomly named index watchIndexName = randomAlphaOfLengthBetween(5,10).toLowerCase(Locale.ROOT); replaceWatcherIndexWithRandomlyNamedIndex(Watch.INDEX, watchIndexName); logger.info("set alias for .watches index to [{}]", watchIndexName); } else { watchIndexName = Watch.INDEX; Settings.Builder builder = Settings.builder(); if (randomBoolean()) { builder.put("index.number_of_shards", scaledRandomIntBetween(1, 5)); } assertAcked(client().admin().indices().prepareCreate(watchIndexName).setSettings(builder)); } // alias for .triggered-watches, ensuring the index template is set appropriately if (randomBoolean()) { String tempIndex = ".triggered_watches-alias-index"; CreateIndexResponse response = client().admin().indices().prepareCreate(tempIndex) .setCause("Index to test aliases with .triggered-watches index") .addAlias(new Alias(TriggeredWatchStoreField.INDEX_NAME)) .get(); assertAcked(response); // Now replace it with a randomly-named index triggeredWatchIndexName = randomValueOtherThan(watchIndexName, () -> randomAlphaOfLengthBetween(5,10).toLowerCase(Locale.ROOT)); replaceWatcherIndexWithRandomlyNamedIndex(TriggeredWatchStoreField.INDEX_NAME, triggeredWatchIndexName); logger.info("set alias for .triggered-watches index to [{}]", triggeredWatchIndexName); } else { triggeredWatchIndexName = TriggeredWatchStoreField.INDEX_NAME; assertAcked(client().admin().indices().prepareCreate(triggeredWatchIndexName)); } String historyIndex = HistoryStoreField.getHistoryIndexNameForTime(ZonedDateTime.now(ZoneOffset.UTC)); assertAcked(client().admin().indices().prepareCreate(historyIndex)); logger.info("creating watch history index [{}]", historyIndex); ensureGreen(historyIndex, watchIndexName, triggeredWatchIndexName); } } public void replaceWatcherIndexWithRandomlyNamedIndex(String originalIndexOrAlias, String to) { GetIndexResponse index = client().admin().indices().prepareGetIndex().setIndices(originalIndexOrAlias).get(); MappingMetadata mapping = index.getMappings().get(index.getIndices()[0]); Settings settings = index.getSettings().get(index.getIndices()[0]); Settings.Builder newSettings = Settings.builder().put(settings); newSettings.remove("index.provided_name"); newSettings.remove("index.uuid"); newSettings.remove("index.creation_date"); newSettings.remove("index.version.created"); CreateIndexResponse createIndexResponse = client().admin().indices().prepareCreate(to) .setMapping(mapping.sourceAsMap()) .setSettings(newSettings) .get(); assertTrue(createIndexResponse.isAcknowledged()); ensureGreen(to); AtomicReference<String> originalIndex = new AtomicReference<>(originalIndexOrAlias); boolean watchesIsAlias = client().admin().indices().prepareGetAliases(originalIndexOrAlias).get().getAliases().isEmpty() == false; if (watchesIsAlias) { GetAliasesResponse aliasesResponse = client().admin().indices().prepareGetAliases(originalIndexOrAlias).get(); assertEquals(1, aliasesResponse.getAliases().size()); aliasesResponse.getAliases().forEach((aliasRecord) -> { assertEquals(1, aliasRecord.value.size()); originalIndex.set(aliasRecord.key); }); } client().admin().indices().prepareDelete(originalIndex.get()).get(); client().admin().indices().prepareAliases().addAlias(to, originalIndexOrAlias).get(); } protected TimeWarp timeWarp() { assert timeWarped() : "cannot access TimeWarp when test context is not time warped"; return timeWarp; } public boolean randomizeNumberOfShardsAndReplicas() { return false; } protected long docCount(String index, QueryBuilder query) { refresh(); return docCount(index, SearchSourceBuilder.searchSource().query(query)); } protected long watchRecordCount(QueryBuilder query) { refresh(); return docCount(HistoryStoreField.INDEX_PREFIX_WITH_TEMPLATE + "*", SearchSourceBuilder.searchSource().query(query)); } protected long docCount(String index, SearchSourceBuilder source) { SearchRequestBuilder builder = client().prepareSearch(index).setSource(source).setSize(0); return builder.get().getHits().getTotalHits().value; } protected SearchResponse searchHistory(SearchSourceBuilder builder) { return client().prepareSearch(HistoryStoreField.INDEX_PREFIX_WITH_TEMPLATE + "*").setSource(builder).get(); } protected <T> T getInstanceFromMaster(Class<T> type) { return internalCluster().getInstance(type, internalCluster().getMasterName()); } protected WatchParser watchParser() { return getInstanceFromMaster(WatchParser.class); } private IndexNameExpressionResolver indexNameExpressionResolver() { return internalCluster().getInstance(IndexNameExpressionResolver.class); } @SuppressWarnings("unchecked") protected void assertValue(XContentSource source, String path, Matcher<?> matcher) { assertThat(source.getValue(path), (Matcher<Object>) matcher); } protected void assertWatchWithMinimumPerformedActionsCount(final String watchName, final long minimumExpectedWatchActionsWithActionPerformed) throws Exception { assertWatchWithMinimumPerformedActionsCount(watchName, minimumExpectedWatchActionsWithActionPerformed, true); } // TODO remove this shitty method... the `assertConditionMet` is bogus protected void assertWatchWithMinimumPerformedActionsCount(final String watchName, final long minimumExpectedWatchActionsWithActionPerformed, final boolean assertConditionMet) throws Exception { final AtomicReference<SearchResponse> lastResponse = new AtomicReference<>(); try { assertBusy(() -> { ClusterState state = client().admin().cluster().prepareState().get().getState(); String[] watchHistoryIndices = indexNameExpressionResolver().concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), HistoryStoreField.INDEX_PREFIX_WITH_TEMPLATE + "*"); assertThat(watchHistoryIndices, not(emptyArray())); for (String index : watchHistoryIndices) { IndexRoutingTable routingTable = state.getRoutingTable().index(index); assertThat(routingTable, notNullValue()); assertThat(routingTable.allPrimaryShardsActive(), is(true)); } refresh(); SearchResponse searchResponse = client().prepareSearch(HistoryStoreField.INDEX_PREFIX_WITH_TEMPLATE + "*") .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setQuery(boolQuery().must(matchQuery("watch_id", watchName)).must(matchQuery("state", ExecutionState.EXECUTED.id()))) .get(); lastResponse.set(searchResponse); assertThat("could not find executed watch record for watch " + watchName, searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(minimumExpectedWatchActionsWithActionPerformed)); if (assertConditionMet) { assertThat((Integer) XContentMapValues.extractValue("result.input.payload.hits.total", searchResponse.getHits().getAt(0).getSourceAsMap()), greaterThanOrEqualTo(1)); } }); } catch (AssertionError error) { SearchResponse searchResponse = lastResponse.get(); logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value, watchName); int counter = 1; for (SearchHit hit : searchResponse.getHits().getHits()) { logger.info("hit [{}]=\n {}", counter++, XContentHelper.convertToJson(hit.getSourceRef(), true, true)); } throw error; } } protected SearchResponse searchWatchRecords(Consumer<SearchRequestBuilder> requestBuilderCallback) { SearchRequestBuilder builder = client().prepareSearch(HistoryStoreField.INDEX_PREFIX_WITH_TEMPLATE + "*"); requestBuilderCallback.accept(builder); return builder.get(); } protected long findNumberOfPerformedActions(String watchName) { refresh(); SearchResponse searchResponse = client().prepareSearch(HistoryStoreField.INDEX_PREFIX_WITH_TEMPLATE + "*") .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setQuery(boolQuery().must(matchQuery("watch_id", watchName)).must(matchQuery("state", ExecutionState.EXECUTED.id()))) .get(); return searchResponse.getHits().getTotalHits().value; } protected void assertWatchWithNoActionNeeded(final String watchName, final long expectedWatchActionsWithNoActionNeeded) throws Exception { final AtomicReference<SearchResponse> lastResponse = new AtomicReference<>(); try { assertBusy(() -> { // The watch_history index gets created in the background when the first watch is triggered // so we to check first is this index is created and shards are started ClusterState state = client().admin().cluster().prepareState().get().getState(); String[] watchHistoryIndices = indexNameExpressionResolver().concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), HistoryStoreField.INDEX_PREFIX_WITH_TEMPLATE + "*"); assertThat(watchHistoryIndices, not(emptyArray())); for (String index : watchHistoryIndices) { IndexRoutingTable routingTable = state.getRoutingTable().index(index); assertThat(routingTable, notNullValue()); assertThat(routingTable.allPrimaryShardsActive(), is(true)); } refresh(); SearchResponse searchResponse = client().prepareSearch(HistoryStoreField.INDEX_PREFIX_WITH_TEMPLATE + "*") .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setQuery(boolQuery().must(matchQuery("watch_id", watchName)).must(matchQuery("state", ExecutionState.EXECUTION_NOT_NEEDED.id()))) .get(); lastResponse.set(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(expectedWatchActionsWithNoActionNeeded)); }); } catch (AssertionError error) { SearchResponse searchResponse = lastResponse.get(); logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value, watchName); int counter = 1; for (SearchHit hit : searchResponse.getHits().getHits()) { logger.info("hit [{}]=\n {}", counter++, XContentHelper.convertToJson(hit.getSourceRef(), true, true)); } throw error; } } protected void assertWatchWithMinimumActionsCount(final String watchName, final ExecutionState recordState, final long recordCount) throws Exception { assertBusy(() -> { ClusterState state = client().admin().cluster().prepareState().get().getState(); String[] watchHistoryIndices = indexNameExpressionResolver().concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), HistoryStoreField.INDEX_PREFIX_WITH_TEMPLATE + "*"); assertThat(watchHistoryIndices, not(emptyArray())); for (String index : watchHistoryIndices) { IndexRoutingTable routingTable = state.getRoutingTable().index(index); assertThat(routingTable, notNullValue()); assertThat(routingTable.allPrimaryShardsActive(), is(true)); } refresh(); SearchResponse searchResponse = client().prepareSearch(HistoryStoreField.INDEX_PREFIX_WITH_TEMPLATE + "*") .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setQuery(boolQuery().must(matchQuery("watch_id", watchName)).must(matchQuery("state", recordState.id()))) .get(); assertThat("could not find executed watch record", searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(recordCount)); }); } private void ensureWatcherTemplatesAdded() throws Exception { // Verify that the index templates exist: assertBusy(() -> { GetComposableIndexTemplateAction.Response response = client().execute(GetComposableIndexTemplateAction.INSTANCE, new GetComposableIndexTemplateAction.Request(HISTORY_TEMPLATE_NAME)).get(); assertThat("[" + HISTORY_TEMPLATE_NAME + "] is missing", response.indexTemplates().size(), equalTo(1)); response = client().execute(GetComposableIndexTemplateAction.INSTANCE, new GetComposableIndexTemplateAction.Request(TRIGGERED_TEMPLATE_NAME)).get(); assertThat("[" + TRIGGERED_TEMPLATE_NAME + "] is missing", response.indexTemplates().size(), equalTo(1)); response = client().execute(GetComposableIndexTemplateAction.INSTANCE, new GetComposableIndexTemplateAction.Request(WATCHES_TEMPLATE_NAME)).get(); assertThat("[" + WATCHES_TEMPLATE_NAME + "] is missing", response.indexTemplates().size(), equalTo(1)); }); } protected void startWatcher() throws Exception { assertBusy(() -> { WatcherStatsResponse watcherStatsResponse = new WatcherStatsRequestBuilder(client()).get(); assertThat(watcherStatsResponse.hasFailures(), is(false)); List<Tuple<String, WatcherState>> currentStatesFromStatsRequest = watcherStatsResponse.getNodes().stream() .map(response -> Tuple.tuple(response.getNode().getName(), response.getWatcherState())) .collect(Collectors.toList()); List<WatcherState> states = currentStatesFromStatsRequest.stream().map(Tuple::v2).collect(Collectors.toList()); logger.info("waiting to start watcher, current states {}", currentStatesFromStatsRequest); boolean isAllStateStarted = states.stream().allMatch(w -> w == WatcherState.STARTED); if (isAllStateStarted) { return; } boolean isAnyStopping = states.stream().anyMatch(w -> w == WatcherState.STOPPING); if (isAnyStopping) { throw new AssertionError("at least one node is in state stopping, waiting to be stopped"); } boolean isAllStateStopped = states.stream().allMatch(w -> w == WatcherState.STOPPED); if (isAllStateStopped) { assertAcked(new WatcherServiceRequestBuilder(client()).start().get()); throw new AssertionError("all nodes are stopped, restarting"); } boolean isAnyStarting = states.stream().anyMatch(w -> w == WatcherState.STARTING); if (isAnyStarting) { throw new AssertionError("at least one node is in state starting, waiting to be stopped"); } throw new AssertionError("unexpected state, retrying with next run"); }, 30, TimeUnit.SECONDS); } protected void ensureLicenseEnabled() throws Exception { assertBusy(() -> { for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { assertThat(licenseState.checkFeature(XPackLicenseState.Feature.WATCHER), is(true)); } }); } protected void stopWatcher() throws Exception { assertBusy(() -> { WatcherStatsResponse watcherStatsResponse = new WatcherStatsRequestBuilder(client()).get(); assertThat(watcherStatsResponse.hasFailures(), is(false)); List<Tuple<String, WatcherState>> currentStatesFromStatsRequest = watcherStatsResponse.getNodes().stream() .map(response -> Tuple.tuple(response.getNode().getName() + " (" + response.getThreadPoolQueueSize() + ")", response.getWatcherState())).collect(Collectors.toList()); List<WatcherState> states = currentStatesFromStatsRequest.stream().map(Tuple::v2).collect(Collectors.toList()); logger.info("waiting to stop watcher, current states {}", currentStatesFromStatsRequest); boolean isAllStateStarted = states.stream().allMatch(w -> w == WatcherState.STARTED); if (isAllStateStarted) { assertAcked(new WatcherServiceRequestBuilder(client()).stop().get()); throw new AssertionError("all nodes are started, stopping"); } boolean isAnyStopping = states.stream().anyMatch(w -> w == WatcherState.STOPPING); if (isAnyStopping) { throw new AssertionError("at least one node is in state stopping, waiting to be stopped"); } boolean isAllStateStopped = states.stream().allMatch(w -> w == WatcherState.STOPPED); if (isAllStateStopped) { return; } boolean isAnyStarting = states.stream().anyMatch(w -> w == WatcherState.STARTING); if (isAnyStarting) { throw new AssertionError("at least one node is in state starting, waiting to be started before stopping"); } throw new AssertionError("unexpected state, retrying with next run"); }, 60, TimeUnit.SECONDS); } public static class NoopEmailService extends EmailService { public NoopEmailService() { super(Settings.EMPTY, null, mock(SSLService.class), new ClusterSettings(Settings.EMPTY, new HashSet<>(EmailService.getSettings()))); } @Override public EmailSent send(Email email, Authentication auth, Profile profile, String accountName) { return new EmailSent(accountName, email); } } protected static class TimeWarp { private static final Logger logger = LogManager.getLogger(TimeWarp.class); private final List<ScheduleTriggerEngineMock> schedulers; private final ClockMock clock; TimeWarp(Iterable<ScheduleTriggerEngineMock> schedulers, ClockMock clock) { this.schedulers = StreamSupport.stream(schedulers.spliterator(), false).collect(Collectors.toList()); this.clock = clock; } public void trigger(String jobName) { trigger(jobName, 1, null); } public ClockMock clock() { return clock; } public void trigger(String watchId, int times, TimeValue timeValue) { long triggeredCount = schedulers.stream() .filter(scheduler -> scheduler.trigger(watchId, times, timeValue)) .count(); String msg = String.format(Locale.ROOT, "watch was triggered on [%d] schedulers, expected [1]", triggeredCount); if (triggeredCount > 1) { logger.warn(msg); } assertThat(msg, triggeredCount, greaterThanOrEqualTo(1L)); } } /** * A disruption that prevents time from advancing on nodes. This is needed to allow time sensitive tests * to have full control of time. This disruption requires {@link ClockMock} being available on the nodes. */ private static class TimeFreezeDisruption implements ServiceDisruptionScheme { private InternalTestCluster cluster; private boolean frozen; @Override public void applyToCluster(InternalTestCluster cluster) { this.cluster = cluster; } @Override public void removeFromCluster(InternalTestCluster cluster) { stopDisrupting(); } @Override public void removeAndEnsureHealthy(InternalTestCluster cluster) { stopDisrupting(); } @Override public synchronized void applyToNode(String node, InternalTestCluster cluster) { if (frozen) { ((ClockMock)cluster.getInstance(ClockHolder.class, node).clock).freeze(); } } @Override public void removeFromNode(String node, InternalTestCluster cluster) { ((ClockMock)cluster.getInstance(ClockHolder.class, node).clock).unfreeze(); } @Override public synchronized void startDisrupting() { frozen = true; for (String node: cluster.getNodeNames()) { applyToNode(node, cluster); } } @Override public void stopDisrupting() { frozen = false; for (String node: cluster.getNodeNames()) { removeFromNode(node, cluster); } } @Override public void testClusterClosed() { } @Override public TimeValue expectedTimeToHeal() { return TimeValue.ZERO; } @Override public String toString() { return "time frozen"; } } }
/* * Copyright 2017, Yahoo! Inc. Licensed under the terms of the * Apache License 2.0. See LICENSE file at the project root for terms. */ package com.yahoo.memory; import static com.yahoo.memory.Util.nativeOrder; import static com.yahoo.memory.Util.negativeCheck; import static com.yahoo.memory.Util.nullCheck; import static com.yahoo.memory.Util.zeroCheck; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; /** * Provides read and write primitive and primitive array access to any of the four resources * mentioned at the package level. * * @author Roman Leventov * @author Lee Rhodes */ public abstract class WritableMemory extends Memory { //Pass-through ctor WritableMemory(final Object unsafeObj, final long nativeBaseOffset, final long regionOffset, final long capacityBytes) { super(unsafeObj, nativeBaseOffset, regionOffset, capacityBytes); } //BYTE BUFFER /** * Accesses the given ByteBuffer for write operations. The returned WritableMemory object has * the same byte order, as the given ByteBuffer, unless the capacity of the given ByteBuffer is * zero, then byte order of the returned WritableMemory object, as well as backing storage and * read-only status are unspecified. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.wrap(...)</i>. * @param byteBuf the given ByteBuffer * @return a new WritableMemory for write operations on the given ByteBuffer. */ public static WritableMemory wrap(final ByteBuffer byteBuf) { return BaseWritableMemoryImpl.wrapByteBuffer(byteBuf, false, byteBuf.order()); } /** * Accesses the given ByteBuffer for write operations. The returned WritableMemory object has * the given byte order, ignoring the byte order of the given ByteBuffer. If the capacity of * the given ByteBuffer is zero the byte order of the returned WritableMemory object * (as well as backing storage) is unspecified. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.wrap(...)</i>. * @param byteBuf the given ByteBuffer, must not be null * @param byteOrder the byte order to be used, which may be independent of the byte order * state of the given ByteBuffer * @return a new WritableMemory for write operations on the given ByteBuffer. */ public static WritableMemory wrap(final ByteBuffer byteBuf, final ByteOrder byteOrder) { return BaseWritableMemoryImpl.wrapByteBuffer(byteBuf, false, byteOrder); } //MAP /** * Maps the entire given file into native-ordered Memory for write operations * (including those &gt; 2GB). Calling this method is equivalent to calling * {@link #map(File, long, long, ByteOrder) map(file, 0, file.length(), ByteOrder.nativeOrder())}. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.map(...)</i>. * @param file the given file to map * @return WritableMapHandle for managing the mapped Memory. * Please read Javadocs for {@link Handle}. * @throws IOException file not found or a RuntimeException. */ public static WritableMapHandle map(final File file) throws IOException { return map(file, 0, file.length(), nativeOrder); } /** * Maps the specified portion of the given file into Memory for write operations * (including those &gt; 2GB). * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.map(...)</i>. * @param file the given file to map. It may not be null. * @param fileOffsetBytes the position in the given file in bytes. It may not be negative. * @param capacityBytes the size of the mapped Memory. It may not be negative or zero. * @param byteOrder the byte order to be used for the given file. It may not be null. * @return WritableMapHandle for managing the mapped Memory. * Please read Javadocs for {@link Handle}. * @throws IOException file not found or RuntimeException, etc. */ public static WritableMapHandle map(final File file, final long fileOffsetBytes, final long capacityBytes, final ByteOrder byteOrder) throws IOException { zeroCheck(capacityBytes, "Capacity"); nullCheck(file, "file is null"); negativeCheck(fileOffsetBytes, "File offset is negative"); return BaseWritableMemoryImpl .wrapMap(file, fileOffsetBytes, capacityBytes, false, byteOrder); } //ALLOCATE DIRECT /** * Allocates and provides access to capacityBytes directly in native (off-heap) memory * leveraging the WritableMemory API. Native byte order is assumed. * The allocated memory will be 8-byte aligned, but may not be page aligned. * If capacityBytes is zero, byte order, backing storage and read-only status * of the WritableMemory object, returned from {@link WritableHandle#get()} are unspecified. * * <p>The default MemoryRequestServer, which allocates any request for memory onto the heap, * will be used.</p> * * <p><b>NOTE:</b> Native/Direct memory acquired using Unsafe may have garbage in it. * It is the responsibility of the using class to clear this memory, if required, * and to call <i>close()</i> when done.</p> * * @param capacityBytes the size of the desired memory in bytes. * @return WritableDirectHandle for this off-heap resource. * Please read Javadocs for {@link Handle}. */ public static WritableDirectHandle allocateDirect(final long capacityBytes) { return allocateDirect(capacityBytes, null); } /** * Allocates and provides access to capacityBytes directly in native (off-heap) memory * leveraging the WritableMemory API. The allocated memory will be 8-byte aligned, but may not * be page aligned. If capacityBytes is zero, byte order, backing storage and read-only status * of the WritableMemory object, returned from {@link WritableHandle#get()} are unspecified. * * <p><b>NOTE:</b> Native/Direct memory acquired using Unsafe may have garbage in it. * It is the responsibility of the using class to clear this memory, if required, * and to call <i>close()</i> when done.</p> * * @param capacityBytes the size of the desired memory in bytes. * @param memReqSvr A user-specified MemoryRequestServer. * This is a callback mechanism for a user client of direct memory to request more memory. * @return WritableHandle for this off-heap resource. * Please read Javadocs for {@link Handle}. */ public static WritableDirectHandle allocateDirect(final long capacityBytes, final MemoryRequestServer memReqSvr) { return BaseWritableMemoryImpl.wrapDirect(capacityBytes, nativeOrder, memReqSvr); } //REGIONS /** * A writable region is a writable view of this object. * This returns a new <i>WritableMemory</i> representing the defined writable region with the * given offsetBytes and capacityBytes. * <ul> * <li>Returned object's origin = this objects' origin + <i>offsetBytes</i></li> * <li>Returned object's capacity = <i>capacityBytes</i></li> * </ul> * If the given capacityBytes is zero, the returned object is effectively immutable and * the backing storage and byte order are unspecified. * * @param offsetBytes the starting offset with respect to this object. * @param capacityBytes the capacity of the returned object in bytes. * @return a new <i>WritableMemory</i> representing the defined writable region. */ public abstract WritableMemory writableRegion(long offsetBytes, long capacityBytes); /** * A writable region is a writable view of this object. * This returns a new <i>WritableMemory</i> representing the defined writable region with the * given offsetBytes, capacityBytes and byte order. * <ul> * <li>Returned object's origin = this objects' origin + <i>offsetBytes</i></li> * <li>Returned object's capacity = <i>capacityBytes</i></li> * <li>Returned object's byte order = <i>byteOrder</i></li> * </ul> * If the given capacityBytes is zero, the returned object is effectively immutable and * the backing storage and byte order are unspecified. * * @param offsetBytes the starting offset with respect to this object. * @param capacityBytes the capacity of the returned object in bytes. * @param byteOrder the given byte order * @return a new <i>WritableMemory</i> representing the defined writable region. */ public abstract WritableMemory writableRegion(long offsetBytes, long capacityBytes, ByteOrder byteOrder); //AS BUFFER /** * Returns a new <i>WritableBuffer</i> with a writable view of this object. * <ul> * <li>Returned object's origin = this object's origin</li> * <li>Returned object's <i>start</i> = 0</li> * <li>Returned object's <i>position</i> = 0</li> * <li>Returned object's <i>end</i> = this object's capacity</li> * <li>Returned object's <i>capacity</i> = this object's capacity</li> * <li>Returned object's <i>start</i>, <i>position</i> and <i>end</i> are mutable</li> * </ul> * If this object's capacity is zero, the returned object is effectively immutable and * the backing storage and byte order are unspecified. * @return a new <i>WritableBuffer</i> with a view of this WritableMemory */ public abstract WritableBuffer asWritableBuffer(); /** * Returns a new <i>WritableBuffer</i> with a writable view of this object * with the given byte order. * <ul> * <li>Returned object's origin = this object's origin</li> * <li>Returned object's <i>start</i> = 0</li> * <li>Returned object's <i>position</i> = 0</li> * <li>Returned object's <i>end</i> = this object's capacity</li> * <li>Returned object's <i>capacity</i> = this object's capacity</li> * <li>Returned object's <i>start</i>, <i>position</i> and <i>end</i> are mutable</li> * </ul> * If this object's capacity is zero, the returned object is effectively immutable and * the backing storage and byte order are unspecified. * @param byteOrder the given byte order * @return a new <i>WritableBuffer</i> with a view of this WritableMemory */ public abstract WritableBuffer asWritableBuffer(ByteOrder byteOrder); //ALLOCATE HEAP VIA AUTOMATIC BYTE ARRAY /** * Creates on-heap WritableMemory with the given capacity and the native byte order. If the given * capacityBytes is zero, backing storage, byte order and read-only status of the returned * WritableMemory object are unspecified. * @param capacityBytes the given capacity in bytes. * @return a new WritableMemory for write operations on a new byte array. */ public static WritableMemory allocate(final int capacityBytes) { final byte[] arr = new byte[capacityBytes]; return wrap(arr, nativeOrder); } /** * Creates on-heap WritableMemory with the given capacity and the given byte order. If the given * capacityBytes is zero, backing storage, byte order and read-only status of the returned * WritableMemory object are unspecified. * @param capacityBytes the given capacity in bytes. * @param byteOrder the given byte order to allocate new Memory object with. * @return a new WritableMemory for write operations on a new byte array. */ public static WritableMemory allocate(final int capacityBytes, final ByteOrder byteOrder) { final byte[] arr = new byte[capacityBytes]; return wrap(arr, byteOrder); } //ACCESS PRIMITIVE HEAP ARRAYS for write /** * Wraps the given primitive array for write operations assuming native byte order. If the array * size is zero, backing storage, byte order and read-only status of the returned WritableMemory * object are unspecified. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.wrap(...)</i>. * @param arr the given primitive array. * @return a new WritableMemory for write operations on the given primitive array. */ public static WritableMemory wrap(final boolean[] arr) { final long lengthBytes = arr.length << Prim.BOOLEAN.shift(); return BaseWritableMemoryImpl.wrapHeapArray(arr, 0L, lengthBytes, false, nativeOrder); } /** * Wraps the given primitive array for write operations assuming native byte order. If the array * size is zero, backing storage, byte order and read-only status of the returned WritableMemory * object are unspecified. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.wrap(...)</i>. * @param arr the given primitive array. * @return a new WritableMemory for write operations on the given primitive array. */ public static WritableMemory wrap(final byte[] arr) { return WritableMemory.wrap(arr, 0, arr.length, nativeOrder); } /** * Wraps the given primitive array for write operations with the given byte order. If the array * size is zero, backing storage, byte order and read-only status of the returned WritableMemory * object are unspecified. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.wrap(...)</i>. * @param arr the given primitive array. * @param byteOrder the byte order to be used * @return a new WritableMemory for write operations on the given primitive array. */ public static WritableMemory wrap(final byte[] arr, final ByteOrder byteOrder) { return WritableMemory.wrap(arr, 0, arr.length, byteOrder); } /** * Wraps the given primitive array for write operations with the given byte order. If the given * lengthBytes is zero, backing storage, byte order and read-only status of the returned * WritableMemory object are unspecified. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.wrap(...)</i>. * @param arr the given primitive array. * @param offsetBytes the byte offset into the given array * @param lengthBytes the number of bytes to include from the given array * @param byteOrder the byte order to be used * @return a new WritableMemory for write operations on the given primitive array. */ public static WritableMemory wrap(final byte[] arr, final int offsetBytes, final int lengthBytes, final ByteOrder byteOrder) { UnsafeUtil.checkBounds(offsetBytes, lengthBytes, arr.length); return BaseWritableMemoryImpl.wrapHeapArray(arr, 0L, lengthBytes, false, byteOrder); } /** * Wraps the given primitive array for write operations assuming native byte order. If the array * size is zero, backing storage, byte order and read-only status of the returned WritableMemory * object are unspecified. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.wrap(...)</i>. * @param arr the given primitive array. * @return a new WritableMemory for write operations on the given primitive array. */ public static WritableMemory wrap(final char[] arr) { final long lengthBytes = arr.length << Prim.CHAR.shift(); return BaseWritableMemoryImpl.wrapHeapArray(arr, 0L, lengthBytes, false, nativeOrder); } /** * Wraps the given primitive array for write operations assuming native byte order. If the array * size is zero, backing storage, byte order and read-only status of the returned WritableMemory * object are unspecified. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.wrap(...)</i>. * @param arr the given primitive array. * @return a new WritableMemory for write operations on the given primitive array. */ public static WritableMemory wrap(final short[] arr) { final long lengthBytes = arr.length << Prim.SHORT.shift(); return BaseWritableMemoryImpl.wrapHeapArray(arr, 0L, lengthBytes, false, nativeOrder); } /** * Wraps the given primitive array for write operations assuming native byte order. If the array * size is zero, backing storage, byte order and read-only status of the returned WritableMemory * object are unspecified. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.wrap(...)</i>. * @param arr the given primitive array. * @return a new WritableMemory for write operations on the given primitive array. */ public static WritableMemory wrap(final int[] arr) { final long lengthBytes = arr.length << Prim.INT.shift(); return BaseWritableMemoryImpl.wrapHeapArray(arr, 0L, lengthBytes, false, nativeOrder); } /** * Wraps the given primitive array for write operations assuming native byte order. If the array * size is zero, backing storage, byte order and read-only status of the returned WritableMemory * object are unspecified. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.wrap(...)</i>. * @param arr the given primitive array. * @return a new WritableMemory for write operations on the given primitive array. */ public static WritableMemory wrap(final long[] arr) { final long lengthBytes = arr.length << Prim.LONG.shift(); return BaseWritableMemoryImpl.wrapHeapArray(arr, 0L, lengthBytes, false, nativeOrder); } /** * Wraps the given primitive array for write operations assuming native byte order. If the array * size is zero, backing storage, byte order and read-only status of the returned WritableMemory * object are unspecified. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.wrap(...)</i>. * @param arr the given primitive array. * @return a new WritableMemory for write operations on the given primitive array. */ public static WritableMemory wrap(final float[] arr) { final long lengthBytes = arr.length << Prim.FLOAT.shift(); return BaseWritableMemoryImpl.wrapHeapArray(arr, 0L, lengthBytes, false, nativeOrder); } /** * Wraps the given primitive array for write operations assuming native byte order. If the array * size is zero, backing storage, byte order and read-only status of the returned WritableMemory * object are unspecified. * * <p><b>Note:</b> Always qualify this method with the class name, e.g., * <i>WritableMemory.wrap(...)</i>. * @param arr the given primitive array. * @return a new WritableMemory for write operations on the given primitive array. */ public static WritableMemory wrap(final double[] arr) { final long lengthBytes = arr.length << Prim.DOUBLE.shift(); return BaseWritableMemoryImpl.wrapHeapArray(arr, 0L, lengthBytes, false, nativeOrder); } //END OF CONSTRUCTOR-TYPE METHODS //PRIMITIVE putX() and putXArray() /** * Puts the boolean value at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param value the value to put */ public abstract void putBoolean(long offsetBytes, boolean value); /** * Puts the boolean array at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param srcArray The source array. * @param srcOffsetBooleans offset in array units * @param lengthBooleans number of array units to transfer */ public abstract void putBooleanArray(long offsetBytes, boolean[] srcArray, int srcOffsetBooleans, int lengthBooleans); /** * Puts the byte value at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param value the value to put */ public abstract void putByte(long offsetBytes, byte value); /** * Puts the byte array at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param srcArray The source array. * @param srcOffsetBytes offset in array units * @param lengthBytes number of array units to transfer */ public abstract void putByteArray(long offsetBytes, byte[] srcArray, int srcOffsetBytes, int lengthBytes); /** * Puts the char value at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param value the value to put */ public abstract void putChar(long offsetBytes, char value); /** * Puts the char array at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param srcArray The source array. * @param srcOffsetChars offset in array units * @param lengthChars number of array units to transfer */ public abstract void putCharArray(long offsetBytes, char[] srcArray, int srcOffsetChars, int lengthChars); /** * Encodes characters from the given CharSequence into UTF-8 bytes and puts them into this * <i>WritableMemory</i> begining at the given offsetBytes. * This is specifically designed to reduce the production of intermediate objects (garbage), * thus significantly reducing pressure on the JVM Garbage Collector. * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param src The source CharSequence to be encoded and put into this WritableMemory. It is * the responsibility of the caller to provide sufficient capacity in this * <i>WritableMemory</i> for the encoded Utf8 bytes. Characters outside the ASCII range can * require 2, 3 or 4 bytes per character to encode. * @return the number of bytes encoded */ public abstract long putCharsToUtf8(long offsetBytes, CharSequence src); /** * Puts the double value at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param value the value to put */ public abstract void putDouble(long offsetBytes, double value); /** * Puts the double array at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param srcArray The source array. * @param srcOffsetDoubles offset in array units * @param lengthDoubles number of array units to transfer */ public abstract void putDoubleArray(long offsetBytes, double[] srcArray, final int srcOffsetDoubles, final int lengthDoubles); /** * Puts the float value at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param value the value to put */ public abstract void putFloat(long offsetBytes, float value); /** * Puts the float array at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param srcArray The source array. * @param srcOffsetFloats offset in array units * @param lengthFloats number of array units to transfer */ public abstract void putFloatArray(long offsetBytes, float[] srcArray, final int srcOffsetFloats, final int lengthFloats); /** * Puts the int value at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param value the value to put */ public abstract void putInt(long offsetBytes, int value); /** * Puts the int array at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param srcArray The source array. * @param srcOffsetInts offset in array units * @param lengthInts number of array units to transfer */ public abstract void putIntArray(long offsetBytes, int[] srcArray, final int srcOffsetInts, final int lengthInts); /** * Puts the long value at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param value the value to put */ public abstract void putLong(long offsetBytes, long value); /** * Puts the long array at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param srcArray The source array. * @param srcOffsetLongs offset in array units * @param lengthLongs number of array units to transfer */ public abstract void putLongArray(long offsetBytes, long[] srcArray, final int srcOffsetLongs, final int lengthLongs); /** * Puts the short value at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param value the value to put */ public abstract void putShort(long offsetBytes, short value); /** * Puts the short array at the given offset * @param offsetBytes offset bytes relative to this <i>WritableMemory</i> start * @param srcArray The source array. * @param srcOffsetShorts offset in array units * @param lengthShorts number of array units to transfer */ public abstract void putShortArray(long offsetBytes, short[] srcArray, final int srcOffsetShorts, final int lengthShorts); //Atomic Methods /** * Atomically adds the given value to the long located at offsetBytes. * @param offsetBytes offset bytes relative to this Memory start * @param delta the amount to add * @return the the previous value */ public abstract long getAndAddLong(long offsetBytes, long delta); /** * Atomically sets the current value at the memory location to the given updated value * if and only if the current value {@code ==} the expected value. * @param offsetBytes offset bytes relative to this Memory start * @param expect the expected value * @param update the new value * @return {@code true} if successful. False return indicates that * the current value at the memory location was not equal to the expected value. */ public abstract boolean compareAndSwapLong(long offsetBytes, long expect, long update); /** * Atomically exchanges the given value with the current value located at offsetBytes. * @param offsetBytes offset bytes relative to this Memory start * @param newValue new value * @return the previous value */ public abstract long getAndSetLong(long offsetBytes, long newValue); //OTHER WRITE METHODS /** * Returns the primitive backing array, otherwise null. * @return the primitive backing array, otherwise null. */ public abstract Object getArray(); /** * Clears all bytes of this Memory to zero */ public abstract void clear(); /** * Clears a portion of this Memory to zero. * @param offsetBytes offset bytes relative to this Memory start * @param lengthBytes the length in bytes */ public abstract void clear(long offsetBytes, long lengthBytes); /** * Clears the bits defined by the bitMask * @param offsetBytes offset bytes relative to this Memory start. * @param bitMask the bits set to one will be cleared */ public abstract void clearBits(long offsetBytes, byte bitMask); /** * Fills all bytes of this Memory region to the given byte value. * @param value the given byte value */ public abstract void fill(byte value); /** * Fills a portion of this Memory region to the given byte value. * @param offsetBytes offset bytes relative to this Memory start * @param lengthBytes the length in bytes * @param value the given byte value */ public abstract void fill(long offsetBytes, long lengthBytes, byte value); /** * Sets the bits defined by the bitMask * @param offsetBytes offset bytes relative to this Memory start * @param bitMask the bits set to one will be set */ public abstract void setBits(long offsetBytes, byte bitMask); //OTHER WRITABLE API METHODS /** * For Direct Memory only. Other types of backing resources will return null. * Gets the MemoryRequestServer object used by dynamic off-heap (Direct) memory objects * to request additional memory. * Set using {@link WritableMemory#allocateDirect(long, MemoryRequestServer)}. * If not explicity set, this returns the {@link DefaultMemoryRequestServer}. * @return the MemoryRequestServer object (if direct memory) or null. */ @Override public MemoryRequestServer getMemoryRequestServer() { return null; } }
package net.galaxygaming.dispenser.database; import java.util.UUID; import net.galaxygaming.dispenser.GameDispenser; import org.bukkit.Bukkit; public abstract class Database { private static Database currentDatabase; // TODO: Make a table that automatically sorts all of the stats. /** * Gets a player's experience points * @param minigame the minigame to get the points from (@see {@link #getTotalEXP(String)} for getting a player's total exp) * @param playerName the name of the player to get the exp from * @return the player's experience points * @deprecated use {@link #getEXP(String, UUID)} */ public double getEXP(String minigame, String playerName) { return (double) get(minigame, Bukkit.getOfflinePlayer(playerName).getUniqueId(), "exp", "double", 0); } /** * Sets the amount of experience points a player has * @param minigame the minigame that the player has the points in (@see {@link #setTotalEXP(String, double)} to set the player's total exp) * @param playerName the name of the player to set the experience points of * @param exp the experience points the player will now have * @deprecated use {@link #setEXP(String, UUID, double)} */ public void setEXP(String minigame, String playerName, double exp) { UUID uuid = Bukkit.getOfflinePlayer(playerName).getUniqueId(); set(minigame, uuid, playerName, "exp", "double", exp); set(uuid, playerName, "exp_total", "double", (double) get(uuid, "exp_total", "double", 0) + exp); } /** * Gets the amount of times a player has won a minigame * @param minigame the minigame the player has won (@see {@link #getTotalGamesWon(String)} for getting a player's total wins) * @param playerName the name of the player to get the win count of * @return how many times the player has won the minigame given * @deprecated use {@link #getGamesWon(String, UUID)} */ public int getGamesWon(String minigame, String playerName) { return (int) get(minigame, Bukkit.getOfflinePlayer(playerName).getUniqueId(), "games won", "int", 0); } /** * Sets how many times a player has won a game * @param minigame the game that the player has won * @param playerName the name of the player who won the game * @param gamesWon the amount of games the player has won * @deprecated use {@link #setGamesWon(String, UUID, int)} */ public void setGamesWon(String minigame, String playerName, int gamesWon) { UUID uuid = Bukkit.getOfflinePlayer(playerName).getUniqueId(); set(minigame, uuid, playerName, "games won", "int", gamesWon); set(minigame, uuid, playerName, "games won_total", "int", (int) get(uuid, "games won_total", "int", 0) + gamesWon); } /** * Gets the amount of times a player has lost a minigame * @param minigame the minigame the player has lost (@see {@link #getTotalGamesLost(String)} for getting a player's total losses) * @param playerName the name of the player to get the loss count of * @return how many times the player has lost the minigame given * @deprecated use {@link #getGamesLost(String, UUID)} */ public int getGamesLost(String minigame, String playerName) { return (int) get(minigame, Bukkit.getOfflinePlayer(playerName).getUniqueId(), "games lost", "int", 0); } /** * Sets how many times a player has lost a game * @param minigame the minigame the player has lost * @param playerName the name of the player to set the games lost of * @param gamesLost the amount of times the player has lost the game * @deprecated use {@link #setGamesLost(String, UUID, int)} */ public void setGamesLost(String minigame, String playerName, int gamesLost) { UUID uuid = Bukkit.getOfflinePlayer(playerName).getUniqueId(); set(minigame, uuid, playerName, "games lost", "int", gamesLost); set(minigame, uuid, playerName, "games lost_total", "int", (int) get(uuid, "games lost_total", "int", 0) + gamesLost); } /** * Gets the total amount of experience points a player has * @param playerName the name of the player to get the experience points of * @return the total experience points for all games combined of a player * @deprecated use {@link #getTotalEXP(UUID)} */ public double getTotalEXP(String playerName) { return (double) get(Bukkit.getOfflinePlayer(playerName).getUniqueId(), "exp_total", "double", 0); } /** * Sets the total amount of experience points a player has * @param playerName the name of the player to set the total experience points of * @param exp the amount of experience points the player has * @deprecated use {@link #setTotalEXP(UUID, double)} */ public void setTotalEXP(String playerName, double exp) { set(Bukkit.getOfflinePlayer(playerName).getUniqueId(), playerName, "exp_total", "double", exp); } /** * Gets the total amount of times a player has won a game * @param playerName the name of the player to get the total win amount of * @return the total amount of times the player has won a game * @deprecated use {@link #getTotalGamesWon(UUID)} */ public int getTotalGamesWon(String playerName) { return (int) get(Bukkit.getOfflinePlayer(playerName).getUniqueId(), "games won_total", "double", 0); } /** * Sets the total amount of times a player has won a game * @param playerName the name of the player to set the total wins amount of * @param gamesWon the total amount of times the player has won a game * @deprecated use {@link #setTotalGamesWon(UUID, double)} */ public void setTotalGamesWon(String playerName, int gamesWon) { set(Bukkit.getOfflinePlayer(playerName).getUniqueId(), playerName, "games won_total", "int", gamesWon); } /** * Gets the total amount of times a player has lost a game. * @param playerName the name of the player to get the total loss amount of * @return the total amount of times the player has lost a game * @deprecated use {@link #getTotalGamesLost(UUID)} */ public int getTotalGamesLost(String playerName) { return (int) get(Bukkit.getOfflinePlayer(playerName).getUniqueId(), "games lost_total", "int", 0); } /** * Sets the total amount of times a player has lost a game * @param playerName the name of the player to set the total losses amount of * @param gamesLost the total amount of times the player has lost a game * @deprecated use {@link #setTotalGamesLost(UUID, double)} */ public void setTotalGamesLost(String playerName, int gamesLost) { set(Bukkit.getOfflinePlayer(playerName).getUniqueId(), playerName, "games lost_total", "int", gamesLost); } /** * Gets a player's experience points * @param minigame the minigame to get the points from (@see {@link #getTotalEXP(UUID)} for getting a player's total exp) * @param uuid the unique id of the player to get the exp of * @return the player's experience points */ public double getEXP(String minigame, UUID uuid) { return (double) get(minigame, uuid, "exp", "double", 0); } /** * Sets the amount of experience points a player has * @param minigame the minigame that the player has the experience points in (@see {@link #setTotalEXP(UUID, double)} to set the player's total exp) * @param uuid the unique id of the player to set the experience points of * @param exp the experience points the player will now have */ public void setEXP(String minigame, UUID uuid, double exp) { String name = Bukkit.getOfflinePlayer(uuid).getName(); set(minigame, uuid, name, "exp", "double", exp); set(uuid, name, "exp_total", "double", (double) get(uuid, "exp_total", "double", 0) + exp); } /** * Gets the amount of times a player has won a minigame * @param minigame the minigame the player has won (@see {@link #getTotalGamesWon(UUID)} for getting a player's total wins) * @param uuid the unique id of the player to get the win count of * @return how many times the player has won the minigame given */ public int getGamesWon(String minigame, UUID uuid) { return (int) get(minigame, uuid, "games won", "int", 0); } /** * Sets how many times a player has won a game * @param minigame the game that the player has won * @param uuid the unique id of the player who won the game * @param gamesWon the amount of games the player has won */ public void setGamesWon(String minigame, UUID uuid, int gamesWon) { String name = Bukkit.getOfflinePlayer(uuid).getName(); set(minigame, uuid, name, "games won", "int", gamesWon); set(minigame, uuid, name, "games won_total", "int", (int) get(uuid, "games won_total", "int", 0) + gamesWon); } /** * Gets the amount of times a player has lost a minigame * @param minigame the minigame the player has lost (@see {@link #getTotalGamesLost(UUID)} for getting a player's total wins) * @param uuid the unique id of the player to get the loss count of * @return how many times the player has lost the minigame given */ public int getGamesLost(String minigame, UUID uuid) { return (int) get(minigame, uuid, "games lost", "int", 0); } /** * Sets how many times a player has lost a game * @param minigame the minigame the player has lost * @param uuid the unique id of the player to set the games lost of * @param gamesLost the amount of times the player has lost the game */ public void setGamesLost(String minigame, UUID uuid, int gamesLost) { String name = Bukkit.getOfflinePlayer(uuid).getName(); set(minigame, uuid, name, "games lost", "int", gamesLost); set(minigame, uuid, name, "games lost_total", "int", (int) get(uuid, "games lost_total", "int", 0) + gamesLost); } /** * Gets the total amount of experience points a player has * @param uuid the unique id of the player to get the experience points of * @return the total experience points for all games combined of a player */ public double getTotalEXP(UUID uuid) { return (double) get(uuid, "exp_total", "double", 0); } /** * Sets the total amount of experience points a player has * @param uuid the unique id of the player to set the experience points of * @param exp the amount of experience points the player has */ public void setTotalEXP(UUID uuid, double exp) { set(uuid, Bukkit.getOfflinePlayer(uuid).getName(), "exp_total", "double", exp); } /** * Gets the total amount of times a player has won a game * @param uuid the unique id of the player to get the total win amount of * @return the total amount of times the player has won a game */ public int getTotalGamesWon(UUID uuid) { return (int) get(uuid, "games won_total", "int", 0); } /** * Sets the total amount of times a player has lost a game * @param uuid the unique id of the player to get the total win amount of * @param gamesWon the total amount of times the player has lost a game */ public void setTotalGamesWon(UUID uuid, int gamesWon) { set(uuid, Bukkit.getOfflinePlayer(uuid).getName(), "games won_total", "int", gamesWon); } /** * Gets the total amount of times a player has lost a game * @param uuid the unique id of the player to get the total loss amount of * @return the total amount of times the player has lost a game */ public int getTotalGamesLost(UUID uuid) { return (int) get(uuid, "games lost_total", "int", 0); } /** * Sets the total amount of times a player has lost a game * @param uuid the unique id of the player to get the total win amount of * @param gamesLost the total amount of times the player has lost a game */ public void setTotalGamesLost(UUID uuid, int gamesLost) { set(uuid, Bukkit.getOfflinePlayer(uuid).getName(), "games lost_total", "int", gamesLost); } /** * Gets something from the database * @param minigame the minigame to get from * @param playerUUID the unique id of the player associated with what to get * @param key what to get, such as kills, deaths, wins, etc... * @return the value paired with the key */ protected abstract Object get(String minigame, UUID playerUUID, String key); /** * Gets something from the database * @param minigame the minigame to get from * @param playerUUID the unique id of the player associated with what to get * @param key what to get, such as kills, deaths, wins, etc... * @param defaultType the type of the default value * @param defaultValue the default value that the key should be * @return the value paired with the key */ protected abstract Object get(String minigame, UUID playerUUID, String key, String defaultType, Object defaultValue); /** * Sets something in the database * @param minigame the minigame to set * @param playerUUID the unique id of the player associated with what to set * @param playerName the name of the player associated with what to set * @param key what to set, such as kills, deaths, wins, etc... * @param value the value that the key will be set to */ protected abstract void set(String minigame, UUID playerUUID, String playerName, String key, Object value); /** * Sets something in the database * @param minigame the minigame to set * @param playerUUID the unique id of the player associated with what to set * @param playerName the name of the player associated with what to set * @param key what to set, such as kills, deaths, wins, etc... * @param valueType the type of the value * @param value the value that the key will be set to */ protected abstract void set(String minigame, UUID playerUUID, String playerName, String key, String valueType, Object value); /** * Gets something from the database * @param playerUUID the unique id of the player associated with what to get * @param key what to get, such as kills, deaths, wins, etc... * @return the value paired with the key */ protected abstract Object get(UUID playerUUID, String key); /** * Gets something from the database * @param playerUUID the unique id of the player associated with what to get * @param key what to get, such as kills, deaths, wins, etc... * @param defaultType the type of the default value * @param defaultValue the default value that the key should be * @return the value paired with the key */ protected abstract Object get(UUID playerUUID, String key, String defaultType, Object defaultValue); /** * Sets something in the database * @param playerUUID the unique id of the player associated with what to set * @param playerName the player associated with what to set * @param key what to set, such as kills, deaths, wins, etc... * @param value the value that the key will be set to */ protected abstract void set(UUID playerUUID, String playerName, String key, Object value); /** * Sets something in the database * @param playerUUID the unique id of the player associated with what to set * @param playerName the player associated with what to set * @param key what to set, such as kills, deaths, wins, etc... * @param valueType the type of the value * @param value the value that the key will be set to */ protected abstract void set(UUID playerUUID, String playerName, String key, String valueType, Object value); /** * Executes any tasks required to unload the database */ public abstract void unload(); /** * Gets the database type that is being used, as set in the config. * @return the current database being used */ public static Database getDatabase() { switch (GameDispenser.getInstance().getConfig().getString("database").toLowerCase().replaceAll(" ", "")) { case "mysql": case "sql": if (!(currentDatabase instanceof MySQL)) currentDatabase = new MySQL(); default: if (!(currentDatabase instanceof YAML)) currentDatabase = new YAML(); } return currentDatabase; } }
/* * Copyright (C) 2017 Genymobile * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.genymobile.relay; import java.nio.ByteBuffer; @SuppressWarnings("checkstyle:MagicNumber") public class TCPHeader implements TransportHeader { public static final int FLAG_FIN = 1 << 0; public static final int FLAG_SYN = 1 << 1; public static final int FLAG_RST = 1 << 2; public static final int FLAG_PSH = 1 << 3; public static final int FLAG_ACK = 1 << 4; public static final int FLAG_URG = 1 << 5; private final ByteBuffer raw; private int sourcePort; private int destinationPort; private int headerLength; private int sequenceNumber; private int acknowledgementNumber; private int flags; private int window; public TCPHeader(ByteBuffer raw) { this.raw = raw; sourcePort = Short.toUnsignedInt(raw.getShort(0)); destinationPort = Short.toUnsignedInt(raw.getShort(2)); sequenceNumber = raw.getInt(4); acknowledgementNumber = raw.getInt(8); short dataOffsetAndFlags = raw.getShort(12); headerLength = (dataOffsetAndFlags & 0xf000) >> 10; flags = dataOffsetAndFlags & 0x1ff; window = Short.toUnsignedInt(raw.getShort(14)); raw.limit(headerLength); } public int getWindow() { return window; } @Override public int getSourcePort() { return sourcePort; } @Override public int getDestinationPort() { return destinationPort; } @Override public void setSourcePort(int sourcePort) { this.sourcePort = sourcePort; raw.putShort(0, (short) sourcePort); } @Override public void setDestinationPort(int destinationPort) { this.destinationPort = destinationPort; raw.putShort(2, (short) destinationPort); } public int getSequenceNumber() { return sequenceNumber; } public void setSequenceNumber(int sequenceNumber) { this.sequenceNumber = sequenceNumber; raw.putInt(4, sequenceNumber); } public int getAcknowledgementNumber() { return acknowledgementNumber; } public void setAcknowledgementNumber(int acknowledgementNumber) { this.acknowledgementNumber = acknowledgementNumber; raw.putInt(8, acknowledgementNumber); } @Override public int getHeaderLength() { return headerLength; } @Override public void setPayloadLength(int payloadLength) { // do nothing } public int getFlags() { return flags; } public void setFlags(int flags) { this.flags = flags; short dataOffsetAndFlags = raw.getShort(12); dataOffsetAndFlags = (short) (dataOffsetAndFlags & 0xfe00 | flags & 0x1ff); raw.putShort(12, dataOffsetAndFlags); } public void shrinkOptions() { setDataOffset(5); raw.limit(20); } private void setDataOffset(int dataOffset) { short dataOffsetAndFlags = raw.getShort(12); dataOffsetAndFlags = (short) (dataOffsetAndFlags & 0x0fff | (dataOffset << 12)); raw.putShort(12, dataOffsetAndFlags); headerLength = dataOffset << 2; } public boolean isFin() { return (flags & FLAG_FIN) != 0; } public boolean isSyn() { return (flags & FLAG_SYN) != 0; } public boolean isRst() { return (flags & FLAG_RST) != 0; } public boolean isPsh() { return (flags & FLAG_PSH) != 0; } public boolean isAck() { return (flags & FLAG_ACK) != 0; } public boolean isUrg() { return (flags & FLAG_URG) != 0; } @Override public ByteBuffer getRaw() { raw.rewind(); return raw.slice(); } @Override public TCPHeader copyTo(ByteBuffer target) { raw.rewind(); ByteBuffer slice = Binary.slice(target, target.position(), getHeaderLength()); target.put(raw); return new TCPHeader(slice); } @Override public TCPHeader copy() { return new TCPHeader(Binary.copy(raw)); } @Override public void computeChecksum(IPv4Header ipv4Header, ByteBuffer payload) { raw.rewind(); payload.rewind(); // pseudo-header checksum (cf rfc793 section 3.1) int source = ipv4Header.getSource(); int destination = ipv4Header.getDestination(); int length = getHeaderLength() + payload.remaining(); assert (length & ~0xffff) == 0 : "Length cannot take more than 16 bits"; // by design int sum = source >>> 16; sum += source & 0xffff; sum += destination >>> 16; sum += destination & 0xffff; sum += IPv4Header.Protocol.TCP.getNumber(); sum += length; // reset checksum field setChecksum((short) 0); while (raw.hasRemaining()) { sum += Short.toUnsignedInt(raw.getShort()); } while (payload.remaining() > 1) { sum += Short.toUnsignedInt(payload.getShort()); } // if payload length is odd, pad last short with 0 if (payload.hasRemaining()) { sum += Byte.toUnsignedInt(payload.get()) << 8; } while ((sum & ~0xffff) != 0) { sum = (sum & 0xffff) + (sum >> 16); } setChecksum((short) ~sum); } private void setChecksum(short checksum) { raw.putShort(16, checksum); } public short getChecksum() { return raw.getShort(16); } }
/* * * Copyright 2015 LG CNS. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * The initial idea for this class is from "org.apache.commons.lang.IntHashMap"; * http://commons.apache.org/commons-lang-2.6-src.zip * * */ package scouter.util; import java.util.Enumeration; import java.util.NoSuchElementException; public class IntIntLinkedMap { private static final int DEFAULT_CAPACITY = 101; private static final float DEFAULT_LOAD_FACTOR = 0.75f; private ENTRY table[]; private ENTRY header; private int count; private int threshold; private float loadFactor; private int NONE=0; public IntIntLinkedMap setNullValue(int none){ this.NONE=none; return this; } public IntIntLinkedMap(int initCapacity, float loadFactor) { if (initCapacity < 0) throw new RuntimeException("Capacity Error: " + initCapacity); if (loadFactor <= 0) throw new RuntimeException("Load Count Error: " + loadFactor); if (initCapacity == 0) initCapacity = 1; this.loadFactor = loadFactor; this.table = new ENTRY[initCapacity]; this.header = new ENTRY(0, 0, null); this.header.link_next = header.link_prev = header; threshold = (int) (initCapacity * loadFactor); } public IntIntLinkedMap() { this(DEFAULT_CAPACITY, DEFAULT_LOAD_FACTOR); } public int size() { return count; } public int[] keyArray() { int[] _keys = new int[this.size()]; IntEnumer en = this.keys(); for (int i = 0; i < _keys.length; i++) _keys[i] = en.nextInt(); return _keys; } public synchronized IntEnumer keys() { return new Enumer(TYPE.KEYS); } public synchronized IntEnumer values() { return new Enumer(TYPE.VALUES); } public synchronized Enumeration<ENTRY> entries() { return new Enumer<ENTRY>(TYPE.ENTRIES); } public synchronized boolean containsValue(int value) { ENTRY tab[] = table; for (int i = tab.length; i-- > 0;) { for (ENTRY e = tab[i]; e != null; e = e.hash_next) { if (CompareUtil.equals(e.value,value)) { return true; } } } return false; } public synchronized boolean containsKey(int key) { ENTRY tab[] = table; int index = hash(key) % tab.length; ENTRY e = tab[index]; while (e != null) { if (CompareUtil.equals(e.key, key)) { return true; } e = e.hash_next; } return false; } public synchronized int get(int key) { ENTRY tab[] = table; int index = hash(key) % tab.length; for (ENTRY e = tab[index]; e != null; e = e.hash_next) { if (CompareUtil.equals(e.key,key)) { return e.value; } } return NONE; } public synchronized int getFirstKey() { return this.header.link_next.key; } public synchronized int getLastKey() { return this.header.link_prev.key; } public synchronized int getFirstValue() { return this.header.link_next.value; } public synchronized int getLastValue() { return this.header.link_prev.value; } private int hash(int key) { return key & Integer.MAX_VALUE; } protected void rehash() { int oldCapacity = table.length; ENTRY oldMap[] = table; int newCapacity = oldCapacity * 2 + 1; ENTRY newMap[] = new ENTRY[newCapacity]; threshold = (int) (newCapacity * loadFactor); table = newMap; for (int i = oldCapacity; i-- > 0;) { ENTRY old = oldMap[i]; while (old != null) { ENTRY e = old; old = old.hash_next; int key = e.key; int index = hash(key) % newCapacity; e.hash_next = newMap[index]; newMap[index] = e; } } } private int max; public IntIntLinkedMap setMax(int max) { this.max = max; return this; } private static enum MODE { FORCE_FIRST, FORCE_LAST, FIRST, LAST }; public int put(int key, int value) { return _put(key, value, MODE.LAST); } public int putLast(int key, int value) { return _put(key, value, MODE.FORCE_LAST); } public int putFirst(int key, int value) { return _put(key, value, MODE.FORCE_FIRST); } private synchronized int _put(int key, int value, MODE m) { ENTRY tab[] = table; int index = hash(key) % tab.length; for (ENTRY e = tab[index]; e != null; e = e.hash_next) { if (CompareUtil.equals(e.key, key)) { int old = e.value; e.value = value; switch (m) { case FORCE_FIRST: if (header.link_next != e) { unchain(e); chain(header, header.link_next, e); } break; case FORCE_LAST: if (header.link_prev != e) { unchain(e); chain(header.link_prev, header, e); } break; } return old; } } if (max > 0) { while (count >= max) { removeFirst(); } } if (count >= threshold) { rehash(); tab = table; index = hash(key) % tab.length; } ENTRY e = new ENTRY(key, value, tab[index]); tab[index] = e; switch (m) { case FORCE_FIRST: case FIRST: chain(header, header.link_next, e); break; case FORCE_LAST: case LAST: chain(header.link_prev, header, e); break; } count++; return NONE; } public synchronized int remove(int key) { ENTRY tab[] = table; int index = hash(key) % tab.length; ENTRY e = tab[index]; ENTRY prev = null; while ( e != null ) { if (CompareUtil.equals(e.key, key)) { if (prev != null) { prev.hash_next = e.hash_next; } else { tab[index] = e.hash_next; } count--; int oldValue = e.value; e.value = NONE; // unchain(e); return oldValue; } prev = e; e = e.hash_next; } return NONE; } public synchronized int removeFirst() { if (isEmpty()) return 0; return remove(header.link_next.key); } public synchronized int removeLast() { if (isEmpty()) return 0; return remove(header.link_prev.key); } public boolean isEmpty() { return size() == 0; } public synchronized void clear() { ENTRY tab[] = table; for (int index = tab.length; --index >= 0;) tab[index] = null; this.header.link_next = header; this.header.link_prev = header; count = 0; } public String toString() { StringBuffer buf = new StringBuffer(); Enumeration it = entries(); buf.append("{"); for (int i = 0; it.hasMoreElements(); i++) { ENTRY e = (ENTRY) (it.nextElement()); if (i > 0) buf.append(", "); buf.append(e.getKey() + "=" + e.getValue()); } buf.append("}"); return buf.toString(); } public String toFormatString() { StringBuffer buf = new StringBuffer(); Enumeration it = entries(); buf.append("{\n"); while (it.hasMoreElements()) { ENTRY e = (ENTRY) it.nextElement(); buf.append("\t").append(e.getKey() + "=" + e.getValue()).append("\n"); } buf.append("}"); return buf.toString(); } public static class ENTRY { int key; int value; ENTRY hash_next; ENTRY link_next, link_prev; protected ENTRY(int key, int value, ENTRY next) { this.key = key; this.value = value; this.hash_next = next; } protected Object clone() { return new ENTRY(key, value, (hash_next == null ? null : (ENTRY) hash_next.clone())); } public int getKey() { return key; } public int getValue() { return value; } public int setValue(int value) { int oldValue = this.value; this.value = value; return oldValue; } public boolean equals(Object o) { if (!(o instanceof ENTRY)) return false; ENTRY e = (ENTRY) o; return CompareUtil.equals(e.key, key) && CompareUtil.equals(e.value, value); } public int hashCode() { return key ^ value; } public String toString() { return key + "=" + value; } } private enum TYPE{KEYS, VALUES, ENTRIES } private class Enumer<V> implements Enumeration, IntEnumer { TYPE type; ENTRY entry = IntIntLinkedMap.this.header.link_next; Enumer(TYPE type) { this.type = type; } public boolean hasMoreElements() { return entry != null && header != entry; } public Object nextElement() { if (hasMoreElements()) { ENTRY e = entry; entry = e.link_next; switch (type) { case KEYS: return e.key; case VALUES: return e.value; default: return e; } } throw new NoSuchElementException("no more next"); } public int nextInt() { if (hasMoreElements()) { ENTRY e = entry; entry = e.link_next; switch (type) { case KEYS: return e.key; case VALUES: return e.value; } } throw new NoSuchElementException("no more next"); } } private void chain(ENTRY link_prev, ENTRY link_next, ENTRY e) { e.link_prev = link_prev; e.link_next = link_next; link_prev.link_next = e; link_next.link_prev = e; } private void unchain(ENTRY e) { e.link_prev.link_next = e.link_next; e.link_next.link_prev = e.link_prev; e.link_prev = null; e.link_next = null; } public static void main(String[] args) { IntIntLinkedMap m = new IntIntLinkedMap(); System.out.println(m.getFirstValue()); System.out.println(m.getLastKey()); for (int i = 0; i < 10; i++) { m.put(i, i); System.out.println(m); } System.out.println("=================================="); for (int i = 0; i < 10; i++) { m.putLast(i, i); System.out.println(m); } System.out.println("=================================="); for (int i = 0; i < 10; i++) { m.putFirst(i, i); System.out.println(m); } System.out.println("=================================="); for (int i = 0; i < 10; i++) { m.removeFirst(); System.out.println(m); } } private static void print(Object e) { System.out.println(e); } }
// Copyright 2016 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////////// package tiger; import static tiger.ProvisionType.SET; import static tiger.ProvisionType.SET_VALUES; import com.google.common.base.Optional; import com.google.common.base.Pair; import com.google.common.base.Preconditions; import com.google.common.collect.HashMultimap; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.SetMultimap; import com.google.common.collect.Sets; import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.ParameterizedTypeName; import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; import com.squareup.javapoet.TypeSpec.Builder; import dagger.Lazy; import dagger.MapKey; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Generated; import javax.annotation.Nullable; import javax.annotation.processing.Messager; import javax.annotation.processing.ProcessingEnvironment; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.AnnotationValue; import javax.lang.model.element.Element; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.ExecutableType; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Elements; import javax.lang.model.util.Types; import javax.tools.Diagnostic.Kind; /** * `Shared function between hub and packaged injector of tiger 3. Below might be obsoleted and need * review. * * <p>TODO: handle generic everywhere properly without assuming it is not generic, maybe using * {@link TypeName} always, with utilities to convert it back to {@link * Element} and {@link TypeMirror}. * * <p>Generates packaged injectors, multi-binding injectors and top level injectors. The former is * to get around accessibility limitation. The second is unscoped injector dedicated for * multi-bindings. There is only one class for each scope, though could/will be multiple instances. * Top level injectors orchestrate everything. * * <p>PackagedInjector for a {@link BindingKey} is decided this way. * * <ul> * <li>Provided Set(potentially all multi) binding: unscoped dedicated global multi-binding * injector. All multi-bindings are unscoped. If scoped, ignored(or disallowed at all). For * each binding of a multi-binding, they will have a non-null dependencyInfo from which the * packaged injector can be deduced. Contributors of multi-bindings providing non-set binding * and handled normally as below. * <li>Built-in binding, i.e., some_qualifier Provider<Foo> or Lazy<Foo>: binding provider package * of some_qualifier Foo. * <li>Provided non-set binding: binding provider package. * <li>Other(MUST be resolved by generic binding): go to referencing package. if Referencing class * is generic, then continue. This way we could run into to access problem if the referenced * generic class is not public. But better chance is that the type variable is not public but * accessible by the original non-generic class. Let's see how it works. This therefore could * be created in multiple PackagedInjectors. It MUST be unscoped. * </ul> * * Generated injectors are not thread safe for performance reason. Callers must guarantee thread * safe if needed. * * <p>All types are provided by packaged injectors and referred by other packaged injectors and * top-level injectors. The ONLY exception is Subcomponents. In packaged injectors the subcomponent * in context is returned. But the top level provision method will construct the component by its * ctor. * * <p>TODO: revisit all the asMemberOf to make sure only direct container is used. */ abstract class GeneralInjectorGenerator { private static final String TAG = "GeneralInjectorGenerator"; /** * Used for value of @Generated(). It starts with "dagger." so that it will be exempted from * strict java deps check. TODO(freeman): change it to tiger. */ private static final String GENERATOR_NAME = "dagger." + TAG; private static final String INITIALIZE_METHOD_NAME = "initialize"; private static final String INJECT_METHOD_NAME = "inject"; protected final String TIGER_PROXY_NAME = "TigerProxy"; private static final boolean LOG_PROVISION_METHOD_ENABLED = false; protected final static String LOCK_HOLDER_PACKAGE_STRING = "lock.holder"; protected final static String LOCK_HOLDER_CLASS_STRING = "LockHolder"; protected final static String LOCK_HOLDER_FIELD_STRIN = "theLock"; static final String MULTI_BINDING_INJECTOR_NAME = "MultiBindingInjector"; protected static final String TOP_LEVEL_INJECTOR_FIELD = "topLevelInjector"; // Refers the packaged injector for parent scope. private static final String CONTAINING_PACKAGED_INJECTOR_FIELD = "containingPackagedInjector"; private static final String UNSCOPED_SUFFIX = "_unscoped"; // This does not include key for injected class, but does include its injected memebers. protected SetMultimap<BindingKey, DependencyInfo> dependencies; protected final Set<TypeElement> componentDependencies; protected final Utils utils; /** * Includes multi-binding package. We use name instead of key because {@link * PackagedInjectorGenerator} will create local and global version for public types provided. */ protected final Set<String> generatedBindings = new HashSet<>(); // From packaged injector to spec builder. protected final Map<ClassName, Builder> packagedInjectorBuilders = Maps.newHashMap(); // From packaged injector to injected ClassName. private final SetMultimap<ClassName, ClassName> injectedClassNamesForPackagedInjector = HashMultimap.create(); protected final ProcessingEnvironment processingEnv; protected final Messager messager; protected final Elements elements; protected final Types types; protected Builder injectorBuilder; protected MethodSpec.Builder initializeBuilder; private final Set<String> proxiesWhoseGetterGenerated = new HashSet<>(); private final Set<ClassName> typesWhoseGetterGenerated = new HashSet<>(); private final Set<String> fieldsGenerated = new HashSet<>(); // method simple name and type. protected final Set<Pair<String, TypeName>> injectionMethodsDone = new HashSet<>(); protected final Logger logger; protected final Set<TypeElement> modules; protected Set<TypeElement> nonNullaryCtorModules; private String initializeMethodName; public GeneralInjectorGenerator( SetMultimap<BindingKey, DependencyInfo> dependencies, Set<TypeElement> modules, Set<TypeElement> componentDependencies, ProcessingEnvironment env, Utils utils) { this.processingEnv = env; this.messager = env.getMessager(); this.elements = env.getElementUtils(); this.types = env.getTypeUtils(); this.dependencies = dependencies; this.componentDependencies = componentDependencies; this.utils = utils; logger = new Logger(messager, Kind.WARNING); this.modules = modules; nonNullaryCtorModules = utils.getNonNullaryCtorOnes(modules); logger.n("modules: \n%s\n nonnullaryctor: \n%s\n", modules, nonNullaryCtorModules); } /** This are for the class header */ protected abstract String getPackageString(); protected abstract String getInjectorSimpleName(); // external dependencies protected abstract Set<TypeName> getSuperInterfaces(); // dependency map protected abstract Set<BindingKey> getAllCtorParameters(); // Those can be either provided(first) or injected(second) by this injector. protected abstract Pair<Set<BindingKey>, Set<BindingKey>> getProduced(); protected abstract String getProvisionMethodName(BindingKey key); /** used for the part after "var = " */ protected abstract void addNewStatementToMethodSpec( MethodSpec.Builder methodSpecBuilder, DependencyInfo dependencyInfo, String newVarName); // Do implementation specific stuff. protected abstract void preGenerateProduced(); // Do implementation specific stuff. protected abstract void postGenerateProduced(); public void generate() { injectorBuilder = createInjectorBuilder(); initializeBuilder = MethodSpec.methodBuilder(INITIALIZE_METHOD_NAME).addModifiers(Modifier.PUBLIC); // messager.printMessage(Kind.NOTE, // "generatedBindings: " + generatedBindings); // logger.n("" + keyToPackagedInjectorMap); generateCtor(); preGenerateProduced(); generateProduced(); postGenerateProduced(); // injectorBuilder.addMethod(initializeBuilder.build()); // Write JavaFile javaFile = JavaFile.builder(getPackageString(), injectorBuilder.build()).build(); try { // logger.n( // " package:%s\n%s", // getPackageString(), // new StringBuilder().append(javaFile.toJavaFileObject().getCharContent(true)).toString()); javaFile.writeTo(processingEnv.getFiler()); } catch (IOException e) { logger.e(e.toString()); } } private void generateProduced() { logger.w("provision count: %d", getProduced().getFirst().size()); logger.w("injection count: %d", getProduced().getSecond().size()); for (BindingKey key : getProduced().first) { generateProvisionMethodIfNeeded(key); } for (BindingKey key : getProduced().second) { generateInjectionMethod(utils.getTypeElement(key), "inject"); } } private void generateCtor() { // Ctor MethodSpec.Builder ctorBuilder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); // ctorBuilder.addStatement( // "$T.out.printf($S, $L)", ClassName.get(System.class), "This is tiger: %s\n", "this"); // TODO: maybe remove this. // Ctor - ancester top level injectors. // CoreInjectorInfo tmp = coreInjectorInfo; // while (componentTree.get(tmp) != null) { // tmp = componentTree.get(tmp); // ClassName className = ClassName.get(topLevelPackageString, // getTopLevelInjectorName(tmp, topLevelInjectorPrefix, topLevelInjectorSuffix)); // String sourceCodeName = utils.getSourceCodeName(className); // injectorBuilder.addField(className, sourceCodeName); // if (tmp.equals(componentTree.get(coreInjectorInfo))) { // ctorBuilder.addStatement( // "this.$L = $L", sourceCodeName, containingInjectorName); // } else { // ctorBuilder.addStatement( // "this.$L = $L.$L", sourceCodeName, containingInjectorName, sourceCodeName); // } // } /** * Ctor - Component dependencies, @BindsInstance and Passed modules. All sorted together so that * it is easier to implement {@link #generateGetSubcomponentMethod(ExecutableElement, Builder)} */ Set<BindingKey> allParameters = getAllCtorParameters(); for (BindingKey key : utils.sortBindingKeys(allParameters)) { TypeName typeName = key.getTypeName(); String sourceCodeName = utils.getSourceCodeName(key); generateFieldIfNeeded(typeName, sourceCodeName); ctorBuilder .addParameter(typeName, sourceCodeName) .addStatement("this.$L = $L", sourceCodeName, sourceCodeName); } // TODO: handle ApplicationCompoent in a consistent way. if (!Sets.newHashSet( "DaggerApplicationComponent", "DaggerRootApplicationComponent", "DaggerPlatformApplicationComponentImpl") .contains(getInjectorSimpleName())) { // ctorBuilder.addStatement("$L()", INITIALIZE_METHOD_NAME); } injectorBuilder.addMethod(ctorBuilder.build()); } protected final static String getPackageFromInjectorClassName(ClassName injectorClassName) { return injectorClassName.packageName(); } protected final Builder createInjectorBuilder() { String injectorSimpleName = getInjectorSimpleName(); logger.n("generated component " + injectorSimpleName); Builder result = TypeSpec.classBuilder(injectorSimpleName) .addModifiers(Modifier.PUBLIC) .addAnnotation( AnnotationSpec.builder(Generated.class) .addMember("value", "$S", GENERATOR_NAME) .build()); for (TypeName typeName : getSuperInterfaces()) { result.addSuperinterface(typeName); } return result; } /** * For all bindings but single contributor of a multi-binding, which is handled by {@link * #getPackagedInjectorNameForDependencyInfo(BindingKey, DependencyInfo)} . For generic binding, * package of referencing class has access to both raw type and parameter types, though the * provision method generated for it will be duplicated in each such package. */ protected final ClassName getInjectorNameFor(BindingKey key, TypeElement referencingClass) { ClassName result = null; TypeElement scope = null; // scopeCalculator.calculate(key); DependencyInfo dependencyInfo = Iterables.getFirst(utils.getDependencyInfo(dependencies, key), null); // logger.n("" + key + " dependencyInfo: " // + dependencyInfo); String packageString = null; boolean isMultiBinding = false; if (dependencyInfo != null) { isMultiBinding = dependencyInfo.isMultiBinding(); Preconditions.checkNotNull( dependencyInfo.getSourceClassElement(), "DependencyInfo without source class? " + dependencyInfo); packageString = utils.getPackage(dependencyInfo.getSourceClassElement()).getQualifiedName().toString(); } else if (utils.isProviderOrLazy(key)) { result = getInjectorNameFor(utils.getElementKeyForParameterizedBinding(key), referencingClass); } else { // TODO: clean this. logger.n("not found for key: " + key); DependencyInfo genericDependencyInfo = utils.getDependencyInfoByGeneric(dependencies, key); if (genericDependencyInfo != null) { packageString = utils.getPackageString(referencingClass); } else { logger.e(String.format("Cannot resolve %s.", key)); } } if (result == null) { String simpleName = isMultiBinding ? Utils.getMultiBindingInjectorSimpleName(scope) : Utils.getPackagedInjectorSimpleName(scope); result = ClassName.get(isMultiBinding ? "" /*topLevelPackageString*/ : packageString, simpleName); } return result; } private String getInjectorSimpleName(TypeElement eitherComponent) { return utils.getComponentImplementationSimpleNameFromInterface(eitherComponent); } /** From the given dependencyInfo contributes to given key. */ protected final ClassName getPackagedInjectorNameForDependencyInfo( BindingKey key, DependencyInfo dependencyInfo) { TypeElement scope = null; // scopeCalculator.calculate(key); return getPackagedInjectorNameForDependencyInfo(scope, dependencyInfo); } /** From the given dependencyInfo contributes to given key. */ protected final ClassName getPackagedInjectorNameForDependencyInfo( TypeElement scope, DependencyInfo dependencyInfo) { return ClassName.get( utils.getPackageString(dependencyInfo.getSourceClassElement()), Utils.getPackagedInjectorSimpleName(scope)); } protected final void generateProvisionMethodIfNeeded(BindingKey key) { logger.n("key: %s", key.toString()); // TODO: put all the dependency handling logic in one place Set<DependencyInfo> dependencyInfos = Utils.getDependencyInfosHandlingBox(dependencies, key); DependencyInfo dependencyInfo = dependencyInfos == null ? null : Iterables.getFirst(dependencyInfos, null); // TODO: handle boxing better. if (dependencyInfo != null) { key = dependencyInfo.getDependant(); } if (!generatedBindings.add(getProvisionMethodName(key))) { return; } logger.n("dI: " + dependencyInfo); // logger.n("DependencyInfo: " + // dependencyInfo); // logger.n("scope: " + // scopeCalculator.calculate(key)); boolean scoped = utils.isScoped(dependencyInfo); // explicitScopes.contains(key); String suffix = scoped ? UNSCOPED_SUFFIX : ""; /** * TODO: revist this and handle it in a consistent way with the ones below. This is related with * {@link Utils#getDependencyInfo(SetMultimap, BindingKey)}. */ if (utils.isOptional(key) && utils.isBindsOptionalOf(utils.getDependencyInfo(dependencies, key))) { generateProvisionMethodForBindsOptionalOf(key, suffix); } else if (dependencyInfo != null) { switch (dependencyInfo.getType()) { case SET: case SET_VALUES: // TODO: revisit scoped // TODO: Multi-bindings are handled multiple time for each package if there more multiple // contributors from // that package. But that's fine because the nature of Set and Map. scoped = false; generateProvisionMethodForSet(key, ""); break; case MAP: // TODO: refactor here and below. // TODO: revisit scoped scoped = false; generateProvisionMethodForMap(key, ""); break; case UNIQUE: switch (dependencyInfo.getDependencySourceType()) { case MODULE: generateProvisionMethodFromModuleUniqueBinding(key, suffix); break; case CTOR_INJECTED_CLASS: generateProvisionMethodFromClass(key, suffix); break; case DAGGER_MEMBERS_INJECTOR: generateProvisionMethodForDaggerMembersInjector(key, suffix); break; case COMPONENT_DEPENDENCIES_METHOD: generateProvisionMethodFromComponentDependency(key); break; case BINDS_INTANCE: case COMPONENT_DEPENDENCIES_ITSELF: /** TODO: move hub related stuff away to {@link Tiger2InjectorGenerator} */ generateProvisionMethodFromBindsInstance(key); break; case EITHER_COMPONENT: generateProvisionMethodForEitherComponent(key); break; case EITHER_COMPONENT_BUILDER: generateProvisionMethodForEitherComponentBuilder(key); break; default: throw new RuntimeException( "Shouln't be here. dependencyInfo.dependencySourceType: " + dependencyInfo.getDependencySourceType()); } break; default: throw new RuntimeException("Unknown dependencyInfo.type: " + dependencyInfo.getType()); } } else if (utils.isProviderOrLazy(key)) { generateProvisionMethodForProviderOrLazy(key, suffix); } else if (utils.isMap(key)) { Preconditions.checkState( utils.isMapWithBuiltinValueType(key), "Expect map with builtin type but got: " + key); generateProvisionMethodForMap(key, suffix); } else { logger.w("really!? we have handling generic: %s", key); logger.n("stack:"); for (StackTraceElement e : new Exception("").getStackTrace()) { logger.n("%s", e); } logger.n("dependencies:\n%s", dependencies); DependencyInfo genericDependencyInfo = utils.getDependencyInfoByGeneric(dependencies, key); if (genericDependencyInfo != null) { if (genericDependencyInfo.getProvisionMethodElement() == null) { generateProvisionMethodFromClass(key, suffix); } else { logger.e( "Generic provision method not supported yet: %s -> %s", key, genericDependencyInfo); } } else { utils.generateDebugInfoMethod( injectorBuilder, "diNotFound", String.format( "(sub)component: %s, Cannot resolve %s\nstack: %s\ndependencies: %s", getCurrentEitherComponent(), key, utils.getStack(), dependencies)); logger.w( "(sub)component: %s, Cannot resolve %s\nstack: %s\ndependencies: %s", getCurrentEitherComponent(), key, utils.getStack(), dependencies); // throw new RuntimeException("cannot resolve " + key); } } if (scoped) { generateFieldIfNeeded(key.getTypeName(), getFieldName(key)); generateFieldIfNeededAndInitialize(ClassName.get(Object.class), getLockFieldName(key)); generateScopedProvisionMethod(injectorBuilder, key); } } private void generateProvisionMethodForEitherComponent(BindingKey key) { generateProvisionMethodForThoseFromTopLevel(key); } protected final void generateProvisionMethodFromComponentDependency(BindingKey key) { generateProvisionMethodForThoseFromTopLevel(key); } /** * for {@link DependencySourceType#COMPONENT_DEPENDENCIES_ITSELF}, {link {@link * DependencySourceType#COMPONENT_DEPENDENCIES_METHOD} and {@link * DependencySourceType#BINDS_INTANCE}, {@link DependencySourceType#EITHER_COMPONENT}, {@link * DependencySourceType#EITHER_COMPONENT_BUILDER} */ protected final void generateProvisionMethodForThoseFromTopLevel(BindingKey key) { DependencyInfo dependencyInfo = Iterables.getOnlyElement(Utils.getDependencyInfosHandlingBox(dependencies, key)); DependencySourceType dependencySourceType = dependencyInfo.getDependencySourceType(); MethodSpec.Builder methodSpecBuilder = MethodSpec.methodBuilder(getProvisionMethodName(key)) .addModifiers(Modifier.PUBLIC) .returns(key.getTypeName()); onProvisionMethodStart(methodSpecBuilder, key); methodSpecBuilder.addStatement("$T result", key.getTypeName()); StringBuilder builder = new StringBuilder("result = "); switch (dependencySourceType) { case COMPONENT_DEPENDENCIES_METHOD: builder .append(utils.getSourceCodeName(dependencyInfo.getSourceClassElement())) .append(".") .append(dependencyInfo.getProvisionMethodElement().getSimpleName()) .append("()"); break; case COMPONENT_DEPENDENCIES_ITSELF: // fall through case BINDS_INTANCE: builder.append(Utils.getSourceCodeNameHandlingBox(key, dependencies)); break; case EITHER_COMPONENT: /** TODO: see {@link #generateGetSubcomponentMethod(ExecutableElement, Builder)} */ if (utils.isComponent(utils.getTypeElement(key))) { builder.append("this"); } break; case EITHER_COMPONENT_BUILDER: generateImplicitProvisionMethodForEitherComponentBuilder( injectorBuilder, utils.getTypeElement(dependencyInfo.getDependant())); TypeElement eitherComponentBuilder = utils.getTypeElementForClassName((ClassName) key.getTypeName()); builder.append(utils.getGetMethodName(eitherComponentBuilder)).append("()"); break; default: throw new RuntimeException("Unexpected dependencySourceType from dI: " + dependencyInfo); } methodSpecBuilder.addStatement(builder.toString()); methodSpecBuilder.addStatement("return result"); onProvisionMethodEnd(methodSpecBuilder, key); injectorBuilder.addMethod(methodSpecBuilder.build()); } protected final void generateProvisionMethodFromBindsInstance(BindingKey key) { generateProvisionMethodForThoseFromTopLevel(key); } protected final void generateProvisionMethodForBindsOptionalOf(BindingKey key, String suffix) { BindingKey elementKey = utils.getElementKeyForParameterizedBinding(key); Set<DependencyInfo> dependencyInfos = utils.getDependencyInfo(dependencies, elementKey); boolean present = dependencyInfos != null; MethodSpec.Builder methodSpecBuilder = MethodSpec.methodBuilder(getProvisionMethodName(key) + suffix) .addModifiers(suffix.isEmpty() ? Modifier.PUBLIC : Modifier.PRIVATE) .returns(key.getTypeName()); onProvisionMethodStart(methodSpecBuilder, key); if (present) { methodSpecBuilder.addStatement("$T value", elementKey.getTypeName()); DependencyInfo dependencyInfo = Iterables.getOnlyElement(dependencyInfos); generateProvisionMethodIfNeeded(elementKey); StringBuilder stringBuilder = new StringBuilder("value = "); addCallingProvisionMethod(stringBuilder, elementKey); methodSpecBuilder.addStatement(stringBuilder.toString()); methodSpecBuilder.addStatement("return $T.of(value)", ClassName.get(Optional.class)); } else { methodSpecBuilder.addStatement("return $T.absent()", ClassName.get(Optional.class)); } onProvisionMethodEnd(methodSpecBuilder, key); injectorBuilder.addMethod(methodSpecBuilder.build()); } protected final void onProvisionMethodStart(MethodSpec.Builder methodSpecBuilder, BindingKey key) { if (!LOG_PROVISION_METHOD_ENABLED) { return; } // methodSpecBuilder.addStatement( // "logger.atInfo().log($S)", "providing starts before lock: " + key); // methodSpecBuilder // .addStatement( // "$L.$L.$L.lock()", // LOCK_HOLDER_PACKAGE_STRING, // LOCK_HOLDER_CLASS_STRING, // LOCK_HOLDER_FIELD_STRIN); methodSpecBuilder.beginControlFlow("try"); methodSpecBuilder.addStatement("logger.atInfo().log($S)", "{ providing starts: " + key); } protected final void onProvisionMethodEnd(MethodSpec.Builder methodSpecBuilder, BindingKey key) { if (!LOG_PROVISION_METHOD_ENABLED) { return; } methodSpecBuilder .nextControlFlow("finally") .addStatement("logger.atInfo().log($S)", "} providing ends: " + key); // methodSpecBuilder.addStatement( // "$L.$L.$L.unlock()", // LOCK_HOLDER_PACKAGE_STRING, // LOCK_HOLDER_CLASS_STRING, // LOCK_HOLDER_FIELD_STRIN); methodSpecBuilder.endControlFlow(); } // TODO: Refactor, so far this happens to work for provision methods from component dependencies. protected final void generateProvisionMethodFromModuleUniqueBinding(BindingKey key, String suffix) { // DependencyInfo dependencyInfo = // Iterables.getOnlyElement(Utils.getDependencyInfosHandlingBox(dependencies, key)); // TODO: this is a hack to make it build for same type bound in one package to different types. Set<DependencyInfo> dependencyInfos = Utils .getDependencyInfosHandlingBox(dependencies, key); if (dependencyInfos.size() > 1) { logger.w("multiple unique bindings found for key: %s, bindings: %s", key, dependencyInfos); } DependencyInfo dependencyInfo = Preconditions.checkNotNull( Iterables.getFirst(dependencyInfos, null), "binding not found for key " + key); generateProvisionMethodFromModuleBinding(dependencyInfo, suffix, ""); // logger.n(String.format( // "generateUniqueTypeProvisionMethodFromModule: \n key: %s, \n injector: %s, \n method: %s.", // key, injectorClassName, methodSpecBuilder.build())); } /** * empty methodName for default behavior, otherwise for special case like multi-binding * contributors from{@link PackagedInjectorGenerator}} */ protected final void generateProvisionMethodFromModuleBinding( DependencyInfo dependencyInfo, String suffix, String methodName) { Preconditions.checkNotNull(dependencyInfo.getProvisionMethodElement()); BindingKey key = dependencyInfo.getDependant(); TypeMirror returnType = dependencyInfo.getProvisionMethodElement().getReturnType(); BindingKey returnKey = BindingKey.get(returnType, key.getQualifier()); MethodSpec.Builder methodSpecBuilder = MethodSpec.methodBuilder( !methodName.isEmpty() ? methodName : getProvisionMethodName(returnKey) + suffix); methodSpecBuilder .addModifiers(suffix.isEmpty() ? Modifier.PUBLIC : Modifier.PRIVATE) .returns(TypeName.get(returnType)); onProvisionMethodStart(methodSpecBuilder, key); /** TODO: unitfy this with {@link #generateProvisionMethodFromClass(BindingKey, String)} */ methodSpecBuilder.addStatement("$T result", returnKey.getTypeName()); if (methodName.isEmpty()) { addNewStatementToMethodSpec(methodSpecBuilder, dependencyInfo, "result"); } else { addNewStatementToMethodSpecByModuleOrCtor(methodSpecBuilder, dependencyInfo, "result"); } methodSpecBuilder.addStatement("return result"); onProvisionMethodEnd(methodSpecBuilder, key); injectorBuilder.addMethod(methodSpecBuilder.build()); } protected final void generateGetProxyMethodIfNeeded(String packageString) { if (!proxiesWhoseGetterGenerated.add(packageString)) { return; } ClassName proxyClassName = ClassName.get(packageString, TIGER_PROXY_NAME); generateGetTypeMethod(proxyClassName); } /** generate a getXXX() that returns xxx field, initialize it if needed. */ protected final void generateGetTypeMethod(ClassName proxyClassName) { String proxySourceName = Utils.getSourceCodeName(proxyClassName); generateFieldIfNeeded(proxyClassName, proxySourceName); injectorBuilder.addMethod( MethodSpec.methodBuilder(utils.getGetMethodName(proxyClassName)) .addModifiers(Modifier.PRIVATE) .returns(proxyClassName) .beginControlFlow("if ($L == null)", proxySourceName) .addStatement("$L = new $T()", proxySourceName, proxyClassName) .endControlFlow() .addStatement("return $L", proxySourceName) .build()); } protected final void generateFieldIfNeededAndInitialize(TypeName typeName, String fieldName) { generateFieldIfNeededCommon(typeName, fieldName, true, ""); } protected final void generateFieldIfNeeded(TypeName typeName, String fieldName) { generateFieldIfNeededCommon(typeName, fieldName, false, ""); } protected final void generateFieldIfNeeded( TypeName typeName, String fieldName, String fmt, Object... args) { generateFieldIfNeededCommon(typeName, fieldName, true, fmt, args); } protected final void generateFieldIfNeededCommon( TypeName typeName, String fieldName, boolean initialize, String fmt, Object... args) { if (!fieldsGenerated.add(fieldName)) { return; } FieldSpec.Builder fieldBuilder = FieldSpec.builder(typeName, fieldName, Modifier.PRIVATE); if (initialize) { if (fmt.isEmpty()) { fieldBuilder.initializer("new $T()", typeName); } else { fieldBuilder.initializer(fmt, args); } } injectorBuilder.addField(fieldBuilder.build()); } protected final void generateGetTypeMethodIfNeeded(TypeElement module) { generateGetTypeMethodIfNeeded(ClassName.get(module)); } protected final void generateGetTypeMethodIfNeeded(ClassName className) { if (!typesWhoseGetterGenerated.add(className)) { return; } generateGetTypeMethod(className); } /** * For key like javax.inject.Provider<Foo> and dagger.Lazy<Foo>. Qualifier, if presented, will * also apply to element binding. */ protected final void generateProvisionMethodForProviderOrLazy(BindingKey key, String suffix) { // logger.n(String.format( // "generateProvisionMethodForProviderOrLazy: key %s, referencingClass: %s, suffix : %s.", key, // referencingClass, suffix)); // TypeSpec anonymousTypeSpec = createAnonymousBuiltinTypeForUniqueBinding(key); BindingKey elementKey = utils.getElementKeyForParameterizedBinding(key); generateProvisionMethodIfNeeded(elementKey); MethodSpec.Builder methodSpecBuilder = MethodSpec.methodBuilder(getProvisionMethodName(key) + suffix) .addModifiers(suffix.isEmpty() ? Modifier.PUBLIC : Modifier.PRIVATE) .returns(key.getTypeName()); onProvisionMethodStart(methodSpecBuilder, key); // methodSpecBuilder.addStatement("$T result = $L", key.getTypeName(), anonymousTypeSpec); methodSpecBuilder.addStatement( "$T result = $T.create(()->$L())", key.getTypeName(), ClassName.get(DoubleCheckLazyProvider.class), getProvisionMethodName(elementKey)); methodSpecBuilder.addStatement("return result"); onProvisionMethodEnd(methodSpecBuilder, key); injectorBuilder.addMethod(methodSpecBuilder.build()); } protected final TypeSpec createAnonymousBuiltinTypeForUniqueBinding(BindingKey key) { return createAnonymousBuiltinType(key, null); } protected final TypeSpec createAnonymousBuiltinTypeForMultiBinding( BindingKey key, DependencyInfo dependency) { return createAnonymousBuiltinType(key, dependency); } /** * Generate for either unique a binding or a contributor to a multi-binding. DependencyInfo is * null for unique one and non-null or multi-binding one. ReferencingClass is the opposite. For * multi-binding, referencing class is the module in dependency. Scope is null for unique binding. */ protected final TypeSpec createAnonymousBuiltinType( BindingKey key, @Nullable DependencyInfo dependencyInfo) { Preconditions.checkArgument(key.getTypeName() instanceof ParameterizedTypeName); boolean isMultiBinding = dependencyInfo != null; TypeName rawTypeName = ((ParameterizedTypeName) key.getTypeName()).rawType; Preconditions.checkArgument( utils.isProviderOrLazy(key), String.format("Built-in binding expected(Provider or Lazy), but get %s", key)); boolean isLazy = rawTypeName.equals(ClassName.get(Lazy.class)); BindingKey elementKey = utils.getElementKeyForParameterizedBinding(key); Preconditions.checkNotNull(elementKey); if (!isMultiBinding) { String provisionMethodName = getProvisionMethodName(elementKey); // utils.generateDebugInfoMethod(injectorBuilder, // "before" + provisionMethodName + generatedBindings.contains(provisionMethodName)); generateProvisionMethodIfNeeded(elementKey); } // TODO: multi-threading. MethodSpec.Builder builderForGet = MethodSpec.methodBuilder("get") .returns(elementKey.getTypeName()) .addAnnotation(Override.class) .addModifiers(Modifier.SYNCHRONIZED) .addModifiers(Modifier.PUBLIC); if (isLazy) { builderForGet.beginControlFlow("if (var == null)"); } if (!isMultiBinding) { Set<DependencyInfo> dIs = Utils.getDependencyInfosHandlingBox(dependencies, elementKey); if (dIs != null) { elementKey = Preconditions.checkNotNull( Iterables.getFirst(dIs, null), "key: " + elementKey + " dI: " + dIs) .getDependant(); } builderForGet.addStatement("var = $N()", getProvisionMethodName(elementKey)); } else { /** * TODO: revisit the logic here, current, for Provide, Lazy and Optional, the key != {@link * DependencyInfo#getDependant()}. */ addNewStatementToMethodSpec(builderForGet, dependencyInfo, "var"); } if (isLazy) { builderForGet.endControlFlow(); } builderForGet.addStatement("return var"); return TypeSpec.anonymousClassBuilder("") .addSuperinterface(key.getTypeName()) .addField(elementKey.getTypeName(), "var", Modifier.PRIVATE) .addMethod(builderForGet.build()) .build(); } protected final void generateScopedProvisionMethod(Builder componentSpecBuilder, BindingKey key) { MethodSpec.Builder builder = MethodSpec.methodBuilder(getProvisionMethodName(key)) .returns(key.getTypeName()) .addModifiers(Modifier.PUBLIC); builder .addStatement("$T result = $N", key.getTypeName().box(), getFieldName(key)) .beginControlFlow("if (result == null)") .beginControlFlow("synchronized($L)", getLockFieldName(key)) .addStatement("result = $N", getFieldName(key)) .beginControlFlow("if (result == null)") .addStatement( "result = $L = $L()", getFieldName(key), getProvisionMethodName(key) + UNSCOPED_SUFFIX) .endControlFlow() // if .endControlFlow() // synchronized .endControlFlow() // if .addStatement("return result"); componentSpecBuilder.addMethod(builder.build()); initializeBuilder.addStatement("$L = $L()", getFieldName(key), getProvisionMethodName(key)); } protected final String getFieldName(BindingKey key) { return Utils.getSourceCodeNameHandlingBox(key, dependencies); } protected final String getLockFieldName(BindingKey key) { return "lock_" + Utils.getSourceCodeNameHandlingBox(key, dependencies); } /** TODO: support set of builtin types. */ protected final void generateProvisionMethodForSet(BindingKey key, String suffix) { // logger.n("" + key + // " PackagedInjector: " // + getInjectorFor(key, referencingClass) + " SpecBuilder: " + componentSpecBuilder); ParameterizedTypeName type = (ParameterizedTypeName) key.getTypeName(); Preconditions.checkArgument(type.rawType.equals(ClassName.get(Set.class))); TypeName elementType = Iterables.getOnlyElement(type.typeArguments); MethodSpec.Builder methodSpecBuilder = MethodSpec.methodBuilder(getProvisionMethodName(key) + suffix) .addModifiers(suffix.isEmpty() ? Modifier.PUBLIC : Modifier.PRIVATE) .returns(type); onProvisionMethodStart(methodSpecBuilder, key); methodSpecBuilder.addStatement("$T result = new $T<>()", type, HashSet.class); generateSetContributors(key, methodSpecBuilder); methodSpecBuilder.addStatement("return result"); onProvisionMethodEnd(methodSpecBuilder, key); injectorBuilder.addMethod(methodSpecBuilder.build()); } protected void generateSetContributors(BindingKey key, MethodSpec.Builder methodSpecBuilder) { Set<DependencyInfo> dependencyInfos = utils.getDependencyInfo(dependencies, key); for (DependencyInfo dependencyInfo : dependencyInfos) { // logger.n("for %s from" // + // " %s", key, packageToDependencyInfoMap.get(pkg))); ExecutableElement provisionMethodElement = dependencyInfo.getProvisionMethodElement(); boolean isSetValues = dependencyInfo.getType().equals(SET_VALUES); if (utils.isMultibindsMethod(provisionMethodElement)) { continue; } methodSpecBuilder.beginControlFlow(""); TypeName contributorType = TypeName.get(dependencyInfo.getProvisionMethodElement().getReturnType()); methodSpecBuilder.addStatement("$T contributor", contributorType); addNewStatementToMethodSpec(methodSpecBuilder, dependencyInfo, "contributor"); if (dependencyInfo.getType().equals(SET)) { methodSpecBuilder.addStatement("result.add(contributor)"); } else { Preconditions.checkState(dependencyInfo.getType().equals(SET_VALUES)); methodSpecBuilder.addStatement("result.addAll(contributor)"); } methodSpecBuilder.endControlFlow(); } } protected final void generateProvisionMethodForMap(final BindingKey key, String suffix) { // logger.n("" + key + // " PackagedInjector: " // + getInjectorFor(key, referencingClass) + " SpecBuilder: " + componentSpecBuilder); ParameterizedTypeName returnType = (ParameterizedTypeName) key.getTypeName(); Preconditions.checkArgument(returnType.rawType.equals(ClassName.get(Map.class))); MethodSpec.Builder methodSpecBuilder = MethodSpec.methodBuilder(getProvisionMethodName(key) + suffix) .addModifiers(suffix.isEmpty() ? Modifier.PUBLIC : Modifier.PRIVATE) .returns(returnType); onProvisionMethodStart(methodSpecBuilder, key); methodSpecBuilder.addStatement("$T result = new $T<>()", returnType, HashMap.class); generateMapContributors(key, returnType, methodSpecBuilder); methodSpecBuilder.addStatement("return result"); onProvisionMethodEnd(methodSpecBuilder, key); injectorBuilder.addMethod(methodSpecBuilder.build()); } protected final void addNewStatementToMethodSpecByModuleOrCtor( MethodSpec.Builder methodSpecBuilder, DependencyInfo dependencyInfo, String newVarName) { logger.n(" dependencyInfo : " + dependencyInfo); ExecutableElement provisionMethodElement = dependencyInfo.getProvisionMethodElement(); if (provisionMethodElement == null) { StringBuilder builder = new StringBuilder("$L = new $T("); List<BindingKey> dependenciesFromExecutableElement = utils.getDependenciesFromExecutableElement( utils.findInjectedCtor(utils.getTypeElement(dependencyInfo.getDependant()))); for (BindingKey dependentKey : dependenciesFromExecutableElement) { generateProvisionMethodAndAppendAsParameter(dependentKey, builder); } if (builder.substring(builder.length() - 2).equals(", ")) { builder.delete(builder.length() - 2, builder.length()); } builder.append(")"); methodSpecBuilder.addStatement( builder.toString(), newVarName, ClassName.get(dependencyInfo.getSourceClassElement())); } else if (utils.isBindsMethod(provisionMethodElement)) { // for @Binds StringBuilder stringBuilder = new StringBuilder(); addCallingProvisionMethod( stringBuilder, Iterables.getOnlyElement(dependencyInfo.getDependencies())); methodSpecBuilder.addStatement("$L = $L", newVarName, stringBuilder.toString()); } else { boolean isStaticMethod = dependencyInfo.getProvisionMethodElement().getModifiers().contains(Modifier.STATIC); StringBuilder builder = new StringBuilder("$L = "); if (!utils.isStatic(provisionMethodElement)) { TypeElement sourceClassElement = dependencyInfo.getSourceClassElement(); if (nonNullaryCtorModules.contains(sourceClassElement)) { builder.append(utils.getSourceCodeName(sourceClassElement)); } else { generateGetTypeMethodIfNeeded(sourceClassElement); builder.append(utils.getGetMethodName(sourceClassElement)).append("()"); } } else { builder.append(utils.getQualifiedName(dependencyInfo.getSourceClassElement())); } builder.append(".$N("); List<BindingKey> dependenciesFromExecutableElement = utils.getDependenciesFromExecutableElement(provisionMethodElement); for (BindingKey dependentKey : dependenciesFromExecutableElement) { generateProvisionMethodAndAppendAsParameter(dependentKey, builder); } if (builder.substring(builder.length() - 2).equals(", ")) { builder.delete(builder.length() - 2, builder.length()); } builder.append(")"); methodSpecBuilder.addStatement( builder.toString(), newVarName, provisionMethodElement.getSimpleName()); } } protected final void generateMapContributors( BindingKey key, ParameterizedTypeName returnType, MethodSpec.Builder methodSpecBuilder) { Set<DependencyInfo> dependencyInfos = utils.getDependencyInfo(dependencies, key); // TODO: remove this hack if (dependencyInfos == null) { dependencyInfos = new HashSet<>(); logger.w("no dI for key: " + key); } Preconditions.checkNotNull( dependencyInfos, String.format("dependencyInfo not found for key: %s", key)); TypeName mapKeyType = returnType.typeArguments.get(0); TypeName mapValueType = returnType.typeArguments.get(1); BindingKey mapValueKey = BindingKey.get(mapValueType); methodSpecBuilder.addStatement("$T mapKey", mapKeyType); methodSpecBuilder.addStatement("$T mapValue", mapValueType); for (DependencyInfo di : dependencyInfos) { if (utils.isMultibindsMethod(di.getProvisionMethodElement())) { continue; } AnnotationMirror mapKeyMirror = Utils.getAnnotationMirrorWithMetaAnnotation(di.getProvisionMethodElement(), MapKey.class); AnnotationValue unwrapValueAnnotationValue = Utils.getAnnotationValue(elements, mapKeyMirror, "unwrapValue"); if (unwrapValueAnnotationValue != null && !((boolean) unwrapValueAnnotationValue.getValue())) { logger.e("unwrapValue = false not supported yet. Consider using set binding."); return; } AnnotationValue mapKey = Utils.getAnnotationValue(elements, mapKeyMirror, "value"); logger.l(Kind.NOTE, "mapKey: %s", mapKey.toString()); methodSpecBuilder.addStatement("mapKey = ($T) $L", mapKeyType, mapKey); if (utils.isMapWithBuiltinValueType(key)) { methodSpecBuilder.addStatement( "mapValue = $L", createAnonymousBuiltinTypeForMultiBinding(mapValueKey, di)); } else { addNewStatementToMethodSpec(methodSpecBuilder, di, "mapValue"); } methodSpecBuilder.addStatement("result.put(mapKey, mapValue)"); } } protected final void generateInjectionMethod(BindingKey key) { generateInjectionMethod(utils.getClassFromKey(key), "inject"); } protected final void generateInjectionMethod(TypeElement cls, String methodName) { // logger.n("cls: %s, injector: %s, method: %s", cls, // packagedInjectorClassName, methodName)); if (!injectionMethodsDone.add(Pair.of(methodName, TypeName.get(types.erasure(cls.asType()))))) { logger.w("duplicate injection method: " + methodName + " for type: " + cls); return; } MethodSpec.Builder methodSpecBuilder = MethodSpec.methodBuilder(methodName) .addModifiers(Modifier.PUBLIC) .addParameter(ClassName.get(cls), "arg"); generateInjectionMethodBody(cls, methodSpecBuilder); injectorBuilder.addMethod(methodSpecBuilder.build()); } // TODO: move this to PackagedInjectorGenerator. protected void generateInjectionMethodBody( TypeElement cls, MethodSpec.Builder methodSpecBuilder) { // Inject closest ancestor first. TypeElement clsClosestInjectAncestor = utils.getClosestInjectedAncestor(cls); if (clsClosestInjectAncestor != null) { if (clsClosestInjectAncestor.getModifiers().contains(Modifier.PUBLIC)) { String packageString = utils.getPackageString(cls); ClassName hub = ClassName.get(packageString, PackagedHubInterfaceGenerator.HUB_INTERFACE); methodSpecBuilder.addStatement( "$L.inject(($T) arg)", utils.getSourceCodeName(hub), ClassName.get(clsClosestInjectAncestor)); } else { methodSpecBuilder.addStatement("inject(($T) arg)", ClassName.get(clsClosestInjectAncestor)); } } for (VariableElement field : utils.getInjectedFields(cls, processingEnv)) { // logger.n("field: " + field); TypeMirror fieldType = field.asType(); AnnotationMirror fieldQualifier = Utils.getQualifier(field); BindingKey fieldKey = BindingKey.get(fieldType, fieldQualifier); generateProvisionMethodIfNeeded(fieldKey); StringBuilder stringBuilder = new StringBuilder("arg.").append(field.getSimpleName()).append(" = "); addCallingProvisionMethod(stringBuilder, fieldKey); methodSpecBuilder.addStatement(stringBuilder.toString()); } for (ExecutableElement method : utils.getInjectedMethods(cls, processingEnv)) { StringBuilder builder = new StringBuilder("arg.").append(method.getSimpleName()).append("("); List<BindingKey> methodArgs = utils.getDependenciesFromExecutableElement(method); if (methodArgs.size() > 0) { for (BindingKey dependentKey : methodArgs) { addCallingProvisionMethod(builder, dependentKey); builder.append(", "); } builder.delete(builder.length() - 2, builder.length()); } builder.append(")"); methodSpecBuilder.addStatement(builder.toString()); } } /** Adds "getxxx()" to the builder. */ protected final void addCallingProvisionMethod(StringBuilder stringBuilder, BindingKey key) { generateProvisionMethodIfNeeded(key); stringBuilder.append(getProvisionMethodName(key)).append("()"); } /** Generic is handled. */ protected final void generateProvisionMethodFromClass(BindingKey key, String suffix) { // logger.n("key: " + key + " referencingClass: " + // referencingClass); TypeElement cls = utils.getClassFromKey(key); DeclaredType clsType = (DeclaredType) utils.getTypeFromKey(key); ExecutableElement ctor = utils.findInjectedCtor(cls); Preconditions.checkNotNull(ctor, String.format("Did not find ctor for %s", cls)); ExecutableType ctorType = (ExecutableType) types.asMemberOf(clsType, ctor); List<BindingKey> dependencyKeys = utils.getDependenciesFromMethod(ctorType, ctor); // TODO: clean this. // if (key.getTypeName() instanceof ParameterizedTypeName) { // logger.n("be here :" + key); // List<BindingKey> specializedKeys = new ArrayList<>(); // Map<TypeVariableName, TypeName> map = // utils.getMapFromTypeVariableToSpecialized((ParameterizedTypeName) key.getTypeName(), // (ParameterizedTypeName) TypeName.get(cls.asType())); // for (BindingKey k : dependencyKeys) { // specializedKeys.add(utils.specializeIfNeeded(k, map)); // } // dependencyKeys = specializedKeys; // } // logger.n("dependencyKeys: " + // dependencyKeys); MethodSpec.Builder methodSpecBuilder = MethodSpec.methodBuilder(getProvisionMethodName(key) + suffix); methodSpecBuilder .addModifiers(suffix.isEmpty() ? Modifier.PUBLIC : Modifier.PRIVATE) .returns(key.getTypeName()); onProvisionMethodStart(methodSpecBuilder, key); methodSpecBuilder.addStatement("$T result", key.getTypeName()); addNewStatementToMethodSpec( methodSpecBuilder, Iterables.getOnlyElement(dependencies.get(key)), "result"); if (shouldInjectAfterCreation() && !utils.isGenericNotSpecialized(cls.asType()) && utils.hasInjectedFieldsOrMethodsRecursively(cls, processingEnv)) { // logger.n("hasInjected"); generateInjectionMethod(key); methodSpecBuilder.addStatement("inject(result)"); } methodSpecBuilder.addStatement("return result"); onProvisionMethodEnd(methodSpecBuilder, key); injectorBuilder.addMethod(methodSpecBuilder.build()); } /** * True for package injector and false for hub ones. */ protected boolean shouldInjectAfterCreation() { return true; } protected final void generateProvisionMethodForDaggerMembersInjector(BindingKey key, String suffix) { // logger.n("key: " + key + " referencingClass: " + // referencingClass); BindingKey childKey = utils.getElementKeyForParameterizedBinding(key); TypeName childTypeName = childKey.getTypeName(); String parameterSourceCodeName = Utils.getSourceCodeName(childTypeName); MethodSpec.Builder injectMethodSpecBuilder = MethodSpec.methodBuilder("injectMembers") .addModifiers(Modifier.PUBLIC) .addParameter(childTypeName, parameterSourceCodeName) .addAnnotation(Override.class); generateInjectionMethod(childKey); injectMethodSpecBuilder.addStatement("$L($L)", "inject", parameterSourceCodeName); TypeSpec injectorType = TypeSpec.anonymousClassBuilder("") .addSuperinterface(key.getTypeName()) .addMethod(injectMethodSpecBuilder.build()) .build(); MethodSpec.Builder provisionMethodSpecBuilder = MethodSpec.methodBuilder(getProvisionMethodName(key) + suffix) .addModifiers(suffix.isEmpty() ? Modifier.PUBLIC : Modifier.PRIVATE) .returns(key.getTypeName()); onProvisionMethodStart(provisionMethodSpecBuilder, key); provisionMethodSpecBuilder.addStatement("$T result = $L", key.getTypeName(), injectorType); provisionMethodSpecBuilder.addStatement("return result"); onProvisionMethodEnd(provisionMethodSpecBuilder, key); // logger.n("" + provisionMethodSpecBuilder.build()); injectorBuilder.addMethod(provisionMethodSpecBuilder.build()); } protected final void generateProvisionMethodForEitherComponentBuilder(BindingKey key) { // The top level injector must be the one that provides the wanted builder. generateProvisionMethodForThoseFromTopLevel(key); } protected final void generateProvisionMethodAndAppendAsParameter( BindingKey key, StringBuilder builder) { builder.append(generateProvisionMethodAndReturnCallingString(key)).append(", "); } protected final String generateProvisionMethodAndReturnCallingString(BindingKey key) { generateProvisionMethodIfNeeded(key); return getProvisionMethodName(key) + "()"; } protected final boolean isEitherComponentBuilderProvsionMethodProvidedByModule(Element method) { TypeElement element = Utils.getReturnTypeElement((ExecutableElement) method); return utils.isEitherComponentBuilderProvisionMethod(method) && Iterables.getOnlyElement(utils.getDependencyInfo(dependencies, BindingKey.get(element))) .getDependencySourceType() .equals(DependencySourceType.MODULE); } /** * (sub)component builder provision method that has not been explicitly specific in the parent * (sub)component. */ protected final void generateImplicitProvisionMethodForEitherComponentBuilder( Builder injectorBuilder, TypeElement componentBuilder) { Preconditions.checkArgument(utils.isEitherComponentBuilder(componentBuilder)); TypeElement enclosingElement = (TypeElement) componentBuilder.getEnclosingElement(); boolean isSubcomponent = utils.isSubcomponent(enclosingElement); ClassName builderImplementationName = getClassNameForEitherComponentBuilder(componentBuilder); MethodSpec.Builder methodBuilder = MethodSpec.methodBuilder(utils.getGetMethodName(componentBuilder)) .addModifiers(Modifier.PUBLIC) .returns(builderImplementationName); methodBuilder.addCode( "return new $T($L);", builderImplementationName, isSubcomponent ? "this" : ""); injectorBuilder.addMethod(methodBuilder.build()); } protected final ClassName getClassNameForEitherComponentBuilder(TypeElement componentBuilder) { TypeElement enclosingElement = (TypeElement) componentBuilder.getEnclosingElement(); return ClassName.get( utils.getPackageString(enclosingElement), getInjectorSimpleName(enclosingElement), componentBuilder.getSimpleName().toString()); } protected final void generateGetSubcomponentMethod(ExecutableElement method, Builder injectorBuilder) { TypeElement returnType = Utils.getReturnTypeElement(method); logger.n("returnType: " + returnType + " method: " + method); // Method head MethodSpec.Builder buildMethodBuilder = MethodSpec.methodBuilder(method.getSimpleName().toString()) .addModifiers(Modifier.PUBLIC) .addAnnotation(Override.class) .returns(TypeName.get(returnType.asType())); Map<BindingKey, String> keyNameMap = new HashMap<>(); for (VariableElement parameter : method.getParameters()) { keyNameMap.put( utils.getBindingKeyForMethodParameter(parameter), parameter.getSimpleName().toString()); } // method parameters for (VariableElement parameter : method.getParameters()) { buildMethodBuilder.addParameter( TypeName.get(parameter.asType()), parameter.getSimpleName().toString()); } // return statement StringBuilder returnCodeBuilder = new StringBuilder("return new $T(this, "); // return statement deps List<BindingKey> sortedParams = utils.sortBindingKeys(keyNameMap.keySet()); for (BindingKey key : sortedParams) { String name = keyNameMap.get(key); returnCodeBuilder.append(name).append(", "); } int size = returnCodeBuilder.length(); returnCodeBuilder.delete(size - 2, size); returnCodeBuilder.append(");"); buildMethodBuilder.addCode( returnCodeBuilder.toString(), ClassName.get(utils.getPackageString(returnType), getInjectorSimpleName(returnType))); injectorBuilder.addMethod(buildMethodBuilder.build()); } protected final void generateExplicitProvisionMethodForEitherComponentBuilder( ExecutableElement method, Builder injectorBuilder) { TypeElement returnType = Utils.getReturnTypeElement(method); logger.n("" + returnType + " method: " + method); Preconditions.checkArgument( Utils.getQualifier(returnType) == null, "Qualifier found for (sub)component builder: " + returnType); MethodSpec.Builder buildMethodBuilder = MethodSpec.methodBuilder(method.getSimpleName().toString()) .addModifiers(Modifier.PUBLIC) .addAnnotation(Override.class) .returns(TypeName.get(returnType.asType())); buildMethodBuilder.addCode("return $L();", utils.getGetMethodName(returnType)); injectorBuilder.addMethod(buildMethodBuilder.build()); } ClassName getTopLevelInjectorBuilderClassName(CoreInjectorInfo component) { return null; // ClassName.get(topLevelPackageString, getTopLevelInjectorName(component, // topLevelInjectorPrefix, topLevelInjectorSuffix), "Builder"); } protected final boolean isInjectorOfScope(ClassName injectorClassName, TypeElement scope) { return injectorClassName .simpleName() .contains(scope.getQualifiedName().toString().replace(".", "_")); } public String getTopLevelInjectorName( CoreInjectorInfo component, String topLevelInjectorPrefix, String topLevelInjectorSuffix) { return null; // this.topLevelInjectorPrefix + component.getName() + this.topLevelInjectorSuffix; } private String getCurrentEitherComponent() { return getPackageString() + "." + getInjectorSimpleName(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.sql.tests; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.common.restartstrategy.RestartStrategies; import org.apache.flink.api.common.serialization.Encoder; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.time.Time; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.api.common.typeutils.base.IntSerializer; import org.apache.flink.api.common.typeutils.base.LongSerializer; import org.apache.flink.api.java.typeutils.ResultTypeQueryable; import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.core.fs.Path; import org.apache.flink.core.io.SimpleVersionedSerializer; import org.apache.flink.runtime.state.FunctionInitializationContext; import org.apache.flink.runtime.state.FunctionSnapshotContext; import org.apache.flink.streaming.api.TimeCharacteristic; import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.sink.filesystem.BucketAssigner; import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink; import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.SimpleVersionedStringSerializer; import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.OnCheckpointRollingPolicy; import org.apache.flink.streaming.api.functions.source.SourceFunction; import org.apache.flink.table.api.EnvironmentSettings; import org.apache.flink.table.api.Table; import org.apache.flink.table.api.TableSchema; import org.apache.flink.table.api.internal.TableEnvironmentInternal; import org.apache.flink.table.api.java.StreamTableEnvironment; import org.apache.flink.table.sources.DefinedFieldMapping; import org.apache.flink.table.sources.DefinedRowtimeAttributes; import org.apache.flink.table.sources.RowtimeAttributeDescriptor; import org.apache.flink.table.sources.StreamTableSource; import org.apache.flink.table.sources.tsextractors.ExistingField; import org.apache.flink.table.sources.wmstrategies.BoundedOutOfOrderTimestamps; import org.apache.flink.types.Row; import java.io.PrintStream; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; /** * End-to-end test for Stream SQL queries. * * <p>Includes the following SQL features: * - OVER window aggregation * - keyed and non-keyed GROUP BY TUMBLE aggregation * - windowed INNER JOIN * - TableSource with event-time attribute * * <p>The stream is bounded and will complete after about a minute. * The result is always constant. * The job is killed on the first attempt and restarted. * * <p>Parameters: * -outputPath Sets the path to where the result data is written. */ public class StreamSQLTestProgram { public static void main(String[] args) throws Exception { ParameterTool params = ParameterTool.fromArgs(args); String outputPath = params.getRequired("outputPath"); String planner = params.get("planner", "blink"); final EnvironmentSettings.Builder builder = EnvironmentSettings.newInstance(); builder.inStreamingMode(); if (planner.equals("old")) { builder.useOldPlanner(); } else if (planner.equals("blink")) { builder.useBlinkPlanner(); } final EnvironmentSettings settings = builder.build(); final StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment(); sEnv.setRestartStrategy(RestartStrategies.fixedDelayRestart( 3, Time.of(10, TimeUnit.SECONDS) )); sEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); sEnv.enableCheckpointing(4000); sEnv.getConfig().setAutoWatermarkInterval(1000); final StreamTableEnvironment tEnv = StreamTableEnvironment.create(sEnv, settings); ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("table1", new GeneratorTableSource(10, 100, 60, 0)); ((TableEnvironmentInternal) tEnv).registerTableSourceInternal("table2", new GeneratorTableSource(5, 0.2f, 60, 5)); int overWindowSizeSeconds = 1; int tumbleWindowSizeSeconds = 10; String overQuery = String.format( "SELECT " + " key, " + " rowtime, " + " COUNT(*) OVER (PARTITION BY key ORDER BY rowtime RANGE BETWEEN INTERVAL '%d' SECOND PRECEDING AND CURRENT ROW) AS cnt " + "FROM table1", overWindowSizeSeconds); String tumbleQuery = String.format( "SELECT " + " key, " + " CASE SUM(cnt) / COUNT(*) WHEN 101 THEN 1 ELSE 99 END AS correct, " + " TUMBLE_START(rowtime, INTERVAL '%d' SECOND) AS wStart, " + " TUMBLE_ROWTIME(rowtime, INTERVAL '%d' SECOND) AS rowtime " + "FROM (%s) " + "WHERE rowtime > TIMESTAMP '1970-01-01 00:00:01' " + "GROUP BY key, TUMBLE(rowtime, INTERVAL '%d' SECOND)", tumbleWindowSizeSeconds, tumbleWindowSizeSeconds, overQuery, tumbleWindowSizeSeconds); String joinQuery = String.format( "SELECT " + " t1.key, " + " t2.rowtime AS rowtime, " + " t2.correct," + " t2.wStart " + "FROM table2 t1, (%s) t2 " + "WHERE " + " t1.key = t2.key AND " + " t1.rowtime BETWEEN t2.rowtime AND t2.rowtime + INTERVAL '%d' SECOND", tumbleQuery, tumbleWindowSizeSeconds); String finalAgg = String.format( "SELECT " + " SUM(correct) AS correct, " + " TUMBLE_START(rowtime, INTERVAL '20' SECOND) AS rowtime " + "FROM (%s) " + "GROUP BY TUMBLE(rowtime, INTERVAL '20' SECOND)", joinQuery); // get Table for SQL query Table result = tEnv.sqlQuery(finalAgg); // convert Table into append-only DataStream DataStream<Row> resultStream = tEnv.toAppendStream(result, Types.ROW(Types.INT, Types.SQL_TIMESTAMP)); final StreamingFileSink<Row> sink = StreamingFileSink .forRowFormat(new Path(outputPath), (Encoder<Row>) (element, stream) -> { PrintStream out = new PrintStream(stream); out.println(element.toString()); }) .withBucketAssigner(new KeyBucketAssigner()) .withRollingPolicy(OnCheckpointRollingPolicy.build()) .build(); resultStream // inject a KillMapper that forwards all records but terminates the first execution attempt .map(new KillMapper()).setParallelism(1) // add sink function .addSink(sink).setParallelism(1); sEnv.execute(); } /** * Use first field for buckets. */ public static final class KeyBucketAssigner implements BucketAssigner<Row, String> { private static final long serialVersionUID = 987325769970523326L; @Override public String getBucketId(final Row element, final Context context) { return String.valueOf(element.getField(0)); } @Override public SimpleVersionedSerializer<String> getSerializer() { return SimpleVersionedStringSerializer.INSTANCE; } } /** * TableSource for generated data. */ public static class GeneratorTableSource implements StreamTableSource<Row>, DefinedRowtimeAttributes, DefinedFieldMapping { private final int numKeys; private final float recordsPerKeyAndSecond; private final int durationSeconds; private final int offsetSeconds; public GeneratorTableSource(int numKeys, float recordsPerKeyAndSecond, int durationSeconds, int offsetSeconds) { this.numKeys = numKeys; this.recordsPerKeyAndSecond = recordsPerKeyAndSecond; this.durationSeconds = durationSeconds; this.offsetSeconds = offsetSeconds; } @Override public DataStream<Row> getDataStream(StreamExecutionEnvironment execEnv) { return execEnv.addSource(new Generator(numKeys, recordsPerKeyAndSecond, durationSeconds, offsetSeconds)); } @Override public TypeInformation<Row> getReturnType() { return Types.ROW(Types.INT, Types.LONG, Types.STRING); } @Override public TableSchema getTableSchema() { return new TableSchema( new String[] {"key", "rowtime", "payload"}, new TypeInformation[] {Types.INT, Types.SQL_TIMESTAMP, Types.STRING}); } @Override public String explainSource() { return "GeneratorTableSource"; } @Override public List<RowtimeAttributeDescriptor> getRowtimeAttributeDescriptors() { return Collections.singletonList( new RowtimeAttributeDescriptor( "rowtime", new ExistingField("ts"), new BoundedOutOfOrderTimestamps(100))); } @Override public Map<String, String> getFieldMapping() { Map<String, String> mapping = new HashMap<>(); mapping.put("key", "f0"); mapping.put("ts", "f1"); mapping.put("payload", "f2"); return mapping; } } /** * Data-generating source function. */ public static class Generator implements SourceFunction<Row>, ResultTypeQueryable<Row>, CheckpointedFunction { private final int numKeys; private final int offsetSeconds; private final int sleepMs; private final int durationMs; private long ms = 0; private ListState<Long> state = null; public Generator(int numKeys, float rowsPerKeyAndSecond, int durationSeconds, int offsetSeconds) { this.numKeys = numKeys; this.durationMs = durationSeconds * 1000; this.offsetSeconds = offsetSeconds; this.sleepMs = (int) (1000 / rowsPerKeyAndSecond); } @Override public void run(SourceContext<Row> ctx) throws Exception { long offsetMS = offsetSeconds * 2000L; while (ms < durationMs) { synchronized (ctx.getCheckpointLock()) { for (int i = 0; i < numKeys; i++) { ctx.collect(Row.of(i, ms + offsetMS, "Some payload...")); } ms += sleepMs; } Thread.sleep(sleepMs); } } @Override public void cancel() { } @Override public TypeInformation<Row> getProducedType() { return Types.ROW(Types.INT, Types.LONG, Types.STRING); } @Override public void initializeState(FunctionInitializationContext context) throws Exception { state = context.getOperatorStateStore().getListState( new ListStateDescriptor<Long>("state", LongSerializer.INSTANCE)); for (Long l : state.get()) { ms += l; } } @Override public void snapshotState(FunctionSnapshotContext context) throws Exception { state.clear(); state.add(ms); } } /** * Kills the first execution attempt of an application when it receives the second record. */ public static class KillMapper implements MapFunction<Row, Row>, CheckpointedFunction, ResultTypeQueryable { // counts all processed records of all previous execution attempts private int saveRecordCnt = 0; // counts all processed records of this execution attempt private int lostRecordCnt = 0; private ListState<Integer> state = null; @Override public Row map(Row value) { // the both counts are the same only in the first execution attempt if (saveRecordCnt == 1 && lostRecordCnt == 1) { throw new RuntimeException("Kill this Job!"); } // update checkpointed counter saveRecordCnt++; // update non-checkpointed counter lostRecordCnt++; // forward record return value; } @Override public TypeInformation getProducedType() { return Types.ROW(Types.INT, Types.SQL_TIMESTAMP); } @Override public void initializeState(FunctionInitializationContext context) throws Exception { state = context.getOperatorStateStore().getListState( new ListStateDescriptor<Integer>("state", IntSerializer.INSTANCE)); for (Integer i : state.get()) { saveRecordCnt += i; } } @Override public void snapshotState(FunctionSnapshotContext context) throws Exception { state.clear(); state.add(saveRecordCnt); } } }
package me.gurpreetsk.textchangeobserve.model; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; /** * Created by Gurpreet on 18/08/17. */ public class Project { @SerializedName("id") @Expose private Integer id; @SerializedName("name") @Expose private String name; @SerializedName("full_name") @Expose private String fullName; @SerializedName("owner") @Expose private Owner owner; @SerializedName("private") @Expose private Boolean _private; @SerializedName("html_url") @Expose private String htmlUrl; @SerializedName("description") @Expose private String description; @SerializedName("fork") @Expose private Boolean fork; @SerializedName("url") @Expose private String url; @SerializedName("forks_url") @Expose private String forksUrl; @SerializedName("keys_url") @Expose private String keysUrl; @SerializedName("collaborators_url") @Expose private String collaboratorsUrl; @SerializedName("teams_url") @Expose private String teamsUrl; @SerializedName("hooks_url") @Expose private String hooksUrl; @SerializedName("issue_events_url") @Expose private String issueEventsUrl; @SerializedName("events_url") @Expose private String eventsUrl; @SerializedName("assignees_url") @Expose private String assigneesUrl; @SerializedName("branches_url") @Expose private String branchesUrl; @SerializedName("tags_url") @Expose private String tagsUrl; @SerializedName("blobs_url") @Expose private String blobsUrl; @SerializedName("git_tags_url") @Expose private String gitTagsUrl; @SerializedName("git_refs_url") @Expose private String gitRefsUrl; @SerializedName("trees_url") @Expose private String treesUrl; @SerializedName("statuses_url") @Expose private String statusesUrl; @SerializedName("languages_url") @Expose private String languagesUrl; @SerializedName("stargazers_url") @Expose private String stargazersUrl; @SerializedName("contributors_url") @Expose private String contributorsUrl; @SerializedName("subscribers_url") @Expose private String subscribersUrl; @SerializedName("subscription_url") @Expose private String subscriptionUrl; @SerializedName("commits_url") @Expose private String commitsUrl; @SerializedName("git_commits_url") @Expose private String gitCommitsUrl; @SerializedName("comments_url") @Expose private String commentsUrl; @SerializedName("issue_comment_url") @Expose private String issueCommentUrl; @SerializedName("contents_url") @Expose private String contentsUrl; @SerializedName("compare_url") @Expose private String compareUrl; @SerializedName("merges_url") @Expose private String mergesUrl; @SerializedName("archive_url") @Expose private String archiveUrl; @SerializedName("downloads_url") @Expose private String downloadsUrl; @SerializedName("issues_url") @Expose private String issuesUrl; @SerializedName("pulls_url") @Expose private String pullsUrl; @SerializedName("milestones_url") @Expose private String milestonesUrl; @SerializedName("notifications_url") @Expose private String notificationsUrl; @SerializedName("labels_url") @Expose private String labelsUrl; @SerializedName("releases_url") @Expose private String releasesUrl; @SerializedName("deployments_url") @Expose private String deploymentsUrl; @SerializedName("created_at") @Expose private String createdAt; @SerializedName("updated_at") @Expose private String updatedAt; @SerializedName("pushed_at") @Expose private String pushedAt; @SerializedName("git_url") @Expose private String gitUrl; @SerializedName("ssh_url") @Expose private String sshUrl; @SerializedName("clone_url") @Expose private String cloneUrl; @SerializedName("svn_url") @Expose private String svnUrl; @SerializedName("homepage") @Expose private String homepage; @SerializedName("size") @Expose private Integer size; @SerializedName("stargazers_count") @Expose private Integer stargazersCount; @SerializedName("watchers_count") @Expose private Integer watchersCount; @SerializedName("language") @Expose private String language; @SerializedName("has_issues") @Expose private Boolean hasIssues; @SerializedName("has_projects") @Expose private Boolean hasProjects; @SerializedName("has_downloads") @Expose private Boolean hasDownloads; @SerializedName("has_wiki") @Expose private Boolean hasWiki; @SerializedName("has_pages") @Expose private Boolean hasPages; @SerializedName("forks_count") @Expose private Integer forksCount; @SerializedName("mirror_url") @Expose private Object mirrorUrl; @SerializedName("open_issues_count") @Expose private Integer openIssuesCount; @SerializedName("forks") @Expose private Integer forks; @SerializedName("open_issues") @Expose private Integer openIssues; @SerializedName("watchers") @Expose private Integer watchers; @SerializedName("default_branch") @Expose private String defaultBranch; @SerializedName("score") @Expose private Double score; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getFullName() { return fullName; } public void setFullName(String fullName) { this.fullName = fullName; } public Owner getOwner() { return owner; } public void setOwner(Owner owner) { this.owner = owner; } public Boolean getPrivate() { return _private; } public void setPrivate(Boolean _private) { this._private = _private; } public String getHtmlUrl() { return htmlUrl; } public void setHtmlUrl(String htmlUrl) { this.htmlUrl = htmlUrl; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public Boolean getFork() { return fork; } public void setFork(Boolean fork) { this.fork = fork; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getForksUrl() { return forksUrl; } public void setForksUrl(String forksUrl) { this.forksUrl = forksUrl; } public String getKeysUrl() { return keysUrl; } public void setKeysUrl(String keysUrl) { this.keysUrl = keysUrl; } public String getCollaboratorsUrl() { return collaboratorsUrl; } public void setCollaboratorsUrl(String collaboratorsUrl) { this.collaboratorsUrl = collaboratorsUrl; } public String getTeamsUrl() { return teamsUrl; } public void setTeamsUrl(String teamsUrl) { this.teamsUrl = teamsUrl; } public String getHooksUrl() { return hooksUrl; } public void setHooksUrl(String hooksUrl) { this.hooksUrl = hooksUrl; } public String getIssueEventsUrl() { return issueEventsUrl; } public void setIssueEventsUrl(String issueEventsUrl) { this.issueEventsUrl = issueEventsUrl; } public String getEventsUrl() { return eventsUrl; } public void setEventsUrl(String eventsUrl) { this.eventsUrl = eventsUrl; } public String getAssigneesUrl() { return assigneesUrl; } public void setAssigneesUrl(String assigneesUrl) { this.assigneesUrl = assigneesUrl; } public String getBranchesUrl() { return branchesUrl; } public void setBranchesUrl(String branchesUrl) { this.branchesUrl = branchesUrl; } public String getTagsUrl() { return tagsUrl; } public void setTagsUrl(String tagsUrl) { this.tagsUrl = tagsUrl; } public String getBlobsUrl() { return blobsUrl; } public void setBlobsUrl(String blobsUrl) { this.blobsUrl = blobsUrl; } public String getGitTagsUrl() { return gitTagsUrl; } public void setGitTagsUrl(String gitTagsUrl) { this.gitTagsUrl = gitTagsUrl; } public String getGitRefsUrl() { return gitRefsUrl; } public void setGitRefsUrl(String gitRefsUrl) { this.gitRefsUrl = gitRefsUrl; } public String getTreesUrl() { return treesUrl; } public void setTreesUrl(String treesUrl) { this.treesUrl = treesUrl; } public String getStatusesUrl() { return statusesUrl; } public void setStatusesUrl(String statusesUrl) { this.statusesUrl = statusesUrl; } public String getLanguagesUrl() { return languagesUrl; } public void setLanguagesUrl(String languagesUrl) { this.languagesUrl = languagesUrl; } public String getStargazersUrl() { return stargazersUrl; } public void setStargazersUrl(String stargazersUrl) { this.stargazersUrl = stargazersUrl; } public String getContributorsUrl() { return contributorsUrl; } public void setContributorsUrl(String contributorsUrl) { this.contributorsUrl = contributorsUrl; } public String getSubscribersUrl() { return subscribersUrl; } public void setSubscribersUrl(String subscribersUrl) { this.subscribersUrl = subscribersUrl; } public String getSubscriptionUrl() { return subscriptionUrl; } public void setSubscriptionUrl(String subscriptionUrl) { this.subscriptionUrl = subscriptionUrl; } public String getCommitsUrl() { return commitsUrl; } public void setCommitsUrl(String commitsUrl) { this.commitsUrl = commitsUrl; } public String getGitCommitsUrl() { return gitCommitsUrl; } public void setGitCommitsUrl(String gitCommitsUrl) { this.gitCommitsUrl = gitCommitsUrl; } public String getCommentsUrl() { return commentsUrl; } public void setCommentsUrl(String commentsUrl) { this.commentsUrl = commentsUrl; } public String getIssueCommentUrl() { return issueCommentUrl; } public void setIssueCommentUrl(String issueCommentUrl) { this.issueCommentUrl = issueCommentUrl; } public String getContentsUrl() { return contentsUrl; } public void setContentsUrl(String contentsUrl) { this.contentsUrl = contentsUrl; } public String getCompareUrl() { return compareUrl; } public void setCompareUrl(String compareUrl) { this.compareUrl = compareUrl; } public String getMergesUrl() { return mergesUrl; } public void setMergesUrl(String mergesUrl) { this.mergesUrl = mergesUrl; } public String getArchiveUrl() { return archiveUrl; } public void setArchiveUrl(String archiveUrl) { this.archiveUrl = archiveUrl; } public String getDownloadsUrl() { return downloadsUrl; } public void setDownloadsUrl(String downloadsUrl) { this.downloadsUrl = downloadsUrl; } public String getIssuesUrl() { return issuesUrl; } public void setIssuesUrl(String issuesUrl) { this.issuesUrl = issuesUrl; } public String getPullsUrl() { return pullsUrl; } public void setPullsUrl(String pullsUrl) { this.pullsUrl = pullsUrl; } public String getMilestonesUrl() { return milestonesUrl; } public void setMilestonesUrl(String milestonesUrl) { this.milestonesUrl = milestonesUrl; } public String getNotificationsUrl() { return notificationsUrl; } public void setNotificationsUrl(String notificationsUrl) { this.notificationsUrl = notificationsUrl; } public String getLabelsUrl() { return labelsUrl; } public void setLabelsUrl(String labelsUrl) { this.labelsUrl = labelsUrl; } public String getReleasesUrl() { return releasesUrl; } public void setReleasesUrl(String releasesUrl) { this.releasesUrl = releasesUrl; } public String getDeploymentsUrl() { return deploymentsUrl; } public void setDeploymentsUrl(String deploymentsUrl) { this.deploymentsUrl = deploymentsUrl; } public String getCreatedAt() { return createdAt; } public void setCreatedAt(String createdAt) { this.createdAt = createdAt; } public String getUpdatedAt() { return updatedAt; } public void setUpdatedAt(String updatedAt) { this.updatedAt = updatedAt; } public String getPushedAt() { return pushedAt; } public void setPushedAt(String pushedAt) { this.pushedAt = pushedAt; } public String getGitUrl() { return gitUrl; } public void setGitUrl(String gitUrl) { this.gitUrl = gitUrl; } public String getSshUrl() { return sshUrl; } public void setSshUrl(String sshUrl) { this.sshUrl = sshUrl; } public String getCloneUrl() { return cloneUrl; } public void setCloneUrl(String cloneUrl) { this.cloneUrl = cloneUrl; } public String getSvnUrl() { return svnUrl; } public void setSvnUrl(String svnUrl) { this.svnUrl = svnUrl; } public String getHomepage() { return homepage; } public void setHomepage(String homepage) { this.homepage = homepage; } public Integer getSize() { return size; } public void setSize(Integer size) { this.size = size; } public Integer getStargazersCount() { return stargazersCount; } public void setStargazersCount(Integer stargazersCount) { this.stargazersCount = stargazersCount; } public Integer getWatchersCount() { return watchersCount; } public void setWatchersCount(Integer watchersCount) { this.watchersCount = watchersCount; } public String getLanguage() { return language; } public void setLanguage(String language) { this.language = language; } public Boolean getHasIssues() { return hasIssues; } public void setHasIssues(Boolean hasIssues) { this.hasIssues = hasIssues; } public Boolean getHasProjects() { return hasProjects; } public void setHasProjects(Boolean hasProjects) { this.hasProjects = hasProjects; } public Boolean getHasDownloads() { return hasDownloads; } public void setHasDownloads(Boolean hasDownloads) { this.hasDownloads = hasDownloads; } public Boolean getHasWiki() { return hasWiki; } public void setHasWiki(Boolean hasWiki) { this.hasWiki = hasWiki; } public Boolean getHasPages() { return hasPages; } public void setHasPages(Boolean hasPages) { this.hasPages = hasPages; } public Integer getForksCount() { return forksCount; } public void setForksCount(Integer forksCount) { this.forksCount = forksCount; } public Object getMirrorUrl() { return mirrorUrl; } public void setMirrorUrl(Object mirrorUrl) { this.mirrorUrl = mirrorUrl; } public Integer getOpenIssuesCount() { return openIssuesCount; } public void setOpenIssuesCount(Integer openIssuesCount) { this.openIssuesCount = openIssuesCount; } public Integer getForks() { return forks; } public void setForks(Integer forks) { this.forks = forks; } public Integer getOpenIssues() { return openIssues; } public void setOpenIssues(Integer openIssues) { this.openIssues = openIssues; } public Integer getWatchers() { return watchers; } public void setWatchers(Integer watchers) { this.watchers = watchers; } public String getDefaultBranch() { return defaultBranch; } public void setDefaultBranch(String defaultBranch) { this.defaultBranch = defaultBranch; } public Double getScore() { return score; } public void setScore(Double score) { this.score = score; } }
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.instrument.annuity; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang.ObjectUtils; import org.threeten.bp.Period; import org.threeten.bp.ZonedDateTime; import com.opengamma.analytics.financial.instrument.index.IborIndex; import com.opengamma.analytics.financial.instrument.payment.CouponIborSpreadDefinition; import com.opengamma.analytics.financial.interestrate.annuity.derivative.Annuity; import com.opengamma.analytics.financial.interestrate.payments.derivative.Coupon; import com.opengamma.analytics.financial.schedule.ScheduleCalculator; import com.opengamma.financial.convention.StubType; import com.opengamma.financial.convention.businessday.BusinessDayConvention; import com.opengamma.financial.convention.calendar.Calendar; import com.opengamma.financial.convention.daycount.DayCount; import com.opengamma.timeseries.DoubleTimeSeries; import com.opengamma.util.ArgumentChecker; /** * A wrapper class for an AnnuityDefinition containing CouponIborSpreadDefinition. */ public class AnnuityCouponIborSpreadDefinition extends AnnuityCouponDefinition<CouponIborSpreadDefinition> { /** * The underlying Ibor index. */ private final IborIndex _iborIndex; /** * The holiday calendar for the ibor index * */ private final Calendar _calendar; /** * Constructor from a list of Ibor-like coupons. * @param payments The Ibor coupons. * @param calendar The calendar */ public AnnuityCouponIborSpreadDefinition(final CouponIborSpreadDefinition[] payments, final Calendar calendar) { super(payments, calendar); _iborIndex = payments[0].getIndex(); _calendar = payments[0].getCalendar(); } /** * Annuity builder from the conventions and common characteristics. * @param settlementDate The settlement date. * @param tenor The tenor. * @param notional The notional. * @param index The Ibor index. * @param spread The common spread. * @param isPayer The payer flag. * @param calendar The holiday calendar for the ibor leg. * @return The Ibor annuity. */ public static AnnuityCouponIborSpreadDefinition from(final ZonedDateTime settlementDate, final Period tenor, final double notional, final IborIndex index, final double spread, final boolean isPayer, final Calendar calendar) { ArgumentChecker.notNull(settlementDate, "settlement date"); ArgumentChecker.notNull(index, "index"); ArgumentChecker.notNull(tenor, "tenor"); final AnnuityCouponIborDefinition iborAnnuity = AnnuityCouponIborDefinition.from(settlementDate, tenor, notional, index, isPayer, calendar); final CouponIborSpreadDefinition[] coupons = new CouponIborSpreadDefinition[iborAnnuity.getPayments().length]; for (int loopcpn = 0; loopcpn < iborAnnuity.getPayments().length; loopcpn++) { coupons[loopcpn] = CouponIborSpreadDefinition.from(iborAnnuity.getNthPayment(loopcpn), spread); } return new AnnuityCouponIborSpreadDefinition(coupons, calendar); } /** * Annuity builder from the conventions and common characteristics. * @param settlementDate The settlement date. * @param maturityDate The annuity maturity date. * @param paymentPeriod The payment period. * @param notional The notional. * @param index The Ibor index. * @param isPayer The payer flag. * @param businessDayConvention The leg business day convention. * @param endOfMonth The leg end-of-month convention. * @param dayCount The coupons day count. * @param spread The spread rate. * @param calendar The holiday calendar for the ibor leg. * @return The Ibor annuity. */ public static AnnuityCouponIborSpreadDefinition from(final ZonedDateTime settlementDate, final ZonedDateTime maturityDate, final Period paymentPeriod, final double notional, final IborIndex index, final boolean isPayer, final BusinessDayConvention businessDayConvention, final boolean endOfMonth, final DayCount dayCount, final double spread, final Calendar calendar) { ArgumentChecker.notNull(settlementDate, "settlement date"); ArgumentChecker.notNull(maturityDate, "maturity date"); ArgumentChecker.notNull(paymentPeriod, "payment period"); ArgumentChecker.notNull(index, "index"); ArgumentChecker.notNull(businessDayConvention, "Business day convention"); ArgumentChecker.notNull(dayCount, "Day count convention"); ArgumentChecker.isTrue(notional > 0, "notional <= 0"); final double sign = isPayer ? -1.0 : 1.0; final ZonedDateTime[] paymentDates = ScheduleCalculator.getAdjustedDateSchedule(settlementDate, maturityDate, paymentPeriod, true, false, businessDayConvention, calendar, endOfMonth); final CouponIborSpreadDefinition[] coupons = new CouponIborSpreadDefinition[paymentDates.length]; ZonedDateTime fixingDate = ScheduleCalculator.getAdjustedDate(settlementDate, -index.getSpotLag(), calendar); coupons[0] = new CouponIborSpreadDefinition(index.getCurrency(), paymentDates[0], settlementDate, paymentDates[0], dayCount.getDayCountFraction(settlementDate, paymentDates[0], calendar), sign * notional, fixingDate, index, spread, calendar); for (int loopcpn = 1; loopcpn < paymentDates.length; loopcpn++) { fixingDate = ScheduleCalculator.getAdjustedDate(paymentDates[loopcpn - 1], -index.getSpotLag(), calendar); coupons[loopcpn] = new CouponIborSpreadDefinition(index.getCurrency(), paymentDates[loopcpn], paymentDates[loopcpn - 1], paymentDates[loopcpn], dayCount.getDayCountFraction(paymentDates[loopcpn - 1], paymentDates[loopcpn], calendar), sign * notional, fixingDate, index, spread, calendar); } return new AnnuityCouponIborSpreadDefinition(coupons, calendar); } /** * Annuity builder from the conventions and common characteristics. * @param settlementDate The settlement date. * @param maturityDate The annuity maturity date. * @param paymentPeriod The payment period. * @param notional The notional. * @param spread The spread rate. * @param index The Ibor index. * @param isPayer The payer flag. * @param businessDayConvention The leg business day convention. * @param endOfMonth The leg end-of-month convention. * @param dayCount The coupons day count. * @param calendar The holiday calendar for the ibor leg. * @param stub The stub type. * @return The Ibor annuity. */ public static AnnuityCouponIborSpreadDefinition from(final ZonedDateTime settlementDate, final ZonedDateTime maturityDate, final Period paymentPeriod, final double notional, final double spread, final IborIndex index, final boolean isPayer, final BusinessDayConvention businessDayConvention, final boolean endOfMonth, final DayCount dayCount, final Calendar calendar, final StubType stub) { ArgumentChecker.notNull(settlementDate, "settlement date"); ArgumentChecker.notNull(maturityDate, "maturity date"); ArgumentChecker.notNull(paymentPeriod, "payment period"); ArgumentChecker.notNull(index, "index"); ArgumentChecker.notNull(businessDayConvention, "Business day convention"); ArgumentChecker.notNull(dayCount, "Day count convention"); ArgumentChecker.isTrue(notional > 0, "notional <= 0"); final boolean isStubShort = stub.equals(StubType.SHORT_END) || stub.equals(StubType.SHORT_START); final boolean isStubStart = stub.equals(StubType.LONG_START) || stub.equals(StubType.SHORT_START); // Implementation note: dates computed from the end. final ZonedDateTime[] paymentDates = ScheduleCalculator.getAdjustedDateSchedule(settlementDate, maturityDate, paymentPeriod, isStubShort, isStubStart, businessDayConvention, calendar, endOfMonth); final double sign = isPayer ? -1.0 : 1.0; final CouponIborSpreadDefinition[] coupons = new CouponIborSpreadDefinition[paymentDates.length]; ZonedDateTime fixingDate = ScheduleCalculator.getAdjustedDate(settlementDate, -index.getSpotLag(), calendar); coupons[0] = new CouponIborSpreadDefinition(index.getCurrency(), paymentDates[0], settlementDate, paymentDates[0], dayCount.getDayCountFraction(settlementDate, paymentDates[0], calendar), sign * notional, fixingDate, index, spread, calendar); for (int loopcpn = 1; loopcpn < paymentDates.length; loopcpn++) { fixingDate = ScheduleCalculator.getAdjustedDate(paymentDates[loopcpn - 1], -index.getSpotLag(), calendar); coupons[loopcpn] = new CouponIborSpreadDefinition(index.getCurrency(), paymentDates[loopcpn], paymentDates[loopcpn - 1], paymentDates[loopcpn], dayCount.getDayCountFraction(paymentDates[loopcpn - 1], paymentDates[loopcpn], calendar), sign * notional, fixingDate, index, spread, calendar); } return new AnnuityCouponIborSpreadDefinition(coupons, calendar); } /** * Annuity builder from the conventions and common characteristics. * @param settlementDate The settlement date. * @param maturityDate The annuity maturity date. * @param notional The notional. * @param index The Ibor index. * @param isPayer The payer flag. * @param spread The common spread. * @param calendar The holiday calendar for the ibor leg. * @return The Ibor annuity. */ public static AnnuityCouponIborSpreadDefinition from(final ZonedDateTime settlementDate, final ZonedDateTime maturityDate, final double notional, final IborIndex index, final double spread, final boolean isPayer, final Calendar calendar) { ArgumentChecker.notNull(settlementDate, "settlement date"); ArgumentChecker.notNull(index, "index"); ArgumentChecker.notNull(maturityDate, "maturity date"); final AnnuityCouponIborDefinition iborAnnuity = AnnuityCouponIborDefinition.from(settlementDate, maturityDate, notional, index, isPayer, calendar); final CouponIborSpreadDefinition[] coupons = new CouponIborSpreadDefinition[iborAnnuity.getPayments().length]; for (int loopcpn = 0; loopcpn < iborAnnuity.getPayments().length; loopcpn++) { coupons[loopcpn] = CouponIborSpreadDefinition.from(iborAnnuity.getNthPayment(loopcpn), spread); } return new AnnuityCouponIborSpreadDefinition(coupons, calendar); } /** * Creates an annuity with zero spread from an {@link AnnuityCouponIborDefinition} * @param iborAnnuity The annuity, not null * @return An ibor annuity with spread */ public static AnnuityCouponIborSpreadDefinition from(final AnnuityCouponIborDefinition iborAnnuity) { ArgumentChecker.notNull(iborAnnuity, "ibor annuity"); final CouponIborSpreadDefinition[] coupons = new CouponIborSpreadDefinition[iborAnnuity.getPayments().length]; for (int loopcpn = 0; loopcpn < iborAnnuity.getPayments().length; loopcpn++) { coupons[loopcpn] = CouponIborSpreadDefinition.from(iborAnnuity.getNthPayment(loopcpn), 0); } return new AnnuityCouponIborSpreadDefinition(coupons, iborAnnuity.getCalendar()); } /** * Returns the underlying ibor index * @return The underlying ibor index */ public IborIndex getIborIndex() { return _iborIndex; } /** * Gets the holiday calendar for the ibor index. * @return The calendar */ public Calendar getIborCalendar() { return _calendar; } @Override public Annuity<Coupon> toDerivative(final ZonedDateTime date, final DoubleTimeSeries<ZonedDateTime> indexFixingTS) { ArgumentChecker.notNull(date, "date"); final List<Coupon> resultList = new ArrayList<>(); final CouponIborSpreadDefinition[] payments = getPayments(); for (int loopcoupon = 0; loopcoupon < payments.length; loopcoupon++) { if (!date.isAfter(payments[loopcoupon].getPaymentDate())) { resultList.add(payments[loopcoupon].toDerivative(date, indexFixingTS)); } } return new Annuity<>(resultList.toArray(new Coupon[resultList.size()])); } @Override public Annuity<Coupon> toDerivative(final ZonedDateTime date) { ArgumentChecker.notNull(date, "date"); final List<Coupon> resultList = new ArrayList<>(); for (int loopcoupon = 0; loopcoupon < getPayments().length; loopcoupon++) { if (!date.isAfter(getPayments()[loopcoupon].getPaymentDate())) { resultList.add(getPayments()[loopcoupon].toDerivative(date)); } } return new Annuity<>(resultList.toArray(new Coupon[resultList.size()])); } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + _iborIndex.hashCode(); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } final AnnuityCouponIborSpreadDefinition other = (AnnuityCouponIborSpreadDefinition) obj; if (!ObjectUtils.equals(_iborIndex, other._iborIndex)) { return false; } return true; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2012.06.29 at 10:15:17 AM BST // package org.w3._1999.xhtml; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;extension base="{http://www.w3.org/1999/xhtml}Inline"> * &lt;attGroup ref="{http://www.w3.org/1999/xhtml}attrs"/> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "") @XmlRootElement(name = "acronym") public class Acronym extends Inline { @XmlAttribute protected String onclick; @XmlAttribute protected String ondblclick; @XmlAttribute protected String onmousedown; @XmlAttribute protected String onmouseup; @XmlAttribute protected String onmouseover; @XmlAttribute protected String onmousemove; @XmlAttribute protected String onmouseout; @XmlAttribute protected String onkeypress; @XmlAttribute protected String onkeydown; @XmlAttribute protected String onkeyup; @XmlAttribute(name = "lang") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String langCode; @XmlAttribute(namespace = "http://www.w3.org/XML/1998/namespace") protected String lang; @XmlAttribute @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String dir; @XmlAttribute @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute(name = "class") @XmlSchemaType(name = "NMTOKENS") protected List<String> clazz; @XmlAttribute protected String style; @XmlAttribute protected String title; /** * Gets the value of the onclick property. * * @return * possible object is * {@link String } * */ public String getOnclick() { return onclick; } /** * Sets the value of the onclick property. * * @param value * allowed object is * {@link String } * */ public void setOnclick(String value) { this.onclick = value; } /** * Gets the value of the ondblclick property. * * @return * possible object is * {@link String } * */ public String getOndblclick() { return ondblclick; } /** * Sets the value of the ondblclick property. * * @param value * allowed object is * {@link String } * */ public void setOndblclick(String value) { this.ondblclick = value; } /** * Gets the value of the onmousedown property. * * @return * possible object is * {@link String } * */ public String getOnmousedown() { return onmousedown; } /** * Sets the value of the onmousedown property. * * @param value * allowed object is * {@link String } * */ public void setOnmousedown(String value) { this.onmousedown = value; } /** * Gets the value of the onmouseup property. * * @return * possible object is * {@link String } * */ public String getOnmouseup() { return onmouseup; } /** * Sets the value of the onmouseup property. * * @param value * allowed object is * {@link String } * */ public void setOnmouseup(String value) { this.onmouseup = value; } /** * Gets the value of the onmouseover property. * * @return * possible object is * {@link String } * */ public String getOnmouseover() { return onmouseover; } /** * Sets the value of the onmouseover property. * * @param value * allowed object is * {@link String } * */ public void setOnmouseover(String value) { this.onmouseover = value; } /** * Gets the value of the onmousemove property. * * @return * possible object is * {@link String } * */ public String getOnmousemove() { return onmousemove; } /** * Sets the value of the onmousemove property. * * @param value * allowed object is * {@link String } * */ public void setOnmousemove(String value) { this.onmousemove = value; } /** * Gets the value of the onmouseout property. * * @return * possible object is * {@link String } * */ public String getOnmouseout() { return onmouseout; } /** * Sets the value of the onmouseout property. * * @param value * allowed object is * {@link String } * */ public void setOnmouseout(String value) { this.onmouseout = value; } /** * Gets the value of the onkeypress property. * * @return * possible object is * {@link String } * */ public String getOnkeypress() { return onkeypress; } /** * Sets the value of the onkeypress property. * * @param value * allowed object is * {@link String } * */ public void setOnkeypress(String value) { this.onkeypress = value; } /** * Gets the value of the onkeydown property. * * @return * possible object is * {@link String } * */ public String getOnkeydown() { return onkeydown; } /** * Sets the value of the onkeydown property. * * @param value * allowed object is * {@link String } * */ public void setOnkeydown(String value) { this.onkeydown = value; } /** * Gets the value of the onkeyup property. * * @return * possible object is * {@link String } * */ public String getOnkeyup() { return onkeyup; } /** * Sets the value of the onkeyup property. * * @param value * allowed object is * {@link String } * */ public void setOnkeyup(String value) { this.onkeyup = value; } /** * Gets the value of the langCode property. * * @return * possible object is * {@link String } * */ public String getLangCode() { return langCode; } /** * Sets the value of the langCode property. * * @param value * allowed object is * {@link String } * */ public void setLangCode(String value) { this.langCode = value; } /** * Gets the value of the lang property. * * @return * possible object is * {@link String } * */ public String getLang() { return lang; } /** * Sets the value of the lang property. * * @param value * allowed object is * {@link String } * */ public void setLang(String value) { this.lang = value; } /** * Gets the value of the dir property. * * @return * possible object is * {@link String } * */ public String getDir() { return dir; } /** * Sets the value of the dir property. * * @param value * allowed object is * {@link String } * */ public void setDir(String value) { this.dir = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the clazz property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the clazz property. * * <p> * For example, to add a new item, do as follows: * <pre> * getClazz().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getClazz() { if (clazz == null) { clazz = new ArrayList<String>(); } return this.clazz; } /** * Gets the value of the style property. * * @return * possible object is * {@link String } * */ public String getStyle() { return style; } /** * Sets the value of the style property. * * @param value * allowed object is * {@link String } * */ public void setStyle(String value) { this.style = value; } /** * Gets the value of the title property. * * @return * possible object is * {@link String } * */ public String getTitle() { return title; } /** * Sets the value of the title property. * * @param value * allowed object is * {@link String } * */ public void setTitle(String value) { this.title = value; } }