gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Copyright (C) 2014 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.dashbuilder.dataset.backend; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.GregorianCalendar; import java.util.List; import java.util.Locale; import javax.enterprise.context.ApplicationScoped; import org.apache.commons.lang.StringUtils; import org.dashbuilder.dataset.DataSet; import org.dashbuilder.dataset.engine.DataSetHandler; import org.dashbuilder.dataset.engine.group.Interval; import org.dashbuilder.dataset.engine.group.IntervalBuilder; import org.dashbuilder.dataset.engine.group.IntervalList; import org.dashbuilder.dataset.group.DateIntervalType; import org.dashbuilder.dataset.group.ColumnGroup; import org.dashbuilder.dataset.sort.DataSetSort; import org.dashbuilder.dataset.sort.ColumnSort; import org.dashbuilder.dataset.sort.SortOrder; import org.dashbuilder.dataset.sort.SortedList; import org.dashbuilder.dataset.date.Quarter; import static org.dashbuilder.dataset.group.DateIntervalType.*; /** * Interval builder for date columns which generates intervals depending on the underlying data available. */ @ApplicationScoped public class BackendIntervalBuilderDynamicDate implements IntervalBuilder { public IntervalList build(DataSetHandler handler, ColumnGroup columnGroup) { IntervalDateRangeList results = new IntervalDateRangeList(columnGroup); DataSet dataSet = handler.getDataSet(); List values = dataSet.getColumnById(columnGroup.getSourceId()).getValues(); if (values.isEmpty()) { return results; } // Sort the column dates. DataSetSort sortOp = new DataSetSort(); sortOp.addSortColumn(new ColumnSort(columnGroup.getSourceId(), SortOrder.ASCENDING)); DataSetHandler sortResults = handler.sort(sortOp); List<Integer> sortedRows = sortResults.getRows(); if (sortedRows == null || sortedRows.isEmpty()) { return results; } // Get the lower & upper limits. SortedList sortedValues = new SortedList(values, sortedRows); Date minDate = (Date) sortedValues.get(0); Date maxDate = (Date) sortedValues.get(sortedValues.size()-1); // If min/max are equals then create a single interval. if (minDate.compareTo(maxDate) == 0) { IntervalDateRange interval = new IntervalDateRange(DAY, minDate, maxDate); for (int row = 0; row < sortedValues.size(); row++) interval.rows.add(row); results.add(interval); return results; } // Calculate the interval type used according to the constraints set. int maxIntervals = columnGroup.getMaxIntervals(); if (maxIntervals < 1) maxIntervals = 15; DateIntervalType intervalType = YEAR; long millis = (maxDate.getTime() - minDate.getTime()); for (DateIntervalType type : values()) { long nintervals = millis / getDurationInMillis(type); if (nintervals < maxIntervals) { intervalType = type; break; } } // Ensure the interval mode obtained is always greater or equals than the preferred interval size. DateIntervalType intervalSize = null; if (!StringUtils.isBlank(columnGroup.getIntervalSize())) { intervalSize = getByName(columnGroup.getIntervalSize()); } if (intervalSize != null && compare(intervalType, intervalSize) == -1) { intervalType = intervalSize; } // Adjust the minDate according to the interval type. Calendar gc = GregorianCalendar.getInstance(); gc.setLenient(false); gc.setTime(minDate); if (YEAR.equals(intervalType)) { gc.set(Calendar.MONTH, 0); gc.set(Calendar.DAY_OF_MONTH, 1); gc.set(Calendar.HOUR, 0); gc.set(Calendar.MINUTE, 0); gc.set(Calendar.SECOND, 0); gc.set(Calendar.MILLISECOND, 0); } if (QUARTER.equals(intervalType)) { int currentMonth = gc.get(Calendar.MONTH); int firstMonthYear = columnGroup.getFirstMonthOfYear().getIndex(); int rest = Quarter.getPositionInQuarter(firstMonthYear, currentMonth); gc.add(Calendar.MONTH, rest * -1); gc.set(Calendar.DAY_OF_MONTH, 1); gc.set(Calendar.HOUR, 0); gc.set(Calendar.MINUTE, 0); gc.set(Calendar.SECOND, 0); gc.set(Calendar.MILLISECOND, 0); } if (MONTH.equals(intervalType)) { gc.set(Calendar.DAY_OF_MONTH, 1); gc.set(Calendar.HOUR, 0); gc.set(Calendar.MINUTE, 0); gc.set(Calendar.SECOND, 0); gc.set(Calendar.MILLISECOND, 0); } if (DAY.equals(intervalType) || DAY_OF_WEEK.equals(intervalType)) { gc.set(Calendar.HOUR, 0); gc.set(Calendar.MINUTE, 0); gc.set(Calendar.SECOND, 0); gc.set(Calendar.MILLISECOND, 0); } if (HOUR.equals(intervalType)) { gc.set(Calendar.MINUTE, 0); gc.set(Calendar.SECOND, 0); gc.set(Calendar.MILLISECOND, 0); } if (MINUTE.equals(intervalType)) { gc.set(Calendar.SECOND, 0); gc.set(Calendar.MILLISECOND, 0); } if (SECOND.equals(intervalType)) { gc.set(Calendar.MILLISECOND, 0); } // Create the intervals according to the min/max dates. int index = 0; while (gc.getTime().compareTo(maxDate) <= 0) { Date intervalMinDate = gc.getTime(); // Go to the next interval if (MILLENIUM.equals(intervalType)) { gc.add(Calendar.YEAR, 1000); } if (CENTURY.equals(intervalType)) { gc.add(Calendar.YEAR, 100); } if (DECADE.equals(intervalType)) { gc.add(Calendar.YEAR, 10); } if (YEAR.equals(intervalType)) { gc.add(Calendar.YEAR, 1); } if (QUARTER.equals(intervalType)) { gc.add(Calendar.MONTH, 3); } if (MONTH.equals(intervalType)) { gc.add(Calendar.MONTH, 1); } if (WEEK.equals(intervalType)) { gc.add(Calendar.DAY_OF_MONTH, 7); } if (DAY.equals(intervalType) || DAY_OF_WEEK.equals(intervalType)) { gc.add(Calendar.DAY_OF_MONTH, 1); } if (HOUR.equals(intervalType)) { gc.add(Calendar.HOUR_OF_DAY, 1); } if (MINUTE.equals(intervalType)) { gc.add(Calendar.MINUTE, 1); } if (SECOND.equals(intervalType)) { gc.add(Calendar.SECOND, 1); } // Create the interval. Date intervalMaxDate = gc.getTime(); IntervalDateRange interval = new IntervalDateRange(intervalType, intervalMinDate, intervalMaxDate); results.add(interval); // Add the target rows to the interval. boolean stop = false; while (!stop) { if (index >= sortedValues.size()) { stop = true; } else { Date dateValue = (Date) sortedValues.get(index); Integer row = sortedRows.get(index); if (dateValue.before(intervalMaxDate)){ interval.rows.add(row); index++; } else { stop = true; } } } } // Reverse intervals if requested boolean asc = columnGroup.isAscendingOrder(); if (!asc) Collections.reverse( results ); return results; } private static SimpleDateFormat format = new SimpleDateFormat("dd-MM-yyyy hh:mm:ss"); /** * A list containing date range intervals. */ public class IntervalDateRangeList extends IntervalList { public IntervalDateRangeList(ColumnGroup columnGroup) { super(columnGroup); } public Interval locateInterval(Object value) { Date d = (Date) value; for (Interval interval : this) { IntervalDateRange dateRange = (IntervalDateRange) interval; if (d.equals(dateRange.minDate) || (d.after(dateRange.minDate) && d.before(dateRange.maxDate))) { return interval; } } return null; } } /** * A date interval holding dates belonging to a given range. */ public class IntervalDateRange extends Interval { protected DateIntervalType intervalType; protected Date minDate; protected Date maxDate; public IntervalDateRange(DateIntervalType intervalType, Date minDate, Date maxDate) { super(); this.name = calculateName(intervalType, minDate); this.intervalType = intervalType; this.minDate = minDate; this.maxDate = maxDate; } public String calculateName(DateIntervalType intervalType, Date d) { Locale l = Locale.getDefault(); if (MILLENIUM.equals(intervalType)) { SimpleDateFormat formatYear = new SimpleDateFormat("yyyy", l); return formatYear.format(d); } if (CENTURY.equals(intervalType)) { SimpleDateFormat formatYear = new SimpleDateFormat("yyyy", l); return formatYear.format(d); } if (DECADE.equals(intervalType)) { SimpleDateFormat formatYear = new SimpleDateFormat("yyyy", l); return formatYear.format(d); } if (YEAR.equals(intervalType)) { SimpleDateFormat format = new SimpleDateFormat("yyyy", l); return format.format(d); } if (QUARTER.equals(intervalType)) { SimpleDateFormat format = new SimpleDateFormat("MMM yyyy", l); return format.format(d); } if (MONTH.equals(intervalType)) { SimpleDateFormat format = new SimpleDateFormat("MMMM yyyy", l); return format.format(d); } if (WEEK.equals(intervalType)) { return DateFormat.getDateInstance(DateFormat.SHORT, l).format(d); } if (DAY.equals(intervalType) || DAY_OF_WEEK.equals(intervalType)) { SimpleDateFormat format = new SimpleDateFormat("EEE", l); return format.format(d) + " " + DateFormat.getDateInstance(DateFormat.SHORT, l).format(d); } if (HOUR.equals(intervalType)) { SimpleDateFormat format = new SimpleDateFormat("HH", l); return format.format(d) + "h"; } if (MINUTE.equals(intervalType)) { SimpleDateFormat format = new SimpleDateFormat("mm", l); return format.format(d); } if (SECOND.equals(intervalType)) { SimpleDateFormat format = new SimpleDateFormat("ss", l); return format.format(d); } return format.format(d); } } }
/* * Copyright (c) Microsoft. All rights reserved. * Licensed under the MIT license. See LICENSE file in the project root for full license information. */ package com.microsoft.azure.eventhubs.ext.impl; import com.microsoft.azure.eventhubs.ext.Base64Util; import java.nio.charset.StandardCharsets; import java.security.InvalidParameterException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Map; import java.util.Scanner; public class WebSocketUpgrade { public static final String RFC_GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"; private final char _colon = ':'; private final char _slash = '/'; private String _host = ""; private String _path = ""; private String _port = ""; private String _protocol = ""; private String _webSocketKey = ""; private Map<String, String> _additionalHeaders = null; private boolean _certAvailability = false; public WebSocketUpgrade(String hostName, String webSocketPath, int webSocketPort, String webSocketProtocol, Map<String, String> additionalHeaders) { setHost(hostName); setPath(webSocketPath); setPort(webSocketPort); setProtocol(webSocketProtocol); setAdditionalHeaders(additionalHeaders); } /** * Set host value in host header * * @param host The host header field value. */ public void setHost(String host) { this._host = host; } /** * Set port value in host header * * @param port The port header field value. */ public void setPort(int port) { this._port = ""; if (port != 0) { this._port = String.valueOf(port); } } /** * Set path value in handshake * * @param path The path field value. */ public void setPath(String path) { this._path = path; if (!this._path.isEmpty()) { if (this._path.charAt(0) != this._slash) { this._path = this._slash + this._path; } } } /** * Set protocol value in protocol header * * @param protocol The protocol header field value. */ public void setProtocol(String protocol) { this._protocol = protocol; } /** * Add field-value pairs to HTTP header * * @param additionalHeaders The Map containing the additional headers. */ public void setAdditionalHeaders(Map<String, String> additionalHeaders) { _additionalHeaders = additionalHeaders; } /** * Utility function to clear all additional headers */ public void clearAdditionalHeaders() { _additionalHeaders.clear(); } /** * Set protocol value in protocol header * */ public void setClientCertAvailable(){ _certAvailability = true; } /** * Utility function to create random, Base64 encoded key */ private String createWebSocketKey() { byte[] key = new byte[16]; for (int i = 0; i < 16; i++) { key[i] = (byte) (int) (Math.random() * 256); } return Base64Util.encodeBase64StringLocal(key).trim(); } public String createUpgradeRequest() { if (this._host.isEmpty()) { throw new InvalidParameterException("host header has no value"); } if (this._protocol.isEmpty()) { throw new InvalidParameterException("protocol header has no value"); } this._webSocketKey = createWebSocketKey(); String _endOfLine = "\r\n"; StringBuilder stringBuilder = new StringBuilder().append("GET https://").append(this._host).append(this._path) .append("?").append("iothub-no-client-cert=").append(!this._certAvailability) .append(" HTTP/1.1").append(_endOfLine) .append("Connection: Upgrade,Keep-Alive").append(_endOfLine) .append("Upgrade: websocket").append(_endOfLine) .append("Sec-WebSocket-Version: 13").append(_endOfLine) .append("Sec-WebSocket-Key: ").append(this._webSocketKey).append(_endOfLine) .append("Sec-WebSocket-Protocol: ").append(this._protocol).append(_endOfLine) .append("Host: ").append(this._host).append(_endOfLine); if (_additionalHeaders != null) { for (Map.Entry<String, String> entry : _additionalHeaders.entrySet()) { stringBuilder.append(entry.getKey() + ": " + entry.getValue()).append(_endOfLine); } } stringBuilder.append(_endOfLine); return stringBuilder.toString(); } public Boolean validateUpgradeReply(byte[] responseBytes) { String httpString = new String(responseBytes, StandardCharsets.UTF_8); Boolean isStatusLineOk = false; Boolean isUpgradeHeaderOk = false; Boolean isConnectionHeaderOk = false; Boolean isProtocolHeaderOk = false; Boolean isAcceptHeaderOk = false; Scanner scanner = new Scanner(httpString); while (scanner.hasNextLine()) { String line = scanner.nextLine(); if ((line.toLowerCase().contains("http/1.1")) && (line.contains("101")) && (line.toLowerCase().contains("switching protocols"))) { isStatusLineOk = true; continue; } if ((line.toLowerCase().contains("upgrade")) && (line.toLowerCase().contains("websocket"))) { isUpgradeHeaderOk = true; continue; } if ((line.toLowerCase().contains("connection")) && (line.toLowerCase().contains("upgrade"))) { isConnectionHeaderOk = true; continue; } if (line.toLowerCase().contains("sec-websocket-protocol") && (line.toLowerCase().contains(this._protocol.toLowerCase()))) { isProtocolHeaderOk = true; continue; } if (line.toLowerCase().contains("sec-websocket-accept")) { MessageDigest messageDigest = null; try { messageDigest = MessageDigest.getInstance("SHA-1"); } catch (NoSuchAlgorithmException e) { // can't happen since SHA-1 is a known digest break; } String expectedKey = Base64Util.encodeBase64StringLocal(messageDigest.digest((this._webSocketKey + RFC_GUID).getBytes())).trim(); if (line.contains(expectedKey)) { isAcceptHeaderOk = true; } continue; } } scanner.close(); if ((isStatusLineOk) && (isUpgradeHeaderOk) && (isConnectionHeaderOk) && (isProtocolHeaderOk) && (isAcceptHeaderOk)) { return true; } else { return false; } } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("WebSocketUpgrade [host=").append(_host).append(", path=").append(_path).append(", port=").append(_port).append(", protocol=").append(_protocol).append(", webSocketKey=").append(_webSocketKey); if ((_additionalHeaders != null) && (!_additionalHeaders.isEmpty())) { builder.append(", additionalHeaders="); for (Map.Entry<String, String> entry : _additionalHeaders.entrySet()) { builder.append(entry.getKey() + ":" + entry.getValue()).append(", "); } int lastIndex = builder.lastIndexOf(", "); builder.delete(lastIndex, lastIndex + 2); } builder.append("]"); return builder.toString(); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: grpc/job_master.proto package alluxio.grpc; /** * Protobuf type {@code alluxio.grpc.job.TaskInfo} */ public final class TaskInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:alluxio.grpc.job.TaskInfo) TaskInfoOrBuilder { private static final long serialVersionUID = 0L; // Use TaskInfo.newBuilder() to construct. private TaskInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TaskInfo() { jobId_ = 0L; taskId_ = 0; status_ = 0; errorMessage_ = ""; result_ = com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TaskInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; jobId_ = input.readInt64(); break; } case 16: { bitField0_ |= 0x00000002; taskId_ = input.readInt32(); break; } case 24: { int rawValue = input.readEnum(); alluxio.grpc.Status value = alluxio.grpc.Status.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(3, rawValue); } else { bitField0_ |= 0x00000004; status_ = rawValue; } break; } case 34: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000008; errorMessage_ = bs; break; } case 42: { bitField0_ |= 0x00000010; result_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return alluxio.grpc.JobMasterProto.internal_static_alluxio_grpc_job_TaskInfo_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return alluxio.grpc.JobMasterProto.internal_static_alluxio_grpc_job_TaskInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( alluxio.grpc.TaskInfo.class, alluxio.grpc.TaskInfo.Builder.class); } private int bitField0_; public static final int JOBID_FIELD_NUMBER = 1; private long jobId_; /** * <code>optional int64 jobId = 1;</code> */ public boolean hasJobId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int64 jobId = 1;</code> */ public long getJobId() { return jobId_; } public static final int TASKID_FIELD_NUMBER = 2; private int taskId_; /** * <code>optional int32 taskId = 2;</code> */ public boolean hasTaskId() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int32 taskId = 2;</code> */ public int getTaskId() { return taskId_; } public static final int STATUS_FIELD_NUMBER = 3; private int status_; /** * <code>optional .alluxio.grpc.job.Status status = 3;</code> */ public boolean hasStatus() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .alluxio.grpc.job.Status status = 3;</code> */ public alluxio.grpc.Status getStatus() { alluxio.grpc.Status result = alluxio.grpc.Status.valueOf(status_); return result == null ? alluxio.grpc.Status.UNKNOWN : result; } public static final int ERRORMESSAGE_FIELD_NUMBER = 4; private volatile java.lang.Object errorMessage_; /** * <code>optional string errorMessage = 4;</code> */ public boolean hasErrorMessage() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string errorMessage = 4;</code> */ public java.lang.String getErrorMessage() { java.lang.Object ref = errorMessage_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { errorMessage_ = s; } return s; } } /** * <code>optional string errorMessage = 4;</code> */ public com.google.protobuf.ByteString getErrorMessageBytes() { java.lang.Object ref = errorMessage_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); errorMessage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RESULT_FIELD_NUMBER = 5; private com.google.protobuf.ByteString result_; /** * <code>optional bytes result = 5;</code> */ public boolean hasResult() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bytes result = 5;</code> */ public com.google.protobuf.ByteString getResult() { return result_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, jobId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(2, taskId_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeEnum(3, status_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, errorMessage_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, result_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, jobId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, taskId_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(3, status_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, errorMessage_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(5, result_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof alluxio.grpc.TaskInfo)) { return super.equals(obj); } alluxio.grpc.TaskInfo other = (alluxio.grpc.TaskInfo) obj; boolean result = true; result = result && (hasJobId() == other.hasJobId()); if (hasJobId()) { result = result && (getJobId() == other.getJobId()); } result = result && (hasTaskId() == other.hasTaskId()); if (hasTaskId()) { result = result && (getTaskId() == other.getTaskId()); } result = result && (hasStatus() == other.hasStatus()); if (hasStatus()) { result = result && status_ == other.status_; } result = result && (hasErrorMessage() == other.hasErrorMessage()); if (hasErrorMessage()) { result = result && getErrorMessage() .equals(other.getErrorMessage()); } result = result && (hasResult() == other.hasResult()); if (hasResult()) { result = result && getResult() .equals(other.getResult()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasJobId()) { hash = (37 * hash) + JOBID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getJobId()); } if (hasTaskId()) { hash = (37 * hash) + TASKID_FIELD_NUMBER; hash = (53 * hash) + getTaskId(); } if (hasStatus()) { hash = (37 * hash) + STATUS_FIELD_NUMBER; hash = (53 * hash) + status_; } if (hasErrorMessage()) { hash = (37 * hash) + ERRORMESSAGE_FIELD_NUMBER; hash = (53 * hash) + getErrorMessage().hashCode(); } if (hasResult()) { hash = (37 * hash) + RESULT_FIELD_NUMBER; hash = (53 * hash) + getResult().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static alluxio.grpc.TaskInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static alluxio.grpc.TaskInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static alluxio.grpc.TaskInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static alluxio.grpc.TaskInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static alluxio.grpc.TaskInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static alluxio.grpc.TaskInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static alluxio.grpc.TaskInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static alluxio.grpc.TaskInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static alluxio.grpc.TaskInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static alluxio.grpc.TaskInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static alluxio.grpc.TaskInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static alluxio.grpc.TaskInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(alluxio.grpc.TaskInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code alluxio.grpc.job.TaskInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:alluxio.grpc.job.TaskInfo) alluxio.grpc.TaskInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return alluxio.grpc.JobMasterProto.internal_static_alluxio_grpc_job_TaskInfo_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return alluxio.grpc.JobMasterProto.internal_static_alluxio_grpc_job_TaskInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( alluxio.grpc.TaskInfo.class, alluxio.grpc.TaskInfo.Builder.class); } // Construct using alluxio.grpc.TaskInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); jobId_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); taskId_ = 0; bitField0_ = (bitField0_ & ~0x00000002); status_ = 0; bitField0_ = (bitField0_ & ~0x00000004); errorMessage_ = ""; bitField0_ = (bitField0_ & ~0x00000008); result_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return alluxio.grpc.JobMasterProto.internal_static_alluxio_grpc_job_TaskInfo_descriptor; } public alluxio.grpc.TaskInfo getDefaultInstanceForType() { return alluxio.grpc.TaskInfo.getDefaultInstance(); } public alluxio.grpc.TaskInfo build() { alluxio.grpc.TaskInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public alluxio.grpc.TaskInfo buildPartial() { alluxio.grpc.TaskInfo result = new alluxio.grpc.TaskInfo(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.jobId_ = jobId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.taskId_ = taskId_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.status_ = status_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.errorMessage_ = errorMessage_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.result_ = result_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof alluxio.grpc.TaskInfo) { return mergeFrom((alluxio.grpc.TaskInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(alluxio.grpc.TaskInfo other) { if (other == alluxio.grpc.TaskInfo.getDefaultInstance()) return this; if (other.hasJobId()) { setJobId(other.getJobId()); } if (other.hasTaskId()) { setTaskId(other.getTaskId()); } if (other.hasStatus()) { setStatus(other.getStatus()); } if (other.hasErrorMessage()) { bitField0_ |= 0x00000008; errorMessage_ = other.errorMessage_; onChanged(); } if (other.hasResult()) { setResult(other.getResult()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { alluxio.grpc.TaskInfo parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (alluxio.grpc.TaskInfo) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private long jobId_ ; /** * <code>optional int64 jobId = 1;</code> */ public boolean hasJobId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int64 jobId = 1;</code> */ public long getJobId() { return jobId_; } /** * <code>optional int64 jobId = 1;</code> */ public Builder setJobId(long value) { bitField0_ |= 0x00000001; jobId_ = value; onChanged(); return this; } /** * <code>optional int64 jobId = 1;</code> */ public Builder clearJobId() { bitField0_ = (bitField0_ & ~0x00000001); jobId_ = 0L; onChanged(); return this; } private int taskId_ ; /** * <code>optional int32 taskId = 2;</code> */ public boolean hasTaskId() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int32 taskId = 2;</code> */ public int getTaskId() { return taskId_; } /** * <code>optional int32 taskId = 2;</code> */ public Builder setTaskId(int value) { bitField0_ |= 0x00000002; taskId_ = value; onChanged(); return this; } /** * <code>optional int32 taskId = 2;</code> */ public Builder clearTaskId() { bitField0_ = (bitField0_ & ~0x00000002); taskId_ = 0; onChanged(); return this; } private int status_ = 0; /** * <code>optional .alluxio.grpc.job.Status status = 3;</code> */ public boolean hasStatus() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .alluxio.grpc.job.Status status = 3;</code> */ public alluxio.grpc.Status getStatus() { alluxio.grpc.Status result = alluxio.grpc.Status.valueOf(status_); return result == null ? alluxio.grpc.Status.UNKNOWN : result; } /** * <code>optional .alluxio.grpc.job.Status status = 3;</code> */ public Builder setStatus(alluxio.grpc.Status value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; status_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .alluxio.grpc.job.Status status = 3;</code> */ public Builder clearStatus() { bitField0_ = (bitField0_ & ~0x00000004); status_ = 0; onChanged(); return this; } private java.lang.Object errorMessage_ = ""; /** * <code>optional string errorMessage = 4;</code> */ public boolean hasErrorMessage() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string errorMessage = 4;</code> */ public java.lang.String getErrorMessage() { java.lang.Object ref = errorMessage_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { errorMessage_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string errorMessage = 4;</code> */ public com.google.protobuf.ByteString getErrorMessageBytes() { java.lang.Object ref = errorMessage_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); errorMessage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string errorMessage = 4;</code> */ public Builder setErrorMessage( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; errorMessage_ = value; onChanged(); return this; } /** * <code>optional string errorMessage = 4;</code> */ public Builder clearErrorMessage() { bitField0_ = (bitField0_ & ~0x00000008); errorMessage_ = getDefaultInstance().getErrorMessage(); onChanged(); return this; } /** * <code>optional string errorMessage = 4;</code> */ public Builder setErrorMessageBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; errorMessage_ = value; onChanged(); return this; } private com.google.protobuf.ByteString result_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes result = 5;</code> */ public boolean hasResult() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bytes result = 5;</code> */ public com.google.protobuf.ByteString getResult() { return result_; } /** * <code>optional bytes result = 5;</code> */ public Builder setResult(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; result_ = value; onChanged(); return this; } /** * <code>optional bytes result = 5;</code> */ public Builder clearResult() { bitField0_ = (bitField0_ & ~0x00000010); result_ = getDefaultInstance().getResult(); onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:alluxio.grpc.job.TaskInfo) } // @@protoc_insertion_point(class_scope:alluxio.grpc.job.TaskInfo) private static final alluxio.grpc.TaskInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new alluxio.grpc.TaskInfo(); } public static alluxio.grpc.TaskInfo getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final com.google.protobuf.Parser<TaskInfo> PARSER = new com.google.protobuf.AbstractParser<TaskInfo>() { public TaskInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TaskInfo(input, extensionRegistry); } }; public static com.google.protobuf.Parser<TaskInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TaskInfo> getParserForType() { return PARSER; } public alluxio.grpc.TaskInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package com.ubiqlog.vis.ui; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Locale; import android.app.Activity; import android.app.AlertDialog; import android.app.ProgressDialog; import android.content.Context; import android.content.DialogInterface; import android.content.DialogInterface.OnClickListener; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Point; import android.graphics.Rect; import android.graphics.Typeface; import android.graphics.Paint.Style; import android.graphics.drawable.ShapeDrawable; import android.graphics.drawable.shapes.RoundRectShape; import android.location.Address; import android.location.Geocoder; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.view.ViewGroup.LayoutParams; import android.widget.LinearLayout; import android.widget.SeekBar; import com.google.android.gms.maps.CameraUpdate; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.MapFragment; import com.google.android.gms.maps.MapView; import com.google.android.gms.maps.OnMapReadyCallback; //import com.google.android.maps.GeoPoint; //import com.google.android.maps.MapActivity; //import com.google.android.maps.MapController; //import com.google.android.maps.MapView; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.MarkerOptions; import com.ubiqlog.ui.R; import com.ubiqlog.vis.common.GeoPoint; import com.ubiqlog.vis.common.Settings; import com.ubiqlog.vis.ui.extras.ControlBar; import com.ubiqlog.vis.extras.search.Searcher; import com.ubiqlog.vis.utils.UserFriendlyException; import com.ubiqlog.vis.utils.Utils; /** * LocationLog activity * * @author Victor Gugonatu * @date 10.2010 * @version 1.0 */ // MapActivity is deprecated and is now being replaced with // MapFragment - AP public class LocationLog extends Activity implements OnMapReadyCallback { List<UbiqGeoPoint> _points = null; //MapView _mapView = null; MapFragment _mapView = null; Boolean _needToLoadData = true; Date _start = null; Date _end = null; ControlBar _cBar = null; Context _context = null; ProgressDialog _progressDialog = null; private GoogleMap googleMap; private DialogInterface.OnClickListener ic_clicked = new DialogInterface.OnClickListener() { public void onClick(DialogInterface arg0, int arg1) { // finish application LocationLog.this.finish(); } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.layout_locationlog); // check for internet connection; if none that show a dialog to user; // else continue application if (!Utils.Instance().hasInternetConnection(this)) { AlertDialog dlg = new AlertDialog.Builder(this).create(); dlg.setButton(DialogInterface.BUTTON_NEUTRAL, getResources() .getText(R.string.Vis_location_ic_dialog_ok), ic_clicked); dlg.setMessage(getResources().getText( R.string.Vis_location_ic_dialog_message)); dlg.show(); } else { _cBar = new ControlBar(this, 0, Settings.location_timeFrame,stateChanged, null, false, true); /* _mapView = new MapView(this, Settings.googleMapKey); // disable user interaction _mapView.setClickable(false); _mapView.setEnabled(false); _context = this; // set weight=1 to cover the space left on the screen _mapView.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT, 1)); */ // Added by AP _mapView = (MapFragment) getFragmentManager().findFragmentById(R.id.map); if (_mapView.getView() != null) { _mapView.getView().setClickable(false); _mapView.getView().setEnabled(false); } _context = this; // set weight=1 to cover the space left on the screen _mapView.getView().setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT, 1)); _mapView.getMapAsync(this); _cBar.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT)); //LinearLayout locationLayout = new LinearLayout(this); LinearLayout locationLayout = (LinearLayout) findViewById(R.id.lin_lay_maps); locationLayout.setOrientation(LinearLayout.VERTICAL); //locationLayout.addView(_mapView); //locationLayout.addView(_mapView.getView()); locationLayout.addView(_cBar); //setContentView(locationLayout); } } /* @Override protected boolean isRouteDisplayed() { // no route information are displayed return false; } */ @Override public void onMapReady(GoogleMap googleMap) { this.googleMap = googleMap; this.googleMap.setMyLocationEnabled(true); } /* // custom map overlay -> display the marker and the location class UbiqMapOverlay extends com.google.android.maps.Overlay { private UbiqGeoPoint _point = null; private UbiqGeoPoint _pointBefore1 = null; private UbiqGeoPoint _pointBefore2 = null; public UbiqMapOverlay(UbiqGeoPoint point, UbiqGeoPoint pointBefore1, UbiqGeoPoint pointBefore2) { super(); _point = point.initialise(); _pointBefore1 = pointBefore1 != null ? pointBefore1.initialise(): null; _pointBefore2 = pointBefore2 != null ? pointBefore2.initialise(): null; } @Override public boolean draw(Canvas canvas, MapView mapView, boolean shadow,long when) { super.draw(canvas, mapView, shadow); // translate the GeoPoint to screen pixels--- Point screenPts = new Point(); mapView.getProjection().toPixels(_point.get_geoPoint(), screenPts); Point screenPtsBefore1 = new Point(); if (_pointBefore1 != null) mapView.getProjection().toPixels(_pointBefore1.get_geoPoint(),screenPtsBefore1); Point screenPtsBefore2 = new Point(); if (_pointBefore2 != null) mapView.getProjection().toPixels(_pointBefore2.get_geoPoint(),screenPtsBefore2); // add the marker Bitmap bmp = BitmapFactory.decodeResource(getResources(), R.drawable.bluepoint); canvas.drawBitmap(bmp, screenPts.x - (bmp.getWidth() / 2), screenPts.y - (bmp.getHeight() / 2), null); Paint paint = new Paint(); paint.setAlpha(90); if (_pointBefore1 != null) canvas.drawBitmap(bmp, screenPtsBefore1.x - (bmp.getWidth() / 2), screenPtsBefore1.y - (bmp.getHeight() / 2), paint); paint.setAlpha(40); if (_pointBefore2 != null) canvas.drawBitmap(bmp, screenPtsBefore2.x - (bmp.getWidth() / 2), screenPtsBefore2.y - (bmp.getHeight() / 2), paint); // configure paint for the location Paint p = new Paint(); p.setAntiAlias(true); p.setColor(Color.BLACK); p.setFakeBoldText(true); p.setTypeface(Typeface.DEFAULT_BOLD); p.setTextAlign(Paint.Align.CENTER); p.setTextScaleX(1.2f); // add the location information // draw text cannot draw newline (\n) // text is splited and draw separately int yInitOffset = 0; int yLineOffset = 5; Rect bounds = new Rect(); Rect totalbounds = new Rect(); String[] phrases = (_point.getAddress()).split("\n"); for (String phrase : phrases) { p.getTextBounds(phrase, 0, phrase.length(), bounds); if (totalbounds.bottom == 0) { totalbounds.bottom = yInitOffset + yLineOffset+ bounds.bottom - bounds.top; totalbounds.top = yInitOffset + yLineOffset; totalbounds.left = 0; totalbounds.right = bounds.right; } else { totalbounds.bottom += yLineOffset + bounds.bottom - bounds.top; totalbounds.right = Math.max(bounds.right, totalbounds.right); } } int yTextOffset = 20; int xTextOffset = 20; int yRectOffset = 10; int xRectOffset = 5; int yOrigLineOffset = yLineOffset; int diff = bounds.bottom - bounds.top; int diffOrig = diff; for (String phrase : phrases) { canvas .drawText(phrase, screenPts.x, screenPts.y + bmp.getHeight() + yInitOffset + yLineOffset + diff, p); yLineOffset += yOrigLineOffset; diff += diffOrig; } String dateTime = _point.get_dateTime().toLocaleString(); p.setTextAlign(Paint.Align.RIGHT); p.setColor(Color.WHITE); ShapeDrawable sp = new ShapeDrawable(new RoundRectShape( new float[] { 12, 12, 12, 12, 12, 12, 12, 12 }, null, null)); bounds = new Rect(); p.getTextBounds(dateTime, 0, dateTime.length(), bounds); sp.setBounds(bounds.left + (mapView.getRight() - bounds.right - yTextOffset) - yRectOffset, bounds.top + (mapView.getTop() + xTextOffset) - xRectOffset, bounds.right + (mapView.getRight() - bounds.right - yTextOffset) + yRectOffset, bounds.bottom + (mapView.getTop() + xTextOffset) + xRectOffset); sp.getPaint().setColor(Color.argb(90, 0, 0, 0)); sp.getPaint().setStyle(Style.FILL_AND_STROKE); sp.draw(canvas); canvas.drawText(dateTime, mapView.getRight() - yTextOffset, mapView .getTop() + xTextOffset, p); return false; } } */ // ubiqlog GeoPoint // contains the needed data public class UbiqGeoPoint implements Comparable<UbiqGeoPoint> { private String _address = null; private Date _dateTime = null; private String _rowData = null; private Boolean isCorrupt = false; //private GeoPoint _geoPoint = null; private LatLng _geoPoint = null; public UbiqGeoPoint(String rowData) { _rowData = rowData; } public UbiqGeoPoint initialise() { if (!isCorrupt && _rowData != null && _geoPoint == null) { try { String[] entities = _rowData.split("\\\""); //int latitude = (int) (Double.parseDouble(entities[5]) * 1E6); double latitude = (Double.parseDouble(entities[5])); //int longitude = (int) (Double.parseDouble(entities[9]) * 1E6); double longitude = (Double.parseDouble(entities[9])); this._geoPoint = new LatLng(latitude, longitude); // Mar 6 2010 2:49:16 AM -> uknown locale; US locale: Mar 6, // 2010 2:49:16 AM _dateTime = new Date(Date.parse(entities[17])); Geocoder geoCoder = new Geocoder(getBaseContext(), Locale .getDefault()); List<Address> addresses; try { /* addresses = geoCoder.getFromLocation(this._geoPoint .getLatitudeE6() / 1E6, this._geoPoint .getLongitudeE6() / 1E6, 1); */ addresses = geoCoder.getFromLocation(this._geoPoint.latitude, this._geoPoint .longitude , 1); _address = ""; if (addresses.size() > 0) { for (int i = 0; i < addresses.get(0) .getMaxAddressLineIndex(); i++) { _address += addresses.get(0).getAddressLine(i) + "\n"; } } } catch (IOException e) { // nothing } } catch (Exception ex) { isCorrupt = true; } } return this; } public UbiqGeoPoint(double latitudeE6, double longitudeE6, Date dateTime) { _geoPoint = new LatLng(latitudeE6, longitudeE6); _dateTime = dateTime; } /* * try to get the address from gps coordinates */ public String getAddress() { if (_address == null) { _address = ""; Geocoder geoCoder = new Geocoder(getBaseContext(), Locale.getDefault()); List<Address> addresses; try { /* addresses = geoCoder.getFromLocation(this._geoPoint .getLatitudeE6() / 1E6, this._geoPoint .getLongitudeE6() / 1E6, 1); */ addresses = geoCoder.getFromLocation(this._geoPoint .latitude , this._geoPoint .longitude , 1); if (addresses.size() > 0) { for (int i = 0; i < addresses.get(0) .getMaxAddressLineIndex(); i++) { _address += addresses.get(0).getAddressLine(i) + "\n"; } } } catch (IOException e) { // nothing } } return _address; } public Date get_dateTime() { return _dateTime; } public String get_rowData() { return _rowData; } public LatLng get_geoPoint() { this.initialise(); return _geoPoint; } public int compareTo(UbiqGeoPoint another) { return this._dateTime.compareTo(another._dateTime); } } ControlBar.OnStateChangedListener stateChanged = new ControlBar.OnStateChangedListener() { public void onStateChanged(SeekBar seekBar, int step) { //MapController mc = _mapView.getController(); //_mapView.getOverlays().clear(); googleMap.clear(); if (step == -1) { _progressDialog = ProgressDialog.show(_context, getResources() .getText(R.string.Vis_loading), getResources().getText( R.string.Vis_Searching), true, false); SearchThread searchThread = new SearchThread(); searchThread.start(); } else { /* _mapView.getgetOverlays().add( new UbiqMapOverlay(_points.get(step), (step > 0 ? _points.get(step - 1) : null), (step > 1 ? _points.get(step - 2) : null))); mc.animateTo(_points.get(step).get_geoPoint()); mc.setZoom(_mapView.getMaxZoomLevel()); */ // Added by AP // Get Current GeoPoint UbiqGeoPoint _point = _points.get(step).initialise(); // Get Text ready for marker String[] phrases = (_points.get(step).getAddress()).split("\n"); String dateTime = _points.get(step).get_dateTime().toLocaleString(); StringBuilder markerString = new StringBuilder(); for (String phrase : phrases) { markerString.append(phrase); markerString.append('\n'); } markerString.append(dateTime); // Add a Marker to the Map googleMap.addMarker(new MarkerOptions() .position(_point.get_geoPoint()) // latlng .title(markerString.toString())); // marker text // Move camera CameraUpdate cameraUpdate = CameraUpdateFactory.newLatLngZoom( _points.get(step).get_geoPoint(), googleMap.getMaxZoomLevel()); googleMap.animateCamera(cameraUpdate); } } }; /* * the search is done in another Thread so that the UI won't be blocked * android closes the application if the UI is not responding for a period * of time */ private class SearchThread extends Thread { @Override public void run() { // getData _start = _cBar.getStartDate(); _end = _cBar.getEndDate(); Boolean play = false; try { _points = getLocationLog(_start, _end); _progressDialog.dismiss(); if (_points.size() > 0) { play = true; } else { play = false; Message msg = Message.obtain(); msg.obj = _context.getResources().getString( R.string.Vis_noData); handler.sendMessage(msg); } if (play) { // player is valid handlerChangeData.sendEmptyMessage(0); } } catch (UserFriendlyException e) { Message msg = Message.obtain(); msg.obj = e.getMessage(); handler.sendMessage(msg); } } private Handler handler = new Handler() { @Override public void handleMessage(Message msg) { _progressDialog.dismiss(); AlertDialog aldlg = new AlertDialog.Builder(_context).create(); aldlg.setMessage((String) msg.obj); aldlg.setButton(AlertDialog.BUTTON_NEUTRAL, _context .getResources().getString(R.string.Vis_Ok), (OnClickListener) null); aldlg.show(); _cBar.setProgress(0); _cBar.setMax(0); } }; private Handler handlerChangeData = new Handler() { @Override public void handleMessage(Message msg) { if (_points.size() > 0) { _cBar.setProgress(0); _cBar.setMax(_points.size() - 1); _cBar.setValidAndPlay(); } } }; } /* * search the log files for data between from and to Date */ public ArrayList<UbiqGeoPoint> getLocationLog(Date from, Date to) throws UserFriendlyException { ArrayList<UbiqGeoPoint> points = new ArrayList<UbiqGeoPoint>(); Searcher searcher = new Searcher(); SimpleDateFormat dateformat = new SimpleDateFormat("M-d-yyyy"); String dateFrom = dateformat.format(from); String dateTo = dateformat.format(to); ArrayList<String> lines = searcher.searchFolder( Settings.SensorLocation, dateFrom, dateTo, this); SimpleDateFormat dateformatLog = new SimpleDateFormat( "M-d-yyyy HH:mm:ss"); if (lines != null) { for (String line : lines) { // {"Location":{"Latitude":"48.23316693305829","Longtitude":"16.377139091491223","Altitude":"220.0","time":"Mar 6 2010 2:49:16 AM","Accuracy":"9.487171","Provider":"gps"}} try { String[] entities = line.split("\\\""); //int latitude = (int) (Double.parseDouble(entities[5]) * 1E6); double latitude = (Double.parseDouble(entities[5])); //int longitude = (int) (Double.parseDouble(entities[9]) * 1E6); double longitude = (Double.parseDouble(entities[9])); // Mar 6 2010 2:49:16 AM -> uknown locale; US locale: Mar 6, // 2010 2:49:16 AM Date dateTime = null; try { dateTime = dateformatLog.parse(entities[17]); } catch (Exception ex) { dateTime = new Date(Date.parse(entities[17].replace( ",", ""))); } if (dateTime != null && dateTime.compareTo(from) >= 0 && dateTime.compareTo(to) <= 0) { UbiqGeoPoint point = new UbiqGeoPoint(latitude, longitude, dateTime); points.add(point); } } catch (Exception exc) { // ignore corrupted lines } } } Collections.sort(points); return points; } }
/* * Copyright 2012 Decebal Suiu * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this work except in compliance with * the License. You may obtain a copy of the License in the LICENSE file, or at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package ro.fortsoft.pf4j; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ro.fortsoft.pf4j.util.*; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Default implementation of the PluginManager interface. * * @author Decebal Suiu */ public class DefaultPluginManager implements PluginManager { private static final Logger log = LoggerFactory.getLogger(DefaultPluginManager.class); public static final String DEFAULT_PLUGINS_DIRECTORY = "plugins"; public static final String DEVELOPMENT_PLUGINS_DIRECTORY = "../plugins"; private static final String PLUGIN_PACKAGE_PREFIX_IZOU_SDK = "org.intellimate.izou.sdk"; /** * The plugins repository. */ private File pluginsDirectory; private ExtensionFinder extensionFinder; private PluginDescriptorFinder pluginDescriptorFinder; private PluginClasspath pluginClasspath; /** * A map of plugins this manager is responsible for (the key is the 'pluginId'). */ private Map<String, PluginWrapper> plugins; /** * A map of plugin class loaders (he key is the 'pluginId'). */ private Map<String, IzouPluginClassLoader> pluginClassLoaders; /** * A map of sdks (also plugins) paired with their class loaders */ private Map<String, IzouPluginClassLoader> sdkClassLoaders; /** * A relation between 'pluginPath' and 'pluginId' */ private Map<String, String> pathToIdMap; /** * A list with unresolved plugins (unresolved dependency). */ private List<PluginWrapper> unresolvedPlugins; /** * A list with resolved plugins (resolved dependency). */ private List<PluginWrapper> resolvedPlugins; /** * A list with started plugins. */ private List<PluginWrapper> startedPlugins; private List<String> enabledPlugins; private List<String> disabledPlugins; /** * The registered {@link PluginStateListener}s. */ private List<PluginStateListener> pluginStateListeners; /** * Cache value for the runtime mode. No need to re-read it because it wont change at * runtime. */ private RuntimeMode runtimeMode; /** * The system version used for comparisons to the plugin requires attribute. */ private Version systemVersion = Version.ZERO; private PluginFactory pluginFactory; private final List<AspectOrAffected> aspectOrAffectedList; private final IzouWeavingClassLoader weavingClassloader; private ExtensionFactory extensionFactory; /** * Constructs DefaultPluginManager which the given plugins directory. * * @param pluginsDirectory * the directory to search for plugins * @param aspectOrAffectedList the list of aspects or affected classes */ public DefaultPluginManager(File pluginsDirectory, List<AspectOrAffected> aspectOrAffectedList) { this.pluginsDirectory = pluginsDirectory; this.aspectOrAffectedList = aspectOrAffectedList; ClassLoader parent = this.getClass().getClassLoader(); this.weavingClassloader = new IzouWeavingClassLoader(parent, aspectOrAffectedList); initialize(); } @Override public void setSystemVersion(Version version) { systemVersion = version; } @Override public Version getSystemVersion() { return systemVersion; } /** * returns the directory the plugins are loaded from * * @return a file */ @Override public File getPluginDirectory() { return pluginsDirectory; } @Override public List<PluginWrapper> getPlugins() { return new ArrayList<PluginWrapper>(plugins.values()); } @Override public List<PluginWrapper> getPlugins(PluginState pluginState) { List<PluginWrapper> plugins= new ArrayList<PluginWrapper>(); for (PluginWrapper plugin : getPlugins()) { if (pluginState.equals(plugin.getPluginState())) { plugins.add(plugin); } } return plugins; } @Override public List<PluginWrapper> getResolvedPlugins() { return resolvedPlugins; } @Override public List<PluginWrapper> getUnresolvedPlugins() { return unresolvedPlugins; } @Override public List<PluginWrapper> getStartedPlugins() { return startedPlugins; } @Override public PluginWrapper getPlugin(String pluginId) { return plugins.get(pluginId); } @Override public String loadPlugin(File pluginArchiveFile) { if ((pluginArchiveFile == null) || !pluginArchiveFile.exists()) { throw new IllegalArgumentException(String.format("Specified plugin %s does not exist!", pluginArchiveFile)); } log.debug("Loading plugin from '{}'", pluginArchiveFile); File pluginDirectory = null; try { pluginDirectory = expandPluginArchive(pluginArchiveFile); } catch (IOException e) { log.error(e.getMessage(), e); } if ((pluginDirectory == null) || !pluginDirectory.exists()) { throw new IllegalArgumentException(String.format("Failed to expand %s", pluginArchiveFile)); } try { PluginWrapper pluginWrapper = loadPluginDirectory(pluginDirectory); // TODO uninstalled plugin dependencies? unresolvedPlugins.remove(pluginWrapper); resolvedPlugins.add(pluginWrapper); firePluginStateEvent(new PluginStateEvent(this, pluginWrapper, null)); return pluginWrapper.getDescriptor().getPluginId(); } catch (PluginException e) { log.error(e.getMessage(), e); } return null; } /** * Start all active plugins. */ @Override public void startPlugins() { for (PluginWrapper pluginWrapper : resolvedPlugins) { PluginState pluginState = pluginWrapper.getPluginState(); if ((PluginState.DISABLED != pluginState) && (PluginState.STARTED != pluginState)) { try { PluginDescriptor pluginDescriptor = pluginWrapper.getDescriptor(); log.info("Start plugin '{}:{}'", pluginDescriptor.getPluginId(), pluginDescriptor.getVersion()); pluginWrapper.getPlugin().start(); pluginWrapper.setPluginState(PluginState.STARTED); startedPlugins.add(pluginWrapper); firePluginStateEvent(new PluginStateEvent(this, pluginWrapper, pluginState)); } catch (PluginException e) { log.error(e.getMessage(), e); } } } } /** * Start the specified plugin and it's dependencies. */ @Override public PluginState startPlugin(String pluginId) { if (!plugins.containsKey(pluginId)) { throw new IllegalArgumentException(String.format("Unknown pluginId %s", pluginId)); } PluginWrapper pluginWrapper = getPlugin(pluginId); PluginDescriptor pluginDescriptor = pluginWrapper.getDescriptor(); PluginState pluginState = pluginWrapper.getPluginState(); if (PluginState.STARTED == pluginState) { log.debug("Already started plugin '{}:{}'", pluginDescriptor.getPluginId(), pluginDescriptor.getVersion()); return PluginState.STARTED; } if (PluginState.DISABLED == pluginState) { // automatically enable plugin on manual plugin start if (!enablePlugin(pluginId)) { return pluginState; } } for (PluginDependency dependency : pluginDescriptor.getDependencies()) { startPlugin(dependency.getPluginId()); } try { log.info("Start plugin '{}:{}'", pluginDescriptor.getPluginId(), pluginDescriptor.getVersion()); pluginWrapper.getPlugin().start(); pluginWrapper.setPluginState(PluginState.STARTED); startedPlugins.add(pluginWrapper); firePluginStateEvent(new PluginStateEvent(this, pluginWrapper, pluginState)); } catch (PluginException e) { log.error(e.getMessage(), e); } return pluginWrapper.getPluginState(); } /** * Stop all active plugins. */ @Override public void stopPlugins() { // stop started plugins in reverse order Collections.reverse(startedPlugins); Iterator<PluginWrapper> itr = startedPlugins.iterator(); while (itr.hasNext()) { PluginWrapper pluginWrapper = itr.next(); PluginState pluginState = pluginWrapper.getPluginState(); if (PluginState.STARTED == pluginState) { try { PluginDescriptor pluginDescriptor = pluginWrapper.getDescriptor(); log.info("Stop plugin '{}:{}'", pluginDescriptor.getPluginId(), pluginDescriptor.getVersion()); pluginWrapper.getPlugin().stop(); pluginWrapper.setPluginState(PluginState.STOPPED); itr.remove(); firePluginStateEvent(new PluginStateEvent(this, pluginWrapper, pluginState)); } catch (PluginException e) { log.error(e.getMessage(), e); } } } } /** * Stop the specified plugin and it's dependencies. */ @Override public PluginState stopPlugin(String pluginId) { if (!plugins.containsKey(pluginId)) { throw new IllegalArgumentException(String.format("Unknown pluginId %s", pluginId)); } PluginWrapper pluginWrapper = getPlugin(pluginId); PluginDescriptor pluginDescriptor = pluginWrapper.getDescriptor(); PluginState pluginState = pluginWrapper.getPluginState(); if (PluginState.STOPPED == pluginState) { log.debug("Already stopped plugin '{}:{}'", pluginDescriptor.getPluginId(), pluginDescriptor.getVersion()); return PluginState.STOPPED; } // test for disabled plugin if (PluginState.DISABLED == pluginState) { // do nothing return pluginState; } for (PluginDependency dependency : pluginDescriptor.getDependencies()) { stopPlugin(dependency.getPluginId()); } try { log.info("Stop plugin '{}:{}'", pluginDescriptor.getPluginId(), pluginDescriptor.getVersion()); pluginWrapper.getPlugin().stop(); pluginWrapper.setPluginState(PluginState.STOPPED); startedPlugins.remove(pluginWrapper); firePluginStateEvent(new PluginStateEvent(this, pluginWrapper, pluginState)); } catch (PluginException e) { log.error(e.getMessage(), e); } return pluginWrapper.getPluginState(); } /** * Load plugins. */ @Override public void loadPlugins() { log.debug("Lookup plugins in '{}'", pluginsDirectory.getAbsolutePath()); // check for plugins directory if (!pluginsDirectory.exists() || !pluginsDirectory.isDirectory()) { log.error("No '{}' directory", pluginsDirectory.getAbsolutePath()); return; } // expand all plugin archives FileFilter zipFilter = new ZipFileFilter(); File[] zipFiles = pluginsDirectory.listFiles(zipFilter); if (zipFiles != null) { for (File zipFile : zipFiles) { try { expandPluginArchive(zipFile); } catch (IOException e) { log.error(e.getMessage(), e); } } } // check for no plugins List<FileFilter> filterList = new ArrayList<FileFilter>(); filterList.add(new DirectoryFileFilter()); filterList.add(new NotFileFilter(createHiddenPluginFilter())); FileFilter pluginsFilter = new AndFileFilter(filterList); File[] directories = pluginsDirectory.listFiles(pluginsFilter); if (directories == null) { directories = new File[0]; } log.debug("Found {} possible plugins: {}", directories.length, directories); if (directories.length == 0) { log.info("No plugins"); return; } // load any plugin from plugins directory for (File directory : directories) { try { loadPluginDirectory(directory); } catch (PluginException e) { log.error(e.getMessage(), e); } } // resolve 'unresolvedPlugins' try { resolvePlugins(); } catch (PluginException e) { log.error(e.getMessage(), e); } } @Override public boolean unloadPlugin(String pluginId) { try { PluginState pluginState = stopPlugin(pluginId); if (PluginState.STARTED == pluginState) { return false; } PluginWrapper pluginWrapper = getPlugin(pluginId); PluginDescriptor descriptor = pluginWrapper.getDescriptor(); List<PluginDependency> dependencies = descriptor.getDependencies(); for (PluginDependency dependency : dependencies) { if (!unloadPlugin(dependency.getPluginId())) { return false; } } // remove the plugin plugins.remove(pluginId); resolvedPlugins.remove(pluginWrapper); pathToIdMap.remove(pluginWrapper.getPluginPath()); firePluginStateEvent(new PluginStateEvent(this, pluginWrapper, pluginState)); // remove the classloader if (pluginClassLoaders.containsKey(pluginId)) { IzouPluginClassLoader classLoader = pluginClassLoaders.remove(pluginId); classLoader.dispose(); } return true; } catch (IllegalArgumentException e) { // ignore not found exceptions because this method is recursive } return false; } @Override public boolean disablePlugin(String pluginId) { if (!plugins.containsKey(pluginId)) { throw new IllegalArgumentException(String.format("Unknown pluginId %s", pluginId)); } PluginWrapper pluginWrapper = getPlugin(pluginId); PluginDescriptor pluginDescriptor = pluginWrapper.getDescriptor(); PluginState pluginState = pluginWrapper.getPluginState(); if (PluginState.DISABLED == pluginState) { log.debug("Already disabled plugin '{}:{}'", pluginDescriptor.getPluginId(), pluginDescriptor.getVersion()); return true; } if (PluginState.STOPPED == stopPlugin(pluginId)) { pluginWrapper.setPluginState(PluginState.DISABLED); firePluginStateEvent(new PluginStateEvent(this, pluginWrapper, PluginState.STOPPED)); if (disabledPlugins.add(pluginId)) { try { FileUtils.writeLines(disabledPlugins, new File(pluginsDirectory, "disabled.txt")); } catch (IOException e) { log.error("Failed to disable plugin {}", pluginId, e); return false; } } log.info("Disabled plugin '{}:{}'", pluginDescriptor.getPluginId(), pluginDescriptor.getVersion()); return true; } return false; } @Override public boolean enablePlugin(String pluginId) { if (!plugins.containsKey(pluginId)) { throw new IllegalArgumentException(String.format("Unknown pluginId %s", pluginId)); } PluginWrapper pluginWrapper = getPlugin(pluginId); if (!isPluginValid(pluginWrapper)) { log.warn("Plugin '{}:{}' can not be enabled", pluginWrapper.getPluginId(), pluginWrapper.getDescriptor().getVersion()); return false; } PluginDescriptor pluginDescriptor = pluginWrapper.getDescriptor(); PluginState pluginState = pluginWrapper.getPluginState(); if (PluginState.DISABLED != pluginState) { log.debug("Plugin '{}:{}' is not disabled", pluginDescriptor.getPluginId(), pluginDescriptor.getVersion()); return true; } try { if (disabledPlugins.remove(pluginId)) { FileUtils.writeLines(disabledPlugins, new File(pluginsDirectory, "disabled.txt")); } } catch (IOException e) { log.error("Failed to enable plugin {}", pluginId, e); return false; } pluginWrapper.setPluginState(PluginState.CREATED); firePluginStateEvent(new PluginStateEvent(this, pluginWrapper, pluginState)); log.info("Enabled plugin '{}:{}'", pluginDescriptor.getPluginId(), pluginDescriptor.getVersion()); return true; } @Override public boolean deletePlugin(String pluginId) { if (!plugins.containsKey(pluginId)) { throw new IllegalArgumentException(String.format("Unknown pluginId %s", pluginId)); } PluginWrapper pluginWrapper = getPlugin(pluginId); PluginState pluginState = stopPlugin(pluginId); if (PluginState.STARTED == pluginState) { log.error("Failed to stop plugin {} on delete", pluginId); return false; } if (!unloadPlugin(pluginId)) { log.error("Failed to unload plugin {} on delete", pluginId); return false; } File pluginFolder = new File(pluginsDirectory, pluginWrapper.getPluginPath()); File pluginZip = null; FileFilter zipFilter = new ZipFileFilter(); File[] zipFiles = pluginsDirectory.listFiles(zipFilter); if (zipFiles != null) { // strip prepended / from the plugin path String dirName = pluginWrapper.getPluginPath().substring(1); // find the zip file that matches the plugin path for (File zipFile : zipFiles) { String name = zipFile.getName().substring(0, zipFile.getName().lastIndexOf('.')); if (name.equals(dirName)) { pluginZip = zipFile; break; } } } if (pluginFolder.exists()) { FileUtils.delete(pluginFolder); } if (pluginZip != null && pluginZip.exists()) { FileUtils.delete(pluginZip); } return true; } /** * Get plugin class loader for this path. */ @Override public IzouPluginClassLoader getPluginClassLoader(String pluginId) { return pluginClassLoaders.get(pluginId); } @Override public <T> List<T> getExtensions(Class<T> type) { List<ExtensionWrapper<T>> extensionsWrapper = extensionFinder.find(type); List<T> extensions = new ArrayList<T>(extensionsWrapper.size()); for (ExtensionWrapper<T> extensionWrapper : extensionsWrapper) { extensions.add(extensionWrapper.getExtension()); } return extensions; } @Override public Set<String> getExtensionClassNames(String pluginId) { return extensionFinder.findClassNames(pluginId); } @Override public RuntimeMode getRuntimeMode() { if (runtimeMode == null) { // retrieves the runtime mode from system String modeAsString = System.getProperty("pf4j.mode", RuntimeMode.DEPLOYMENT.toString()); runtimeMode = RuntimeMode.byName(modeAsString); } return runtimeMode; } /** * Retrieves the {@link PluginWrapper} that loaded the given class 'clazz'. */ public PluginWrapper whichPlugin(Class<?> clazz) { ClassLoader classLoader = clazz.getClassLoader(); for (PluginWrapper plugin : resolvedPlugins) { if (plugin.getPluginClassLoader() == classLoader) { return plugin; } } log.warn("Failed to find the plugin for {}", clazz); return null; } @Override public synchronized void addPluginStateListener(PluginStateListener listener) { pluginStateListeners.add(listener); } @Override public synchronized void removePluginStateListener(PluginStateListener listener) { pluginStateListeners.remove(listener); } public Version getVersion() { String version = null; Package pf4jPackage = getClass().getPackage(); if (pf4jPackage != null) { version = pf4jPackage.getImplementationVersion(); if (version == null) { version = pf4jPackage.getSpecificationVersion(); } } return (version != null) ? Version.createVersion(version) : Version.ZERO; } /** * Add the possibility to override the PluginDescriptorFinder. * By default if getRuntimeMode() returns RuntimeMode.DEVELOPMENT than a * PropertiesPluginDescriptorFinder is returned else this method returns * DefaultPluginDescriptorFinder. */ protected PluginDescriptorFinder createPluginDescriptorFinder() { if (RuntimeMode.DEVELOPMENT.equals(getRuntimeMode())) { return new PropertiesPluginDescriptorFinder(this); } return new DefaultPluginDescriptorFinder(pluginClasspath, this); } /** * Add the possibility to override the ExtensionFinder. */ protected ExtensionFinder createExtensionFinder() { DefaultExtensionFinder extensionFinder = new DefaultExtensionFinder(this, extensionFactory); addPluginStateListener(extensionFinder); return extensionFinder; } /** * Add the possibility to override the PluginClassPath. * By default if getRuntimeMode() returns RuntimeMode.DEVELOPMENT than a * DevelopmentPluginClasspath is returned else this method returns * PluginClasspath. */ protected PluginClasspath createPluginClasspath() { if (RuntimeMode.DEVELOPMENT.equals(getRuntimeMode())) { return new DevelopmentPluginClasspath(); } return new PluginClasspath(); } protected boolean isPluginDisabled(String pluginId) { if (enabledPlugins.isEmpty()) { return disabledPlugins.contains(pluginId); } return !enabledPlugins.contains(pluginId); } protected boolean isPluginValid(PluginWrapper pluginWrapper) { Version requires = pluginWrapper.getDescriptor().getRequires(); Version system = getSystemVersion(); if (system.isZero() || system.atLeast(requires)) { return true; } log.warn("Plugin '{}:{}' requires a minimum system version of {}", pluginWrapper.getPluginId(), pluginWrapper.getDescriptor().getVersion(), requires); return false; } protected FileFilter createHiddenPluginFilter() { return new HiddenFilter(); } /** * Add the possibility to override the plugins directory. * If a "pf4j.pluginsDir" system property is defined than this method returns * that directory. * If getRuntimeMode() returns RuntimeMode.DEVELOPMENT than a * DEVELOPMENT_PLUGINS_DIRECTORY ("../plugins") is returned else this method returns * DEFAULT_PLUGINS_DIRECTORY ("plugins"). * @return */ protected File createPluginsDirectory() { String pluginsDir = System.getProperty("pf4j.pluginsDir"); if (pluginsDir == null) { if (RuntimeMode.DEVELOPMENT.equals(getRuntimeMode())) { pluginsDir = DEVELOPMENT_PLUGINS_DIRECTORY; } else { pluginsDir = DEFAULT_PLUGINS_DIRECTORY; } } return new File(pluginsDir); } /** * Add the possibility to override the PluginFactory.. */ protected PluginFactory createPluginFactory() { return new DefaultPluginFactory(); } /** * Add the possibility to override the ExtensionFactory. */ protected ExtensionFactory createExtensionFactory() { return new DefaultExtensionFactory(); } private void initialize() { plugins = new HashMap<String, PluginWrapper>(); pluginClassLoaders = new HashMap<String, IzouPluginClassLoader>(); pathToIdMap = new HashMap<String, String>(); unresolvedPlugins = new ArrayList<PluginWrapper>(); resolvedPlugins = new ArrayList<PluginWrapper>(); startedPlugins = new ArrayList<PluginWrapper>(); disabledPlugins = new ArrayList<String>(); this.sdkClassLoaders = new HashMap<>(); pluginStateListeners = new ArrayList<PluginStateListener>(); log.info("PF4J version {} in '{}' mode", getVersion(), getRuntimeMode()); pluginClasspath = createPluginClasspath(); pluginFactory = createPluginFactory(); extensionFactory = createExtensionFactory(); pluginDescriptorFinder = createPluginDescriptorFinder(); extensionFinder = createExtensionFinder(); try { // create a list with plugin identifiers that should be only accepted by this manager (whitelist from plugins/enabled.txt file) enabledPlugins = FileUtils.readLines(new File(pluginsDirectory, "enabled.txt"), true); log.info("Enabled plugins: {}", enabledPlugins); // create a list with plugin identifiers that should not be accepted by this manager (blacklist from plugins/disabled.txt file) disabledPlugins = FileUtils.readLines(new File(pluginsDirectory, "disabled.txt"), true); log.info("Disabled plugins: {}", disabledPlugins); } catch (IOException e) { log.error(e.getMessage(), e); } System.setProperty("pf4j.pluginsDir", pluginsDirectory.getAbsolutePath()); } private PluginWrapper loadPluginDirectory(File pluginDirectory) throws PluginException { // try to load the plugin String pluginName = pluginDirectory.getName(); String pluginPath = "/".concat(pluginName); // test for plugin duplication if (plugins.get(pathToIdMap.get(pluginPath)) != null) { return null; } // retrieves the plugin descriptor log.debug("Find plugin descriptor '{}'", pluginPath); PluginDescriptor pluginDescriptor = pluginDescriptorFinder.find(pluginDirectory); log.debug("Descriptor " + pluginDescriptor); String pluginClassName = pluginDescriptor.getPluginClass(); log.debug("Class '{}' for plugin '{}'", pluginClassName, pluginPath); // load plugin log.debug("Loading plugin '{}'", pluginPath); PluginLoader pluginLoader = new PluginLoader(this, pluginDescriptor, pluginDirectory, pluginClasspath, weavingClassloader); pluginLoader.load(); log.debug("Loaded plugin '{}'", pluginPath); // create the plugin wrapper log.debug("Creating wrapper for plugin '{}'", pluginPath); PluginWrapper pluginWrapper = new PluginWrapper(pluginDescriptor, pluginPath, pluginLoader.getPluginClassLoader()); pluginWrapper.setPluginFactory(pluginFactory); pluginWrapper.setRuntimeMode(getRuntimeMode()); // test for disabled plugin if (isPluginDisabled(pluginDescriptor.getPluginId())) { log.info("Plugin '{}' is disabled", pluginPath); pluginWrapper.setPluginState(PluginState.DISABLED); } // validate the plugin if (!isPluginValid(pluginWrapper)) { log.info("Plugin '{}' is disabled", pluginPath); pluginWrapper.setPluginState(PluginState.DISABLED); } log.debug("Created wrapper '{}' for plugin '{}'", pluginWrapper, pluginPath); String pluginId = pluginDescriptor.getPluginId(); // add plugin to the list with plugins plugins.put(pluginId, pluginWrapper); unresolvedPlugins.add(pluginWrapper); // add plugin class loader to the list with class loaders IzouPluginClassLoader pluginClassLoader = pluginLoader.getPluginClassLoader(); if (pluginId.startsWith(PLUGIN_PACKAGE_PREFIX_IZOU_SDK)) { String sdkVersion = ""; Pattern pattern = Pattern.compile("([a-zA-Z]+\\-)(([0-9]+\\.)*[0-9]+)(\\-[a-zA-Z]+)?"); Matcher matcher = pattern.matcher(pluginName); if (matcher.matches()) { String nameParts[] = pluginName.split("\\-"); for (String part : nameParts) { pattern = Pattern.compile("(([0-9]+\\.)*[0-9]+)"); matcher = pattern.matcher(part); if (matcher.matches()) { sdkVersion = part; break; } } } String pluginIDWithVersion = pluginId + ":" + sdkVersion; sdkClassLoaders.put(pluginIDWithVersion, pluginClassLoader); } pluginClassLoaders.put(pluginId, pluginClassLoader); return pluginWrapper; } private File expandPluginArchive(File pluginArchiveFile) throws IOException { String fileName = pluginArchiveFile.getName(); long pluginArchiveDate = pluginArchiveFile.lastModified(); String pluginName = fileName.substring(0, fileName.length() - 4); File pluginDirectory = new File(pluginsDirectory, pluginName); // check if exists directory or the '.zip' file is "newer" than directory if (!pluginDirectory.exists() || (pluginArchiveDate > pluginDirectory.lastModified())) { log.debug("Expand plugin archive '{}' in '{}'", pluginArchiveFile, pluginDirectory); // do not overwrite an old version, remove it if (pluginDirectory.exists()) { FileUtils.delete(pluginDirectory); } // create directory for plugin pluginDirectory.mkdirs(); // expand '.zip' file Unzip unzip = new Unzip(); unzip.setSource(pluginArchiveFile); unzip.setDestination(pluginDirectory); unzip.extract(); } return pluginDirectory; } private void resolvePlugins() throws PluginException { resolveDependencies(); } private void resolveDependencies() throws PluginException { DependencyResolver dependencyResolver = new DependencyResolver(unresolvedPlugins); resolvedPlugins = dependencyResolver.getSortedPlugins(); for (PluginWrapper pluginWrapper : resolvedPlugins) { unresolvedPlugins.remove(pluginWrapper); log.info("Plugin '{}' resolved", pluginWrapper.getDescriptor().getPluginId()); } } private synchronized void firePluginStateEvent(PluginStateEvent event) { for (PluginStateListener listener : pluginStateListeners) { log.debug("Fire '{}' to '{}'", event, listener); listener.pluginStateChanged(event); } } }
/* * $Id$ * This file is a part of the Arakhne Foundation Classes, http://www.arakhne.org/afc * * Copyright (c) 2000-2012 Stephane GALLAND. * Copyright (c) 2005-10, Multiagent Team, Laboratoire Systemes et Transports, * Universite de Technologie de Belfort-Montbeliard. * Copyright (c) 2013-2020 The original authors, and other authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.arakhne.afc.math.test.geometry; import static org.junit.jupiter.api.Assertions.*; import static org.junit.jupiter.api.Assumptions.*; import org.junit.jupiter.api.Test; import org.arakhne.afc.math.geometry.d3.Point3D; import org.arakhne.afc.math.geometry.d3.Tuple3D; import org.arakhne.afc.math.geometry.d3.Vector3D; import org.arakhne.afc.math.geometry.d3.d.Tuple3d; @SuppressWarnings("all") public abstract class AbstractUnmodifiableVector3DTest<V extends Vector3D<? super V, ? super P>, P extends Point3D<? super P, ? super V>> extends AbstractVector3DTest<V, P, Vector3D> { @Override @Test public final void absolute() { assertThrows(UnsupportedOperationException.class, () -> getT().absolute()); } @Override @Test public final void absoluteT() { assertThrows(UnsupportedOperationException.class, () -> { Tuple3D c = new Tuple3d(); getT().absolute(c); }); } @Override @Test public final void addIntIntInt() { assertThrows(UnsupportedOperationException.class, () -> getT().add(6, 7, 0)); } @Test public final void addDoubleDouble() { assertThrows(UnsupportedOperationException.class, () -> getT().add(6.5, 7.5, 0)); } @Override @Test public final void addXInt() { assertThrows(UnsupportedOperationException.class, () -> getT().addX(6)); } @Test public final void addXDouble() { assertThrows(UnsupportedOperationException.class, () -> getT().addX(6.5)); } @Override @Test public final void addYInt() { assertThrows(UnsupportedOperationException.class, () -> getT().addY(6)); } @Test public final void addYDouble() { assertThrows(UnsupportedOperationException.class, () -> getT().addY(6.5)); } @Override @Test public final void addZInt() { assertThrows(UnsupportedOperationException.class, () -> getT().addZ(6)); } @Test public final void addZDouble() { assertThrows(UnsupportedOperationException.class, () -> getT().addZ(6.5)); } @Override @Test public final void negateT() { assertThrows(UnsupportedOperationException.class, () -> { Tuple3D c = new Tuple3d(); getT().negate(c); }); } @Override @Test public final void negate() { assertThrows(UnsupportedOperationException.class, () -> getT().negate()); } @Override @Test public final void scaleIntT() { assertThrows(UnsupportedOperationException.class, () -> { Tuple3D c = new Tuple3d(2, -1, 0); getT().scale(4, c); }); } @Test public final void scaleDoubleT() { assertThrows(UnsupportedOperationException.class, () -> { Tuple3D c = new Tuple3d(2, -1, 0); getT().scale(4.5, c); }); } @Override @Test public final void scaleInt() { assertThrows(UnsupportedOperationException.class, () -> getT().scale(4)); } @Test public final void scaleDouble() { assertThrows(UnsupportedOperationException.class, () -> getT().scale(4.5)); } @Override @Test public final void setTuple3D() { assertThrows(UnsupportedOperationException.class, () -> { Tuple3D c = new Tuple3d(-45, 78, 0); getT().set(c); }); } @Override @Test public final void setIntIntInt() { assertThrows(UnsupportedOperationException.class, () -> getT().set(-45, 78, 0)); } @Test public final void setDoubleDouble() { assertThrows(UnsupportedOperationException.class, () -> getT().set(-45.5, 78.5, 0)); } @Override @Test public final void setIntArray() { assertThrows(UnsupportedOperationException.class, () -> getT().set(new int[]{-45, 78, 0})); } @Test public final void setDoubleArray() { assertThrows(UnsupportedOperationException.class, () -> getT().set(new double[]{-45.5, 78.5, 0})); } @Override @Test public final void setXInt() { assertThrows(UnsupportedOperationException.class, () -> getT().setX(45)); } @Test public final void setXDouble() { assertThrows(UnsupportedOperationException.class, () -> getT().setX(45.5)); } @Override @Test public final void setYInt() { assertThrows(UnsupportedOperationException.class, () -> getT().setY(45)); } @Test public final void setYDouble() { assertThrows(UnsupportedOperationException.class, () -> getT().setY(45.5)); } @Override @Test public final void subIntIntInt() { assertThrows(UnsupportedOperationException.class, () -> getT().sub(45, 78, 0)); } @Override @Test public final void subXInt() { assertThrows(UnsupportedOperationException.class, () -> getT().subX(45)); } @Override @Test public final void subYInt() { assertThrows(UnsupportedOperationException.class, () -> getT().subY(78)); } @Test public final void subDoubleDouble() { assertThrows(UnsupportedOperationException.class, () -> getT().sub(45.5, 78.5, 0)); } @Test public final void subXDouble() { assertThrows(UnsupportedOperationException.class, () -> getT().subX(45.5)); } @Test public final void subYDouble() { assertThrows(UnsupportedOperationException.class, () -> getT().subY(78.5)); } @Override @Test public final void addDoubleDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().add(12.3, 4.56, 0); }); } @Override @Test public final void addDoubleDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().add(12.3, 4.56, 0); }); } @Override @Test public final void addXDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().addX(12.3); }); } @Override @Test public final void addXDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().addX(12.3); }); } @Override @Test public final void addYDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().addY(12.3); }); } @Override @Test public final void addYDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().addY(12.3); }); } @Override @Test public final void addZDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().addZ(12.3); }); } @Override @Test public final void addZDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().addZ(12.3); }); } @Override @Test public final void scaleDoubleT_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().scale(12.3, createTuple(1,2, 0)); }); } @Override @Test public final void scaleDoubleT_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().scale(12.3, createTuple(1,2, 0)); }); } @Override @Test public final void scaleDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().scale(12.3); }); } @Override @Test public final void scaleDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().scale(12.3); }); } @Override @Test public final void setDoubleDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().set(12.3, 4.56, 0); }); } @Override @Test public final void setDoubleDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().set(12.3, 4.56, 0); }); } @Override @Test public final void setDoubleArray_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().set(new double[] {12.3, 4.56, 0}); }); } @Override @Test public final void setDoubleArray_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().set(new double[] {12.3, 4.56, 0}); }); } @Override @Test public final void setXDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().setX(12.3); }); } @Override @Test public final void setXDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().setX(12.3); }); } @Override @Test public final void setYDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().setY(12.3); }); } @Override @Test public final void setYDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().setY(12.3); }); } @Test public final void setZDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().setZ(12.3); }); } @Test public final void setZDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().setZ(12.3); }); } @Override @Test public final void subDoubleDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().sub(12.3, 4.56, 0); }); } @Override @Test public final void subDoubleDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().sub(12.3, 4.56, 0); }); } @Override @Test public final void subXDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().subX(12.3); }); } @Override @Test public final void subXDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().subX(12.3); }); } @Override @Test public final void subYDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().subY(12.3); }); } @Override @Test public final void subYDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().subY(12.3); }); } @Test public final void subZDouble_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); getT().subZ(12.3); }); } @Test public final void subZDouble_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); getT().subZ(12.3); }); } @Override @Test public final void addVector3DVector3D_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Vector3D vector = createVector(0, 0, 0); Vector3D vector3 = createVector(1.2, 1.2, 0); Vector3D vector5 = createTuple(0.0, 0.0, 0); vector5.add(vector3,vector); }); } @Override @Test public final void addVector3DVector3D_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Vector3D vector = createVector(0, 0, 0); Vector3D vector3 = createVector(1.2, 1.2, 0); Vector3D vector5 = createTuple(0.0, 0.0, 0); vector5.add(vector3,vector); }); } @Override @Test public final void addVector3D_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Vector3D vector = createTuple(0,0, 0); Vector3D vector3 = createVector(1.2,1.2, 0); vector.add(vector3); }); } @Override @Test public final void addVector3D_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Vector3D vector = createTuple(0,0, 0); Vector3D vector3 = createVector(1.2,1.2, 0); vector.add(vector3); }); } @Override @Test public final void scaleAddIntVector3DVector3D_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Vector3D vector = createVector(-1,0, 0); Vector3D vector2 = createVector(1.0,1.2, 0); Vector3D vector3 = createTuple(0.0,0.0, 0); vector3.scaleAdd(0,vector2,vector); }); } @Override @Test public final void scaleAddIntVector3DVector3D_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Vector3D vector = createVector(-1,0, 0); Vector3D vector2 = createVector(1.0,1.2, 0); Vector3D vector3 = createTuple(0.0,0.0, 0); vector3.scaleAdd(0,vector2,vector); }); } @Override @Test public final void scaleAddDoubleVector3DVector3D_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Vector3D vect = createVector(1,0, 0); Vector3D vector = createVector(-1,1, 0); Vector3D newVector = createTuple(0.0,0.0, 0); newVector.scaleAdd(0.0, vector, vect); }); } @Override @Test public final void scaleAddDoubleVector3DVector3D_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Vector3D vect = createVector(1,0, 0); Vector3D vector = createVector(-1,1, 0); Vector3D newVector = createTuple(0.0,0.0, 0); newVector.scaleAdd(0.0, vector, vect); }); } @Override @Test public final void scaleAddIntVector3D() { assertThrows(UnsupportedOperationException.class, () -> { Vector3D vector = createVector(1,0, 0); Vector3D newVector = createTuple(0,0, 0); newVector.scaleAdd(0,vector); }); } @Override @Test public final void scaleAddDoubleVector3D_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Vector3D vector = createVector(1,0, 0); Vector3D newVector = createTuple(0.0,0.0, 0); newVector.scaleAdd(0.5,vector); }); } @Override @Test public final void scaleAddDoubleVector3D_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Vector3D vector = createVector(1,0, 0); Vector3D newTuple = createTuple(0.0,0.0, 0); newTuple.scaleAdd(0.5,vector); }); } @Override @Test public final void subVector3DVector3D_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Vector3D vect = createVector(0, 0, 0); Vector3D vector = createVector(-1.2, -1.2, 0); Vector3D newVector = createTuple(0.0, 0.0, 0); newVector.sub(vect,vector); }); } @Override @Test public final void subVector3DVector3D_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Vector3D vect = createVector(0, 0, 0); Vector3D vector = createVector(-1.2, -1.2, 0); Vector3D newVector = createTuple(0.0, 0.0, 0); newVector.sub(vect,vector); }); } @Override @Test public final void subPoint3DPoint3D_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Point3D point = createPoint(0, 0, 0); Point3D vector = createPoint(-1.2, -1.2, 0); Vector3D newVector = createTuple(0.0, 0.0, 0); newVector.sub(point,vector); }); } @Override @Test public final void subPoint3DPoint3D_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Point3D point = createPoint(0, 0, 0); Point3D vector = createPoint(-1.2, -1.2, 0); Vector3D newPoint = createTuple(0.0, 0.0, 0); newPoint.sub(point,vector); }); } @Override @Test public final void subVector3D_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Vector3D vect = createTuple(0, 0, 0); Vector3D vector = createVector(-1.2, -1.2, 0); vect.sub(vector); }); } @Override @Test public final void subVector3D_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Vector3D vect = createTuple(0, 0, 0); Vector3D vector = createVector(-1.2, -1.2, 0); vect.sub(vector); }); } @Override @Test public final void normalize_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Vector3D vector = createTuple(1,2, 0); vector.normalize(); }); } @Override @Test public final void normalize_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Vector3D vector = createTuple(1,2, 0); vector.normalize(); }); } @Override @Test public final void normalizeVector3D_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Vector3D vector = createTuple(0,0, 0); vector.normalize(createVector(1,2, 0)); }); } @Override @Test public final void normalizeVector3D_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Vector3D vector = createTuple(0,0, 0); vector.normalize(createVector(1,2, 0)); }); } @Override @Test public final void setLength_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Vector3D vector = createTuple(getRandom().nextDouble(), getRandom().nextDouble(), getRandom().nextDouble()); double newLength = getRandom().nextDouble(); vector.setLength(newLength); }); } @Override @Test public final void setLength_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Vector3D vector = createTuple(0, 2, 0); int newLength = 5; vector.setLength(newLength); }); } @Override @Test public final void operator_addVector3D_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Vector3D vector = createTuple(0,0, 0); Vector3D vector3 = createVector(1.2,1.2, 0); vector.operator_add(vector3); }); } @Override @Test public final void operator_addVector3D_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Vector3D vector = createTuple(0,0, 0); Vector3D vector3 = createVector(1.2,1.2, 0); vector.operator_add(vector3); }); } @Override @Test public final void operator_removeVector3D_iffp() { assertThrows(UnsupportedOperationException.class, () -> { assumeFalse(isIntCoordinates()); Vector3D vect = createTuple(0, 0, 0); Vector3D vector = createVector(-1.2, -1.2, 0); vect.operator_remove(vector); }); } @Override @Test public final void operator_removeVector3D_ifi() { assertThrows(UnsupportedOperationException.class, () -> { assumeTrue(isIntCoordinates()); Vector3D vect = createTuple(0, 0, 0); Vector3D vector = createVector(-1.2, -1.2, 0); vect.operator_remove(vector); }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.vmplugin.v9; import groovy.lang.GroovyClassLoader; import groovy.lang.GroovyRuntimeException; import groovy.lang.MetaClass; import groovy.lang.MetaMethod; import org.codehaus.groovy.GroovyBugError; import org.codehaus.groovy.reflection.CachedClass; import org.codehaus.groovy.reflection.CachedMethod; import org.codehaus.groovy.reflection.ReflectionUtils; import org.codehaus.groovy.runtime.DefaultGroovyMethods; import org.codehaus.groovy.vmplugin.v8.Java8; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.module.ModuleDescriptor; import java.lang.module.ModuleFinder; import java.lang.module.ModuleReference; import java.lang.reflect.AccessibleObject; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Member; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.math.BigInteger; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; public class Java9 extends Java8 { @Override public int getVersion() { return 9; } @Override public Class<?>[] getPluginDefaultGroovyMethods() { Class<?>[] answer = super.getPluginDefaultGroovyMethods(); final int n = answer.length; answer = Arrays.copyOf(answer, n + 1); answer[n] = PluginDefaultGroovyMethods.class; return answer; } @Override public Map<String, Set<String>> getDefaultImportClasses(final String[] packageNames) { List<String> javaPackages = new ArrayList<>(4); List<String> groovyPackages = new ArrayList<>(4); for (String prefix : packageNames) { String pn = prefix.substring(0, prefix.length() - 1).replace('.', '/'); if (pn.startsWith("java/")) { javaPackages.add(pn); } else if (pn.startsWith("groovy/")) { groovyPackages.add(pn); } else { throw new GroovyBugError("unexpected package: " + pn); } } Map<String, Set<String>> result = new LinkedHashMap<>(2048); try (GroovyClassLoader gcl = new GroovyClassLoader(this.getClass().getClassLoader())) { CompletableFuture<Map<String, Set<String>>> javaDefaultImportsFuture = CompletableFuture.supplyAsync(() -> doFindClasses(URI.create("jrt:/modules/java.base/"), "java", javaPackages)); try { URI gsLocation = DefaultGroovyMethods.getLocation(gcl.loadClass("groovy.lang.GroovySystem")).toURI(); CompletableFuture<Map<String, Set<String>>> groovyDefaultImportsFuture1 = CompletableFuture.supplyAsync(() -> doFindClasses(gsLocation, "groovy", groovyPackages)); // in production environment, groovy-core classes, e.g. `GroovySystem`(java class) and `GrapeIvy`(groovy class) are all packaged in the groovy-core jar file, // but in Groovy development environment, groovy-core classes are distributed in different directories URI giLocation = DefaultGroovyMethods.getLocation(gcl.loadClass("groovy.grape.GrapeIvy")).toURI(); CompletableFuture<Map<String, Set<String>>> groovyDefaultImportsFuture2 = gsLocation.equals(giLocation) ? CompletableFuture.completedFuture(Collections.emptyMap()) : CompletableFuture.supplyAsync(() -> doFindClasses(giLocation, "groovy", groovyPackages)); result.putAll(groovyDefaultImportsFuture1.get()); result.putAll(groovyDefaultImportsFuture2.get()); } finally { result.putAll(javaDefaultImportsFuture.get()); } } catch (Exception ignore) { Logger logger = Logger.getLogger(getClass().getName()); if (logger.isLoggable(Level.FINEST)) { logger.finest("[WARNING] Failed to find default imported classes:\n" + DefaultGroovyMethods.asString(ignore)); } } return result; } private static Map<String, Set<String>> doFindClasses(final URI uri, final String packageName, final List<String> defaultPackageNames) { Map<String, Set<String>> result = ClassFinder.find(uri, packageName, true) .entrySet().stream() .filter(e -> e.getValue().stream().anyMatch(defaultPackageNames::contains)) .collect( Collectors.toMap( Map.Entry::getKey, entry -> entry.getValue().stream() .filter(e -> defaultPackageNames.contains(e)) .map(e -> e.replace('/', '.') + ".") .collect(Collectors.toSet()) ) ); return result; } @Override protected MethodHandles.Lookup newLookup(final Class<?> declaringClass) { try { final Method privateLookup = getPrivateLookup(); if (privateLookup != null) { return (MethodHandles.Lookup) privateLookup.invoke(null, declaringClass, MethodHandles.lookup()); } return getLookupConstructor().newInstance(declaringClass, MethodHandles.Lookup.PRIVATE).in(declaringClass); } catch (final IllegalAccessException | InstantiationException e) { throw new IllegalArgumentException(e); } catch (final InvocationTargetException e) { throw new GroovyRuntimeException(e); } } protected static Constructor<MethodHandles.Lookup> getLookupConstructor() { return LookupHolder.LOOKUP_Constructor; } protected static Method getPrivateLookup() { return LookupHolder.PRIVATE_LOOKUP; } private static class LookupHolder { private static final Method PRIVATE_LOOKUP; private static final Constructor<MethodHandles.Lookup> LOOKUP_Constructor; static { Constructor<MethodHandles.Lookup> lookup = null; Method privateLookup = null; try { // java 9 privateLookup = MethodHandles.class.getMethod("privateLookupIn", Class.class, MethodHandles.Lookup.class); } catch (final NoSuchMethodException | RuntimeException e) { // java 8 or fallback if anything else goes wrong try { lookup = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, Integer.TYPE); if (!lookup.isAccessible()) { ReflectionUtils.trySetAccessible(lookup); } } catch (final NoSuchMethodException ex) { throw new IllegalStateException("Incompatible JVM", e); } } PRIVATE_LOOKUP = privateLookup; LOOKUP_Constructor = lookup; } } /** * This method may be used by a caller in class C to check whether to enable access to a member of declaring class D successfully * if {@link Java8#checkCanSetAccessible(java.lang.reflect.AccessibleObject, java.lang.Class)} returns true and any of the following hold: * <p> * 1) C and D are in the same module. * 2) The member is public and D is public in a package that the module containing D exports to at least the module containing C. * 3) The member is protected static, D is public in a package that the module containing D exports to at least the module containing C, and C is a subclass of D. * 4) D is in a package that the module containing D opens to at least the module containing C. All packages in unnamed and open modules are open to all modules and so this method always succeeds when D is in an unnamed or open module. * * @param accessibleObject the accessible object to check * @param callerClass the class wishing to invoke {@code setAccessible} * @return the check result */ @Override public boolean checkCanSetAccessible(final AccessibleObject accessibleObject, final Class<?> callerClass) { if (!super.checkCanSetAccessible(accessibleObject, callerClass)) return false; if (callerClass == MethodHandle.class) { throw new IllegalCallerException(); // should not happen } if (!(accessibleObject instanceof Member)) { throw new IllegalArgumentException("accessibleObject should be a member of type: " + accessibleObject); // should not happen } Member member = (Member) accessibleObject; Class<?> declaringClass = member.getDeclaringClass(); Module declaringModule = declaringClass.getModule(); Module callerModule = callerClass.getModule(); if (!declaringModule.isNamed()) return true; if (callerModule == declaringModule) return true; if (callerModule == Object.class.getModule()) return true; return checkAccessible(callerClass, declaringClass, member.getModifiers(), true); } @Override public boolean trySetAccessible(final AccessibleObject ao) { return ao.trySetAccessible(); } @Override public MetaMethod transformMetaMethod(final MetaClass metaClass, final MetaMethod metaMethod, Class<?> caller) { if (!(metaMethod instanceof CachedMethod)) { return metaMethod; } CachedMethod cachedMethod = (CachedMethod) metaMethod; CachedClass methodDeclaringClass = cachedMethod.getDeclaringClass(); if (null == methodDeclaringClass) { return metaMethod; } if (null == caller) { caller = ReflectionUtils.class; // "set accessible" are done via `org.codehaus.groovy.reflection.ReflectionUtils` as shown in warnings } return getOrTransformMetaMethod(metaClass, caller, cachedMethod); } private CachedMethod getOrTransformMetaMethod(final MetaClass metaClass, final Class<?> caller, final CachedMethod cachedMethod) { CachedMethod transformedMethod = cachedMethod.getTransformedMethod(); if (null != transformedMethod) { return transformedMethod; } transformedMethod = doTransformMetaMethod(metaClass, cachedMethod, caller); cachedMethod.setTransformedMethod(transformedMethod); return transformedMethod; } private CachedMethod doTransformMetaMethod(final MetaClass metaClass, final CachedMethod metaMethod, final Class<?> caller) { Class<?> declaringClass = metaMethod.getDeclaringClass().getTheClass(); int methodModifiers = metaMethod.getModifiers(); // if caller can access the method, no need to transform the meta method if (!checkAccessible(caller, declaringClass, methodModifiers, false)) { Class<?>[] paramTypes = metaMethod.getPT(); Class<?> theClass = metaClass.getTheClass(); if (declaringClass == theClass) { if (BigInteger.class == theClass) { CachedMethod bigIntegerMetaMethod = transformBigIntegerMetaMethod(metaMethod, paramTypes); if (bigIntegerMetaMethod != metaMethod) { return bigIntegerMetaMethod; } } // GROOVY-9081: Access public members of private class, e.g. Collections.unmodifiableMap([:]).toString() // try to find the visible method from its superclasses for (Class<?> c = theClass; c != null; c = c.getSuperclass()) { Optional<CachedMethod> cachedMethod = getAccessibleMetaMethod(metaMethod, paramTypes, caller, c, true); if (cachedMethod.isPresent()) { return cachedMethod.get(); } } } else if (declaringClass.isAssignableFrom(theClass)) { // if caller can not access the method, // try to find the corresponding method in its derived class // GROOVY-9081: Sub-class derives the protected members from public class, "Invoke the members on the sub class instances" // e.g. StringBuilder sb = new StringBuilder(); sb.setLength(0); // `setLength` is the method of `AbstractStringBuilder`, which is `package-private` Optional<CachedMethod> cachedMethod = getAccessibleMetaMethod(metaMethod, paramTypes, caller, theClass, false); if (cachedMethod.isPresent()) { return cachedMethod.get(); } } } return metaMethod; } private static CachedMethod transformBigIntegerMetaMethod(final CachedMethod metaMethod, final Class<?>[] paramTypes) { if (paramTypes.length == 1 && metaMethod.getName().equals("multiply")) { Class<?> type = paramTypes[0]; if (type == Long.class || type == long.class || type == Integer.class || type == int.class || type == Short.class || type == short.class) { return BigIntegerMultiplyMethodHolder.MULTIPLY_METHOD; } } return metaMethod; } private static class BigIntegerMultiplyMethodHolder { private static final CachedMethod MULTIPLY_METHOD; static { try { MULTIPLY_METHOD = new CachedMethod(BigInteger.class.getDeclaredMethod("multiply", BigInteger.class)); } catch (NoSuchMethodException | SecurityException e) { throw new GroovyBugError("Failed to find multiply method of BigInteger", e); } } } private Optional<CachedMethod> getAccessibleMetaMethod(final CachedMethod metaMethod, final Class<?>[] params, final Class<?> caller, final Class<?> sc, final boolean declared) { List<CachedMethod> metaMethodList = getMetaMethods(metaMethod, params, sc, declared); for (CachedMethod mm : metaMethodList) { if (checkAccessible(caller, mm.getDeclaringClass().getTheClass(), mm.getModifiers(), false)) { return Optional.of(mm); } } return Optional.empty(); } private static List<CachedMethod> getMetaMethods(final CachedMethod metaMethod, final Class<?>[] params, final Class<?> sc, final boolean declared) { String metaMethodName = metaMethod.getName(); List<Method> optionalMethodList = declared ? ReflectionUtils.getDeclaredMethods(sc, metaMethodName, params) : ReflectionUtils.getMethods(sc, metaMethodName, params); return optionalMethodList.stream().map(CachedMethod::new).collect(Collectors.toList()); } @Override public boolean checkAccessible(final Class<?> accessingClass, final Class<?> declaringClass, final int memberModifiers, final boolean allowIllegalAccess) { Module accessingModule = accessingClass.getModule(); Module declaringModule = declaringClass.getModule(); String packageName = declaringClass.getPackageName(); boolean exportedOrOpenIsSufficient = allowIllegalAccess || accessingModule.isNamed() || !declaringModule.isNamed(); if (Modifier.isPublic(declaringClass.getModifiers()) && declaringModule.isExported(packageName, accessingModule)) { if (Modifier.isPublic(memberModifiers) || Modifier.isProtected(memberModifiers) && declaringClass.isAssignableFrom(accessingClass)) { return exportedOrOpenIsSufficient || !concealedPackageList(declaringModule).contains(packageName); } } if (declaringModule.isOpen(packageName, accessingModule)) { return exportedOrOpenIsSufficient || !(concealedPackageList(declaringModule).contains(packageName) || exportedPackageList(declaringModule).contains(packageName)); } return false; } private static Set<String> concealedPackageList(final Module module) { return CONCEALED_PACKAGES_TO_OPEN.computeIfAbsent(module.getName(), m -> new HashSet<>()); } private static Set<String> exportedPackageList(final Module module) { return EXPORTED_PACKAGES_TO_OPEN.computeIfAbsent(module.getName(), m -> new HashSet<>()); } private static final Map<String, Set<String>> CONCEALED_PACKAGES_TO_OPEN; private static final Map<String, Set<String>> EXPORTED_PACKAGES_TO_OPEN; static { ModuleFinder finder = ModuleFinder.ofSystem(); Map<String, ModuleDescriptor> map = new HashMap<>(); finder.findAll().stream() .map(ModuleReference::descriptor) .forEach(md -> md.packages().forEach(pn -> map.putIfAbsent(pn, md))); Map<String, Set<String>> concealedPackagesToOpen = new ConcurrentHashMap<>(); Map<String, Set<String>> exportedPackagesToOpen = new ConcurrentHashMap<>(); Arrays.stream(JAVA8_PACKAGES()) .forEach(pn -> { ModuleDescriptor descriptor = map.get(pn); if (descriptor != null && !isOpen(descriptor, pn)) { if (isExported(descriptor, pn)) { exportedPackagesToOpen.computeIfAbsent(descriptor.name(), k -> new HashSet<>()).add(pn); } else { concealedPackagesToOpen.computeIfAbsent(descriptor.name(), k -> new HashSet<>()).add(pn); } } }); CONCEALED_PACKAGES_TO_OPEN = concealedPackagesToOpen; EXPORTED_PACKAGES_TO_OPEN = exportedPackagesToOpen; } private static boolean isExported(final ModuleDescriptor descriptor, final String pn) { return descriptor.exports() .stream() .anyMatch(e -> e.source().equals(pn) && !e.isQualified()); } private static boolean isOpen(final ModuleDescriptor descriptor, final String pn) { return descriptor.opens() .stream() .anyMatch(e -> e.source().equals(pn) && !e.isQualified()); } private static String[] JAVA8_PACKAGES() { // The following package list should NOT be changed! return new String[] { "apple.applescript", "apple.laf", "apple.launcher", "apple.security", "com.apple.concurrent", "com.apple.eawt", "com.apple.eawt.event", "com.apple.eio", "com.apple.laf", "com.apple.laf.resources", "com.oracle.jrockit.jfr", "com.oracle.jrockit.jfr.client", "com.oracle.jrockit.jfr.management", "com.oracle.security.ucrypto", "com.oracle.util", "com.oracle.webservices.internal.api", "com.oracle.webservices.internal.api.databinding", "com.oracle.webservices.internal.api.message", "com.oracle.webservices.internal.impl.encoding", "com.oracle.webservices.internal.impl.internalspi.encoding", "com.oracle.xmlns.internal.webservices.jaxws_databinding", "com.sun.accessibility.internal.resources", "com.sun.activation.registries", "com.sun.awt", "com.sun.beans", "com.sun.beans.decoder", "com.sun.beans.editors", "com.sun.beans.finder", "com.sun.beans.infos", "com.sun.beans.util", "com.sun.codemodel.internal", "com.sun.codemodel.internal.fmt", "com.sun.codemodel.internal.util", "com.sun.codemodel.internal.writer", "com.sun.corba.se.impl.activation", "com.sun.corba.se.impl.copyobject", "com.sun.corba.se.impl.corba", "com.sun.corba.se.impl.dynamicany", "com.sun.corba.se.impl.encoding", "com.sun.corba.se.impl.interceptors", "com.sun.corba.se.impl.io", "com.sun.corba.se.impl.ior", "com.sun.corba.se.impl.ior.iiop", "com.sun.corba.se.impl.javax.rmi", "com.sun.corba.se.impl.javax.rmi.CORBA", "com.sun.corba.se.impl.legacy.connection", "com.sun.corba.se.impl.logging", "com.sun.corba.se.impl.monitoring", "com.sun.corba.se.impl.naming.cosnaming", "com.sun.corba.se.impl.naming.namingutil", "com.sun.corba.se.impl.naming.pcosnaming", "com.sun.corba.se.impl.oa", "com.sun.corba.se.impl.oa.poa", "com.sun.corba.se.impl.oa.toa", "com.sun.corba.se.impl.orb", "com.sun.corba.se.impl.orbutil", "com.sun.corba.se.impl.orbutil.closure", "com.sun.corba.se.impl.orbutil.concurrent", "com.sun.corba.se.impl.orbutil.fsm", "com.sun.corba.se.impl.orbutil.graph", "com.sun.corba.se.impl.orbutil.threadpool", "com.sun.corba.se.impl.presentation.rmi", "com.sun.corba.se.impl.protocol", "com.sun.corba.se.impl.protocol.giopmsgheaders", "com.sun.corba.se.impl.resolver", "com.sun.corba.se.impl.transport", "com.sun.corba.se.impl.util", "com.sun.corba.se.internal.CosNaming", "com.sun.corba.se.internal.Interceptors", "com.sun.corba.se.internal.POA", "com.sun.corba.se.internal.corba", "com.sun.corba.se.internal.iiop", "com.sun.corba.se.org.omg.CORBA", "com.sun.corba.se.pept.broker", "com.sun.corba.se.pept.encoding", "com.sun.corba.se.pept.protocol", "com.sun.corba.se.pept.transport", "com.sun.corba.se.spi.activation", "com.sun.corba.se.spi.activation.InitialNameServicePackage", "com.sun.corba.se.spi.activation.LocatorPackage", "com.sun.corba.se.spi.activation.RepositoryPackage", "com.sun.corba.se.spi.copyobject", "com.sun.corba.se.spi.encoding", "com.sun.corba.se.spi.extension", "com.sun.corba.se.spi.ior", "com.sun.corba.se.spi.ior.iiop", "com.sun.corba.se.spi.legacy.connection", "com.sun.corba.se.spi.legacy.interceptor", "com.sun.corba.se.spi.logging", "com.sun.corba.se.spi.monitoring", "com.sun.corba.se.spi.oa", "com.sun.corba.se.spi.orb", "com.sun.corba.se.spi.orbutil.closure", "com.sun.corba.se.spi.orbutil.fsm", "com.sun.corba.se.spi.orbutil.proxy", "com.sun.corba.se.spi.orbutil.threadpool", "com.sun.corba.se.spi.presentation.rmi", "com.sun.corba.se.spi.protocol", "com.sun.corba.se.spi.resolver", "com.sun.corba.se.spi.servicecontext", "com.sun.corba.se.spi.transport", "com.sun.crypto.provider", "com.sun.demo.jvmti.hprof", "com.sun.deploy.uitoolkit.impl.fx", "com.sun.deploy.uitoolkit.impl.fx.ui", "com.sun.deploy.uitoolkit.impl.fx.ui.resources", "com.sun.glass.events", "com.sun.glass.events.mac", "com.sun.glass.ui", "com.sun.glass.ui.delegate", "com.sun.glass.ui.gtk", "com.sun.glass.ui.mac", "com.sun.glass.ui.win", "com.sun.glass.utils", "com.sun.image.codec.jpeg", "com.sun.imageio.plugins.bmp", "com.sun.imageio.plugins.common", "com.sun.imageio.plugins.gif", "com.sun.imageio.plugins.jpeg", "com.sun.imageio.plugins.png", "com.sun.imageio.plugins.wbmp", "com.sun.imageio.spi", "com.sun.imageio.stream", "com.sun.istack.internal", "com.sun.istack.internal.localization", "com.sun.istack.internal.logging", "com.sun.istack.internal.tools", "com.sun.jarsigner", "com.sun.java.accessibility", "com.sun.java.accessibility.util", "com.sun.java.accessibility.util.java.awt", "com.sun.java.browser.dom", "com.sun.java.browser.net", "com.sun.java.swing", "com.sun.java.swing.plaf.gtk", "com.sun.java.swing.plaf.gtk.resources", "com.sun.java.swing.plaf.motif", "com.sun.java.swing.plaf.motif.resources", "com.sun.java.swing.plaf.nimbus", "com.sun.java.swing.plaf.windows", "com.sun.java.swing.plaf.windows.resources", "com.sun.java.util.jar.pack", "com.sun.java_cup.internal.runtime", "com.sun.javadoc", "com.sun.javafx", "com.sun.javafx.animation", "com.sun.javafx.applet", "com.sun.javafx.application", "com.sun.javafx.beans", "com.sun.javafx.beans.event", "com.sun.javafx.binding", "com.sun.javafx.charts", "com.sun.javafx.collections", "com.sun.javafx.css", "com.sun.javafx.css.converters", "com.sun.javafx.css.parser", "com.sun.javafx.cursor", "com.sun.javafx.effect", "com.sun.javafx.embed", "com.sun.javafx.event", "com.sun.javafx.font", "com.sun.javafx.font.coretext", "com.sun.javafx.font.directwrite", "com.sun.javafx.font.freetype", "com.sun.javafx.font.t2k", "com.sun.javafx.fxml", "com.sun.javafx.fxml.builder", "com.sun.javafx.fxml.expression", "com.sun.javafx.geom", "com.sun.javafx.geom.transform", "com.sun.javafx.geometry", "com.sun.javafx.iio", "com.sun.javafx.iio.bmp", "com.sun.javafx.iio.common", "com.sun.javafx.iio.gif", "com.sun.javafx.iio.ios", "com.sun.javafx.iio.jpeg", "com.sun.javafx.iio.png", "com.sun.javafx.image", "com.sun.javafx.image.impl", "com.sun.javafx.jmx", "com.sun.javafx.logging", "com.sun.javafx.media", "com.sun.javafx.menu", "com.sun.javafx.perf", "com.sun.javafx.print", "com.sun.javafx.property", "com.sun.javafx.property.adapter", "com.sun.javafx.robot", "com.sun.javafx.robot.impl", "com.sun.javafx.runtime", "com.sun.javafx.runtime.async", "com.sun.javafx.runtime.eula", "com.sun.javafx.scene", "com.sun.javafx.scene.control", "com.sun.javafx.scene.control.behavior", "com.sun.javafx.scene.control.skin", "com.sun.javafx.scene.control.skin.resources", "com.sun.javafx.scene.input", "com.sun.javafx.scene.layout.region", "com.sun.javafx.scene.paint", "com.sun.javafx.scene.shape", "com.sun.javafx.scene.text", "com.sun.javafx.scene.transform", "com.sun.javafx.scene.traversal", "com.sun.javafx.scene.web", "com.sun.javafx.scene.web.behavior", "com.sun.javafx.scene.web.skin", "com.sun.javafx.sg.prism", "com.sun.javafx.sg.prism.web", "com.sun.javafx.stage", "com.sun.javafx.text", "com.sun.javafx.tk", "com.sun.javafx.tk.quantum", "com.sun.javafx.util", "com.sun.javafx.webkit", "com.sun.javafx.webkit.drt", "com.sun.javafx.webkit.prism", "com.sun.javafx.webkit.prism.theme", "com.sun.javafx.webkit.theme", "com.sun.jdi", "com.sun.jdi.connect", "com.sun.jdi.connect.spi", "com.sun.jdi.event", "com.sun.jdi.request", "com.sun.jmx.defaults", "com.sun.jmx.interceptor", "com.sun.jmx.mbeanserver", "com.sun.jmx.remote.internal", "com.sun.jmx.remote.protocol.iiop", "com.sun.jmx.remote.protocol.rmi", "com.sun.jmx.remote.security", "com.sun.jmx.remote.util", "com.sun.jmx.snmp", "com.sun.jmx.snmp.IPAcl", "com.sun.jmx.snmp.agent", "com.sun.jmx.snmp.daemon", "com.sun.jmx.snmp.defaults", "com.sun.jmx.snmp.internal", "com.sun.jmx.snmp.mpm", "com.sun.jmx.snmp.tasks", "com.sun.jndi.cosnaming", "com.sun.jndi.dns", "com.sun.jndi.ldap", "com.sun.jndi.ldap.ext", "com.sun.jndi.ldap.pool", "com.sun.jndi.ldap.sasl", "com.sun.jndi.rmi.registry", "com.sun.jndi.toolkit.corba", "com.sun.jndi.toolkit.ctx", "com.sun.jndi.toolkit.dir", "com.sun.jndi.toolkit.url", "com.sun.jndi.url.corbaname", "com.sun.jndi.url.dns", "com.sun.jndi.url.iiop", "com.sun.jndi.url.iiopname", "com.sun.jndi.url.ldap", "com.sun.jndi.url.ldaps", "com.sun.jndi.url.rmi", "com.sun.management", "com.sun.management.jmx", "com.sun.media.jfxmedia", "com.sun.media.jfxmedia.control", "com.sun.media.jfxmedia.effects", "com.sun.media.jfxmedia.events", "com.sun.media.jfxmedia.locator", "com.sun.media.jfxmedia.logging", "com.sun.media.jfxmedia.track", "com.sun.media.jfxmediaimpl", "com.sun.media.jfxmediaimpl.platform", "com.sun.media.jfxmediaimpl.platform.gstreamer", "com.sun.media.jfxmediaimpl.platform.ios", "com.sun.media.jfxmediaimpl.platform.java", "com.sun.media.jfxmediaimpl.platform.osx", "com.sun.media.sound", "com.sun.naming.internal", "com.sun.net.httpserver", "com.sun.net.httpserver.spi", "com.sun.net.ssl", "com.sun.net.ssl.internal.ssl", "com.sun.net.ssl.internal.www.protocol.https", "com.sun.nio.file", "com.sun.nio.sctp", "com.sun.nio.zipfs", "com.sun.openpisces", "com.sun.org.apache.bcel.internal", "com.sun.org.apache.bcel.internal.classfile", "com.sun.org.apache.bcel.internal.generic", "com.sun.org.apache.bcel.internal.util", "com.sun.org.apache.regexp.internal", "com.sun.org.apache.xalan.internal", "com.sun.org.apache.xalan.internal.extensions", "com.sun.org.apache.xalan.internal.lib", "com.sun.org.apache.xalan.internal.res", "com.sun.org.apache.xalan.internal.templates", "com.sun.org.apache.xalan.internal.utils", "com.sun.org.apache.xalan.internal.xslt", "com.sun.org.apache.xalan.internal.xsltc", "com.sun.org.apache.xalan.internal.xsltc.cmdline", "com.sun.org.apache.xalan.internal.xsltc.cmdline.getopt", "com.sun.org.apache.xalan.internal.xsltc.compiler", "com.sun.org.apache.xalan.internal.xsltc.compiler.util", "com.sun.org.apache.xalan.internal.xsltc.dom", "com.sun.org.apache.xalan.internal.xsltc.runtime", "com.sun.org.apache.xalan.internal.xsltc.runtime.output", "com.sun.org.apache.xalan.internal.xsltc.trax", "com.sun.org.apache.xalan.internal.xsltc.util", "com.sun.org.apache.xerces.internal.dom", "com.sun.org.apache.xerces.internal.dom.events", "com.sun.org.apache.xerces.internal.impl", "com.sun.org.apache.xerces.internal.impl.dtd", "com.sun.org.apache.xerces.internal.impl.dtd.models", "com.sun.org.apache.xerces.internal.impl.dv", "com.sun.org.apache.xerces.internal.impl.dv.dtd", "com.sun.org.apache.xerces.internal.impl.dv.util", "com.sun.org.apache.xerces.internal.impl.dv.xs", "com.sun.org.apache.xerces.internal.impl.io", "com.sun.org.apache.xerces.internal.impl.msg", "com.sun.org.apache.xerces.internal.impl.validation", "com.sun.org.apache.xerces.internal.impl.xpath", "com.sun.org.apache.xerces.internal.impl.xpath.regex", "com.sun.org.apache.xerces.internal.impl.xs", "com.sun.org.apache.xerces.internal.impl.xs.identity", "com.sun.org.apache.xerces.internal.impl.xs.models", "com.sun.org.apache.xerces.internal.impl.xs.opti", "com.sun.org.apache.xerces.internal.impl.xs.traversers", "com.sun.org.apache.xerces.internal.impl.xs.util", "com.sun.org.apache.xerces.internal.jaxp", "com.sun.org.apache.xerces.internal.jaxp.datatype", "com.sun.org.apache.xerces.internal.jaxp.validation", "com.sun.org.apache.xerces.internal.parsers", "com.sun.org.apache.xerces.internal.util", "com.sun.org.apache.xerces.internal.utils", "com.sun.org.apache.xerces.internal.xinclude", "com.sun.org.apache.xerces.internal.xni", "com.sun.org.apache.xerces.internal.xni.grammars", "com.sun.org.apache.xerces.internal.xni.parser", "com.sun.org.apache.xerces.internal.xpointer", "com.sun.org.apache.xerces.internal.xs", "com.sun.org.apache.xerces.internal.xs.datatypes", "com.sun.org.apache.xml.internal.dtm", "com.sun.org.apache.xml.internal.dtm.ref", "com.sun.org.apache.xml.internal.dtm.ref.dom2dtm", "com.sun.org.apache.xml.internal.dtm.ref.sax2dtm", "com.sun.org.apache.xml.internal.res", "com.sun.org.apache.xml.internal.resolver", "com.sun.org.apache.xml.internal.resolver.helpers", "com.sun.org.apache.xml.internal.resolver.readers", "com.sun.org.apache.xml.internal.resolver.tools", "com.sun.org.apache.xml.internal.security", "com.sun.org.apache.xml.internal.security.algorithms", "com.sun.org.apache.xml.internal.security.algorithms.implementations", "com.sun.org.apache.xml.internal.security.c14n", "com.sun.org.apache.xml.internal.security.c14n.helper", "com.sun.org.apache.xml.internal.security.c14n.implementations", "com.sun.org.apache.xml.internal.security.encryption", "com.sun.org.apache.xml.internal.security.exceptions", "com.sun.org.apache.xml.internal.security.keys", "com.sun.org.apache.xml.internal.security.keys.content", "com.sun.org.apache.xml.internal.security.keys.content.keyvalues", "com.sun.org.apache.xml.internal.security.keys.content.x509", "com.sun.org.apache.xml.internal.security.keys.keyresolver", "com.sun.org.apache.xml.internal.security.keys.keyresolver.implementations", "com.sun.org.apache.xml.internal.security.keys.storage", "com.sun.org.apache.xml.internal.security.keys.storage.implementations", "com.sun.org.apache.xml.internal.security.signature", "com.sun.org.apache.xml.internal.security.signature.reference", "com.sun.org.apache.xml.internal.security.transforms", "com.sun.org.apache.xml.internal.security.transforms.implementations", "com.sun.org.apache.xml.internal.security.transforms.params", "com.sun.org.apache.xml.internal.security.utils", "com.sun.org.apache.xml.internal.security.utils.resolver", "com.sun.org.apache.xml.internal.security.utils.resolver.implementations", "com.sun.org.apache.xml.internal.serialize", "com.sun.org.apache.xml.internal.serializer", "com.sun.org.apache.xml.internal.serializer.utils", "com.sun.org.apache.xml.internal.utils", "com.sun.org.apache.xml.internal.utils.res", "com.sun.org.apache.xpath.internal", "com.sun.org.apache.xpath.internal.axes", "com.sun.org.apache.xpath.internal.compiler", "com.sun.org.apache.xpath.internal.domapi", "com.sun.org.apache.xpath.internal.functions", "com.sun.org.apache.xpath.internal.jaxp", "com.sun.org.apache.xpath.internal.objects", "com.sun.org.apache.xpath.internal.operations", "com.sun.org.apache.xpath.internal.patterns", "com.sun.org.apache.xpath.internal.res", "com.sun.org.glassfish.external.amx", "com.sun.org.glassfish.external.arc", "com.sun.org.glassfish.external.probe.provider", "com.sun.org.glassfish.external.probe.provider.annotations", "com.sun.org.glassfish.external.statistics", "com.sun.org.glassfish.external.statistics.annotations", "com.sun.org.glassfish.external.statistics.impl", "com.sun.org.glassfish.gmbal", "com.sun.org.glassfish.gmbal.util", "com.sun.org.omg.CORBA", "com.sun.org.omg.CORBA.ValueDefPackage", "com.sun.org.omg.CORBA.portable", "com.sun.org.omg.SendingContext", "com.sun.org.omg.SendingContext.CodeBasePackage", "com.sun.pisces", "com.sun.prism", "com.sun.prism.d3d", "com.sun.prism.es2", "com.sun.prism.image", "com.sun.prism.impl", "com.sun.prism.impl.packrect", "com.sun.prism.impl.paint", "com.sun.prism.impl.ps", "com.sun.prism.impl.shape", "com.sun.prism.j2d", "com.sun.prism.j2d.paint", "com.sun.prism.j2d.print", "com.sun.prism.paint", "com.sun.prism.ps", "com.sun.prism.shader", "com.sun.prism.shape", "com.sun.prism.sw", "com.sun.rmi.rmid", "com.sun.rowset", "com.sun.rowset.internal", "com.sun.rowset.providers", "com.sun.scenario", "com.sun.scenario.animation", "com.sun.scenario.animation.shared", "com.sun.scenario.effect", "com.sun.scenario.effect.impl", "com.sun.scenario.effect.impl.es2", "com.sun.scenario.effect.impl.hw", "com.sun.scenario.effect.impl.hw.d3d", "com.sun.scenario.effect.impl.prism", "com.sun.scenario.effect.impl.prism.ps", "com.sun.scenario.effect.impl.prism.sw", "com.sun.scenario.effect.impl.state", "com.sun.scenario.effect.impl.sw", "com.sun.scenario.effect.impl.sw.java", "com.sun.scenario.effect.impl.sw.sse", "com.sun.scenario.effect.light", "com.sun.security.auth", "com.sun.security.auth.callback", "com.sun.security.auth.login", "com.sun.security.auth.module", "com.sun.security.cert.internal.x509", "com.sun.security.jgss", "com.sun.security.ntlm", "com.sun.security.sasl", "com.sun.security.sasl.digest", "com.sun.security.sasl.gsskerb", "com.sun.security.sasl.ntlm", "com.sun.security.sasl.util", "com.sun.source.doctree", "com.sun.source.tree", "com.sun.source.util", "com.sun.swing.internal.plaf.basic.resources", "com.sun.swing.internal.plaf.metal.resources", "com.sun.swing.internal.plaf.synth.resources", "com.sun.tools.attach", "com.sun.tools.attach.spi", "com.sun.tools.classfile", "com.sun.tools.corba.se.idl", "com.sun.tools.corba.se.idl.constExpr", "com.sun.tools.corba.se.idl.som.cff", "com.sun.tools.corba.se.idl.som.idlemit", "com.sun.tools.corba.se.idl.toJavaPortable", "com.sun.tools.doclets", "com.sun.tools.doclets.formats.html", "com.sun.tools.doclets.formats.html.markup", "com.sun.tools.doclets.formats.html.resources", "com.sun.tools.doclets.internal.toolkit", "com.sun.tools.doclets.internal.toolkit.builders", "com.sun.tools.doclets.internal.toolkit.resources", "com.sun.tools.doclets.internal.toolkit.taglets", "com.sun.tools.doclets.internal.toolkit.util", "com.sun.tools.doclets.internal.toolkit.util.links", "com.sun.tools.doclets.standard", "com.sun.tools.doclint", "com.sun.tools.doclint.resources", "com.sun.tools.example.debug.expr", "com.sun.tools.example.debug.tty", "com.sun.tools.extcheck", "com.sun.tools.hat", "com.sun.tools.hat.internal.model", "com.sun.tools.hat.internal.oql", "com.sun.tools.hat.internal.parser", "com.sun.tools.hat.internal.server", "com.sun.tools.hat.internal.util", "com.sun.tools.internal.jxc", "com.sun.tools.internal.jxc.ap", "com.sun.tools.internal.jxc.api", "com.sun.tools.internal.jxc.api.impl.j2s", "com.sun.tools.internal.jxc.gen.config", "com.sun.tools.internal.jxc.model.nav", "com.sun.tools.internal.ws", "com.sun.tools.internal.ws.api", "com.sun.tools.internal.ws.api.wsdl", "com.sun.tools.internal.ws.processor", "com.sun.tools.internal.ws.processor.generator", "com.sun.tools.internal.ws.processor.model", "com.sun.tools.internal.ws.processor.model.exporter", "com.sun.tools.internal.ws.processor.model.java", "com.sun.tools.internal.ws.processor.model.jaxb", "com.sun.tools.internal.ws.processor.modeler", "com.sun.tools.internal.ws.processor.modeler.annotation", "com.sun.tools.internal.ws.processor.modeler.wsdl", "com.sun.tools.internal.ws.processor.util", "com.sun.tools.internal.ws.resources", "com.sun.tools.internal.ws.spi", "com.sun.tools.internal.ws.util", "com.sun.tools.internal.ws.util.xml", "com.sun.tools.internal.ws.wscompile", "com.sun.tools.internal.ws.wscompile.plugin.at_generated", "com.sun.tools.internal.ws.wsdl.document", "com.sun.tools.internal.ws.wsdl.document.http", "com.sun.tools.internal.ws.wsdl.document.jaxws", "com.sun.tools.internal.ws.wsdl.document.mime", "com.sun.tools.internal.ws.wsdl.document.schema", "com.sun.tools.internal.ws.wsdl.document.soap", "com.sun.tools.internal.ws.wsdl.framework", "com.sun.tools.internal.ws.wsdl.parser", "com.sun.tools.internal.xjc", "com.sun.tools.internal.xjc.addon.accessors", "com.sun.tools.internal.xjc.addon.at_generated", "com.sun.tools.internal.xjc.addon.code_injector", "com.sun.tools.internal.xjc.addon.episode", "com.sun.tools.internal.xjc.addon.locator", "com.sun.tools.internal.xjc.addon.sync", "com.sun.tools.internal.xjc.api", "com.sun.tools.internal.xjc.api.impl.s2j", "com.sun.tools.internal.xjc.api.util", "com.sun.tools.internal.xjc.generator.annotation.spec", "com.sun.tools.internal.xjc.generator.bean", "com.sun.tools.internal.xjc.generator.bean.field", "com.sun.tools.internal.xjc.generator.util", "com.sun.tools.internal.xjc.model", "com.sun.tools.internal.xjc.model.nav", "com.sun.tools.internal.xjc.outline", "com.sun.tools.internal.xjc.reader", "com.sun.tools.internal.xjc.reader.dtd", "com.sun.tools.internal.xjc.reader.dtd.bindinfo", "com.sun.tools.internal.xjc.reader.gbind", "com.sun.tools.internal.xjc.reader.internalizer", "com.sun.tools.internal.xjc.reader.relaxng", "com.sun.tools.internal.xjc.reader.xmlschema", "com.sun.tools.internal.xjc.reader.xmlschema.bindinfo", "com.sun.tools.internal.xjc.reader.xmlschema.ct", "com.sun.tools.internal.xjc.reader.xmlschema.parser", "com.sun.tools.internal.xjc.runtime", "com.sun.tools.internal.xjc.util", "com.sun.tools.internal.xjc.writer", "com.sun.tools.javac", "com.sun.tools.javac.api", "com.sun.tools.javac.code", "com.sun.tools.javac.comp", "com.sun.tools.javac.file", "com.sun.tools.javac.jvm", "com.sun.tools.javac.main", "com.sun.tools.javac.model", "com.sun.tools.javac.nio", "com.sun.tools.javac.parser", "com.sun.tools.javac.processing", "com.sun.tools.javac.resources", "com.sun.tools.javac.sym", "com.sun.tools.javac.tree", "com.sun.tools.javac.util", "com.sun.tools.javadoc", "com.sun.tools.javadoc.api", "com.sun.tools.javadoc.resources", "com.sun.tools.javah", "com.sun.tools.javah.resources", "com.sun.tools.javap", "com.sun.tools.javap.resources", "com.sun.tools.jconsole", "com.sun.tools.jdeps", "com.sun.tools.jdeps.resources", "com.sun.tools.jdi", "com.sun.tools.jdi.resources", "com.sun.tools.script.shell", "com.sun.tracing", "com.sun.tracing.dtrace", "com.sun.webkit", "com.sun.webkit.dom", "com.sun.webkit.event", "com.sun.webkit.graphics", "com.sun.webkit.network", "com.sun.webkit.network.about", "com.sun.webkit.network.data", "com.sun.webkit.perf", "com.sun.webkit.plugin", "com.sun.webkit.text", "com.sun.xml.internal.bind", "com.sun.xml.internal.bind.annotation", "com.sun.xml.internal.bind.api", "com.sun.xml.internal.bind.api.impl", "com.sun.xml.internal.bind.marshaller", "com.sun.xml.internal.bind.unmarshaller", "com.sun.xml.internal.bind.util", "com.sun.xml.internal.bind.v2", "com.sun.xml.internal.bind.v2.bytecode", "com.sun.xml.internal.bind.v2.model.annotation", "com.sun.xml.internal.bind.v2.model.core", "com.sun.xml.internal.bind.v2.model.impl", "com.sun.xml.internal.bind.v2.model.nav", "com.sun.xml.internal.bind.v2.model.runtime", "com.sun.xml.internal.bind.v2.model.util", "com.sun.xml.internal.bind.v2.runtime", "com.sun.xml.internal.bind.v2.runtime.output", "com.sun.xml.internal.bind.v2.runtime.property", "com.sun.xml.internal.bind.v2.runtime.reflect", "com.sun.xml.internal.bind.v2.runtime.reflect.opt", "com.sun.xml.internal.bind.v2.runtime.unmarshaller", "com.sun.xml.internal.bind.v2.schemagen", "com.sun.xml.internal.bind.v2.schemagen.episode", "com.sun.xml.internal.bind.v2.schemagen.xmlschema", "com.sun.xml.internal.bind.v2.util", "com.sun.xml.internal.dtdparser", "com.sun.xml.internal.fastinfoset", "com.sun.xml.internal.fastinfoset.algorithm", "com.sun.xml.internal.fastinfoset.alphabet", "com.sun.xml.internal.fastinfoset.dom", "com.sun.xml.internal.fastinfoset.org.apache.xerces.util", "com.sun.xml.internal.fastinfoset.sax", "com.sun.xml.internal.fastinfoset.stax", "com.sun.xml.internal.fastinfoset.stax.events", "com.sun.xml.internal.fastinfoset.stax.factory", "com.sun.xml.internal.fastinfoset.stax.util", "com.sun.xml.internal.fastinfoset.tools", "com.sun.xml.internal.fastinfoset.util", "com.sun.xml.internal.fastinfoset.vocab", "com.sun.xml.internal.messaging.saaj", "com.sun.xml.internal.messaging.saaj.client.p2p", "com.sun.xml.internal.messaging.saaj.packaging.mime", "com.sun.xml.internal.messaging.saaj.packaging.mime.internet", "com.sun.xml.internal.messaging.saaj.packaging.mime.util", "com.sun.xml.internal.messaging.saaj.soap", "com.sun.xml.internal.messaging.saaj.soap.dynamic", "com.sun.xml.internal.messaging.saaj.soap.impl", "com.sun.xml.internal.messaging.saaj.soap.name", "com.sun.xml.internal.messaging.saaj.soap.ver1_1", "com.sun.xml.internal.messaging.saaj.soap.ver1_2", "com.sun.xml.internal.messaging.saaj.util", "com.sun.xml.internal.messaging.saaj.util.transform", "com.sun.xml.internal.org.jvnet.fastinfoset", "com.sun.xml.internal.org.jvnet.fastinfoset.sax", "com.sun.xml.internal.org.jvnet.fastinfoset.sax.helpers", "com.sun.xml.internal.org.jvnet.fastinfoset.stax", "com.sun.xml.internal.org.jvnet.mimepull", "com.sun.xml.internal.org.jvnet.staxex", "com.sun.xml.internal.rngom.ast.builder", "com.sun.xml.internal.rngom.ast.om", "com.sun.xml.internal.rngom.ast.util", "com.sun.xml.internal.rngom.binary", "com.sun.xml.internal.rngom.binary.visitor", "com.sun.xml.internal.rngom.digested", "com.sun.xml.internal.rngom.dt", "com.sun.xml.internal.rngom.dt.builtin", "com.sun.xml.internal.rngom.nc", "com.sun.xml.internal.rngom.parse", "com.sun.xml.internal.rngom.parse.compact", "com.sun.xml.internal.rngom.parse.host", "com.sun.xml.internal.rngom.parse.xml", "com.sun.xml.internal.rngom.util", "com.sun.xml.internal.rngom.xml.sax", "com.sun.xml.internal.rngom.xml.util", "com.sun.xml.internal.stream", "com.sun.xml.internal.stream.buffer", "com.sun.xml.internal.stream.buffer.sax", "com.sun.xml.internal.stream.buffer.stax", "com.sun.xml.internal.stream.dtd", "com.sun.xml.internal.stream.dtd.nonvalidating", "com.sun.xml.internal.stream.events", "com.sun.xml.internal.stream.util", "com.sun.xml.internal.stream.writers", "com.sun.xml.internal.txw2", "com.sun.xml.internal.txw2.annotation", "com.sun.xml.internal.txw2.output", "com.sun.xml.internal.ws", "com.sun.xml.internal.ws.addressing", "com.sun.xml.internal.ws.addressing.model", "com.sun.xml.internal.ws.addressing.policy", "com.sun.xml.internal.ws.addressing.v200408", "com.sun.xml.internal.ws.api", "com.sun.xml.internal.ws.api.addressing", "com.sun.xml.internal.ws.api.client", "com.sun.xml.internal.ws.api.config.management", "com.sun.xml.internal.ws.api.config.management.policy", "com.sun.xml.internal.ws.api.databinding", "com.sun.xml.internal.ws.api.fastinfoset", "com.sun.xml.internal.ws.api.ha", "com.sun.xml.internal.ws.api.handler", "com.sun.xml.internal.ws.api.message", "com.sun.xml.internal.ws.api.message.saaj", "com.sun.xml.internal.ws.api.message.stream", "com.sun.xml.internal.ws.api.model", "com.sun.xml.internal.ws.api.model.soap", "com.sun.xml.internal.ws.api.model.wsdl", "com.sun.xml.internal.ws.api.model.wsdl.editable", "com.sun.xml.internal.ws.api.pipe", "com.sun.xml.internal.ws.api.pipe.helper", "com.sun.xml.internal.ws.api.policy", "com.sun.xml.internal.ws.api.policy.subject", "com.sun.xml.internal.ws.api.server", "com.sun.xml.internal.ws.api.streaming", "com.sun.xml.internal.ws.api.wsdl.parser", "com.sun.xml.internal.ws.api.wsdl.writer", "com.sun.xml.internal.ws.assembler", "com.sun.xml.internal.ws.assembler.dev", "com.sun.xml.internal.ws.assembler.jaxws", "com.sun.xml.internal.ws.binding", "com.sun.xml.internal.ws.client", "com.sun.xml.internal.ws.client.dispatch", "com.sun.xml.internal.ws.client.sei", "com.sun.xml.internal.ws.commons.xmlutil", "com.sun.xml.internal.ws.config.management.policy", "com.sun.xml.internal.ws.config.metro.dev", "com.sun.xml.internal.ws.config.metro.util", "com.sun.xml.internal.ws.db", "com.sun.xml.internal.ws.db.glassfish", "com.sun.xml.internal.ws.developer", "com.sun.xml.internal.ws.dump", "com.sun.xml.internal.ws.encoding", "com.sun.xml.internal.ws.encoding.fastinfoset", "com.sun.xml.internal.ws.encoding.policy", "com.sun.xml.internal.ws.encoding.soap", "com.sun.xml.internal.ws.encoding.soap.streaming", "com.sun.xml.internal.ws.encoding.xml", "com.sun.xml.internal.ws.fault", "com.sun.xml.internal.ws.handler", "com.sun.xml.internal.ws.message", "com.sun.xml.internal.ws.message.jaxb", "com.sun.xml.internal.ws.message.saaj", "com.sun.xml.internal.ws.message.source", "com.sun.xml.internal.ws.message.stream", "com.sun.xml.internal.ws.model", "com.sun.xml.internal.ws.model.soap", "com.sun.xml.internal.ws.model.wsdl", "com.sun.xml.internal.ws.org.objectweb.asm", "com.sun.xml.internal.ws.policy", "com.sun.xml.internal.ws.policy.jaxws", "com.sun.xml.internal.ws.policy.jaxws.spi", "com.sun.xml.internal.ws.policy.privateutil", "com.sun.xml.internal.ws.policy.sourcemodel", "com.sun.xml.internal.ws.policy.sourcemodel.attach", "com.sun.xml.internal.ws.policy.sourcemodel.wspolicy", "com.sun.xml.internal.ws.policy.spi", "com.sun.xml.internal.ws.policy.subject", "com.sun.xml.internal.ws.protocol.soap", "com.sun.xml.internal.ws.protocol.xml", "com.sun.xml.internal.ws.resources", "com.sun.xml.internal.ws.runtime.config", "com.sun.xml.internal.ws.server", "com.sun.xml.internal.ws.server.provider", "com.sun.xml.internal.ws.server.sei", "com.sun.xml.internal.ws.spi", "com.sun.xml.internal.ws.spi.db", "com.sun.xml.internal.ws.streaming", "com.sun.xml.internal.ws.transport", "com.sun.xml.internal.ws.transport.http", "com.sun.xml.internal.ws.transport.http.client", "com.sun.xml.internal.ws.transport.http.server", "com.sun.xml.internal.ws.util", "com.sun.xml.internal.ws.util.exception", "com.sun.xml.internal.ws.util.pipe", "com.sun.xml.internal.ws.util.xml", "com.sun.xml.internal.ws.wsdl", "com.sun.xml.internal.ws.wsdl.parser", "com.sun.xml.internal.ws.wsdl.writer", "com.sun.xml.internal.ws.wsdl.writer.document", "com.sun.xml.internal.ws.wsdl.writer.document.http", "com.sun.xml.internal.ws.wsdl.writer.document.soap", "com.sun.xml.internal.ws.wsdl.writer.document.soap12", "com.sun.xml.internal.ws.wsdl.writer.document.xsd", "com.sun.xml.internal.xsom", "com.sun.xml.internal.xsom.impl", "com.sun.xml.internal.xsom.impl.parser", "com.sun.xml.internal.xsom.impl.parser.state", "com.sun.xml.internal.xsom.impl.scd", "com.sun.xml.internal.xsom.impl.util", "com.sun.xml.internal.xsom.parser", "com.sun.xml.internal.xsom.util", "com.sun.xml.internal.xsom.visitor", "java.applet", "java.awt", "java.awt.color", "java.awt.datatransfer", "java.awt.dnd", "java.awt.dnd.peer", "java.awt.event", "java.awt.font", "java.awt.geom", "java.awt.im", "java.awt.im.spi", "java.awt.image", "java.awt.image.renderable", "java.awt.peer", "java.awt.print", "java.beans", "java.beans.beancontext", "java.io", "java.lang", "java.lang.annotation", "java.lang.instrument", "java.lang.invoke", "java.lang.management", "java.lang.ref", "java.lang.reflect", "java.math", "java.net", "java.nio", "java.nio.channels", "java.nio.channels.spi", "java.nio.charset", "java.nio.charset.spi", "java.nio.file", "java.nio.file.attribute", "java.nio.file.spi", "java.rmi", "java.rmi.activation", "java.rmi.dgc", "java.rmi.registry", "java.rmi.server", "java.security", "java.security.acl", "java.security.cert", "java.security.interfaces", "java.security.spec", "java.sql", "java.text", "java.text.spi", "java.time", "java.time.chrono", "java.time.format", "java.time.temporal", "java.time.zone", "java.util", "java.util.concurrent", "java.util.concurrent.atomic", "java.util.concurrent.locks", "java.util.function", "java.util.jar", "java.util.logging", "java.util.prefs", "java.util.regex", "java.util.spi", "java.util.stream", "java.util.zip", "javafx.animation", "javafx.application", "javafx.beans", "javafx.beans.binding", "javafx.beans.property", "javafx.beans.property.adapter", "javafx.beans.value", "javafx.collections", "javafx.collections.transformation", "javafx.concurrent", "javafx.css", "javafx.embed.swing", "javafx.embed.swt", "javafx.event", "javafx.fxml", "javafx.geometry", "javafx.print", "javafx.scene", "javafx.scene.canvas", "javafx.scene.chart", "javafx.scene.control", "javafx.scene.control.cell", "javafx.scene.effect", "javafx.scene.image", "javafx.scene.input", "javafx.scene.layout", "javafx.scene.media", "javafx.scene.paint", "javafx.scene.shape", "javafx.scene.text", "javafx.scene.transform", "javafx.scene.web", "javafx.stage", "javafx.util", "javafx.util.converter", "javax.accessibility", "javax.activation", "javax.activity", "javax.annotation", "javax.annotation.processing", "javax.crypto", "javax.crypto.interfaces", "javax.crypto.spec", "javax.imageio", "javax.imageio.event", "javax.imageio.metadata", "javax.imageio.plugins.bmp", "javax.imageio.plugins.jpeg", "javax.imageio.spi", "javax.imageio.stream", "javax.jws", "javax.jws.soap", "javax.lang.model", "javax.lang.model.element", "javax.lang.model.type", "javax.lang.model.util", "javax.management", "javax.management.loading", "javax.management.modelmbean", "javax.management.monitor", "javax.management.openmbean", "javax.management.relation", "javax.management.remote", "javax.management.remote.rmi", "javax.management.timer", "javax.naming", "javax.naming.directory", "javax.naming.event", "javax.naming.ldap", "javax.naming.spi", "javax.net", "javax.net.ssl", "javax.print", "javax.print.attribute", "javax.print.attribute.standard", "javax.print.event", "javax.rmi", "javax.rmi.CORBA", "javax.rmi.ssl", "javax.script", "javax.security.auth", "javax.security.auth.callback", "javax.security.auth.kerberos", "javax.security.auth.login", "javax.security.auth.spi", "javax.security.auth.x500", "javax.security.cert", "javax.security.sasl", "javax.smartcardio", "javax.sound.midi", "javax.sound.midi.spi", "javax.sound.sampled", "javax.sound.sampled.spi", "javax.sql", "javax.sql.rowset", "javax.sql.rowset.serial", "javax.sql.rowset.spi", "javax.swing", "javax.swing.border", "javax.swing.colorchooser", "javax.swing.event", "javax.swing.filechooser", "javax.swing.plaf", "javax.swing.plaf.basic", "javax.swing.plaf.metal", "javax.swing.plaf.multi", "javax.swing.plaf.nimbus", "javax.swing.plaf.synth", "javax.swing.table", "javax.swing.text", "javax.swing.text.html", "javax.swing.text.html.parser", "javax.swing.text.rtf", "javax.swing.tree", "javax.swing.undo", "javax.tools", "javax.transaction", "javax.transaction.xa", "javax.xml", "javax.xml.bind", "javax.xml.bind.annotation", "javax.xml.bind.annotation.adapters", "javax.xml.bind.attachment", "javax.xml.bind.helpers", "javax.xml.bind.util", "javax.xml.crypto", "javax.xml.crypto.dom", "javax.xml.crypto.dsig", "javax.xml.crypto.dsig.dom", "javax.xml.crypto.dsig.keyinfo", "javax.xml.crypto.dsig.spec", "javax.xml.datatype", "javax.xml.namespace", "javax.xml.parsers", "javax.xml.soap", "javax.xml.stream", "javax.xml.stream.events", "javax.xml.stream.util", "javax.xml.transform", "javax.xml.transform.dom", "javax.xml.transform.sax", "javax.xml.transform.stax", "javax.xml.transform.stream", "javax.xml.validation", "javax.xml.ws", "javax.xml.ws.handler", "javax.xml.ws.handler.soap", "javax.xml.ws.http", "javax.xml.ws.soap", "javax.xml.ws.spi", "javax.xml.ws.spi.http", "javax.xml.ws.wsaddressing", "javax.xml.xpath", "jdk", "jdk.internal.cmm", "jdk.internal.dynalink", "jdk.internal.dynalink.beans", "jdk.internal.dynalink.linker", "jdk.internal.dynalink.support", "jdk.internal.instrumentation", "jdk.internal.org.objectweb.asm", "jdk.internal.org.objectweb.asm.commons", "jdk.internal.org.objectweb.asm.signature", "jdk.internal.org.objectweb.asm.tree", "jdk.internal.org.objectweb.asm.tree.analysis", "jdk.internal.org.objectweb.asm.util", "jdk.internal.org.xml.sax", "jdk.internal.org.xml.sax.helpers", "jdk.internal.util.xml", "jdk.internal.util.xml.impl", "jdk.jfr.events", "jdk.management.cmm", "jdk.management.resource", "jdk.management.resource.internal", "jdk.management.resource.internal.inst", "jdk.nashorn.api.scripting", "jdk.nashorn.internal", "jdk.nashorn.internal.codegen", "jdk.nashorn.internal.codegen.types", "jdk.nashorn.internal.ir", "jdk.nashorn.internal.ir.annotations", "jdk.nashorn.internal.ir.debug", "jdk.nashorn.internal.ir.visitor", "jdk.nashorn.internal.lookup", "jdk.nashorn.internal.objects", "jdk.nashorn.internal.objects.annotations", "jdk.nashorn.internal.parser", "jdk.nashorn.internal.runtime", "jdk.nashorn.internal.runtime.arrays", "jdk.nashorn.internal.runtime.events", "jdk.nashorn.internal.runtime.linker", "jdk.nashorn.internal.runtime.logging", "jdk.nashorn.internal.runtime.options", "jdk.nashorn.internal.runtime.regexp", "jdk.nashorn.internal.runtime.regexp.joni", "jdk.nashorn.internal.runtime.regexp.joni.ast", "jdk.nashorn.internal.runtime.regexp.joni.constants", "jdk.nashorn.internal.runtime.regexp.joni.encoding", "jdk.nashorn.internal.runtime.regexp.joni.exception", "jdk.nashorn.internal.scripts", "jdk.nashorn.tools", "jdk.net", "netscape.javascript", "oracle.jrockit.jfr", "oracle.jrockit.jfr.events", "oracle.jrockit.jfr.jdkevents", "oracle.jrockit.jfr.jdkevents.throwabletransform", "oracle.jrockit.jfr.openmbean", "oracle.jrockit.jfr.parser", "oracle.jrockit.jfr.settings", "oracle.jrockit.jfr.tools", "org.ietf.jgss", "org.jcp.xml.dsig.internal", "org.jcp.xml.dsig.internal.dom", "org.omg.CORBA", "org.omg.CORBA.DynAnyPackage", "org.omg.CORBA.ORBPackage", "org.omg.CORBA.TypeCodePackage", "org.omg.CORBA.portable", "org.omg.CORBA_2_3", "org.omg.CORBA_2_3.portable", "org.omg.CosNaming", "org.omg.CosNaming.NamingContextExtPackage", "org.omg.CosNaming.NamingContextPackage", "org.omg.Dynamic", "org.omg.DynamicAny", "org.omg.DynamicAny.DynAnyFactoryPackage", "org.omg.DynamicAny.DynAnyPackage", "org.omg.IOP", "org.omg.IOP.CodecFactoryPackage", "org.omg.IOP.CodecPackage", "org.omg.Messaging", "org.omg.PortableInterceptor", "org.omg.PortableInterceptor.ORBInitInfoPackage", "org.omg.PortableServer", "org.omg.PortableServer.CurrentPackage", "org.omg.PortableServer.POAManagerPackage", "org.omg.PortableServer.POAPackage", "org.omg.PortableServer.ServantLocatorPackage", "org.omg.PortableServer.portable", "org.omg.SendingContext", "org.omg.stub.java.rmi", "org.omg.stub.javax.management.remote.rmi", "org.relaxng.datatype", "org.relaxng.datatype.helpers", "org.w3c.dom", "org.w3c.dom.bootstrap", "org.w3c.dom.css", "org.w3c.dom.events", "org.w3c.dom.html", "org.w3c.dom.ls", "org.w3c.dom.ranges", "org.w3c.dom.stylesheets", "org.w3c.dom.traversal", "org.w3c.dom.views", "org.w3c.dom.xpath", "org.xml.sax", "org.xml.sax.ext", "org.xml.sax.helpers", "sun.applet", "sun.applet.resources", "sun.audio", "sun.awt", "sun.awt.X11", "sun.awt.datatransfer", "sun.awt.dnd", "sun.awt.event", "sun.awt.geom", "sun.awt.im", "sun.awt.image", "sun.awt.image.codec", "sun.awt.motif", "sun.awt.resources", "sun.awt.shell", "sun.awt.util", "sun.awt.windows", "sun.corba", "sun.dc", "sun.dc.path", "sun.dc.pr", "sun.font", "sun.instrument", "sun.invoke", "sun.invoke.empty", "sun.invoke.util", "sun.io", "sun.java2d", "sun.java2d.cmm", "sun.java2d.cmm.kcms", "sun.java2d.cmm.lcms", "sun.java2d.d3d", "sun.java2d.jules", "sun.java2d.loops", "sun.java2d.opengl", "sun.java2d.pipe", "sun.java2d.pipe.hw", "sun.java2d.pisces", "sun.java2d.windows", "sun.java2d.x11", "sun.java2d.xr", "sun.jvmstat.monitor", "sun.jvmstat.monitor.event", "sun.jvmstat.monitor.remote", "sun.jvmstat.perfdata.monitor", "sun.jvmstat.perfdata.monitor.protocol.file", "sun.jvmstat.perfdata.monitor.protocol.local", "sun.jvmstat.perfdata.monitor.protocol.rmi", "sun.jvmstat.perfdata.monitor.v1_0", "sun.jvmstat.perfdata.monitor.v2_0", "sun.launcher", "sun.launcher.resources", "sun.lwawt", "sun.lwawt.macosx", "sun.management", "sun.management.counter", "sun.management.counter.perf", "sun.management.jdp", "sun.management.jmxremote", "sun.management.resources", "sun.management.snmp", "sun.management.snmp.jvminstr", "sun.management.snmp.jvmmib", "sun.management.snmp.util", "sun.misc", "sun.misc.resources", "sun.net", "sun.net.dns", "sun.net.ftp", "sun.net.ftp.impl", "sun.net.httpserver", "sun.net.idn", "sun.net.sdp", "sun.net.smtp", "sun.net.spi", "sun.net.spi.nameservice", "sun.net.spi.nameservice.dns", "sun.net.util", "sun.net.www", "sun.net.www.content.audio", "sun.net.www.content.image", "sun.net.www.content.text", "sun.net.www.http", "sun.net.www.protocol.file", "sun.net.www.protocol.ftp", "sun.net.www.protocol.http", "sun.net.www.protocol.http.logging", "sun.net.www.protocol.http.ntlm", "sun.net.www.protocol.http.spnego", "sun.net.www.protocol.https", "sun.net.www.protocol.jar", "sun.net.www.protocol.mailto", "sun.net.www.protocol.netdoc", "sun.nio", "sun.nio.ch", "sun.nio.ch.sctp", "sun.nio.cs", "sun.nio.cs.ext", "sun.nio.fs", "sun.print", "sun.print.resources", "sun.reflect", "sun.reflect.annotation", "sun.reflect.generics.factory", "sun.reflect.generics.parser", "sun.reflect.generics.reflectiveObjects", "sun.reflect.generics.repository", "sun.reflect.generics.scope", "sun.reflect.generics.tree", "sun.reflect.generics.visitor", "sun.reflect.misc", "sun.rmi.log", "sun.rmi.registry", "sun.rmi.rmic", "sun.rmi.rmic.iiop", "sun.rmi.rmic.newrmic", "sun.rmi.rmic.newrmic.jrmp", "sun.rmi.runtime", "sun.rmi.server", "sun.rmi.transport", "sun.rmi.transport.proxy", "sun.rmi.transport.tcp", "sun.security.acl", "sun.security.action", "sun.security.ec", "sun.security.internal.interfaces", "sun.security.internal.spec", "sun.security.jca", "sun.security.jgss", "sun.security.jgss.krb5", "sun.security.jgss.spi", "sun.security.jgss.spnego", "sun.security.jgss.wrapper", "sun.security.krb5", "sun.security.krb5.internal", "sun.security.krb5.internal.ccache", "sun.security.krb5.internal.crypto", "sun.security.krb5.internal.crypto.dk", "sun.security.krb5.internal.ktab", "sun.security.krb5.internal.rcache", "sun.security.krb5.internal.tools", "sun.security.krb5.internal.util", "sun.security.mscapi", "sun.security.pkcs", "sun.security.pkcs10", "sun.security.pkcs11", "sun.security.pkcs11.wrapper", "sun.security.pkcs12", "sun.security.provider", "sun.security.provider.certpath", "sun.security.provider.certpath.ldap", "sun.security.provider.certpath.ssl", "sun.security.rsa", "sun.security.smartcardio", "sun.security.ssl", "sun.security.ssl.krb5", "sun.security.timestamp", "sun.security.tools", "sun.security.tools.jarsigner", "sun.security.tools.keytool", "sun.security.tools.policytool", "sun.security.util", "sun.security.validator", "sun.security.x509", "sun.swing", "sun.swing.icon", "sun.swing.plaf", "sun.swing.plaf.synth", "sun.swing.plaf.windows", "sun.swing.table", "sun.swing.text", "sun.swing.text.html", "sun.text", "sun.text.bidi", "sun.text.normalizer", "sun.text.resources", "sun.text.resources.en", "sun.tools.asm", "sun.tools.attach", "sun.tools.jar", "sun.tools.jar.resources", "sun.tools.java", "sun.tools.javac", "sun.tools.jcmd", "sun.tools.jconsole", "sun.tools.jconsole.inspector", "sun.tools.jinfo", "sun.tools.jmap", "sun.tools.jps", "sun.tools.jstack", "sun.tools.jstat", "sun.tools.jstatd", "sun.tools.native2ascii", "sun.tools.native2ascii.resources", "sun.tools.serialver", "sun.tools.tree", "sun.tools.util", "sun.tracing", "sun.tracing.dtrace", "sun.usagetracker", "sun.util", "sun.util.calendar", "sun.util.cldr", "sun.util.locale", "sun.util.locale.provider", "sun.util.logging", "sun.util.logging.resources", "sun.util.resources", "sun.util.resources.en", "sun.util.spi", "sun.util.xml" }; } }
package us.corenetwork.moblimiter; import org.bukkit.Chunk; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.World; import org.bukkit.entity.LivingEntity; import org.bukkit.entity.Entity; import org.bukkit.entity.EntityType; import org.bukkit.entity.Player; import us.corenetwork.moblimiter.CreatureGroupSettings; import org.bukkit.event.entity.CreatureSpawnEvent.SpawnReason; import org.bukkit.metadata.FixedMetadataValue; import java.util.ArrayDeque; import java.util.HashMap; public class CreatureUtil { public static Iterable<LivingEntity> getCreaturesInRange(Chunk start) { ArrayDeque<LivingEntity> creatures = new ArrayDeque<LivingEntity>(); World world = start.getWorld(); int viewDistance = Settings.getInt(Setting.VIEW_DISTANCE_CHUNKS); for (int x = -viewDistance; x <= viewDistance; x++) { for (int z = -viewDistance; z <= viewDistance; z++) { int newX = start.getX() + x; int newZ = start.getZ() + z; if (world.isChunkLoaded(newX, newZ)) { Chunk chunk = world.getChunkAt(newX, newZ); for (Entity e : chunk.getEntities()) { if (e instanceof LivingEntity) creatures.add((LivingEntity) e); } } } } return creatures; } public static void purgeCreatures(Chunk chunk) { HashMap<CreatureGroupSettings, Integer> perGroupCounts = new HashMap<CreatureGroupSettings, Integer>(); HashMap<CreatureSettings, Integer> perCreatureCounts = new HashMap<CreatureSettings, Integer>(); int count = 0; Entity[] entities = chunk.getEntities().clone(); for (Entity e : entities) { if (e instanceof LivingEntity) { LivingEntity c = (LivingEntity) e; CreatureSettings creatureSettings = CreatureGroupSettings.getAnyCreatureSettings(c.getType()); if (creatureSettings == null) continue; Integer creatureCount = perCreatureCounts.get(creatureSettings); if(creatureCount == null) creatureCount = 0; Integer groupCount = perGroupCounts.get(creatureSettings.getGroup()); if(groupCount == null) groupCount = 0; if ( (creatureSettings.getGroup().getChunkLimit() >= 0 && groupCount >= creatureSettings.getGroup().getChunkLimit()) || creatureCount >= creatureSettings.getChunkLimit()) { e.remove(); count++; } else { perCreatureCounts.put(creatureSettings, creatureCount + 1); perGroupCounts.put(creatureSettings.getGroup(), groupCount + 1); } } } MLLog.debug("purged " + count + " from chunk " + chunk.getX() + "," + chunk.getZ()); } public static boolean isBreedingFood(EntityType type, Material food) { switch (type) { case SHEEP: case COW: return food == Material.WHEAT; case PIG: return food == Material.CARROT_ITEM; case CHICKEN: return food == Material.SEEDS || food == Material.PUMPKIN_SEEDS || food == Material.NETHER_STALK || food == Material.MELON_SEEDS; case HORSE: return food == Material.GOLDEN_APPLE || food == Material.GOLDEN_CARROT; case OCELOT: return food == Material.RAW_FISH; case WOLF: return food == Material.RAW_CHICKEN || food == Material.COOKED_CHICKEN || food == Material.RAW_BEEF || food == Material.COOKED_BEEF || food == Material.ROTTEN_FLESH || food == Material.PORK || food == Material.GRILLED_PORK; default: return false; } } public static LimitStatus getSpawnDistanceLimitStatus(EntityType type, Location loc, SpawnReason reason) { CreatureSettings creatureSettings = CreatureGroupSettings.getAnyCreatureSettings(type); if (creatureSettings == null) return LimitStatus.OK; if(!creatureSettings.isRangeLimited()) return LimitStatus.OK; if(reason == SpawnReason.NATURAL || reason == SpawnReason.CUSTOM || reason == SpawnReason.DEFAULT) { // find closest player double closestDist = 9999; double closestHeight = 9999; for(Player p : loc.getWorld().getPlayers()) { closestDist = Math.min(closestDist, p.getLocation().distance(loc)); closestHeight = Math.min(closestHeight, Math.abs(p.getLocation().getY() - loc.getY())); } if(closestHeight > creatureSettings.getSpawnHeightMax() || closestDist > creatureSettings.getSpawnDistanceMax()) { MLLog.debug("Cancelling " + reason + " Spawn of " + type + " because of " + LimitStatus.TOO_FAR + " " + Math.round(closestDist)); return LimitStatus.TOO_FAR; } if(closestDist < creatureSettings.getSpawnDistanceMin()) { MLLog.debug("Cancelling " + reason + " Spawn of " + type + " because of " + LimitStatus.TOO_CLOSE + " " + Math.round(closestDist)); return LimitStatus.TOO_CLOSE; } } return LimitStatus.OK; } public static void flagSpawnerMob(LivingEntity entity) { entity.setMetadata("ML-Spawner", new FixedMetadataValue(MobLimiter.instance, "true")); } public static LimitStatus getViewDistanceLimitStatus(EntityType type, Chunk chunk, SpawnReason reason) { CreatureGroupSettings groupSettings = CreatureGroupSettings.getGroupSettings(type); if (groupSettings == null) return LimitStatus.OK; CreatureSettings creatureSettings = CreatureGroupSettings.getAnyCreatureSettings(type); if (creatureSettings == null) return LimitStatus.OK; int oneCountViewDistance = 0; int allCountViewDistance = 0; Iterable<LivingEntity> viewDistanceCreatures = getCreaturesInRange(chunk); for (LivingEntity c : viewDistanceCreatures) { // compare spawner mobs against spawner limit and count only spawner mobs if(reason == SpawnReason.SPAWNER) { if(c.getType() == type && c.hasMetadata("ML-Spawner")) { oneCountViewDistance++; if (oneCountViewDistance >= creatureSettings.getSpawnerLimit()) { MLLog.debug("Cancelling " + reason + " Spawn of " + type + " because of too many of type"); return LimitStatus.TOO_MANY_ONE; } } } // ignore spawner mobs on other kinds of spawn counting else if (creatureSettings.IsSameGroup(c.getType()) && !c.hasMetadata("ML-Spawner")) { if (groupSettings.getViewDistanceLimit() >= 0) { allCountViewDistance++; if (allCountViewDistance >= groupSettings.getViewDistanceLimit()) { MLLog.debug("Cancelling " + reason + " Spawn of " + type + " because of too many in group"); return LimitStatus.TOO_MANY_ALL; } } if (c.getType() == type) { oneCountViewDistance++; if (oneCountViewDistance >= creatureSettings.getViewDistanceLimit()) { MLLog.debug("Cancelling " + reason + " Spawn of " + type + " because of too many of type"); return LimitStatus.TOO_MANY_ONE; } } } } return LimitStatus.OK; } public static enum LimitStatus { OK, TOO_MANY_ONE, TOO_MANY_ALL, TOO_FAR, TOO_CLOSE, } }
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * DataUploadResult.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.adwords.axis.v201809.rm; /** * A class represents the data upload result for CRM based lists. */ public class DataUploadResult implements java.io.Serializable { /* Indicates status of the upload operation. * Upload operation is triggered when {@link MutateMembersOperand#removeAll * removeAll} is not set * to true and {@link Operator operator} is "ADD" or * "REMOVE". */ private com.google.api.ads.adwords.axis.v201809.rm.UserListUploadStatus uploadStatus; /* Indicates status of the remove-all operation. * Remove-all operation is triggered when {@link MutateMembersOperand#removeAll * removeAll} is set * to true and {@link Operator operator} is "REMOVE". */ private com.google.api.ads.adwords.axis.v201809.rm.UserListUploadStatus removeAllStatus; public DataUploadResult() { } public DataUploadResult( com.google.api.ads.adwords.axis.v201809.rm.UserListUploadStatus uploadStatus, com.google.api.ads.adwords.axis.v201809.rm.UserListUploadStatus removeAllStatus) { this.uploadStatus = uploadStatus; this.removeAllStatus = removeAllStatus; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() .add("removeAllStatus", getRemoveAllStatus()) .add("uploadStatus", getUploadStatus()) .toString(); } /** * Gets the uploadStatus value for this DataUploadResult. * * @return uploadStatus * Indicates status of the upload operation. * Upload operation is triggered when {@link MutateMembersOperand#removeAll * removeAll} is not set * to true and {@link Operator operator} is "ADD" or * "REMOVE". */ public com.google.api.ads.adwords.axis.v201809.rm.UserListUploadStatus getUploadStatus() { return uploadStatus; } /** * Sets the uploadStatus value for this DataUploadResult. * * @param uploadStatus * Indicates status of the upload operation. * Upload operation is triggered when {@link MutateMembersOperand#removeAll * removeAll} is not set * to true and {@link Operator operator} is "ADD" or * "REMOVE". */ public void setUploadStatus(com.google.api.ads.adwords.axis.v201809.rm.UserListUploadStatus uploadStatus) { this.uploadStatus = uploadStatus; } /** * Gets the removeAllStatus value for this DataUploadResult. * * @return removeAllStatus * Indicates status of the remove-all operation. * Remove-all operation is triggered when {@link MutateMembersOperand#removeAll * removeAll} is set * to true and {@link Operator operator} is "REMOVE". */ public com.google.api.ads.adwords.axis.v201809.rm.UserListUploadStatus getRemoveAllStatus() { return removeAllStatus; } /** * Sets the removeAllStatus value for this DataUploadResult. * * @param removeAllStatus * Indicates status of the remove-all operation. * Remove-all operation is triggered when {@link MutateMembersOperand#removeAll * removeAll} is set * to true and {@link Operator operator} is "REMOVE". */ public void setRemoveAllStatus(com.google.api.ads.adwords.axis.v201809.rm.UserListUploadStatus removeAllStatus) { this.removeAllStatus = removeAllStatus; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof DataUploadResult)) return false; DataUploadResult other = (DataUploadResult) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.uploadStatus==null && other.getUploadStatus()==null) || (this.uploadStatus!=null && this.uploadStatus.equals(other.getUploadStatus()))) && ((this.removeAllStatus==null && other.getRemoveAllStatus()==null) || (this.removeAllStatus!=null && this.removeAllStatus.equals(other.getRemoveAllStatus()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getUploadStatus() != null) { _hashCode += getUploadStatus().hashCode(); } if (getRemoveAllStatus() != null) { _hashCode += getRemoveAllStatus().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(DataUploadResult.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/rm/v201809", "DataUploadResult")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("uploadStatus"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/rm/v201809", "uploadStatus")); elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/rm/v201809", "UserListUploadStatus")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("removeAllStatus"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/rm/v201809", "removeAllStatus")); elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/rm/v201809", "UserListUploadStatus")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.cdn.fluent; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.management.polling.PollResult; import com.azure.core.util.Context; import com.azure.core.util.polling.PollerFlux; import com.azure.core.util.polling.SyncPoller; import com.azure.resourcemanager.cdn.fluent.models.RouteInner; import com.azure.resourcemanager.cdn.models.RouteUpdateParameters; import java.nio.ByteBuffer; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in RoutesClient. */ public interface RoutesClient { /** * Lists all of the existing origins within a profile. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list routes as paginated response with {@link PagedFlux}. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<RouteInner> listByEndpointAsync(String resourceGroupName, String profileName, String endpointName); /** * Lists all of the existing origins within a profile. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list routes as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<RouteInner> listByEndpoint(String resourceGroupName, String profileName, String endpointName); /** * Lists all of the existing origins within a profile. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list routes as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<RouteInner> listByEndpoint( String resourceGroupName, String profileName, String endpointName, Context context); /** * Gets an existing route with the specified route name under the specified subscription, resource group, profile, * and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<RouteInner>> getWithResponseAsync( String resourceGroupName, String profileName, String endpointName, String routeName); /** * Gets an existing route with the specified route name under the specified subscription, resource group, profile, * and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<RouteInner> getAsync(String resourceGroupName, String profileName, String endpointName, String routeName); /** * Gets an existing route with the specified route name under the specified subscription, resource group, profile, * and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. */ @ServiceMethod(returns = ReturnType.SINGLE) RouteInner get(String resourceGroupName, String profileName, String endpointName, String routeName); /** * Gets an existing route with the specified route name under the specified subscription, resource group, profile, * and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<RouteInner> getWithResponse( String resourceGroupName, String profileName, String endpointName, String routeName, Context context); /** * Creates a new route with the specified route name under the specified subscription, resource group, profile, and * AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param route Route properties. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return friendly Routes name mapping to the any Routes or secret related information along with {@link Response} * on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Flux<ByteBuffer>>> createWithResponseAsync( String resourceGroupName, String profileName, String endpointName, String routeName, RouteInner route); /** * Creates a new route with the specified route name under the specified subscription, resource group, profile, and * AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param route Route properties. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link PollerFlux} for polling of friendly Routes name mapping to the any Routes or secret related * information. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) PollerFlux<PollResult<RouteInner>, RouteInner> beginCreateAsync( String resourceGroupName, String profileName, String endpointName, String routeName, RouteInner route); /** * Creates a new route with the specified route name under the specified subscription, resource group, profile, and * AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param route Route properties. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of friendly Routes name mapping to the any Routes or secret related * information. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<RouteInner>, RouteInner> beginCreate( String resourceGroupName, String profileName, String endpointName, String routeName, RouteInner route); /** * Creates a new route with the specified route name under the specified subscription, resource group, profile, and * AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param route Route properties. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of friendly Routes name mapping to the any Routes or secret related * information. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<RouteInner>, RouteInner> beginCreate( String resourceGroupName, String profileName, String endpointName, String routeName, RouteInner route, Context context); /** * Creates a new route with the specified route name under the specified subscription, resource group, profile, and * AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param route Route properties. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return friendly Routes name mapping to the any Routes or secret related information on successful completion of * {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<RouteInner> createAsync( String resourceGroupName, String profileName, String endpointName, String routeName, RouteInner route); /** * Creates a new route with the specified route name under the specified subscription, resource group, profile, and * AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param route Route properties. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return friendly Routes name mapping to the any Routes or secret related information. */ @ServiceMethod(returns = ReturnType.SINGLE) RouteInner create( String resourceGroupName, String profileName, String endpointName, String routeName, RouteInner route); /** * Creates a new route with the specified route name under the specified subscription, resource group, profile, and * AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param route Route properties. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return friendly Routes name mapping to the any Routes or secret related information. */ @ServiceMethod(returns = ReturnType.SINGLE) RouteInner create( String resourceGroupName, String profileName, String endpointName, String routeName, RouteInner route, Context context); /** * Updates an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param routeUpdateProperties Route update properties. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return friendly Routes name mapping to the any Routes or secret related information along with {@link Response} * on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Flux<ByteBuffer>>> updateWithResponseAsync( String resourceGroupName, String profileName, String endpointName, String routeName, RouteUpdateParameters routeUpdateProperties); /** * Updates an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param routeUpdateProperties Route update properties. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link PollerFlux} for polling of friendly Routes name mapping to the any Routes or secret related * information. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) PollerFlux<PollResult<RouteInner>, RouteInner> beginUpdateAsync( String resourceGroupName, String profileName, String endpointName, String routeName, RouteUpdateParameters routeUpdateProperties); /** * Updates an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param routeUpdateProperties Route update properties. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of friendly Routes name mapping to the any Routes or secret related * information. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<RouteInner>, RouteInner> beginUpdate( String resourceGroupName, String profileName, String endpointName, String routeName, RouteUpdateParameters routeUpdateProperties); /** * Updates an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param routeUpdateProperties Route update properties. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of friendly Routes name mapping to the any Routes or secret related * information. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<RouteInner>, RouteInner> beginUpdate( String resourceGroupName, String profileName, String endpointName, String routeName, RouteUpdateParameters routeUpdateProperties, Context context); /** * Updates an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param routeUpdateProperties Route update properties. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return friendly Routes name mapping to the any Routes or secret related information on successful completion of * {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<RouteInner> updateAsync( String resourceGroupName, String profileName, String endpointName, String routeName, RouteUpdateParameters routeUpdateProperties); /** * Updates an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param routeUpdateProperties Route update properties. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return friendly Routes name mapping to the any Routes or secret related information. */ @ServiceMethod(returns = ReturnType.SINGLE) RouteInner update( String resourceGroupName, String profileName, String endpointName, String routeName, RouteUpdateParameters routeUpdateProperties); /** * Updates an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param routeUpdateProperties Route update properties. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return friendly Routes name mapping to the any Routes or secret related information. */ @ServiceMethod(returns = ReturnType.SINGLE) RouteInner update( String resourceGroupName, String profileName, String endpointName, String routeName, RouteUpdateParameters routeUpdateProperties, Context context); /** * Deletes an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync( String resourceGroupName, String profileName, String endpointName, String routeName); /** * Deletes an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link PollerFlux} for polling of long-running operation. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) PollerFlux<PollResult<Void>, Void> beginDeleteAsync( String resourceGroupName, String profileName, String endpointName, String routeName); /** * Deletes an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of long-running operation. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<Void>, Void> beginDelete( String resourceGroupName, String profileName, String endpointName, String routeName); /** * Deletes an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of long-running operation. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<Void>, Void> beginDelete( String resourceGroupName, String profileName, String endpointName, String routeName, Context context); /** * Deletes an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return A {@link Mono} that completes when a successful response is received. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> deleteAsync(String resourceGroupName, String profileName, String endpointName, String routeName); /** * Deletes an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) void delete(String resourceGroupName, String profileName, String endpointName, String routeName); /** * Deletes an existing route with the specified route name under the specified subscription, resource group, * profile, and AzureFrontDoor endpoint. * * @param resourceGroupName Name of the Resource group within the Azure subscription. * @param profileName Name of the Azure Front Door Standard or Azure Front Door Premium profile which is unique * within the resource group. * @param endpointName Name of the endpoint under the profile which is unique globally. * @param routeName Name of the routing rule. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) void delete(String resourceGroupName, String profileName, String endpointName, String routeName, Context context); }
package converter; import core.MetadataParser; import cz.zcu.kiv.signal.ChannelInfo; import cz.zcu.kiv.signal.EEGMarker; import cz.zcu.kiv.signal.VhdrReader; import odml.core.Reader; import org.g_node.nix.Block; import org.g_node.nix.Property; import org.g_node.nix.Section; import org.g_node.nix.Value; import org.apache.log4j.*; import java.io.*; import java.util.*; /** * Created by ipsita on 23/6/16. */ public class MetadataParserImpl implements MetadataParser { final static Logger logger = Logger.getLogger(MetadataParserImpl.class); /** * Initialises the odML file reader * @param metadataFile : the metadata odML file to read * @return : the rootSextion of the the odML file */ public odml.core.Section initializeODMLReader(String metadataFile) throws Exception { logger.info("Entering initializeODMLReader"); Reader reader = new Reader(); odml.core.Section rootSection; InputStream inputstream = null; try { inputstream = new FileInputStream(metadataFile); rootSection = reader.load(inputstream, true); inputstream.close(); } catch (Exception e) { logger.error("Exception occurred while loading and reading odML input file. Please check if correct metadata file is provided "+e); inputstream = null; throw new Exception("Exception occurred while loading and reading odML input file. Please check if correct metadata file is provided ",e); } logger.info("leaving initializeODMLReader"); return rootSection; } /** * * This method takes in a nix file, a nix Block and a odml metadata file, parses the odml * metadata file and creates a metadata for the Block in the Nix file. * * @param metadataFile : File name of the metadata.xml odml file * @param block : Block whose metadata will be set * @param file : HDF5 (Specifically nix) file in which this metadata will be written */ public void setMetadata(String metadataFile, Block block, org.g_node.nix.File file, String headerFile, String markerFile, boolean metadataExists, boolean vhdrExists, boolean vmrkExists) throws Exception { logger.info("entering setMetadata"); // create section and add a property Section rootSectionMetadata = file.createSection("metadataSection", "metadata"); block.setMetadata(rootSectionMetadata); logger.info("created a metadataSection and added it to the block."); if(metadataExists){ odml.core.Section rootSection = initializeODMLReader(metadataFile); logger.info("rootSection initialized"); //for parsing if(rootSection!=null) { Vector<odml.core.Section> sectionVector = rootSection.getSections(); if (sectionVector != null && sectionVector.size() > 0) { setSection(rootSectionMetadata, sectionVector); } } } else{ logger.warn("no metadata.xml exists for this experiment."); } addHeaderAndMarkerInfo(rootSectionMetadata, headerFile, markerFile, vhdrExists, vmrkExists); logger.info("added header and marker file info"); rootSectionMetadata.setNull(); logger.info("leaving setMetadata method"); } /** * Supports adding of channel and marker information to the metadata file * @param parentSection : the setion in which header and marker info is to be added * @param headerFile : the header file which contains channel info * @param markerFile : the marker file which contains marker info * @throws IOException */ public void addHeaderAndMarkerInfo(Section parentSection, String headerFile, String markerFile, boolean vhdrExists, boolean vmrkExists) throws IOException { logger.info("entering addHeaderAndMarkerInfo"); if(vhdrExists) { List<ChannelInfo> channelInfo = getChannelInfo(headerFile); if(channelInfo!=null) setChannelInfo(parentSection, channelInfo); } if(vmrkExists) { HashMap<String, EEGMarker> markerInfo = getMarkerInfo(markerFile); if(markerInfo!=null) setMarkerInfo(parentSection, markerInfo); } logger.info("leaving addHeaderAndMarkerInfo"); } /** * Sets channel info in the HDF5 file metadata * @param parentSection : parent section in which metadata is set * @param channelInfo : the channel info to be set */ public void setChannelInfo( Section parentSection, List<ChannelInfo> channelInfo){ logger.info("entering setChannelInfo"); if(parentSection!=null) { Section channelSection = parentSection.createSection("Channel Infos", "Channel"); Iterator itr = channelInfo.iterator(); int i = 0; while (itr.hasNext()) { i++; ChannelInfo channelInfoItem = (ChannelInfo) itr.next(); Section channelItemSection = channelSection.createSection("Channel" + channelInfoItem.getNumber(), "Channel"); Value channelNumber = new Value(""); channelNumber.setInt(channelInfoItem.getNumber()); channelItemSection.createProperty("Number", channelNumber); Value channelName = new Value(""); channelName.setString(channelInfoItem.getName()); channelItemSection.createProperty("Name", channelName); Value channelUnits = new Value(""); channelUnits.setString(channelInfoItem.getUnits()); channelItemSection.createProperty("Units", channelUnits); Value channelResolution = new Value(""); channelResolution.setDouble(channelInfoItem.getResolution()); channelItemSection.createProperty("Resolution", channelResolution); channelNumber.setNull(); channelName.setNull(); channelUnits.setNull(); channelResolution.setNull(); channelItemSection.setNull(); logger.info(i + " Channel: " + channelInfoItem.getNumber() + " " + channelInfoItem.getName() + " " + channelInfoItem.getUnits() + " " + channelInfoItem.getResolution()); } channelSection.setNull(); logger.info("leaving setChannelInfo"); } } /** * Sets marker info in the HDF5 file metadata * @param parentSection : parent section in which metadata is set * @param markerInfo : the marker info to be set */ public void setMarkerInfo(Section parentSection, HashMap<String, EEGMarker> markerInfo){ logger.info("entering setMarkerInfo"); Section markerSection = parentSection.createSection("Marker Infos", "Marker"); logger.info("\nMarkers Info: "); Iterator markerItr = markerInfo.entrySet().iterator(); int i=0; Set<String> markerKey = markerInfo.keySet(); Iterator keyitr = markerKey.iterator(); while(markerItr.hasNext() && keyitr.hasNext()){ i++; String key = keyitr.next().toString(); EEGMarker markerItem = markerInfo.get(key); Section markerItemSection = markerSection.createSection(key, "Marker"+i); Value markerName = new Value(""); markerName.setString(markerItem.getName()); markerItemSection.createProperty("Name", markerName); Value markerPosition = new Value(""); markerPosition.setInt(markerItem.getPosition()); markerItemSection.createProperty("Position", markerPosition); Value markerStimulus = new Value(""); markerStimulus.setString(markerItem.getName()); markerItemSection.createProperty("Stimulus", markerStimulus); markerName.setNull(); markerPosition.setNull(); markerStimulus.setNull(); markerItemSection.setNull(); logger.info(i+" Marker: "+ markerItem.getName()+" "+markerItem.getPosition()+" "+markerItem.getStimulus()); } markerSection.setNull(); logger.info("leaving setMarkerInfo"); } /** * Sets section related information in metdata of HDF5 file * @param parentSection : the parent section in shich sub sections are to be added * @param sectionVector : the subsections to be added in the parent section */ public void setSection(Section parentSection, Vector<odml.core.Section> sectionVector){ logger.info("entering setSection"); if(sectionVector!=null) { for (int currentSectionIndex = 0; currentSectionIndex < sectionVector.size(); currentSectionIndex++) { odml.core.Section thisSection = sectionVector.get(currentSectionIndex); logger.debug("currentSectionIndex : " + currentSectionIndex + " | Current section name : " + thisSection.getName()); String typeOfSection = ""; String nameOfSection = ""; Vector<odml.core.Property> propertiesList = new Vector<>(); if (thisSection.getType() != null) { typeOfSection = thisSection.getType(); } if (thisSection.getName() != null) { nameOfSection = thisSection.getName(); } // hdf5 creating subsection of metadata section (root) Section secChild = parentSection.createSection(nameOfSection, typeOfSection); //this is the recursive function since a section may have a section inside it, and so on. if (thisSection.getSections() != null && thisSection.getSections().size() > 0) { setSection(secChild, thisSection.getSections()); } if (thisSection.getProperties() != null) { propertiesList = thisSection.getProperties(); } if(secChild!=null) { setProperties(secChild, propertiesList); } } } logger.info("leaving setSection"); } /** * Sets Property related information in metdata of HDF5 file * @param parentSec : the parent section in shich sub sections are to be added * @param propertiesList : the properties to be added in the parent section */ public void setProperties(Section parentSec, Vector<odml.core.Property> propertiesList){ logger.info("entering setProperties"); //for each child of section (for property) if(propertiesList!=null && !propertiesList.isEmpty()){ String nameOfProperty = ""; Value valueOfProperty = new Value(""); for(int tempProperty = 0; tempProperty < propertiesList.size(); tempProperty++) { odml.core.Property thisProperty = propertiesList.get(tempProperty); logger.debug(" tempProperty : " + tempProperty + " | Current property : " + thisProperty.getName()); if(thisProperty.getName()!=null){ nameOfProperty = thisProperty.getName(); } //logger.info("-----------type: "+ thisProperty.getWholeValue().getMap().get("type") + " | value " + thisProperty.getValue()); if(thisProperty.getValue()!=null && thisProperty.valueCount()>0){ odml.core.Value wholeValue = thisProperty.getWholeValue(); String value = thisProperty.getValue(0).toString(); String valueType= wholeValue.getMap().get("type").toString(); switch (valueType) { case "datetime": valueOfProperty.setString(value); break; case "int": valueOfProperty.setInt(Integer.parseInt(value)); break; case "float": valueOfProperty.setDouble(Double.parseDouble(value)); break; case "boolean": valueOfProperty.setBoolean(Boolean.parseBoolean(value)); break; case "string": valueOfProperty.setString(value); break; case "long": valueOfProperty.setLong(Long.parseLong(value)); break; default: logger.error("ERROR. Some wrong valueType. valueType : " + valueType); } } processGUINamespaces(parentSec, thisProperty); Property prop = parentSec.createProperty(nameOfProperty, valueOfProperty); } } logger.info("leaving setProperties"); } /** * Extracts channel info from vhdr file * @param vhdrFile : th vhdr file from which channel info is to be extracted * @return : the channel info extracted from vhdr file * @throws IOException */ public List<ChannelInfo> getChannelInfo(String vhdrFile) throws IOException { logger.info("entering getChannelInfo"); List<ChannelInfo> channelInfo = new ArrayList<ChannelInfo>(); byte[] inputHeaderFIle = convertToByteArray(vhdrFile); VhdrReader vhdrReader = new VhdrReader(); if(inputHeaderFIle!=null) vhdrReader.readVhdr(inputHeaderFIle); channelInfo = vhdrReader.getChannels(); logger.info("leaving getChannelInfo"); return channelInfo; } /** * Extracts marker info from vhdr file * @param vmrkFile : th vmrk file from which marker info is to be extracted * @return : the marker info extracted from vmrk file * @throws IOException */ public HashMap<String, EEGMarker> getMarkerInfo(String vmrkFile) throws IOException { logger.info("entering getMarkerInfo"); byte[] inputMarkerFIle = convertToByteArray(vmrkFile); VhdrReader vhdrReader = new VhdrReader(); if(inputMarkerFIle!=null) vhdrReader.readVmrk(inputMarkerFIle); HashMap<String, EEGMarker> markers = vhdrReader.getMarkers(); logger.info("leaving getMarkerInfo"); return markers; } /** * Converts stream to byte array to be written into metadata file * @param inputFile : file to convert to byte array * @return : the byte array which is input file * @throws IOException */ public byte[] convertToByteArray(String inputFile) throws IOException { logger.info("entering convertToByteArray"); FileInputStream fileInputStream=null; File file = new File(inputFile); byte[] bFile = new byte[(int) file.length()]; try { //convert file into array of bytes fileInputStream = new FileInputStream(file); fileInputStream.read(bFile); fileInputStream.close(); logger.info("File converted to Byte Array"); }catch(IOException e){ logger.error("Error while opening/reading file.",e); throw new IOException("Error while opening/reading file."); } logger.info("leaving convertToByteArray"); return bFile; } /** * Processes and adds GUI namespaces into the metadata file * @param parentSec : The section to which GUI namespaces are to be added * @param property : The property for which GUI namespaces are to be added */ public void processGUINamespaces(Section parentSec, odml.core.Property property){ logger.info("entering processGUINamespaces"); Section guiSection = parentSec.createSection(property.getName(), "GUI:Namespace"); List list = property.getGuiHelper().getGUINamespaceTags(); if(list!=null && !list.isEmpty()) { for (int i = 0; i < list.size(); i++) { org.jdom.Element guiElement = (org.jdom.Element) list.get(i); String elementName = new String("gui_" + guiElement.getName()); String elementValue = guiElement.getValue(); logger.debug("->" + list.get(i)); Value value = new Value(elementValue); guiSection.createProperty(elementName, value); value.setNull(); } } guiSection.setNull(); logger.info("leaving processGUINamespaces"); } }
/* Copyright 2006 by Sean Luke Licensed under the Academic Free License version 3.0 See the file "LICENSE" for more information */ package ec.gp.koza; import ec.steadystate.*; import ec.*; import ec.simple.*; import ec.gp.*; import ec.util.*; import java.io.*; /* * KozaStatistics.java * * Created: Fri Nov 5 16:03:44 1999 * By: Sean Luke */ /** * A simple Koza-style statistics generator. Prints the mean fitness * (raw,adjusted,hits) and best individual of each generation. * At the end, prints the best individual of the run and the number of * individuals processed. * * <p>If gather-full is true, then final timing information, number of nodes * and depths of trees, approximate final memory utilization, etc. are also given. * * <p>Compressed files will be overridden on restart from checkpoint; uncompressed files will be * appended on restart. * * <p>KozaStatistics implements a simple version of steady-state statistics in the * same fashion that SimpleStatistics does: if it quits before a generation boundary, * it will include the best individual discovered, even if the individual was discovered * after the last boundary. This is done by using individualsEvaluatedStatistics(...) * to update best-individual-of-generation in addition to doing it in * postEvaluationStatistics(...). <p><b>Parameters</b><br> <table> <tr><td valign=top><i>base.</i><tt>gzip</tt><br> <font size=-1>boolean</font></td> <td valign=top>(whether or not to compress the file (.gz suffix added)</td></tr> <tr><td valign=top><i>base.</i><tt>file</tt><br> <font size=-1>String (a filename), or nonexistant (signifies stdout)</font></td> <td valign=top>(the log for statistics)</td></tr> <tr><td valign=top><i>base</i>.<tt>gather-full</tt><br> <font size=-1>bool = <tt>true</tt> or <tt>false</tt> (default)</font></td> <td valign=top>(should we full statistics on individuals (will run slower, though the slowness is due to off-line processing that won't mess up timings)</td></tr> </table> * @author Sean Luke * @deprecated use SimpleStatistics instead */ public class KozaStatistics extends Statistics implements SteadyStateStatisticsForm { public Individual[] getBestSoFar() { return best_of_run; } /** log file parameter */ public static final String P_STATISTICS_FILE = "file"; /** The Statistics' log */ public int statisticslog; /** The best individual we've found so far */ public Individual[] best_of_run; /** compress? */ public static final String P_COMPRESS = "gzip"; public static final String P_FULL = "gather-full"; boolean doFull; // total number of individuals long numInds; // timings long lastTime; long initializationTime; long breedingTime; long evaluationTime; long nodesInitialized; long nodesEvaluated; long nodesBred; // memory usage info long lastUsage = 0; long initializationUsage = 0; long breedingUsage = 0; long evaluationUsage = 0; public KozaStatistics() { best_of_run = null; statisticslog = 0; /* stdout */ } public void setup(final EvolutionState state, final Parameter base) { super.setup(state,base); state.output.warnOnce("KozaStatistics is deprecated and will soon be deleted. Use SimpleStatistics instead."); File statisticsFile = state.parameters.getFile( base.push(P_STATISTICS_FILE),null); if (statisticsFile!=null) try { statisticslog = state.output.addLog(statisticsFile, !state.parameters.getBoolean(base.push(P_COMPRESS),null,false), state.parameters.getBoolean(base.push(P_COMPRESS),null,false)); } catch (IOException i) { state.output.fatal("An IOException occurred while trying to create the log " + statisticsFile + ":\n" + i); } doFull = state.parameters.getBoolean(base.push(P_FULL),null,false); nodesInitialized = nodesEvaluated = nodesBred = 0; breedingTime=evaluationTime=0; } public void preInitializationStatistics(final EvolutionState state) { super.preInitializationStatistics(state); if (doFull) { Runtime r = Runtime.getRuntime(); lastTime = System.currentTimeMillis(); lastUsage = r.totalMemory() - r.freeMemory(); } } public void postInitializationStatistics(final EvolutionState state) { super.postInitializationStatistics(state); // set up our best_of_run array -- can't do this in setup, because // we don't know if the number of subpopulations has been determined yet best_of_run = new Individual[state.population.subpops.length]; // gather timings if (doFull) { Runtime r = Runtime.getRuntime(); long curU = r.totalMemory() - r.freeMemory(); if (curU > lastUsage) initializationUsage = curU - lastUsage; initializationTime = System.currentTimeMillis()-lastTime; // Determine how many nodes we have for(int x=0;x<state.population.subpops.length;x++) { // check to make sure they're the right class if ( !(state.population.subpops[x].species instanceof GPSpecies )) state.output.fatal("Subpopulation " + x + " is not of the species form GPSpecies." + " Cannot do timing statistics with KozaStatistics."); for(int y=0;y<state.population.subpops[x].individuals.length;y++) { GPIndividual i = (GPIndividual)(state.population.subpops[x].individuals[y]); for(int z=0;z<i.trees.length;z++) nodesInitialized += i.trees[z].child.numNodes(GPNode.NODESEARCH_ALL); } } } } public void preBreedingStatistics(final EvolutionState state) { super.preBreedingStatistics(state); if (doFull) { Runtime r = Runtime.getRuntime(); lastTime = System.currentTimeMillis(); lastUsage = r.totalMemory() - r.freeMemory(); } } public void postBreedingStatistics(final EvolutionState state) { super.postBreedingStatistics(state); // gather timings if (doFull) { Runtime r = Runtime.getRuntime(); long curU = r.totalMemory() - r.freeMemory(); if (curU > lastUsage) breedingUsage += curU - lastUsage; breedingTime += System.currentTimeMillis()-lastTime; // Determine how many nodes we have for(int x=0;x<state.population.subpops.length;x++) { // check to make sure they're the right class if ( !(state.population.subpops[x].species instanceof GPSpecies )) state.output.fatal("Subpopulation " + x + " is not of the species form GPSpecies." + " Cannot do timing statistics with KozaStatistics."); for(int y=0;y<state.population.subpops[x].individuals.length;y++) { GPIndividual i = (GPIndividual)(state.population.subpops[x].individuals[y]); for(int z=0;z<i.trees.length;z++) nodesBred += i.trees[z].child.numNodes(GPNode.NODESEARCH_ALL); } } } } public void preEvaluationStatistics(final EvolutionState state) { super.preEvaluationStatistics(state); if (doFull) { Runtime r = Runtime.getRuntime(); lastTime = System.currentTimeMillis(); lastUsage = r.totalMemory() - r.freeMemory(); } } public void postEvaluationStatistics(final EvolutionState state) { super.postEvaluationStatistics(state); // Gather statistics Runtime r = Runtime.getRuntime(); long curU = r.totalMemory() - r.freeMemory(); if (curU > lastUsage) evaluationUsage += curU - lastUsage; if (doFull) evaluationTime += System.currentTimeMillis()-lastTime; state.output.println("\n\n\nGeneration " + state.generation + "\n================",statisticslog); Individual[] best_i = new Individual[state.population.subpops.length]; for(int x=0;x<state.population.subpops.length;x++) { state.output.println("\nSubpopulation " + x + "\n----------------",statisticslog); // gather timings if (doFull) { long totNodesPerGen = 0; long totDepthPerGen = 0; // check to make sure they're the right class if ( !(state.population.subpops[x].species instanceof GPSpecies )) state.output.fatal("Subpopulation " + x + " is not of the species form GPSpecies." + " Cannot do timing statistics with KozaStatistics."); long[] numNodes = new long[((GPIndividual)(state.population.subpops[x].species.i_prototype)).trees.length]; long[] numDepth = new long[((GPIndividual)(state.population.subpops[x].species.i_prototype)).trees.length]; for(int y=0;y<state.population.subpops[x].individuals.length;y++) { GPIndividual i = (GPIndividual)(state.population.subpops[x].individuals[y]); for(int z=0;z<i.trees.length;z++) { nodesEvaluated += i.trees[z].child.numNodes(GPNode.NODESEARCH_ALL); numNodes[z] += i.trees[z].child.numNodes(GPNode.NODESEARCH_ALL); numDepth[z] += i.trees[z].child.depth(); } } for(int tr=0;tr<numNodes.length;tr++) totNodesPerGen += numNodes[tr]; state.output.println("Avg Nodes: " + ((double)totNodesPerGen)/state.population.subpops[x].individuals.length, statisticslog); state.output.print("Nodes/tree: [", statisticslog); for(int tr=0;tr<numNodes.length;tr++) { if (tr>0) state.output.print("|", statisticslog); state.output.print(""+((double)numNodes[tr])/state.population.subpops[x].individuals.length, statisticslog); } state.output.println("]", statisticslog); for(int tr=0;tr<numDepth.length;tr++) totDepthPerGen += numDepth[tr]; state.output.println("Avg Depth: " + ((double)totDepthPerGen)/ (state.population.subpops[x].individuals.length * numDepth.length), statisticslog); state.output.print("Depth/tree: [", statisticslog); for(int tr=0;tr<numDepth.length;tr++) { if (tr>0) state.output.print("|", statisticslog); state.output.print(""+((double)numDepth[tr])/state.population.subpops[x].individuals.length, statisticslog); } state.output.println("]", statisticslog); } float meanStandardized = 0.0f; float meanAdjusted = 0.0f; long hits = 0; if (!(state.population.subpops[x].species.f_prototype instanceof KozaFitness)) state.output.fatal("Subpopulation " + x + " is not of the fitness KozaFitness. Cannot do timing statistics with KozaStatistics."); best_i[x] = state.population.subpops[x].individuals[0]; for(int y=0;y<state.population.subpops[x].individuals.length;y++) { // best individual if (state.population.subpops[x].individuals[y].fitness.betterThan(best_i[x].fitness)) best_i[x] = state.population.subpops[x].individuals[y]; // mean for population meanStandardized += ((KozaFitness)(state.population.subpops[x].individuals[y].fitness)).standardizedFitness(); meanAdjusted += ((KozaFitness)(state.population.subpops[x].individuals[y].fitness)).adjustedFitness(); hits += ((KozaFitness)(state.population.subpops[x].individuals[y].fitness)).hits; } // compute fitness stats meanStandardized /= state.population.subpops[x].individuals.length; meanAdjusted /= state.population.subpops[x].individuals.length; state.output.print("Mean fitness raw: " + meanStandardized + " adjusted: " + meanAdjusted + " hits: " + ((double)hits)/state.population.subpops[x].individuals.length, statisticslog); state.output.println("", statisticslog); // compute inds stats numInds += state.population.subpops[x].individuals.length; } // now test to see if it's the new best_of_run for(int x=0;x<state.population.subpops.length;x++) { if (best_of_run[x]==null || best_i[x].fitness.betterThan(best_of_run[x].fitness)) best_of_run[x] = (Individual)(best_i[x].clone()); // print the best-of-generation individual state.output.println("\nBest Individual of Generation:",statisticslog); best_i[x].printIndividualForHumans(state,statisticslog); state.output.message("Subpop " + x + " best fitness of generation: " + best_i[x].fitness.fitnessToStringForHumans()); } } /** Logs the best individual of the run. */ public void finalStatistics(final EvolutionState state, final int result) { super.finalStatistics(state,result); state.output.println("\n\n\nFinal Statistics\n================",statisticslog); state.output.println("Total Individuals Evaluated: " + numInds,statisticslog); // for now we just print the best fitness state.output.println("\nBest Individual of Run:",statisticslog); for(int x=0;x<state.population.subpops.length;x++) { best_of_run[x].printIndividualForHumans(state,statisticslog); state.output.message("Subpop " + x + " best fitness of run: " + best_of_run[x].fitness.fitnessToStringForHumans()); // finally describe the winner if there is a description ((SimpleProblemForm)(state.evaluator.p_problem.clone())).describe(state, best_of_run[x], x, 0, statisticslog); } // Output timings if (doFull) { state.output.println("\n\n\nTimings\n=======",statisticslog); state.output.println("Initialization: " + ((float)initializationTime)/1000 + " secs total, " + nodesInitialized + " nodes, " + nodesInitialized/(((float)initializationTime)/1000) + " nodes/sec",statisticslog); state.output.println("Evaluating: " + ((float)evaluationTime)/1000 + " secs total, " + nodesEvaluated + " nodes, " + nodesEvaluated/(((float)evaluationTime)/1000) + " nodes/sec",statisticslog); state.output.println("Breeding: " + ((float)breedingTime)/1000 + " secs total, " + nodesBred + " nodes, " + nodesBred/(((float)breedingTime)/1000) + " nodes/sec",statisticslog); state.output.println("\n\n\nMemory Usage\n==============",statisticslog); state.output.println("Initialization: " + ((float)initializationUsage)/1024 + " KB total, " + nodesInitialized + " nodes, " + nodesInitialized/(((float)initializationUsage)/1024) + " nodes/KB",statisticslog); state.output.println("Evaluating: " + ((float)evaluationUsage)/1024 + " KB total, " + nodesEvaluated + " nodes, " + nodesEvaluated/(((float)evaluationUsage)/1024) + " nodes/KB",statisticslog); state.output.println("Breeding: " + ((float)breedingUsage)/1024 + " KB total, " + nodesBred + " nodes, " + nodesBred/(((float)breedingUsage)/1024) + " nodes/KB",statisticslog); } } }
/* * Copyright (C) 2014 Michell Bak * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.nmj.nmjmanager.fragments; import android.animation.Animator; import android.annotation.SuppressLint; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.database.Cursor; import android.graphics.Color; import android.graphics.Typeface; import android.net.Uri; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.design.widget.FloatingActionButton; import android.support.v4.app.Fragment; import android.support.v4.content.LocalBroadcastManager; import android.text.Html; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.widget.CheckBox; import android.widget.ImageView; import android.widget.ScrollView; import android.widget.TextView; import android.widget.Toast; import com.nmj.apis.trakt.Trakt; import com.nmj.db.DbAdapterTvShowEpisodes; import com.nmj.functions.BlurTransformation; import com.nmj.functions.FileSource; import com.nmj.functions.Filepath; import com.nmj.functions.NMJLib; import com.nmj.functions.PaletteLoader; import com.nmj.functions.SimpleAnimatorListener; import com.nmj.nmjmanager.EditTvShowEpisode; import com.nmj.nmjmanager.IdentifyTvShowEpisode; import com.nmj.nmjmanager.Main; import com.nmj.nmjmanager.NMJManagerApplication; import com.nmj.nmjmanager.R; import com.nmj.nmjmanager.TvShowEpisode; import com.nmj.remoteplayback.RemotePlayback; import com.nmj.service.DeleteFile; import com.nmj.service.MakeAvailableOffline; import com.nmj.utils.LocalBroadcastUtils; import com.nmj.utils.TvShowDatabaseUtils; import com.nmj.utils.TypefaceUtils; import com.nmj.utils.VideoUtils; import com.nmj.utils.ViewUtils; import com.nmj.views.ObservableScrollView; import com.nmj.views.ObservableScrollView.OnScrollChangedListener; import com.squareup.otto.Bus; import com.squareup.picasso.Callback; import com.squareup.picasso.Picasso; import java.util.ArrayList; import static com.nmj.functions.PreferenceKeys.ALWAYS_DELETE_FILE; import static com.nmj.functions.PreferenceKeys.CHROMECAST_BETA_SUPPORT; import static com.nmj.functions.PreferenceKeys.SHOW_FILE_LOCATION; @SuppressLint("InflateParams") public class TvShowEpisodeDetailsFragment extends Fragment { private Activity mContext; private TvShowEpisode mEpisode; private ImageView mBackdrop, mEpisodePhoto; private TextView mTitle, mDescription, mFileSource, mAirDate, mRating, mDirector, mWriter, mGuestStars, mSeasonEpisodeNumber; private View mDetailsArea; private Picasso mPicasso; private Typeface mMediumItalic, mMedium, mCondensedRegular; private DbAdapterTvShowEpisodes mDatabaseHelper; private long mVideoPlaybackStarted, mVideoPlaybackEnded; private boolean mShowFileLocation; private Bus mBus; private int mToolbarColor = 0; private FloatingActionButton mFab; private PaletteLoader mPaletteLoader; private ObservableScrollView mScrollView; private BroadcastReceiver mBroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { loadEpisode(); loadData(); } }; /** * Empty constructor as per the Fragment documentation */ public TvShowEpisodeDetailsFragment() {} public static TvShowEpisodeDetailsFragment newInstance(String showId, int season, int episode) { TvShowEpisodeDetailsFragment pageFragment = new TvShowEpisodeDetailsFragment(); Bundle bundle = new Bundle(); bundle.putString("showId", showId); bundle.putInt("season", season); bundle.putInt("episode", episode); pageFragment.setArguments(bundle); return pageFragment; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRetainInstance(true); setHasOptionsMenu(true); mContext = getActivity(); mBus = NMJManagerApplication.getBus(); mShowFileLocation = PreferenceManager.getDefaultSharedPreferences(getActivity()).getBoolean(SHOW_FILE_LOCATION, true); mPicasso = NMJManagerApplication.getPicassoDetailsView(getActivity()); mMediumItalic = TypefaceUtils.getRobotoMediumItalic(mContext); mMedium = TypefaceUtils.getRobotoMedium(mContext); mCondensedRegular = TypefaceUtils.getRobotoCondensedRegular(mContext); mDatabaseHelper = NMJManagerApplication.getTvEpisodeDbAdapter(); LocalBroadcastManager.getInstance(mContext).registerReceiver(mBroadcastReceiver, new IntentFilter(LocalBroadcastUtils.UPDATE_TV_SHOW_EPISODE_DETAILS_OVERVIEW)); loadEpisode(); } @Override public void onDestroy() { super.onDestroy(); LocalBroadcastManager.getInstance(mContext).unregisterReceiver(mBroadcastReceiver); } private void loadEpisode() { if (!getArguments().getString("showId").isEmpty() && getArguments().getInt("season") >= 0 && getArguments().getInt("episode") >= 0) { Cursor cursor = mDatabaseHelper.getEpisode(getArguments().getString("showId"), getArguments().getInt("season"), getArguments().getInt("episode")); if (cursor.moveToFirst()) { mEpisode = new TvShowEpisode(getActivity(), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_SHOW_ID)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_EPISODE_TITLE)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_EPISODE_PLOT)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_SEASON)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_EPISODE)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_EPISODE_AIRDATE)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_EPISODE_DIRECTOR)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_EPISODE_WRITER)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_EPISODE_GUESTSTARS)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_EPISODE_RATING)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_HAS_WATCHED)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_FAVOURITE)) ); mEpisode.setFilepaths(NMJManagerApplication.getTvShowEpisodeMappingsDbAdapter().getFilepathsForEpisode( cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_SHOW_ID)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_SEASON)), cursor.getString(cursor.getColumnIndex(DbAdapterTvShowEpisodes.KEY_EPISODE)) )); } cursor.close(); } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.episode_details, container, false); } @Override public void onViewCreated(final View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); mBackdrop = (ImageView) view.findViewById(R.id.imageBackground); mEpisodePhoto = (ImageView) view.findViewById(R.id.episodePhoto); mDetailsArea = view.findViewById(R.id.details_area); mTitle = (TextView) view.findViewById(R.id.movieTitle); mSeasonEpisodeNumber = (TextView) view.findViewById(R.id.textView7); mDescription = (TextView) view.findViewById(R.id.textView2); mFileSource = (TextView) view.findViewById(R.id.textView3); mAirDate = (TextView) view.findViewById(R.id.textReleaseDate); mRating = (TextView) view.findViewById(R.id.textView12); mDirector = (TextView) view.findViewById(R.id.director); mWriter = (TextView) view.findViewById(R.id.writer); mGuestStars = (TextView) view.findViewById(R.id.guest_stars); mScrollView = (ObservableScrollView) view.findViewById(R.id.observableScrollView); mFab = (FloatingActionButton) view.findViewById(R.id.fab); mFab.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { ViewUtils.animateFabJump(v, new SimpleAnimatorListener() { @Override public void onAnimationEnd(Animator animation) { play(); } }); } }); mFab.setSize(FloatingActionButton.SIZE_AUTO); final int height = NMJLib.getActionBarAndStatusBarHeight(getActivity()); mScrollView = (ObservableScrollView) view.findViewById(R.id.observableScrollView); mScrollView.setOnScrollChangedListener(new OnScrollChangedListener() { @Override public void onScrollChanged(ScrollView who, int l, int t, int oldl, int oldt) { final int headerHeight = mEpisodePhoto.getHeight() - height; final float ratio = (float) Math.min(Math.max(t, 0), headerHeight) / headerHeight; final int newAlpha = (int) (ratio * 255); mBus.post(new BusToolbarColorObject(mToolbarColor, newAlpha)); if (NMJLib.isPortrait(mContext)) { // Such parallax, much wow mEpisodePhoto.setPadding(0, (int) (t / 1.5), 0, 0); } } }); mScrollView.getViewTreeObserver().addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() { @Override public void onGlobalLayout() { ViewUtils.setLayoutParamsForDetailsEmptyView(mContext, view, mBackdrop, mScrollView, this); } }); loadData(); mPicasso.load(mEpisode.getEpisodePhoto()).placeholder(R.drawable.bg).config(NMJManagerApplication.getBitmapConfig()).into(mEpisodePhoto, new Callback() { @Override public void onError() { if (!isAdded()) return; int width = getActivity().getResources().getDimensionPixelSize(R.dimen.episode_details_background_overlay_width); int height = getActivity().getResources().getDimensionPixelSize(R.dimen.episode_details_background_overlay_height); mPicasso.load(mEpisode.getTvShowBackdrop()).placeholder(R.drawable.bg).error(R.drawable.nobackdrop).resize(width, height).config(NMJManagerApplication.getBitmapConfig()).into(mEpisodePhoto); } @Override public void onSuccess() { if (mPaletteLoader == null) { mPaletteLoader = new PaletteLoader(mPicasso, Uri.fromFile(mEpisode.getEpisodePhoto()), new PaletteLoader.OnPaletteLoadedCallback() { @Override public void onPaletteLoaded(int swatchColor) { mToolbarColor = swatchColor; } }); mPaletteLoader.addView(mDetailsArea); mPaletteLoader.setFab(mFab); mPaletteLoader.execute(); } else { // Clear old views after configuration change mPaletteLoader.clearViews(); // Add views after configuration change mPaletteLoader.addView(mDetailsArea); mPaletteLoader.setFab(mFab); // Re-color the views mPaletteLoader.colorViews(); } } }); if (!NMJLib.isPortrait(getActivity())) mPicasso.load(mEpisode.getEpisodePhoto()).placeholder(R.drawable.bg).error(R.drawable.bg).transform(new BlurTransformation(getActivity().getApplicationContext(), mEpisode.getEpisodePhoto().getAbsolutePath() + "-blur", 4)).into(mBackdrop, new Callback() { @Override public void onError() { if (!isAdded()) return; mPicasso.load(mEpisode.getTvShowBackdrop()).placeholder(R.drawable.bg).error(R.drawable.nobackdrop).transform(new BlurTransformation(getActivity().getApplicationContext(), mEpisode.getTvShowBackdrop().getAbsolutePath() + "-blur", 4)).into(mBackdrop, new Callback() { @Override public void onError() {} @Override public void onSuccess() { if (!isAdded()) return; mBackdrop.setColorFilter(Color.parseColor("#aa181818"), android.graphics.PorterDuff.Mode.SRC_OVER); } }); } @Override public void onSuccess() { if (!isAdded()) return; mBackdrop.setColorFilter(Color.parseColor("#aa181818"), android.graphics.PorterDuff.Mode.SRC_OVER); } }); } private void loadData() { // Set the episode title mTitle.setVisibility(View.VISIBLE); mTitle.setText(mEpisode.getTitle()); mTitle.setTypeface(mCondensedRegular); mDescription.setTypeface(mCondensedRegular); mFileSource.setTypeface(mCondensedRegular); mDirector.setTypeface(mCondensedRegular); mWriter.setTypeface(mCondensedRegular); mGuestStars.setTypeface(mCondensedRegular); mAirDate.setTypeface(mMedium); mRating.setTypeface(mMedium); mSeasonEpisodeNumber.setTypeface(mMediumItalic); mSeasonEpisodeNumber.setText(getString(R.string.showSeason) + " " + mEpisode.getSeason() + ", " + getString(R.string.showEpisode) + " " + mEpisode.getEpisode()); // Set the movie plot if (!NMJLib.isPortrait(getActivity())) { mDescription.setBackgroundResource(R.drawable.selectable_background); mDescription.setMaxLines(getActivity().getResources().getInteger(R.integer.episode_details_max_lines)); mDescription.setTag(true); // true = collapsed mDescription.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { if (((Boolean) mDescription.getTag())) { mDescription.setMaxLines(1000); mDescription.setTag(false); } else { mDescription.setMaxLines(getActivity().getResources().getInteger(R.integer.episode_details_max_lines)); mDescription.setTag(true); } } }); mDescription.setEllipsize(TextUtils.TruncateAt.END); mDescription.setFocusable(true); } else { if (NMJLib.isTablet(getActivity())) mDescription.setLineSpacing(0, 1.15f); } mDescription.setText(mEpisode.getDescription()); if (mShowFileLocation) { mFileSource.setText(mEpisode.getAllFilepaths()); } else { mFileSource.setVisibility(View.GONE); } // Set the episode air date mAirDate.setText(NMJLib.getPrettyDatePrecise(getActivity(), mEpisode.getReleasedate())); // Set the movie rating if (!mEpisode.getRating().equals("0.0")) { try { int rating = (int) (Double.parseDouble(mEpisode.getRating()) * 10); mRating.setText(Html.fromHtml(rating + "<small> %</small>")); } catch (NumberFormatException e) { mRating.setText(mEpisode.getRating()); } } else { mRating.setText(R.string.stringNA); } if (TextUtils.isEmpty(mEpisode.getDirector()) || mEpisode.getDirector().equals(getString(R.string.stringNA))) { mDirector.setVisibility(View.GONE); } else { mDirector.setText(mEpisode.getDirector()); } if (TextUtils.isEmpty(mEpisode.getWriter()) || mEpisode.getWriter().equals(getString(R.string.stringNA))) { mWriter.setVisibility(View.GONE); } else { mWriter.setText(mEpisode.getWriter()); } if (TextUtils.isEmpty(mEpisode.getGuestStars()) || mEpisode.getGuestStars().equals(getString(R.string.stringNA))) { mGuestStars.setVisibility(View.GONE); } else { mGuestStars.setText(mEpisode.getGuestStars()); } } private void play() { ArrayList<Filepath> paths = mEpisode.getFilepaths(); if (paths.size() == 1) { Filepath path = paths.get(0); if (mEpisode.hasOfflineCopy(path)) { boolean playbackStarted = VideoUtils.playVideo(getActivity(), mEpisode.getOfflineCopyUri(path), FileSource.FILE, mEpisode); if (playbackStarted) { mVideoPlaybackStarted = System.currentTimeMillis(); checkIn(); } } else { boolean playbackStarted = VideoUtils.playVideo(getActivity(), path.getFilepath(), path.getType(), mEpisode); if (playbackStarted) { mVideoPlaybackStarted = System.currentTimeMillis(); checkIn(); } } } else { boolean hasOfflineCopy = false; for (Filepath path : paths) { if (mEpisode.hasOfflineCopy(path)) { boolean playbackStarted = VideoUtils.playVideo(getActivity(), mEpisode.getOfflineCopyUri(path), FileSource.FILE, mEpisode); if (playbackStarted) { mVideoPlaybackStarted = System.currentTimeMillis(); checkIn(); } hasOfflineCopy = true; break; } } if (!hasOfflineCopy) { NMJLib.showSelectFileDialog(getActivity(), mEpisode.getFilepaths(), new Dialog.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { Filepath path = mEpisode.getFilepaths().get(which); boolean playbackStarted = VideoUtils.playVideo(getActivity(), path.getFilepath(), path.getType(), mEpisode); if (playbackStarted) { mVideoPlaybackStarted = System.currentTimeMillis(); checkIn(); } } }); } } } public void onResume() { super.onResume(); mBus.register(getActivity()); mVideoPlaybackEnded = System.currentTimeMillis(); if (mVideoPlaybackStarted > 0 && mVideoPlaybackEnded - mVideoPlaybackStarted > (1000 * 60 * 5)) { if (!mEpisode.hasWatched()) watched(false); // Mark it as watched } } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.episode_details, menu); if (PreferenceManager.getDefaultSharedPreferences(getActivity()).getBoolean(CHROMECAST_BETA_SUPPORT, false)) { boolean add = false; for (Filepath path : mEpisode.getFilepaths()) { if (path.isNetworkFile()) { add = true; break; } } if (add) { menu.add("Remote play").setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(MenuItem item) { final ArrayList<Filepath> networkFiles = new ArrayList<Filepath>(); for (Filepath path : mEpisode.getFilepaths()) { if (path.isNetworkFile()) { networkFiles.add(path); } } NMJLib.showSelectFileDialog(getActivity(), networkFiles, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { String showName = NMJManagerApplication.getTvDbAdapter().getShowTitle(mEpisode.getShowId()); Intent i = new Intent(getActivity(), RemotePlayback.class); i.putExtra("coverUrl", ""); i.putExtra("title", showName + " (S" + NMJLib.addIndexZero(mEpisode.getSeason()) + "E" + NMJLib.addIndexZero(mEpisode.getEpisode()) + "): " + mEpisode.getTitle()); i.putExtra("id", mEpisode.getShowId()); i.putExtra("type", "tv"); if (networkFiles.get(which).getType() == FileSource.SMB) { String url = VideoUtils.startSmbServer(getActivity(), networkFiles.get(which).getFilepath(), mEpisode); i.putExtra("videoUrl", url); } else { i.putExtra("videoUrl", networkFiles.get(which).getFilepath()); } startActivity(i); } }); return false; } }); } } try { if (mEpisode.hasWatched()) { menu.findItem(R.id.watched).setTitle(R.string.stringMarkAsUnwatched); } else { menu.findItem(R.id.watched).setTitle(R.string.stringMarkAsWatched); } for (Filepath path : mEpisode.getFilepaths()) { if (path.isNetworkFile()) { // Set the menu item visibility menu.findItem(R.id.watchOffline).setVisible(true); if (mEpisode.hasOfflineCopy(path)) // There's already an offline copy, so let's allow the user to remove it menu.findItem(R.id.watchOffline).setTitle(R.string.removeOfflineCopy); else // There's no offline copy, so let the user download one menu.findItem(R.id.watchOffline).setTitle(R.string.watchOffline); break; } } } catch (Exception e) {} } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.menuDeleteEpisode: deleteEpisode(); break; case R.id.watched: watched(true); break; case R.id.identify: identifyEpisode(); break; case R.id.watchOffline: watchOffline(); break; case R.id.editTvShowEpisode: editEpisode(); break; } return false; } private void editEpisode() { Intent intent = new Intent(getActivity(), EditTvShowEpisode.class); intent.putExtra("showId", mEpisode.getShowId()); intent.putExtra("season", NMJLib.getInteger(mEpisode.getSeason())); intent.putExtra("episode", NMJLib.getInteger(mEpisode.getEpisode())); startActivityForResult(intent, 0); } public void watchOffline() { if (mEpisode.getFilepaths().size() == 1) { watchOffline(mEpisode.getFilepaths().get(0)); } else { NMJLib.showSelectFileDialog(getActivity(), mEpisode.getFilepaths(), new Dialog.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { watchOffline(mEpisode.getFilepaths().get(which)); // Dismiss the dialog dialog.dismiss(); } }); } } private void watchOffline(final Filepath path) { if (mEpisode.hasOfflineCopy(path)) { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setMessage(getString(R.string.areYouSure)) .setTitle(getString(R.string.removeOfflineCopy)) .setCancelable(false) .setPositiveButton(getString(android.R.string.yes), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { boolean success = mEpisode.getOfflineCopyFile(path).delete(); if (!success) mEpisode.getOfflineCopyFile(path).delete(); getActivity().invalidateOptionsMenu(); } }) .setNegativeButton(getString(android.R.string.no), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }) .create().show(); } else { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setMessage(getString(R.string.downloadOfflineCopy)) .setTitle(getString(R.string.watchOffline)) .setCancelable(false) .setPositiveButton(getString(android.R.string.yes), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { if (NMJLib.isLocalCopyBeingDownloaded(getActivity())) Toast.makeText(getActivity(), R.string.addedToDownloadQueue, Toast.LENGTH_SHORT).show(); Intent i = new Intent(getActivity(), MakeAvailableOffline.class); i.putExtra(MakeAvailableOffline.FILEPATH, path.getFilepath()); i.putExtra(MakeAvailableOffline.TYPE, NMJLib.TYPE_SHOWS); i.putExtra("thumb", mEpisode.getThumbnail().getAbsolutePath()); i.putExtra("backdrop", mEpisode.getEpisodePhoto().getAbsolutePath()); getActivity().startService(i); } }) .setNegativeButton(getString(android.R.string.no), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }) .create().show(); } } private void identifyEpisode() { if (mEpisode.getFilepaths().size() == 1) { getActivity().startActivityForResult(getIdentifyIntent(mEpisode.getFilepaths().get(0).getFullFilepath()), 0); } else { NMJLib.showSelectFileDialog(getActivity(), mEpisode.getFilepaths(), new Dialog.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { getActivity().startActivityForResult(getIdentifyIntent(mEpisode.getFilepaths().get(which).getFullFilepath()), 0); // Dismiss the dialog dialog.dismiss(); } }); } } private Intent getIdentifyIntent(String filepath) { Intent i = new Intent(getActivity(), IdentifyTvShowEpisode.class); ArrayList<String> filepaths = new ArrayList<String>(); filepaths.add(filepath); i.putExtra("filepaths", filepaths); i.putExtra("showId", mEpisode.getShowId()); i.putExtra("showTitle", NMJManagerApplication.getTvDbAdapter().getShowTitle(mEpisode.getShowId())); return i; } private void deleteEpisode() { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); View dialogLayout = getActivity().getLayoutInflater().inflate(R.layout.delete_file_dialog_layout, null); final CheckBox cb = (CheckBox) dialogLayout.findViewById(R.id.deleteFile); cb.setChecked(PreferenceManager.getDefaultSharedPreferences(getActivity()).getBoolean(ALWAYS_DELETE_FILE, true)); builder.setTitle(getString(R.string.removeEpisode) + " S" + mEpisode.getSeason() + "E" + mEpisode.getEpisode()) .setView(dialogLayout) .setCancelable(false) .setPositiveButton(getString(android.R.string.yes), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { TvShowDatabaseUtils.deleteEpisode(mContext, mEpisode.getShowId(), NMJLib.getInteger(mEpisode.getSeason()), NMJLib.getInteger(mEpisode.getEpisode())); if (cb.isChecked()) { for (Filepath path : mEpisode.getFilepaths()) { Intent deleteIntent = new Intent(getActivity(), DeleteFile.class); deleteIntent.putExtra("filepath", path.getFilepath()); getActivity().startService(deleteIntent); } } if (NMJManagerApplication.getTvEpisodeDbAdapter().getEpisodeCount(mEpisode.getShowId()) == 0) { // The show has been deleted! Let's show the TV show library overview Intent i = new Intent(mContext, Main.class); i.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_NEW_TASK); i.putExtra("startup", String.valueOf(Main.SHOWS)); startActivity(i); } else { LocalBroadcastUtils.updateTvShowSeasonsOverview(mContext); LocalBroadcastUtils.updateTvShowEpisodesOverview(mContext); } notifyDatasetChanges(); getActivity().finish(); } }) .setNegativeButton(getString(android.R.string.no), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }) .show(); } private void watched(boolean showToast) { // Create and open database mDatabaseHelper = NMJManagerApplication.getTvEpisodeDbAdapter(); mEpisode.setHasWatched(!mEpisode.hasWatched()); // Reverse the hasWatched boolean if (mDatabaseHelper.setEpisodeWatchStatus(mEpisode.getShowId(), mEpisode.getSeason(), mEpisode.getEpisode(), mEpisode.hasWatched())) { getActivity().invalidateOptionsMenu(); if (showToast) if (mEpisode.hasWatched()) { Toast.makeText(getActivity(), getString(R.string.markedAsWatched), Toast.LENGTH_SHORT).show(); } else { Toast.makeText(getActivity(), getString(R.string.markedAsUnwatched), Toast.LENGTH_SHORT).show(); } } else { if (showToast) Toast.makeText(getActivity(), getString(R.string.errorOccured), Toast.LENGTH_SHORT).show(); } mBus.post(mEpisode); new Thread() { @Override public void run() { ArrayList<com.nmj.functions.TvShowEpisode> episode = new ArrayList<com.nmj.functions.TvShowEpisode>(); episode.add(new com.nmj.functions.TvShowEpisode(mEpisode.getShowId(), Integer.valueOf(mEpisode.getEpisode()), Integer.valueOf(mEpisode.getSeason()))); Trakt.markEpisodeAsWatched(mEpisode.getShowId(), episode, getActivity(), false); } }.start(); } private void notifyDatasetChanges() { LocalBroadcastUtils.updateTvShowLibrary(getActivity()); } private void checkIn() { new Thread() { @Override public void run() { Trakt.performEpisodeCheckin(mEpisode, getActivity()); } }.start(); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == 0) { if (resultCode == Activity.RESULT_OK) { loadEpisode(); loadData(); } } } public class BusToolbarColorObject { private final int mToolbarColor, mAlpha; public BusToolbarColorObject(int toolbarColor, int alpha) { mToolbarColor = toolbarColor; mAlpha = alpha; } public int getToolbarColor() { return mToolbarColor; } public int getAlpha() { return mAlpha; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.chinamobile.bcbsp.bspstaff; import com.chinamobile.bcbsp.BSPConfiguration; import com.chinamobile.bcbsp.Constants; import com.chinamobile.bcbsp.util.BSPJob; import com.chinamobile.bcbsp.workermanager.WorkerManager; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.RunJar; /** * StaffRunner Base class that runs a staff in a child process. * @author * @version */ public class StaffRunner extends Thread { /** * The log in log4j,to write logs. */ public static final Log LOG = LogFactory.getLog(StaffRunner.class); /** * if the staff should be killed. */ private boolean killed = false; /** * Get if the staff should be killed. * @return true:the staff should be killed */ public boolean isKilled() { return killed; } /** * Set if the staff should be killed. * @param killed set if the staff should be killed */ public void setKilled(boolean killed) { this.killed = killed; } /** * The staff process. */ private Process process; /** * The current BSP staff. */ private Staff staff; /** * The current BSP Job configuration. */ private BSPJob conf; /** * The current BSP worker manager. */ private WorkerManager workerManager; /** * The fault step. */ private int faultSSStep = 0; /** * Constructor of Staff runner. * @param bspStaff the current BSP staff * @param workerManager the current worker manager * @param conf the current BSP job configuration */ public StaffRunner(BSPStaff bspStaff, WorkerManager workerManager , BSPJob conf) { this.staff = bspStaff; this.conf = conf; this.workerManager = workerManager; } /** * Get the current BSP staff. * @return the current BSP staff. */ public Staff getStaff() { return staff; } /** * Get the fault step. * @return the fault step */ public int getFaultSSStep() { return faultSSStep; } /** * Set the fault step. * @param faultSSStep the fault step. */ public void setFaultSSStep(int faultSSStep) { this.faultSSStep = faultSSStep; } /** * Called to assemble this staff's input. This method is run in the parent * process before the child is spawned. It should not execute user code, only * system code. * @return true: the staff is prepared * @throws IOException e */ public boolean prepare() throws IOException { return true; } /** * Start to run a BSP staff. */ @Override public void run() { try { String sep = System.getProperty("path.separator"); File workDir = new File(new File(staff.getJobFile()).getParent(), "work"); boolean isCreated = workDir.mkdirs(); if (!isCreated) { LOG.debug("StaffRunner.workDir : " + workDir); } StringBuffer classPath = new StringBuffer(); classPath.append(System.getProperty("java.class.path")); classPath.append(sep); if (Constants.USER_BC_BSP_JOB_TYPE_C.equals(this.conf.getJobType())) { String exe = conf.getJobExe(); if (exe != null) { classPath.append(sep); classPath.append(exe); classPath.append(sep); classPath.append(workDir); } } else { String jar = conf.getJar(); // if jar exists, it into workDir if (jar != null) { RunJar.unJar(new File(jar), workDir); File[] libs = new File(workDir, "lib").listFiles(); if (libs != null) { for (int i = 0; i < libs.length; i++) { // add libs from jar to classpath classPath.append(sep); classPath.append(libs[i]); } } classPath.append(sep); classPath.append(new File(workDir, "classes")); classPath.append(sep); classPath.append(workDir); } } // Build exec child jmv args. Vector<String> vargs = new Vector<String>(); File jvm = new File(new File(System.getProperty("java.home"), "bin"), "java"); vargs.add(jvm.toString()); // bsp.child.java.opts String javaOpts = conf.getConf().get("bsp.child.java.opts", "-Xmx200m"); javaOpts = javaOpts.replace("@taskid@", staff.getStaffID().toString()); String[] javaOptsSplit = javaOpts.split(" "); for (int i = 0; i < javaOptsSplit.length; i++) { vargs.add(javaOptsSplit[i]); } // Add classpath. vargs.add("-classpath"); vargs.add(classPath.toString()); // Setup the log4j prop long logSize = StaffLog.getStaffLogLength(((BSPConfiguration) conf .getConf())); vargs.add("-Dbcbsp.log.dir=" + new File(System.getProperty("bcbsp.log.dir")).getAbsolutePath()); vargs.add("-Dbcbsp.root.logger=INFO,TLA"); LOG.info("debug: staff ID is " + staff.getStaffID()); vargs.add("-Dbcbsp.tasklog.taskid=" + staff.getStaffID()); vargs.add("-Dbcbsp.tasklog.totalLogFileSize=" + logSize); // Add main class and its arguments vargs.add(WorkerManager.Child.class.getName()); InetSocketAddress addr = workerManager.getStaffTrackerReportAddress(); vargs.add(addr.getHostName()); vargs.add(Integer.toString(addr.getPort())); vargs.add(staff.getStaffID().toString()); vargs.add(Integer.toString(getFaultSSStep())); vargs.add(workerManager.getHostName()); vargs.add(this.conf.getJobType()); // Run java runChild(vargs.toArray(new String[0]), workDir); } catch (Exception e) { LOG.error("[run]", e); } } /** * Run the child process. * @throws Exception e */ private void runChild(String[] args, File dir) throws Exception { this.process = Runtime.getRuntime().exec(args, null, dir); try { int exit_code = process.waitFor(); if (!killed && exit_code != 0) { throw new Exception("Staff process exit with nonzero status of " + exit_code + "."); } } catch (InterruptedException e) { throw new IOException(e.toString()); } finally { kill(); } } /** * Kill the child process. */ public void kill() { if (process != null) { process.destroy(); } killed = true; } }
<%# Copyright 2013-2017 the original author or authors from the JHipster project. This file is part of the JHipster project, see http://www.jhipster.tech/ for more information. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -%> package <%=packageName%>.domain; <%_ if (authenticationType === 'oauth2' && applicationType !== 'monolith') { _%> import java.util.Set; public class User { private final String login; private final String firstName; private final String lastName; private final String email; private final String langKey; private final String imageUrl; private final boolean activated; private final Set<String> authorities; public User(String login, String firstName, String lastName, String email, String langKey, String imageUrl, boolean activated, Set<String> authorities) { this.login = login; this.firstName = firstName; this.lastName = lastName; this.email = email; this.langKey = langKey; this.imageUrl = imageUrl; this.activated = activated; this.authorities = authorities; } public String getLogin() { return login; } public String getFirstName() { return firstName; } public String getLastName() { return lastName; } public String getEmail() { return email; } public String getLangKey() { return langKey; } public String getImageUrl() { return imageUrl; } public boolean isActivated() { return activated; } public Set<String> getAuthorities() { return authorities; } } <%_ } else { _%> import <%=packageName%>.config.Constants; <% if (databaseType === 'cassandra') { %> import com.datastax.driver.mapping.annotations.*;<% } %> import com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.commons.lang3.StringUtils;<% if (databaseType === 'sql') { %> import org.hibernate.annotations.BatchSize;<% } %><% if (hibernateCache !== 'no' && databaseType === 'sql') { %> import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy;<% } %> import org.hibernate.validator.constraints.Email; <%_ if (databaseType === 'mongodb') { _%> import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.index.Indexed; import org.springframework.data.mongodb.core.mapping.Field; <%_ } _%> <%_ if (databaseType === 'couchbase') { _%> import org.springframework.data.annotation.Id; import com.couchbase.client.java.repository.annotation.Field; import org.springframework.data.couchbase.core.mapping.Document; import org.springframework.data.couchbase.core.mapping.id.GeneratedValue; import org.springframework.data.couchbase.core.mapping.id.IdAttribute; import org.springframework.data.couchbase.core.mapping.id.IdPrefix; <%_ } _%> <%_ if (databaseType === 'sql') { _%> import javax.persistence.*; <%_ } _%> import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.validation.constraints.Size; import java.io.Serializable; import java.util.HashSet; import java.util.Locale; import java.util.Objects; import java.util.Set; import java.time.Instant; <%_ if (databaseType === 'couchbase') { _%> import static <%=packageName%>.config.Constants.ID_DELIMITER; import static org.springframework.data.couchbase.core.mapping.id.GenerationStrategy.USE_ATTRIBUTES; <%_ } _%> /** * A user. */<% if (databaseType === 'sql') { %> @Entity @Table(name = "<%= jhiTablePrefix %>_user")<% } %> <%_ if (hibernateCache !== 'no' && databaseType === 'sql') { if (hibernateCache === 'infinispan') { _%> @Cache(usage = CacheConcurrencyStrategy.READ_WRITE) <%_ } else { _%> @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) <%_ } } _%><% if (databaseType === 'mongodb') { %> @org.springframework.data.mongodb.core.mapping.Document(collection = "<%= jhiTablePrefix %>_user")<% } %><% if (databaseType === 'couchbase') { %> @Document<% } %><% if (databaseType === 'cassandra') { %> @Table(name = "user")<% } %><% if (searchEngine === 'elasticsearch') { %> @org.springframework.data.elasticsearch.annotations.Document(indexName = "user")<% } %> public class User<% if (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') { %> extends AbstractAuditingEntity<% } %> implements Serializable { private static final long serialVersionUID = 1L; <% if (databaseType === 'sql') { %> @Id <%_ if (prodDatabaseType === 'mysql' || prodDatabaseType === 'mariadb') { _%> @GeneratedValue(strategy = GenerationType.IDENTITY) <%_ } else { _%> @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "sequenceGenerator") @SequenceGenerator(name = "sequenceGenerator") <%_ } _%> private Long id;<% } else { %><% if (databaseType === 'couchbase') { %> public static final String PREFIX = "user"; @SuppressWarnings("unused") @IdPrefix private String prefix = PREFIX;<% } %> <% if (databaseType === 'mongodb' || databaseType === 'couchbase') { %> @Id<% } %><% if (databaseType === 'couchbase') { %> @GeneratedValue(strategy = USE_ATTRIBUTES, delimiter = ID_DELIMITER)<% } %><% if (databaseType === 'cassandra') { %> @PartitionKey<% } %> private String id;<% } %> <%_ let columnMax = 50; if (enableSocialSignIn) { columnMax = 100; } _%> @NotNull @Pattern(regexp = Constants.LOGIN_REGEX) @Size(min = 1, max = <%=columnMax %>)<% if (databaseType === 'sql') { %> @Column(length = <%=columnMax %>, unique = true, nullable = false)<% } %><% if (databaseType === 'mongodb') { %> @Indexed<% } %><% if (databaseType === 'couchbase') { %> @IdAttribute<% } %> private String login; <%_ if (authenticationType !== 'oauth2') { _%> @JsonIgnore @NotNull @Size(min = 60, max = 60)<% if (databaseType === 'sql') { %> @Column(name = "password_hash", length = 60)<% } %> private String password; <%_ } _%> @Size(max = 50)<% if (databaseType === 'sql') { %> @Column(name = "first_name", length = 50)<% } %><% if (databaseType === 'mongodb' || databaseType === 'couchbase') { %> @Field("first_name")<% } %> private String firstName; @Size(max = 50)<% if (databaseType === 'sql') { %> @Column(name = "last_name", length = 50)<% } %><% if (databaseType === 'mongodb' || databaseType === 'couchbase') { %> @Field("last_name")<% } %> private String lastName; @Email @Size(min = 5, max = 100)<% if (databaseType === 'sql') { %> @Column(length = 100, unique = true)<% } %><% if (databaseType === 'mongodb') { %> @Indexed<% } %> private String email; <%_ if (databaseType === 'sql') { _%> @NotNull @Column(nullable = false) <%_ } _%> private boolean activated = false; @Size(min = 2, max = 6)<% if (databaseType === 'sql') { %> @Column(name = "lang_key", length = 6)<% } %><% if (databaseType === 'mongodb' || databaseType === 'couchbase') { %> @Field("lang_key")<% } %><% if (databaseType === 'cassandra') { %> @Column(name = "lang_key")<% } %> private String langKey; <%_ if (databaseType === 'mongodb' || databaseType === 'couchbase' || databaseType === 'sql') { _%> @Size(max = 256)<% if (databaseType === 'sql') { %> @Column(name = "image_url", length = 256)<% } %><% if (databaseType === 'mongodb' || databaseType === 'couchbase') { %> @Field("image_url")<% } %> private String imageUrl; <%_ } _%> <%_ if (authenticationType !== 'oauth2') { _%> @Size(max = 20)<% if (databaseType === 'sql') { %> @Column(name = "activation_key", length = 20)<% } %><% if (databaseType === 'mongodb' || databaseType === 'couchbase') { %> @Field("activation_key")<% } %><% if (databaseType === 'cassandra') { %> @Column(name = "activation_key")<% } %> @JsonIgnore private String activationKey; @Size(max = 20)<% if (databaseType === 'sql') { %> @Column(name = "reset_key", length = 20)<% } %><% if (databaseType === 'mongodb' || databaseType === 'couchbase') { %> @Field("reset_key")<% } %><% if (databaseType === 'cassandra') { %> @Column(name = "reset_key")<% } %> @JsonIgnore private String resetKey; <%_ if (databaseType === 'sql' || databaseType === 'cassandra') { _%> @Column(name = "reset_date") <%_ } else if (databaseType === 'mongodb' || databaseType === 'couchbase') { _%> @Field("reset_date") <%_ } _%> private Instant resetDate = null; <%_ } _%> @JsonIgnore<% if (databaseType === 'sql') { %> @ManyToMany @JoinTable( name = "<%= jhiTablePrefix %>_user_authority", joinColumns = {@JoinColumn(name = "user_id", referencedColumnName = "id")}, inverseJoinColumns = {@JoinColumn(name = "authority_name", referencedColumnName = "name")}) <%_ if (hibernateCache !== 'no') { if (hibernateCache === 'infinispan') { _%> @Cache(usage = CacheConcurrencyStrategy.READ_WRITE) <%_ } else { _%> @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) <%_ } } _%><% if (databaseType === 'sql') { %> @BatchSize(size = 20)<% } %><% } %><% if (databaseType === 'sql' || databaseType === 'mongodb') { %> private Set<Authority> authorities = new HashSet<>();<% } %><% if (databaseType === 'cassandra' || databaseType === 'couchbase') { %> private Set<String> authorities = new HashSet<>();<% } %><% if (authenticationType === 'session' && databaseType === 'sql') { %> @JsonIgnore @OneToMany(cascade = CascadeType.ALL, orphanRemoval = true, mappedBy = "user") <%_ if (hibernateCache !== 'no') { if (hibernateCache === 'infinispan') { _%> @Cache(usage = CacheConcurrencyStrategy.READ_WRITE) <%_ } else { _%> @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) <%_ } } _%> private Set<PersistentToken> persistentTokens = new HashSet<>();<% } %> public <% if (databaseType === 'sql') { %>Long<% } else { %>String<% } %> getId() { return id; } public void setId(<% if (databaseType === 'sql') { %>Long<% } else { %>String<% } %> id) { this.id = id; } public String getLogin() { return login; } // Lowercase the login before saving it in database public void setLogin(String login) { this.login = StringUtils.lowerCase(login, Locale.ENGLISH); } <%_ if (authenticationType !== 'oauth2') { _%> public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } <%_ } _%> public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } <%_ if (databaseType === 'mongodb' || databaseType === 'couchbase' || databaseType === 'sql') { _%> public String getImageUrl() { return imageUrl; } public void setImageUrl(String imageUrl) { this.imageUrl = imageUrl; } <%_ } _%> public boolean getActivated() { return activated; } public void setActivated(boolean activated) { this.activated = activated; } <%_ if (authenticationType !== 'oauth2') { _%> public String getActivationKey() { return activationKey; } public void setActivationKey(String activationKey) { this.activationKey = activationKey; } public String getResetKey() { return resetKey; } public void setResetKey(String resetKey) { this.resetKey = resetKey; } public Instant getResetDate() { return resetDate; } public void setResetDate(Instant resetDate) { this.resetDate = resetDate; } <%_ } _%> public String getLangKey() { return langKey; } public void setLangKey(String langKey) { this.langKey = langKey; } public Set<<% if (databaseType === 'sql' || databaseType === 'mongodb') { %>Authority<% } %><% if (databaseType === 'cassandra' || databaseType === 'couchbase') { %>String<% } %>> getAuthorities() { return authorities; } public void setAuthorities(Set<<% if (databaseType === 'sql' || databaseType === 'mongodb') { %>Authority<% } %><% if (databaseType === 'cassandra' || databaseType === 'couchbase') { %>String<% } %>> authorities) { this.authorities = authorities; }<% if ((authenticationType === 'session') && (databaseType === 'sql')) { %> public Set<PersistentToken> getPersistentTokens() { return persistentTokens; } public void setPersistentTokens(Set<PersistentToken> persistentTokens) { this.persistentTokens = persistentTokens; }<% } %> @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } User user = (User) o; return !(user.getId() == null || getId() == null) && Objects.equals(getId(), user.getId()); } @Override public int hashCode() { return Objects.hashCode(getId()); } @Override public String toString() { return "User{" + "login='" + login + '\'' + ", firstName='" + firstName + '\'' + ", lastName='" + lastName + '\'' + ", email='" + email + '\'' +<% if (databaseType === 'mongodb' || databaseType === 'couchbase' || databaseType === 'sql') { %> ", imageUrl='" + imageUrl + '\'' +<% } %> ", activated='" + activated + '\'' + ", langKey='" + langKey + '\'' + <%_ if (authenticationType !== 'oauth2') { _%> ", activationKey='" + activationKey + '\'' + <%_ } _%> "}"; } } <%_ } _%>
/** * Copyright 2015 Santhosh Kumar Tekuri * * The JLibs authors license this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package jlibs.xml.sax.dog.expr.nodset; import jlibs.core.lang.ImpossibleException; import jlibs.core.lang.NotImplementedException; import jlibs.core.util.LongTreeMap; import jlibs.xml.sax.dog.DataType; import jlibs.xml.sax.dog.Scope; import jlibs.xml.sax.dog.expr.Evaluation; import jlibs.xml.sax.dog.expr.Expression; import jlibs.xml.sax.dog.expr.Literal; import jlibs.xml.sax.dog.path.LocationPath; import jlibs.xml.sax.dog.sniff.Event; import java.util.ArrayList; import java.util.List; /** * TODO: simplify has to be implemented * * @author Santhosh Kumar T */ public class PathExpression extends Expression{ public final LocationPath union; public final Expression contexts[]; public final Expression relativeExpression; public final boolean forEach; public PathExpression(LocationPath union, Expression relativeExpression, boolean forEach){ super(Scope.DOCUMENT, relativeExpression.resultType); assert relativeExpression.scope()!=Scope.DOCUMENT; this.union = union; contexts = new Expression[union.contexts.size()]; for(int i=0; i<contexts.length; i++) contexts[i] = union.contexts.get(i).typeCast(DataType.NODESET); this.relativeExpression = relativeExpression; if(relativeExpression instanceof LocationExpression) ((LocationExpression)relativeExpression).rawResult = true; else ((Literal)relativeExpression).rawResultRequired(); this.forEach = forEach; if(union.hitExpression!=null) union.hitExpression.pathExpression = this; } @Override public Object getResult(){ return null; } @Override public Object getResult(Event event){ return new PathEvaluation(this, event); } @Override public String toString(){ StringBuilder buff = new StringBuilder(); for(Expression context: contexts){ if(buff.length()>0) buff.append(", "); buff.append(context); } if(union.predicateSet.getPredicate()!=null){ buff.insert(0, '('); buff.append(')'); buff.append('['); buff.append(union.predicateSet.getPredicate()); buff.append(']'); } return String.format("path-expression(context(%s), %s, %s)", buff, relativeExpression, forEach); } public static class HitExpression extends Expression{ public PathExpression pathExpression; public HitExpression(){ super(Scope.LOCAL, DataType.BOOLEAN); } @Override public Object getResult(){ throw new ImpossibleException(); } @Override public Object getResult(Event event){ PathEvaluation pathEvaluation = (PathEvaluation)event.result(pathExpression); return pathEvaluation.evaluations.get(event.order()); } } } final class PathEvaluation extends Evaluation<PathExpression> implements NodeSetListener, NodeSetListener.Support{ private Event event; private PositionTracker positionTracker; public PathEvaluation(PathExpression expression, Event event){ super(expression, event.order()); this.event = event; contextsPending = expression.contexts.length; if(expression.union.predicateSet.hasPosition) positionTracker = new PositionTracker(expression.union.predicateSet.headPositionalPredicate); } @Override public void start(){ for(Expression context: expression.contexts){ Object result = event.evaluate(context); if(result==null){ Object eval = event.result(context); if(eval instanceof LocationEvaluation) ((LocationEvaluation)eval).nodeSetListener = this; else ((PathEvaluation)eval).nodeSetListener = this; }else throw new NotImplementedException(); } } protected LongTreeMap<EvaluationInfo> evaluations = new LongTreeMap<EvaluationInfo>(); @Override public void mayHit(){ long order = event.order(); EvaluationInfo evalInfo = evaluations.get(order); if(evalInfo==null){ evaluations.put(order, evalInfo=new EvaluationInfo(event, expression.union.hitExpression, order, nodeSetListener)); if(positionTracker!=null){ event.positionTrackerStack.addFirst(positionTracker); positionTracker.addEvaluation(event); } Expression predicate = expression.union.predicateSet.getPredicate(); Object predicateResult = predicate==null ? Boolean.TRUE : event.evaluate(predicate); if(predicateResult==Boolean.TRUE){ Object r = event.evaluate(expression.relativeExpression); if(r==null){ event.evaluation.addListener(this); event.evaluation.start(); evalInfo.eval = event.evaluation; pendingCount++; if(nodeSetListener!=null){ if(event.evaluation instanceof LocationEvaluation) ((LocationEvaluation)event.evaluation).nodeSetListener = evalInfo; else ((PathEvaluation)event.evaluation).nodeSetListener = evalInfo; } }else{ if(nodeSetListener!=null) nodeSetListener.mayHit(); evalInfo.setResult(r); } }else if(predicateResult==null){ Evaluation predicateEvaluation = event.evaluation; Object resultItem = expression.relativeExpression.getResult(event); if(nodeSetListener!=null && !(nodeSetListener instanceof Event)){ // nodeSetListener will be event if xmlBuilder is set if(resultItem instanceof LocationEvaluation) ((LocationEvaluation)resultItem).nodeSetListener = evalInfo; else ((PathEvaluation)resultItem).nodeSetListener = evalInfo; } Evaluation childEval = new PredicateEvaluation(expression.relativeExpression, event.order(), resultItem, event, predicate, predicateEvaluation); childEval.addListener(this); childEval.start(); evalInfo.eval = childEval; pendingCount++; }else throw new ImpossibleException(); } evalInfo.hitCount++; if(evalInfo.hitCount==1 && positionTracker!=null){ positionTracker.startEvaluation(); event.positionTrackerStack.pollFirst(); } } @Override public void discard(long order){ LongTreeMap.Entry<EvaluationInfo> entry = evaluations.getEntry(order); if(entry!=null){ if(entry.value.discard()==0){ evaluations.deleteEntry(entry); if(entry.value.eval!=null){ pendingCount--; entry.value.eval.removeListener(this); } } } } private int contextsPending; @Override public void finished(){ contextsPending--; if(contextsPending==0){ if(expression.union.hitExpression!=null){ for(EvaluationInfo evalInfo: new ArrayList<EvaluationInfo>(evaluations.values())) evalInfo.doFinish(); if(positionTracker!=null) positionTracker.expired(); } } tryToFinish(); } private int pendingCount; private int pendingCount(){ int count = 0; for(LongTreeMap.Entry<EvaluationInfo> entry = evaluations.firstEntry(); entry!=null; entry=entry.next()){ if(entry.value.eval!=null) count++; } return count; } private Object finalResult; private void tryToFinish(){ if(finalResult==null){ if(contextsPending>0) return; assert pendingCount==pendingCount(); if(pendingCount==0){ finalResult = computeResult(); if(nodeSetListener!=null) nodeSetListener.finished(); fireFinished(); } } } @SuppressWarnings({"unchecked"}) public Object computeResult(){ if(expression.forEach){ List<Object> result = new ArrayList<Object>(evaluations.size()); for(LongTreeMap.Entry entry = evaluations.firstEntry(); entry!=null; entry=entry.next()) result.add(computeResultItem(((EvaluationInfo)entry.value).result)); return result; }else{ LongTreeMap result = new LongTreeMap(); for(LongTreeMap.Entry<EvaluationInfo> entry = evaluations.firstEntry(); entry!=null; entry=entry.next()) result.putAll(entry.value.result); return computeResultItem(result); } } @SuppressWarnings({"unchecked", "UnnecessaryBoxing"}) private Object computeResultItem(LongTreeMap result){ switch(expression.resultType){ case NODESET: case STRINGS: case NUMBERS: return new ArrayList(result.values()); case NUMBER: if(expression.relativeExpression instanceof Count) return new Double(result.size()); else{ double d = 0; for(LongTreeMap.Entry entry=result.firstEntry(); entry!=null; entry=entry.next()) d += (Double)entry.value; return d; } case BOOLEAN: return !result.isEmpty(); default: if(result.isEmpty()) return expression.resultType.defaultValue; else return result.firstEntry().value; } } @Override public Object getResult(){ return finalResult; } @Override @SuppressWarnings({"unchecked"}) public void finished(Evaluation evaluation){ LongTreeMap.Entry<EvaluationInfo> entry = evaluations.getEntry(evaluation.order); assert entry.value.eval==evaluation; if(evaluation instanceof PredicateEvaluation){ PredicateEvaluation predicateEvaluation = (PredicateEvaluation)evaluation; if(predicateEvaluation.result!=null){ if(predicateEvaluation.result instanceof Evaluation){ entry.value.eval = (Evaluation)predicateEvaluation.result; entry.value.eval.addListener(this); return; }else{ entry.value.setResult(predicateEvaluation.result); entry.value.eval = null; pendingCount--; } }else{ entry.value.doDiscards(); evaluations.deleteEntry(entry); if(entry.value.eval!=null) pendingCount--; } }else{ entry.value.setResult(evaluation.getResult()); entry.value.eval = null; pendingCount--; } tryToFinish(); } private NodeSetListener nodeSetListener; @Override public void setNodeSetListener(NodeSetListener nodeSetListener){ this.nodeSetListener = nodeSetListener; } } final class EvaluationInfo extends Evaluation<PathExpression.HitExpression> implements NodeSetListener{ Event event; Evaluation eval; LongTreeMap result; EvaluationInfo(Event event, PathExpression.HitExpression expression, long order, NodeSetListener nodeSetListener){ super(expression, order); this.event = event; this.nodeSetListener = nodeSetListener; if(nodeSetListener!=null) mayHits = new ArrayList<Long>(); } @SuppressWarnings({"unchecked"}) public void setResult(Object result){ if(result instanceof LongTreeMap) this.result = (LongTreeMap)result; else{ this.result = new LongTreeMap(); this.result.put(order, result); } } public int hitCount; private Boolean hit; public int discard(){ if(--hitCount==0){ hit = Boolean.FALSE; doDiscards(); if(listener!=null) fireFinished(); } return hitCount; } public void doFinish(){ hit = Boolean.TRUE; if(listener!=null) fireFinished(); } @Override public void start(){} @Override public Object getResult(){ return hit; } @Override public void finished(Evaluation evaluation){} /*-------------------------------------------------[ NodeSetListener ]---------------------------------------------------*/ public NodeSetListener nodeSetListener; private List<Long> mayHits; @Override public void mayHit(){ mayHits.add(event.order()); nodeSetListener.mayHit(); } public void doDiscards(){ if(nodeSetListener!=null){ for(long order: mayHits) nodeSetListener.discard(order); } } @Override public void discard(long order){ mayHits.remove(order); nodeSetListener.discard(order); } @Override public void finished(){} }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cognitoidentity.model; import java.io.Serializable; /** * The response to a ListIdentities request. */ public class ListIdentitiesResult implements Serializable, Cloneable { /** An identity pool ID in the format REGION:GUID. */ private String identityPoolId; /** An object containing a set of identities and associated mappings. */ private java.util.List<IdentityDescription> identities; /** A pagination token. */ private String nextToken; /** * An identity pool ID in the format REGION:GUID. * * @param identityPoolId * An identity pool ID in the format REGION:GUID. */ public void setIdentityPoolId(String identityPoolId) { this.identityPoolId = identityPoolId; } /** * An identity pool ID in the format REGION:GUID. * * @return An identity pool ID in the format REGION:GUID. */ public String getIdentityPoolId() { return this.identityPoolId; } /** * An identity pool ID in the format REGION:GUID. * * @param identityPoolId * An identity pool ID in the format REGION:GUID. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListIdentitiesResult withIdentityPoolId(String identityPoolId) { setIdentityPoolId(identityPoolId); return this; } /** * An object containing a set of identities and associated mappings. * * @return An object containing a set of identities and associated mappings. */ public java.util.List<IdentityDescription> getIdentities() { return identities; } /** * An object containing a set of identities and associated mappings. * * @param identities * An object containing a set of identities and associated mappings. */ public void setIdentities( java.util.Collection<IdentityDescription> identities) { if (identities == null) { this.identities = null; return; } this.identities = new java.util.ArrayList<IdentityDescription>( identities); } /** * An object containing a set of identities and associated mappings. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setIdentities(java.util.Collection)} or * {@link #withIdentities(java.util.Collection)} if you want to override the * existing values. * </p> * * @param identities * An object containing a set of identities and associated mappings. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListIdentitiesResult withIdentities( IdentityDescription... identities) { if (this.identities == null) { setIdentities(new java.util.ArrayList<IdentityDescription>( identities.length)); } for (IdentityDescription ele : identities) { this.identities.add(ele); } return this; } /** * An object containing a set of identities and associated mappings. * * @param identities * An object containing a set of identities and associated mappings. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListIdentitiesResult withIdentities( java.util.Collection<IdentityDescription> identities) { setIdentities(identities); return this; } /** * A pagination token. * * @param nextToken * A pagination token. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * A pagination token. * * @return A pagination token. */ public String getNextToken() { return this.nextToken; } /** * A pagination token. * * @param nextToken * A pagination token. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListIdentitiesResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getIdentityPoolId() != null) sb.append("IdentityPoolId: " + getIdentityPoolId() + ","); if (getIdentities() != null) sb.append("Identities: " + getIdentities() + ","); if (getNextToken() != null) sb.append("NextToken: " + getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListIdentitiesResult == false) return false; ListIdentitiesResult other = (ListIdentitiesResult) obj; if (other.getIdentityPoolId() == null ^ this.getIdentityPoolId() == null) return false; if (other.getIdentityPoolId() != null && other.getIdentityPoolId().equals(this.getIdentityPoolId()) == false) return false; if (other.getIdentities() == null ^ this.getIdentities() == null) return false; if (other.getIdentities() != null && other.getIdentities().equals(this.getIdentities()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getIdentityPoolId() == null) ? 0 : getIdentityPoolId() .hashCode()); hashCode = prime * hashCode + ((getIdentities() == null) ? 0 : getIdentities().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListIdentitiesResult clone() { try { return (ListIdentitiesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package com.untzuntz.ustack.data; import java.awt.Color; import java.net.URL; import java.util.Date; import org.apache.log4j.Logger; import org.bson.BasicBSONObject; import org.bson.types.ObjectId; import com.mongodb.BasicDBObject; import com.mongodb.BasicDBObjectBuilder; import com.mongodb.DBCollection; import com.mongodb.DBObject; import com.untzuntz.ustack.exceptions.AccountExistsException; import com.untzuntz.ustack.main.UOpts; /** * Object to support a branding per URL / URI * * @author jdanner * */ public class Branding extends UntzDBObject { private static final long serialVersionUID = 1L; @SuppressWarnings("unused") private static Logger logger = Logger.getLogger(Branding.class); public String getCollectionName() { return "branding"; } private Branding() { put("created", new Date()); } public Branding(DBObject obj) { putAll(obj); } public String getId() { return get("_id") + ""; } /** Gets the DB Collection for the UserAccount object */ public static DBCollection getDBCollection() { return new Branding().getCollection(); } public BrandingObject getBrandingByType(Class name) { if (name == null) throw new IllegalArgumentException("You must provide a branding type"); DBObject o = (DBObject)get(name.getSimpleName()); if (o == null) return new BrandingObject(); return new BrandingObject(o); } public static interface ColorBranding { public int getForegroundRGB(); public int getBackgroundRGB(); } /** * A section of the application branding * * @author jdanner * */ public static class BrandingObject { private BasicBSONObject data; public BrandingObject() { data = new BasicBSONObject(); } public String toString() { return data.toString(); } public BrandingObject(DBObject o) { this(); data.putAll(o); } public int getInt(Enum itemName) { return data.getInt(itemName.name(), 0); } public String getText(Enum itemName) { return data.getString(itemName.name()); } public boolean isHidden(Enum itemName) { return data.getBoolean(itemName.name(), false); } public boolean isVisible(Enum itemName) { return data.getBoolean(itemName.name(), true); } public String getImageUrl(Enum itemName) { return data.getString(itemName.name()); } public Color getColor(Enum itemName) { String value = data.getString(itemName.name()); if (value == null) return null; int r = 0; int g = 0; int b = 0; String[] rgb = value.split(","); try { r = Integer.valueOf(rgb[0]); g = Integer.valueOf(rgb[1]); b = Integer.valueOf(rgb[2]); } catch (NumberFormatException nfe) { return null; } System.out.println(String.format("r,g,b = %d,%d,%d", r, g, b)); return new Color(r, g, b); } } public static Branding createBranding(String appName, String host, String file) throws AccountExistsException { String[] fileArray = file.split(","); for (String fileItem : fileArray) { DBObject search = new BasicDBObject("host", host).append("file", fileItem); DBCollection col = new Branding().getCollection(); DBObject obj = col.findOne( search ); if (obj != null) throw new AccountExistsException("URL"); } Branding branding = new Branding(); branding.put("applicationName", appName); branding.put("host", host); branding.put("file", fileArray); return branding; } public static Branding getByAppName(String name) { if (name == null) return null; DBObject book = new Branding().getCollection().findOne(BasicDBObjectBuilder.start("applicationName", name).get()); if (book == null) return null; return new Branding(book); } public static Branding getById(String id) { if (id == null) return null; DBObject book = new Branding().getCollection().findOne(BasicDBObjectBuilder.start("_id", new ObjectId(id)).get()); if (book == null) return null; return new Branding(book); } public static Branding getByURL(URL url) { // String key = url.toString(); if (UOpts.getCacheEnabled()) { //Branding curCache = (Branding)UDataCache.getInstance().get(key); //if (curCache != null) // return curCache; } DBObject search = new BasicDBObject("host", url.getHost()).append("file", url.getFile()); DBCollection col = new Branding().getCollection(); DBObject obj = col.findOne( search ); if (obj != null) { Branding ret = new Branding(obj); //if (UOpts.getCacheEnabled()) // UDataCache.getInstance().set(key, 900, ret); return ret; } return null; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simplesystemsmanagement.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * This data type is deprecated. Instead, use <a>ParameterStringFilter</a>. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ssm-2014-11-06/ParametersFilter" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ParametersFilter implements Serializable, Cloneable, StructuredPojo { /** * <p> * The name of the filter. * </p> */ private String key; /** * <p> * The filter values. * </p> */ private com.amazonaws.internal.SdkInternalList<String> values; /** * <p> * The name of the filter. * </p> * * @param key * The name of the filter. * @see ParametersFilterKey */ public void setKey(String key) { this.key = key; } /** * <p> * The name of the filter. * </p> * * @return The name of the filter. * @see ParametersFilterKey */ public String getKey() { return this.key; } /** * <p> * The name of the filter. * </p> * * @param key * The name of the filter. * @return Returns a reference to this object so that method calls can be chained together. * @see ParametersFilterKey */ public ParametersFilter withKey(String key) { setKey(key); return this; } /** * <p> * The name of the filter. * </p> * * @param key * The name of the filter. * @see ParametersFilterKey */ public void setKey(ParametersFilterKey key) { withKey(key); } /** * <p> * The name of the filter. * </p> * * @param key * The name of the filter. * @return Returns a reference to this object so that method calls can be chained together. * @see ParametersFilterKey */ public ParametersFilter withKey(ParametersFilterKey key) { this.key = key.toString(); return this; } /** * <p> * The filter values. * </p> * * @return The filter values. */ public java.util.List<String> getValues() { if (values == null) { values = new com.amazonaws.internal.SdkInternalList<String>(); } return values; } /** * <p> * The filter values. * </p> * * @param values * The filter values. */ public void setValues(java.util.Collection<String> values) { if (values == null) { this.values = null; return; } this.values = new com.amazonaws.internal.SdkInternalList<String>(values); } /** * <p> * The filter values. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setValues(java.util.Collection)} or {@link #withValues(java.util.Collection)} if you want to override the * existing values. * </p> * * @param values * The filter values. * @return Returns a reference to this object so that method calls can be chained together. */ public ParametersFilter withValues(String... values) { if (this.values == null) { setValues(new com.amazonaws.internal.SdkInternalList<String>(values.length)); } for (String ele : values) { this.values.add(ele); } return this; } /** * <p> * The filter values. * </p> * * @param values * The filter values. * @return Returns a reference to this object so that method calls can be chained together. */ public ParametersFilter withValues(java.util.Collection<String> values) { setValues(values); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getKey() != null) sb.append("Key: ").append(getKey()).append(","); if (getValues() != null) sb.append("Values: ").append(getValues()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ParametersFilter == false) return false; ParametersFilter other = (ParametersFilter) obj; if (other.getKey() == null ^ this.getKey() == null) return false; if (other.getKey() != null && other.getKey().equals(this.getKey()) == false) return false; if (other.getValues() == null ^ this.getValues() == null) return false; if (other.getValues() != null && other.getValues().equals(this.getValues()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getKey() == null) ? 0 : getKey().hashCode()); hashCode = prime * hashCode + ((getValues() == null) ? 0 : getValues().hashCode()); return hashCode; } @Override public ParametersFilter clone() { try { return (ParametersFilter) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.simplesystemsmanagement.model.transform.ParametersFilterMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iot.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Information that identifies the noncompliant resource. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ResourceIdentifier implements Serializable, Cloneable, StructuredPojo { /** * <p> * The ID of the certificate attached to the resource. * </p> */ private String deviceCertificateId; /** * <p> * The ID of the CA certificate used to authorize the certificate. * </p> */ private String caCertificateId; /** * <p> * The ID of the Amazon Cognito identity pool. * </p> */ private String cognitoIdentityPoolId; /** * <p> * The client ID. * </p> */ private String clientId; /** * <p> * The version of the policy associated with the resource. * </p> */ private PolicyVersionIdentifier policyVersionIdentifier; /** * <p> * The account with which the resource is associated. * </p> */ private String account; /** * <p> * The ID of the certificate attached to the resource. * </p> * * @param deviceCertificateId * The ID of the certificate attached to the resource. */ public void setDeviceCertificateId(String deviceCertificateId) { this.deviceCertificateId = deviceCertificateId; } /** * <p> * The ID of the certificate attached to the resource. * </p> * * @return The ID of the certificate attached to the resource. */ public String getDeviceCertificateId() { return this.deviceCertificateId; } /** * <p> * The ID of the certificate attached to the resource. * </p> * * @param deviceCertificateId * The ID of the certificate attached to the resource. * @return Returns a reference to this object so that method calls can be chained together. */ public ResourceIdentifier withDeviceCertificateId(String deviceCertificateId) { setDeviceCertificateId(deviceCertificateId); return this; } /** * <p> * The ID of the CA certificate used to authorize the certificate. * </p> * * @param caCertificateId * The ID of the CA certificate used to authorize the certificate. */ public void setCaCertificateId(String caCertificateId) { this.caCertificateId = caCertificateId; } /** * <p> * The ID of the CA certificate used to authorize the certificate. * </p> * * @return The ID of the CA certificate used to authorize the certificate. */ public String getCaCertificateId() { return this.caCertificateId; } /** * <p> * The ID of the CA certificate used to authorize the certificate. * </p> * * @param caCertificateId * The ID of the CA certificate used to authorize the certificate. * @return Returns a reference to this object so that method calls can be chained together. */ public ResourceIdentifier withCaCertificateId(String caCertificateId) { setCaCertificateId(caCertificateId); return this; } /** * <p> * The ID of the Amazon Cognito identity pool. * </p> * * @param cognitoIdentityPoolId * The ID of the Amazon Cognito identity pool. */ public void setCognitoIdentityPoolId(String cognitoIdentityPoolId) { this.cognitoIdentityPoolId = cognitoIdentityPoolId; } /** * <p> * The ID of the Amazon Cognito identity pool. * </p> * * @return The ID of the Amazon Cognito identity pool. */ public String getCognitoIdentityPoolId() { return this.cognitoIdentityPoolId; } /** * <p> * The ID of the Amazon Cognito identity pool. * </p> * * @param cognitoIdentityPoolId * The ID of the Amazon Cognito identity pool. * @return Returns a reference to this object so that method calls can be chained together. */ public ResourceIdentifier withCognitoIdentityPoolId(String cognitoIdentityPoolId) { setCognitoIdentityPoolId(cognitoIdentityPoolId); return this; } /** * <p> * The client ID. * </p> * * @param clientId * The client ID. */ public void setClientId(String clientId) { this.clientId = clientId; } /** * <p> * The client ID. * </p> * * @return The client ID. */ public String getClientId() { return this.clientId; } /** * <p> * The client ID. * </p> * * @param clientId * The client ID. * @return Returns a reference to this object so that method calls can be chained together. */ public ResourceIdentifier withClientId(String clientId) { setClientId(clientId); return this; } /** * <p> * The version of the policy associated with the resource. * </p> * * @param policyVersionIdentifier * The version of the policy associated with the resource. */ public void setPolicyVersionIdentifier(PolicyVersionIdentifier policyVersionIdentifier) { this.policyVersionIdentifier = policyVersionIdentifier; } /** * <p> * The version of the policy associated with the resource. * </p> * * @return The version of the policy associated with the resource. */ public PolicyVersionIdentifier getPolicyVersionIdentifier() { return this.policyVersionIdentifier; } /** * <p> * The version of the policy associated with the resource. * </p> * * @param policyVersionIdentifier * The version of the policy associated with the resource. * @return Returns a reference to this object so that method calls can be chained together. */ public ResourceIdentifier withPolicyVersionIdentifier(PolicyVersionIdentifier policyVersionIdentifier) { setPolicyVersionIdentifier(policyVersionIdentifier); return this; } /** * <p> * The account with which the resource is associated. * </p> * * @param account * The account with which the resource is associated. */ public void setAccount(String account) { this.account = account; } /** * <p> * The account with which the resource is associated. * </p> * * @return The account with which the resource is associated. */ public String getAccount() { return this.account; } /** * <p> * The account with which the resource is associated. * </p> * * @param account * The account with which the resource is associated. * @return Returns a reference to this object so that method calls can be chained together. */ public ResourceIdentifier withAccount(String account) { setAccount(account); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDeviceCertificateId() != null) sb.append("DeviceCertificateId: ").append(getDeviceCertificateId()).append(","); if (getCaCertificateId() != null) sb.append("CaCertificateId: ").append(getCaCertificateId()).append(","); if (getCognitoIdentityPoolId() != null) sb.append("CognitoIdentityPoolId: ").append(getCognitoIdentityPoolId()).append(","); if (getClientId() != null) sb.append("ClientId: ").append(getClientId()).append(","); if (getPolicyVersionIdentifier() != null) sb.append("PolicyVersionIdentifier: ").append(getPolicyVersionIdentifier()).append(","); if (getAccount() != null) sb.append("Account: ").append(getAccount()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ResourceIdentifier == false) return false; ResourceIdentifier other = (ResourceIdentifier) obj; if (other.getDeviceCertificateId() == null ^ this.getDeviceCertificateId() == null) return false; if (other.getDeviceCertificateId() != null && other.getDeviceCertificateId().equals(this.getDeviceCertificateId()) == false) return false; if (other.getCaCertificateId() == null ^ this.getCaCertificateId() == null) return false; if (other.getCaCertificateId() != null && other.getCaCertificateId().equals(this.getCaCertificateId()) == false) return false; if (other.getCognitoIdentityPoolId() == null ^ this.getCognitoIdentityPoolId() == null) return false; if (other.getCognitoIdentityPoolId() != null && other.getCognitoIdentityPoolId().equals(this.getCognitoIdentityPoolId()) == false) return false; if (other.getClientId() == null ^ this.getClientId() == null) return false; if (other.getClientId() != null && other.getClientId().equals(this.getClientId()) == false) return false; if (other.getPolicyVersionIdentifier() == null ^ this.getPolicyVersionIdentifier() == null) return false; if (other.getPolicyVersionIdentifier() != null && other.getPolicyVersionIdentifier().equals(this.getPolicyVersionIdentifier()) == false) return false; if (other.getAccount() == null ^ this.getAccount() == null) return false; if (other.getAccount() != null && other.getAccount().equals(this.getAccount()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDeviceCertificateId() == null) ? 0 : getDeviceCertificateId().hashCode()); hashCode = prime * hashCode + ((getCaCertificateId() == null) ? 0 : getCaCertificateId().hashCode()); hashCode = prime * hashCode + ((getCognitoIdentityPoolId() == null) ? 0 : getCognitoIdentityPoolId().hashCode()); hashCode = prime * hashCode + ((getClientId() == null) ? 0 : getClientId().hashCode()); hashCode = prime * hashCode + ((getPolicyVersionIdentifier() == null) ? 0 : getPolicyVersionIdentifier().hashCode()); hashCode = prime * hashCode + ((getAccount() == null) ? 0 : getAccount().hashCode()); return hashCode; } @Override public ResourceIdentifier clone() { try { return (ResourceIdentifier) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.iot.model.transform.ResourceIdentifierMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.silsglass.coachingviews; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.res.AssetManager; import android.graphics.Color; import android.media.AudioManager; import android.media.MediaPlayer; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.style.ForegroundColorSpan; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.MotionEvent; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.ImageView; import android.widget.TextView; import android.widget.ViewFlipper; import com.google.android.glass.media.Sounds; import com.google.android.glass.touchpad.Gesture; import com.google.android.glass.touchpad.GestureDetector; import com.google.android.glass.view.WindowUtils; import com.silsglass.taskmanager.R; /** * An abstract implementation of the user interface. This handles functionality shared between * the tutorial, the demo runs and the actual coaching runs, such as displaying the current * activity bar at the bottom of the screen and animations between activities when they are executed. * It is up to subclasses to provide the data model and map gestures to the appropriate * score/pass logic. */ public abstract class BaseTemplateTask extends Activity { public static String TAG = "coaching"; // added to test audio files playback // to be deleted? protected static String mFileName = null; protected MediaPlayer mPlayer = null; protected TextView mTextView; protected TextView mTextViewCard = null; protected ImageView mImageview; protected GestureDetector mGestureActivity; static final int RESULT_ACTIV = 1; /** The amount of time to leave the correctly guessed phrase on screen before advancing. */ private static final long SCORED_PHRASE_DELAY_MILLIS = 500; /** The Unicode character for the hollow circle representing a phrase not yet guessed. */ private static final char HOLLOW_CIRCLE = '\u25cb'; /** The Unicode character for the filled circle representing a correctly guessed phrase. */ private static final char FILLED_CIRCLE = '\u25cf'; /** A light blue color applied to the circle representing the current phrase. */ private static final int CURRENT_PHRASE_COLOR = Color.rgb(0x34, 0xa7, 0xff); /** A light green color applied briefly to a phrase when it is guessed correctly. */ private static final int SCORED_PHRASE_COLOR = Color.rgb(0x99, 0xcc, 0x33); /** Handler used to post a delayed animation when a phrase is scored. */ private final Handler mHandler = new Handler(); /** Listener for tap and swipe gestures during the game. */ private final GestureDetector.BaseListener mBaseListener = new GestureDetector.BaseListener() { @Override public boolean onGesture(Gesture gesture) { if (areGesturesEnabled()) { switch (gesture) { case SWIPE_LEFT: // Swipe left (backward) is always handled here to provide a brief // "disallowed" tug animation. //tugPhrase(); //return true; case TAP: case TWO_TAP: case SWIPE_RIGHT: // Delegate tap and swipe right(forward) / left(backward) to the subclass so that the // tutorial and actual game can handle them differently. handleRunGesture(gesture); return true; default: return false; } } return false; } }; /** Audio manager used to play system sound effects. */ private AudioManager mAudioManager; /** Detects gestures during the game. */ private GestureDetector mGestureDetector; /** Model that stores the state of the Coaching run. */ private TaskCoachingModel mModel; /** * Value that can be updated to enable/disable gesture handling in the game. For example, * gestures are disabled briefly when a phrase is scored so that the user cannot score or * pass again until the animation has completed. */ private boolean mGesturesEnabled; /** View flipper with two views used to provide the flinging animations between phrases. */ protected ViewFlipper mPhraseFlipper; /** TextView containing the dots that represent the scored/unscored phrases in the game. */ private TextView mGameState; /** Animation used to briefly tug a phrase when the user swipes left. */ private Animation mTugRightAnimation; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().requestFeature(WindowUtils.FEATURE_VOICE_COMMANDS); setUpScreen(); // moved in a method to be referenced when a new view is placed on top of it //setContentView(R.layout.activity_taskcoaching); //setGesturesEnabled(true); //mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); //mGestureDetector = new GestureDetector(this).setBaseListener(mBaseListener); //mPhraseFlipper = (ViewFlipper) findViewById(R.id.phrase_flipper); //mGameState = (TextView) findViewById(R.id.game_state); //mTugRightAnimation = AnimationUtils.loadAnimation(this, R.anim.tug_right); mModel = createTaskCoachingModel(); updateDisplay(); } protected void setUpScreen() { setContentView(R.layout.activity_taskcoaching); setGesturesEnabled(true); mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); mGestureDetector = new GestureDetector(this).setBaseListener(mBaseListener); mPhraseFlipper = (ViewFlipper) findViewById(R.id.phrase_flipper); mGameState = (TextView) findViewById(R.id.game_state); mTugRightAnimation = AnimationUtils.loadAnimation(this, R.anim.tug_right); } @Override public boolean onGenericMotionEvent(MotionEvent event) { return mGestureDetector.onMotionEvent(event); } /** * Subclasses must override this method to create and return the data model that will be used * by the game. */ protected abstract TaskCoachingModel createTaskCoachingModel(); /** * Subclasses must override this method to handle {@link Gesture#TAP} and * {@link Gesture#SWIPE_RIGHT} gestures that occur during game play. Typically they should * call the {@link #score()} method on a tap and the {@link #pass()} method on a swipe, but * the tutorial overrides these in certain cases to make the game flow in a predetermined way. */ protected abstract void handleRunGesture(Gesture gesture); /** Returns the data model used by this instance of the game. */ protected TaskCoachingModel getTaskCoachingModel() { return mModel; } /** Plays the sound effect of the specified type. */ protected void playSoundEffect(int effectType) { mAudioManager.playSoundEffect(effectType); } /** * Marks the currently visible phrase as correctly guessed. This method changes the phrase's * color to green, flings it off the screen, advances the game model to the next phrase, and * flings the new phrase into view. */ protected void score() { // Disable gesture handling so that the user can't tap or swipe during the animation. setGesturesEnabled(false); mModel.markGuessed(); playSoundEffect(Sounds.SUCCESS); getCurrentTextView().setTextColor(SCORED_PHRASE_COLOR); mHandler.postDelayed(new Runnable() { @Override public void run() { if (!mModel.areAllPhrasesGuessedCorrectly()) { mPhraseFlipper.showNext(); updateDisplay(); // Re-enable gesture handling after the delay has passed. setGesturesEnabled(true); } } }, SCORED_PHRASE_DELAY_MILLIS); } /** Passes on the current phrase and advances to the next one. */ protected void pass() { mModel.pass(); mPhraseFlipper.showNext(); updateDisplay(); } protected void goBack() { mModel.goBack(); mPhraseFlipper.showPrevious(); updateDisplay(); } /** Updates the main phrase label and score bar with the current state of the game. */ protected void updateDisplay() { getCurrentTextView().setText(mModel.getCurrentPhrase()); getCurrentTextView().setTextColor(Color.WHITE); mGameState.setText(buildScoreBar()); } /** * Builds and returns a spanned string containing hollow and filled circles that represent the * current state and score of the game. */ private CharSequence buildScoreBar() { SpannableStringBuilder builder = new SpannableStringBuilder(); for (int i = 0; i < mModel.getPhraseCount(); i++) { if (i > 0) { builder.append(' '); } if (i == mModel.getCurrentPhraseIndex()) { builder.append(HOLLOW_CIRCLE); builder.setSpan(new ForegroundColorSpan(CURRENT_PHRASE_COLOR), builder.length() - 1, builder.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } else if (mModel.isPhraseGuessedCorrectly(i)) { builder.append(FILLED_CIRCLE); } else { builder.append(HOLLOW_CIRCLE); } } return builder; } /** Returns the {@code TextView} inside the flipper that is currently on-screen. */ protected TextView getCurrentTextView() { return (TextView) mPhraseFlipper.getCurrentView(); } /** Returns true if gestures should be processed or false if they should be ignored. */ private boolean areGesturesEnabled() { return mGesturesEnabled; } /** * Enables gesture handling if {@code enabled} is true, otherwise disables gesture handling. * Gestures are temporarily disabled when a phrase is scored so that extraneous taps and * swipes are ignored during the animation. */ private void setGesturesEnabled(boolean enabled) { mGesturesEnabled = enabled; } /** Plays a tugging animation that provides feedback when the user tries to swipe backward. */ private void tugPhrase() { mPhraseFlipper.startAnimation(mTugRightAnimation); } // adding a new menu to see if we speak to the glass for commands @Override public boolean onCreatePanelMenu(int featureId, Menu menu) { if (featureId == WindowUtils.FEATURE_VOICE_COMMANDS) { getMenuInflater().inflate(R.menu.voice_menu_activities, menu); return true; } // Pass through to super to setup touch menu. return super.onCreatePanelMenu(featureId, menu); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.voice_menu_activities, menu); return true; } @Override public boolean onMenuItemSelected(int featureId, MenuItem item) { if (featureId == WindowUtils.FEATURE_VOICE_COMMANDS) { switch (item.getItemId()) { case R.id.show_me: Log.v(TAG, "this is a show me"); break; case R.id.next: Log.v(TAG, "this is a next"); pass(); break; case R.id.go_back: Log.v(TAG, "this is a go back"); goBack(); break; case R.id.execute: Log.v(TAG, "I want to Try"); userAction(); break; default: return true; } return true; } // Good practice to pass through to super if not handled return super.onMenuItemSelected(featureId, item); } /** * Called to record user actions */ protected void userAction() { startActivityForResult((new Intent(this, UserActionActivity.class)), RESULT_ACTIV); // finish(); } String copyAsset(String filename) { final String PATH = Environment.getExternalStorageDirectory().toString() + "/silsglass/"; File dir = new File(PATH); if (!dir.exists()) { if (!dir.mkdirs()) { Log.v(TAG, "ERROR: Creation of directory " + PATH + " on sdcard failed"); return null; } else { Log.v(TAG, "Created directory " + PATH + " on sdcard"); } } if (!(new File( PATH + filename).exists())) { Log.v(TAG, "copying file " + filename); try { AssetManager assetManager = getAssets(); InputStream in = assetManager.open(filename); OutputStream out = new FileOutputStream(PATH + filename); // Transfer bytes from in to out byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } in.close(); out.close(); } catch (IOException e) { Log.e(TAG, "Was unable to copy " + filename + e.toString()); return null; } } return PATH + filename; } }
package com.chipsetsv.multipaint; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketException; import java.util.Enumeration; import java.util.logging.Level; import org.apache.http.conn.util.InetAddressUtils; import org.ice4j.ice.Agent; import org.ice4j.ice.IceMediaStream; import org.ice4j.ice.NominationStrategy; import com.chipsetsv.multipaint.IceConnection.LocalPseudoTcpJob; import com.chipsetsv.multipaint.connection.Connection; import com.chipsetsv.multipaint.connection.OnReceiveEvent; import android.app.Activity; import android.app.Application; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.util.Log; import android.view.Menu; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.TextView; public class ServerActivity extends Activity { private TextView serverStatus; private Button initiateConnection; // designate a port public static final int SERVERPORT = 8080; private final Handler handler = new Handler(); private String remoteSdp; private String sessionId; private static Thread iceInitial; private static Thread iceConnect; private static Agent localAgent; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_server); serverStatus = (TextView) findViewById(R.id.server_status); initiateConnection = (Button) findViewById(R.id.button_connect); initiateConnection.setOnClickListener(connectListener); // Core.getServer().getHandler().setOnReceive(new OnReceiveListener() { // // @Override // public void onReceive(Message msg) { // serverStatus.setText((String)msg.obj); // // } // }); Connection.getServer().accept(); // if (iceInitial != null && iceInitial.isAlive()) // return; // iceInitial = new Thread(new Runnable() { // @Override // public void run() { // try { // runServer(); // } catch (Throwable e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } // } // }); // iceInitial.start(); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.activity_server, menu); return true; } private final OnClickListener connectListener = new OnClickListener() { public void onClick(View v) { if (Connection.getServer().getConnected()) serverStatus.setText("Connected"); else serverStatus.setText("Non connected"); // if (!connected) { // serverIpAddress = serverStatus.getText().toString(); // if (!serverIpAddress.equals("")) { // Thread cThread = new Thread(new ClientThread()); // cThread.start(); // } // } // if (iceConnect != null && iceConnect.isAlive()) // return; // iceConnect = new Thread(new Runnable() { // @Override // public void run() { // try { // runConnect(sessionId); // } catch (Throwable e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } // } // }); // iceConnect.start(); } }; @Override protected void onStop() { super.onStop(); Connection.getServer().getStatusHandler().removeOnReceive(); } @Override protected void onPause() { super.onPause(); Connection.getServer().getStatusHandler().removeOnReceive(); } @Override protected void onResume() { super.onResume(); Connection.getServer().getStatusHandler().setOnReceive(new OnReceiveEvent() { @Override public void onReceive(String msg) { serverStatus.setText(msg); } }); } public void runServer() throws Throwable { IceConnection.setStartTime(System.currentTimeMillis()); int localPort = 7999; int remotePort = 6000; localAgent = IceConnection.createAgent(localPort); localAgent.setNominationStrategy( NominationStrategy.NOMINATE_HIGHEST_PRIO); String localSDP = SdpUtils.createSDPDescription(localAgent); sessionId = SessionUtils.createSession(localSDP); handler.post(new Runnable() { public void run() { serverStatus.setText(sessionId); } }); //serverIp.setText(getMasterSdpInSession("17", localSDP)); //serverIp.setText(utils.getSlaveSdpInSession("17")); //Agent remotePeer = // createAgent(remotePort); } public void runConnect(String id) throws Throwable { remoteSdp = SessionUtils.getSlaveSdpInSession(id); if (remoteSdp == "") return; SdpUtils.parseSDP(localAgent, remoteSdp); localAgent.addStateChangeListener(new IceConnection.LocalIceProcessingListener()); //remotePeer.addStateChangeListener(new ClientActivity.RemoteIceProcessingListener()); //let them fight ... fights forge character. localAgent.setControlling(true); //remotePeer.setControlling(false); long endTime = System.currentTimeMillis(); IceConnection.getLogger().log(Level.INFO, "Total candidate gathering time: {0} ms", (endTime - IceConnection.getStartTime())); IceConnection.getLogger().log(Level.INFO, "LocalAgent: {0}", localAgent); localAgent.startConnectivityEstablishment(); //if (START_CONNECTIVITY_ESTABLISHMENT_OF_REMOTE_PEER) //remotePeer.startConnectivityEstablishment(); IceMediaStream dataStream = localAgent.getStream("data"); if (dataStream != null) { IceConnection.getLogger().log(Level.INFO, "Local data clist:" + dataStream.getCheckList()); } //wait for one of the agents to complete it's job synchronized (IceConnection.getLocalAgentMonitor()) { IceConnection.getLocalAgentMonitor().wait(IceConnection.agentJobTimeout); } if (IceConnection.getRemoteJob() != null) { IceConnection.getLogger().log(Level.FINEST, "Remote thread join started"); IceConnection.getRemoteJob().join(); IceConnection.getLogger().log(Level.FINEST, "Remote thread joined"); } if (IceConnection.getLocalJob() != null) { IceConnection.getLogger().log(Level.FINEST, "Local thread join started"); IceConnection.getLocalJob().join(); IceConnection.getLogger().log(Level.FINEST, "Local thread joined"); } } }
/** * Licensed to Cloudera, Inc. under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Cloudera, Inc. licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.flume.agent.durability; import java.io.File; import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import com.cloudera.flume.core.EventSource; import com.cloudera.flume.handlers.hdfs.SeqfileEventSource; import com.cloudera.util.Clock; import com.cloudera.util.FileUtil; /** * This tests the write ahead log source against some trouble conditions -- * empty file, truncated file. */ public class TestNaiveFileWALSource { // has 5 good entries. final static String WAL_OK = "src/data/hadoop_logs_5.hdfs"; // this file has been prematurely truncated and is thus corrupt. final static String WAL_CORRUPT = "src/data/hadoop_logs_5.hdfs.aa"; @Before public void setUp() { System.out.println("===================================================="); Logger LOG = Logger.getLogger(NaiveFileWALManager.class.getName()); LOG.setLevel(Level.DEBUG); } /** * Seqfile should fail on open when reading an empty file */ @Test public void testSeqfileErrorOnOpen() throws IOException, InterruptedException { System.out.println("Exception on open empty file with seqfile"); File tmpdir = FileUtil.mktempdir(); tmpdir.deleteOnExit(); // create empty file. File corrupt = new File(tmpdir, "walempty.00000000.20091104-101213997-0800.seq"); System.out.println("corrupt file is named: " + corrupt.getAbsolutePath()); corrupt.createNewFile(); corrupt.deleteOnExit(); // check now, and any age is too old. File commit = new File(tmpdir, "committed"); commit.deleteOnExit(); EventSource src = new SeqfileEventSource(corrupt.getAbsolutePath()); try { src.open(); } catch (IOException e) { return; } Assert.fail("should have failed with io exception"); } /** * WAL should succeed on open even if its internal opens fail. It will block * on next() while continuing to try get a valid source of events. * * This test demonstrates this by starting the WALSource, calling next in a * separate thread, and waits a little. Nothing should have happened. */ @Test public void testSurviveErrorOnOpen() throws IOException, InterruptedException { System.out.println("Survive error on open with WALSource"); File basedir = FileUtil.mktempdir(); basedir.deleteOnExit(); // create empty file. File logDir = new File(basedir, NaiveFileWALManager.LOGGEDDIR); logDir.mkdirs(); File corrupt = new File(logDir, "walempty.00000000.20091104-101213997-0800.seq"); System.out.println("corrupt file is named: " + corrupt.getAbsolutePath()); corrupt.createNewFile(); corrupt.deleteOnExit(); NaiveFileWALManager walman = new NaiveFileWALManager(basedir); final WALSource src = new WALSource(walman); // open would normally fail but because this wrapped, this is ok. src.open(); src.recover(); final AtomicBoolean okstate = new AtomicBoolean(true); Thread t = new Thread() { public void run() { try { // this should block and never make progress. src.next(); } catch (Exception e) { e.printStackTrace(); } finally { // this should never execute. okstate.set(false); } } }; t.start(); // TODO (jon) remove sleeps // yeah, I know you don't like sleeps.. getting into the DirWatcher is more // trouble than I want right now. Clock.sleep(3000); src.close(); Assert.assertTrue(okstate.get()); // no unexepcted exns or fall throughs. } /** * In this situation WAL is open and has a file that starts off being ok. It * should then encounter a file with zero size and fails to open). It should * continue if there are more data or block if there is not. */ @Test public void testSurviveEmptyFile() throws IOException, InterruptedException { System.out.println("Survive empty file with walsource"); File basedir = FileUtil.mktempdir(); basedir.deleteOnExit(); // create a few empty files and writing them to the wal/logged dir File logdir = new File(basedir, NaiveFileWALManager.LOGGEDDIR); logdir.mkdirs(); File emptyfile = new File(logdir, "walempty.0000000.20091104-101213997-0800.seq"); System.out.println("zero file is named: " + emptyfile.getAbsolutePath()); emptyfile.createNewFile(); emptyfile.deleteOnExit(); // copy an ok file that has exactly 5 entries into the wal/logged dir File orig = new File(WAL_OK); File ok = new File(logdir, "ok.0000000.20091104-101213997-0800.seq"); FileUtil.dumbfilecopy(orig, ok); // check now, and any age is too old. NaiveFileWALManager walman = new NaiveFileWALManager(basedir); final WALSource src = new WALSource(walman); // inject data using recovery mode src.open(); src.recover(); final AtomicInteger count = new AtomicInteger(); final AtomicBoolean okstate = new AtomicBoolean(true); Thread t = new Thread("poller") { public void run() { try { for (int i = 0; i < 10; i++) { // this eventually blocks and never make progress. // It will always read the good entries and skip over the bad file. src.next(); count.getAndIncrement(); } } catch (Exception e) { System.out.println("about to fail because of " + e); e.printStackTrace(); okstate.set(false); } } }; t.start(); // TODO (jon) remove sleeps // yeah, I know you don't like sleeps.. getting into the DirWatcher is more // trouble than I want right now. Clock.sleep(3000); src.close(); Assert.assertTrue(okstate.get()); // no unexpected exceptions Assert.assertEquals(5, count.get()); // After this call okstate will be false becuase IOExcpetion is thrown on // close. } /** * In this situation WAL is open and has a file that starts off being ok. It * should then encounter a file with zero size and fails to open). It should * continue if there are more data or block if there is not. */ @Test public void testSurviveTwoEmptyFiles() throws IOException, InterruptedException { System.out.println("Survive two empty files with walsource"); File basedir = FileUtil.mktempdir(); basedir.deleteOnExit(); // create a few empty files. File logdir = new File(basedir, NaiveFileWALManager.LOGGEDDIR); logdir.mkdirs(); File emptyfile = new File(logdir, "walempty.0000000.20091104-101213997-0800.seq"); System.out.println("zero file is named: " + emptyfile.getAbsolutePath()); emptyfile.createNewFile(); emptyfile.deleteOnExit(); File emptyfile2 = new File(logdir, "walempty2.0000000.20091104-101213997-0800.seq"); System.out.println("zero file is named: " + emptyfile2.getAbsolutePath()); emptyfile2.createNewFile(); emptyfile2.deleteOnExit(); // copy an ok file that has exactly 5 entries File orig = new File(WAL_OK); File ok = new File(logdir, "ok.0000000.20091104-101213997-0800.seq"); FileUtil.dumbfilecopy(orig, ok); // check now, and any age is too old. // final WriteAheadLogSource src = new WriteAheadLogSource(tmpdir // .getAbsolutePath(), 0, 0); NaiveFileWALManager walman = new NaiveFileWALManager(basedir); final WALSource src = new WALSource(walman); src.open(); src.recover(); final AtomicInteger count = new AtomicInteger(); final AtomicBoolean okstate = new AtomicBoolean(true); Thread t = new Thread("poller") { public void run() { try { for (int i = 0; i < 10; i++) { // this eventually blocks and never make progress. // It will always read the good entries and skip over the bad file. src.next(); count.getAndIncrement(); } } catch (Exception e) { e.printStackTrace(); okstate.set(false); } } }; t.start(); // TODO (jon) remove sleeps // yeah, I know you don't like sleeps.. getting into the DirWatcher is more // trouble than I want right now. Clock.sleep(3000); src.close(); Assert.assertTrue(okstate.get()); // no unexpected exceptions Assert.assertEquals(5, count.get()); // After this call okstate will be false becuase IOExcpetion is thrown on // close. } /** * In this stuation we intially open a file that starts of ok. However, at * some point in runs into an unexpected end of file (due to a program / * machine/ write failure). * * We want this to send all entries it can, fire some event with the bad wal * file, and the continue on with other ok files. */ @Test public void testSurviveCorruptFile() throws IOException, InterruptedException { System.out.println("Survive zero file with walsource"); File basedir = FileUtil.mktempdir(); basedir.deleteOnExit(); File logdir = new File(basedir, NaiveFileWALManager.LOGGEDDIR); logdir.mkdirs(); // create empty file. File corrupt = new File(logdir, "walcorrupt.0000000.20091104-101213997-0800.seq"); System.out.println("corrupt file is named: " + corrupt.getAbsolutePath()); FileUtil.dumbfilecopy(new File(WAL_CORRUPT), corrupt); corrupt.deleteOnExit(); // check now, and any age is too old. NaiveFileWALManager walman = new NaiveFileWALManager(basedir); final WALSource src = new WALSource(walman); src.open(); src.recover(); final AtomicInteger count = new AtomicInteger(); final AtomicBoolean okstate = new AtomicBoolean(true); Thread t = new Thread() { public void run() { try { for (int i = 0; true; i++) { // this eventually blocks and never make progress. // It will always read the good entries and skip over the bad file. src.next(); count.getAndIncrement(); } } catch (Exception e) { e.printStackTrace(); okstate.set(false); } } }; t.start(); // TODO (jon) remove sleeps // yeah, I know you don't like sleeps.. getting into the DirWatcher is more // trouble than I want right now. Clock.sleep(3000); src.close(); System.out.println("Outputted " + count.get() + " events"); Assert.assertTrue(okstate.get()); // no unexpected exceptions Assert.assertEquals(3, count.get()); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.pipeline.moving.avg; import com.google.common.collect.EvictingQueue; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram.Bucket; import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregationHelperTests; import org.elasticsearch.search.aggregations.pipeline.SimpleValue; import org.elasticsearch.search.aggregations.pipeline.derivative.Derivative; import org.elasticsearch.search.aggregations.pipeline.movavg.models.*; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; import org.junit.Test; import java.util.*; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.min; import static org.elasticsearch.search.aggregations.AggregationBuilders.range; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.movingAvg; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.IsNull.notNullValue; import static org.hamcrest.core.IsNull.nullValue; @ESIntegTestCase.SuiteScopeTestCase public class MovAvgIT extends ESIntegTestCase { private static final String INTERVAL_FIELD = "l_value"; private static final String VALUE_FIELD = "v_value"; private static final String GAP_FIELD = "g_value"; static int interval; static int numBuckets; static int windowSize; static double alpha; static double beta; static double gamma; static int period; static HoltWintersModel.SeasonalityType seasonalityType; static BucketHelpers.GapPolicy gapPolicy; static ValuesSourceMetricsAggregationBuilder metric; static List<PipelineAggregationHelperTests.MockBucket> mockHisto; static Map<String, ArrayList<Double>> testValues; enum MovAvgType { SIMPLE ("simple"), LINEAR("linear"), EWMA("ewma"), HOLT("holt"), HOLT_WINTERS("holt_winters"), HOLT_BIG_MINIMIZE("holt"); private final String name; MovAvgType(String s) { name = s; } public String toString(){ return name; } } enum MetricTarget { VALUE ("value"), COUNT("count"), METRIC("metric"); private final String name; MetricTarget(String s) { name = s; } public String toString(){ return name; } } @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); createIndex("idx_unmapped"); List<IndexRequestBuilder> builders = new ArrayList<>(); interval = 5; numBuckets = randomIntBetween(6, 80); period = randomIntBetween(1, 5); windowSize = randomIntBetween(period * 2, 10); // start must be 2*period to play nice with HW alpha = randomDouble(); beta = randomDouble(); gamma = randomDouble(); seasonalityType = randomBoolean() ? HoltWintersModel.SeasonalityType.ADDITIVE : HoltWintersModel.SeasonalityType.MULTIPLICATIVE; gapPolicy = randomBoolean() ? BucketHelpers.GapPolicy.SKIP : BucketHelpers.GapPolicy.INSERT_ZEROS; metric = randomMetric("the_metric", VALUE_FIELD); mockHisto = PipelineAggregationHelperTests.generateHistogram(interval, numBuckets, randomDouble(), randomDouble()); testValues = new HashMap<>(8); for (MovAvgType type : MovAvgType.values()) { for (MetricTarget target : MetricTarget.values()) { if (type.equals(MovAvgType.HOLT_BIG_MINIMIZE)) { setupExpected(type, target, numBuckets); } else { setupExpected(type, target, windowSize); } } } for (PipelineAggregationHelperTests.MockBucket mockBucket : mockHisto) { for (double value : mockBucket.docValues) { builders.add(client().prepareIndex("idx", "type").setSource(jsonBuilder().startObject() .field(INTERVAL_FIELD, mockBucket.key) .field(VALUE_FIELD, value).endObject())); } } for (int i = -10; i < 10; i++) { builders.add(client().prepareIndex("neg_idx", "type").setSource( jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD, 10).endObject())); } for (int i = 0; i < 12; i++) { builders.add(client().prepareIndex("double_predict", "type").setSource( jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD, 10).endObject())); } indexRandom(true, builders); ensureSearchable(); } /** * Calculates the moving averages for a specific (model, target) tuple based on the previously generated mock histogram. * Computed values are stored in the testValues map. * * @param type The moving average model to use * @param target The document field "target", e.g. _count or a field value */ private void setupExpected(MovAvgType type, MetricTarget target, int windowSize) { ArrayList<Double> values = new ArrayList<>(numBuckets); EvictingQueue<Double> window = EvictingQueue.create(windowSize); for (PipelineAggregationHelperTests.MockBucket mockBucket : mockHisto) { double metricValue; double[] docValues = mockBucket.docValues; // Gaps only apply to metric values, not doc _counts if (mockBucket.count == 0 && target.equals(MetricTarget.VALUE)) { // If there was a gap in doc counts and we are ignoring, just skip this bucket if (gapPolicy.equals(BucketHelpers.GapPolicy.SKIP)) { values.add(null); continue; } else if (gapPolicy.equals(BucketHelpers.GapPolicy.INSERT_ZEROS)) { // otherwise insert a zero instead of the true value metricValue = 0.0; } else { metricValue = PipelineAggregationHelperTests.calculateMetric(docValues, metric); } } else { // If this isn't a gap, or is a _count, just insert the value metricValue = target.equals(MetricTarget.VALUE) ? PipelineAggregationHelperTests.calculateMetric(docValues, metric) : mockBucket.count; } if (window.size() > 0) { switch (type) { case SIMPLE: values.add(simple(window)); break; case LINEAR: values.add(linear(window)); break; case EWMA: values.add(ewma(window)); break; case HOLT: values.add(holt(window)); break; case HOLT_BIG_MINIMIZE: values.add(holt(window)); break; case HOLT_WINTERS: // HW needs at least 2 periods of data to start if (window.size() >= period * 2) { values.add(holtWinters(window)); } else { values.add(null); } break; } } else { values.add(null); } window.offer(metricValue); } testValues.put(type.name() + "_" + target.name(), values); } /** * Simple, unweighted moving average * * @param window Window of values to compute movavg for * @return */ private double simple(Collection<Double> window) { double movAvg = 0; for (double value : window) { movAvg += value; } movAvg /= window.size(); return movAvg; } /** * Linearly weighted moving avg * * @param window Window of values to compute movavg for * @return */ private double linear(Collection<Double> window) { double avg = 0; long totalWeight = 1; long current = 1; for (double value : window) { avg += value * current; totalWeight += current; current += 1; } return avg / totalWeight; } /** * Exponentionally weighted (EWMA, Single exponential) moving avg * * @param window Window of values to compute movavg for * @return */ private double ewma(Collection<Double> window) { double avg = 0; boolean first = true; for (double value : window) { if (first) { avg = value; first = false; } else { avg = (value * alpha) + (avg * (1 - alpha)); } } return avg; } /** * Holt-Linear (Double exponential) moving avg * @param window Window of values to compute movavg for * @return */ private double holt(Collection<Double> window) { double s = 0; double last_s = 0; // Trend value double b = 0; double last_b = 0; int counter = 0; double last; for (double value : window) { last = value; if (counter == 1) { s = value; b = value - last; } else { s = alpha * value + (1.0d - alpha) * (last_s + last_b); b = beta * (s - last_s) + (1 - beta) * last_b; } counter += 1; last_s = s; last_b = b; } return s + (0 * b) ; } /** * Holt winters (triple exponential) moving avg * @param window Window of values to compute movavg for * @return */ private double holtWinters(Collection<Double> window) { // Smoothed value double s = 0; double last_s = 0; // Trend value double b = 0; double last_b = 0; // Seasonal value double[] seasonal = new double[window.size()]; double padding = seasonalityType.equals(HoltWintersModel.SeasonalityType.MULTIPLICATIVE) ? 0.0000000001 : 0; int counter = 0; double[] vs = new double[window.size()]; for (double v : window) { vs[counter] = v + padding; counter += 1; } // Initial level value is average of first season // Calculate the slopes between first and second season for each period for (int i = 0; i < period; i++) { s += vs[i]; b += (vs[i + period] - vs[i]) / period; } s /= (double) period; b /= (double) period; last_s = s; // Calculate first seasonal if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { Arrays.fill(seasonal, 0.0); } else { for (int i = 0; i < period; i++) { seasonal[i] = vs[i] / s; } } for (int i = period; i < vs.length; i++) { if (seasonalityType.equals(HoltWintersModel.SeasonalityType.MULTIPLICATIVE)) { s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); } else { s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); } b = beta * (s - last_s) + (1 - beta) * last_b; if (seasonalityType.equals(HoltWintersModel.SeasonalityType.MULTIPLICATIVE)) { seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; } else { seasonal[i] = gamma * (vs[i] - (last_s - last_b )) + (1 - gamma) * seasonal[i - period]; } last_s = s; last_b = b; } int idx = window.size() - period + (0 % period); // TODO perhaps pad out seasonal to a power of 2 and use a mask instead of modulo? if (seasonalityType.equals(HoltWintersModel.SeasonalityType.MULTIPLICATIVE)) { return (s + (1 * b)) * seasonal[idx]; } else { return s + (1 * b) + seasonal[idx]; } } /** * test simple moving average on single value field */ @Test public void simpleSingleValuedField() { SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(windowSize) .modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy) .setBucketsPaths("_count")) .subAggregation(movingAvg("movavg_values") .window(windowSize) .modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy) .setBucketsPaths("the_metric")) ).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(mockHisto.size())); List<Double> expectedCounts = testValues.get(MovAvgType.SIMPLE.name() + "_" + MetricTarget.COUNT.name()); List<Double> expectedValues = testValues.get(MovAvgType.SIMPLE.name() + "_" + MetricTarget.VALUE.name()); Iterator<? extends Histogram.Bucket> actualIter = buckets.iterator(); Iterator<PipelineAggregationHelperTests.MockBucket> expectedBucketIter = mockHisto.iterator(); Iterator<Double> expectedCountsIter = expectedCounts.iterator(); Iterator<Double> expectedValuesIter = expectedValues.iterator(); while (actualIter.hasNext()) { assertValidIterators(expectedBucketIter, expectedCountsIter, expectedValuesIter); Histogram.Bucket actual = actualIter.next(); PipelineAggregationHelperTests.MockBucket expected = expectedBucketIter.next(); Double expectedCount = expectedCountsIter.next(); Double expectedValue = expectedValuesIter.next(); assertThat("keys do not match", ((Number) actual.getKey()).longValue(), equalTo(expected.key)); assertThat("doc counts do not match", actual.getDocCount(), equalTo((long)expected.count)); assertBucketContents(actual, expectedCount, expectedValue); } } @Test public void linearSingleValuedField() { SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(windowSize) .modelBuilder(new LinearModel.LinearModelBuilder()) .gapPolicy(gapPolicy) .setBucketsPaths("_count")) .subAggregation(movingAvg("movavg_values") .window(windowSize) .modelBuilder(new LinearModel.LinearModelBuilder()) .gapPolicy(gapPolicy) .setBucketsPaths("the_metric")) ).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(mockHisto.size())); List<Double> expectedCounts = testValues.get(MovAvgType.LINEAR.name() + "_" + MetricTarget.COUNT.name()); List<Double> expectedValues = testValues.get(MovAvgType.LINEAR.name() + "_" + MetricTarget.VALUE.name()); Iterator<? extends Histogram.Bucket> actualIter = buckets.iterator(); Iterator<PipelineAggregationHelperTests.MockBucket> expectedBucketIter = mockHisto.iterator(); Iterator<Double> expectedCountsIter = expectedCounts.iterator(); Iterator<Double> expectedValuesIter = expectedValues.iterator(); while (actualIter.hasNext()) { assertValidIterators(expectedBucketIter, expectedCountsIter, expectedValuesIter); Histogram.Bucket actual = actualIter.next(); PipelineAggregationHelperTests.MockBucket expected = expectedBucketIter.next(); Double expectedCount = expectedCountsIter.next(); Double expectedValue = expectedValuesIter.next(); assertThat("keys do not match", ((Number) actual.getKey()).longValue(), equalTo(expected.key)); assertThat("doc counts do not match", actual.getDocCount(), equalTo((long)expected.count)); assertBucketContents(actual, expectedCount, expectedValue); } } @Test public void ewmaSingleValuedField() { SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(windowSize) .modelBuilder(new EwmaModel.EWMAModelBuilder().alpha(alpha)) .gapPolicy(gapPolicy) .setBucketsPaths("_count")) .subAggregation(movingAvg("movavg_values") .window(windowSize) .modelBuilder(new EwmaModel.EWMAModelBuilder().alpha(alpha)) .gapPolicy(gapPolicy) .setBucketsPaths("the_metric")) ).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(mockHisto.size())); List<Double> expectedCounts = testValues.get(MovAvgType.EWMA.name() + "_" + MetricTarget.COUNT.name()); List<Double> expectedValues = testValues.get(MovAvgType.EWMA.name() + "_" + MetricTarget.VALUE.name()); Iterator<? extends Histogram.Bucket> actualIter = buckets.iterator(); Iterator<PipelineAggregationHelperTests.MockBucket> expectedBucketIter = mockHisto.iterator(); Iterator<Double> expectedCountsIter = expectedCounts.iterator(); Iterator<Double> expectedValuesIter = expectedValues.iterator(); while (actualIter.hasNext()) { assertValidIterators(expectedBucketIter, expectedCountsIter, expectedValuesIter); Histogram.Bucket actual = actualIter.next(); PipelineAggregationHelperTests.MockBucket expected = expectedBucketIter.next(); Double expectedCount = expectedCountsIter.next(); Double expectedValue = expectedValuesIter.next(); assertThat("keys do not match", ((Number) actual.getKey()).longValue(), equalTo(expected.key)); assertThat("doc counts do not match", actual.getDocCount(), equalTo((long)expected.count)); assertBucketContents(actual, expectedCount, expectedValue); } } @Test public void holtSingleValuedField() { SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(windowSize) .modelBuilder(new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha).beta(beta)) .gapPolicy(gapPolicy) .setBucketsPaths("_count")) .subAggregation(movingAvg("movavg_values") .window(windowSize) .modelBuilder(new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha).beta(beta)) .gapPolicy(gapPolicy) .setBucketsPaths("the_metric")) ).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(mockHisto.size())); List<Double> expectedCounts = testValues.get(MovAvgType.HOLT.name() + "_" + MetricTarget.COUNT.name()); List<Double> expectedValues = testValues.get(MovAvgType.HOLT.name() + "_" + MetricTarget.VALUE.name()); Iterator<? extends Histogram.Bucket> actualIter = buckets.iterator(); Iterator<PipelineAggregationHelperTests.MockBucket> expectedBucketIter = mockHisto.iterator(); Iterator<Double> expectedCountsIter = expectedCounts.iterator(); Iterator<Double> expectedValuesIter = expectedValues.iterator(); while (actualIter.hasNext()) { assertValidIterators(expectedBucketIter, expectedCountsIter, expectedValuesIter); Histogram.Bucket actual = actualIter.next(); PipelineAggregationHelperTests.MockBucket expected = expectedBucketIter.next(); Double expectedCount = expectedCountsIter.next(); Double expectedValue = expectedValuesIter.next(); assertThat("keys do not match", ((Number) actual.getKey()).longValue(), equalTo(expected.key)); assertThat("doc counts do not match", actual.getDocCount(), equalTo((long)expected.count)); assertBucketContents(actual, expectedCount, expectedValue); } } @Test public void HoltWintersValuedField() { SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(windowSize) .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .alpha(alpha).beta(beta).gamma(gamma).period(period).seasonalityType(seasonalityType)) .gapPolicy(gapPolicy) .minimize(false) .setBucketsPaths("_count")) .subAggregation(movingAvg("movavg_values") .window(windowSize) .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .alpha(alpha).beta(beta).gamma(gamma).period(period).seasonalityType(seasonalityType)) .gapPolicy(gapPolicy) .minimize(false) .setBucketsPaths("the_metric")) ).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(mockHisto.size())); List<Double> expectedCounts = testValues.get(MovAvgType.HOLT_WINTERS.name() + "_" + MetricTarget.COUNT.name()); List<Double> expectedValues = testValues.get(MovAvgType.HOLT_WINTERS.name() + "_" + MetricTarget.VALUE.name()); Iterator<? extends Histogram.Bucket> actualIter = buckets.iterator(); Iterator<PipelineAggregationHelperTests.MockBucket> expectedBucketIter = mockHisto.iterator(); Iterator<Double> expectedCountsIter = expectedCounts.iterator(); Iterator<Double> expectedValuesIter = expectedValues.iterator(); while (actualIter.hasNext()) { assertValidIterators(expectedBucketIter, expectedCountsIter, expectedValuesIter); Histogram.Bucket actual = actualIter.next(); PipelineAggregationHelperTests.MockBucket expected = expectedBucketIter.next(); Double expectedCount = expectedCountsIter.next(); Double expectedValue = expectedValuesIter.next(); assertThat("keys do not match", ((Number) actual.getKey()).longValue(), equalTo(expected.key)); assertThat("doc counts do not match", actual.getDocCount(), equalTo((long)expected.count)); assertBucketContents(actual, expectedCount, expectedValue); } } @Test public void testPredictNegativeKeysAtStart() { SearchResponse response = client() .prepareSearch("neg_idx") .setTypes("type") .addAggregation( histogram("histo") .field(INTERVAL_FIELD) .interval(1) .subAggregation(avg("avg").field(VALUE_FIELD)) .subAggregation( movingAvg("movavg_values").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy).predict(5).setBucketsPaths("avg"))).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(25)); SimpleValue current = buckets.get(0).getAggregations().get("movavg_values"); assertThat(current, nullValue()); for (int i = 1; i < 20; i++) { Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat((long) bucket.getKey(), equalTo((long) i - 10)); assertThat(bucket.getDocCount(), equalTo(1l)); Avg avgAgg = bucket.getAggregations().get("avg"); assertThat(avgAgg, notNullValue()); assertThat(avgAgg.value(), equalTo(10d)); SimpleValue movAvgAgg = bucket.getAggregations().get("movavg_values"); assertThat(movAvgAgg, notNullValue()); assertThat(movAvgAgg.value(), equalTo(10d)); } for (int i = 20; i < 25; i++) { Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat((long) bucket.getKey(), equalTo((long) i - 10)); assertThat(bucket.getDocCount(), equalTo(0l)); Avg avgAgg = bucket.getAggregations().get("avg"); assertThat(avgAgg, nullValue()); SimpleValue movAvgAgg = bucket.getAggregations().get("movavg_values"); assertThat(movAvgAgg, notNullValue()); assertThat(movAvgAgg.value(), equalTo(10d)); } } @Test public void testSizeZeroWindow() { try { client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts") .window(0) .modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy) .setBucketsPaths("the_metric")) ).execute().actionGet(); fail("MovingAvg should not accept a window that is zero"); } catch (SearchPhaseExecutionException exception) { // All good } } @Test public void testBadParent() { try { client() .prepareSearch("idx").setTypes("type") .addAggregation( range("histo").field(INTERVAL_FIELD).addRange(0, 10) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts") .window(0) .modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy) .setBucketsPaths("the_metric")) ).execute().actionGet(); fail("MovingAvg should not accept non-histogram as parent"); } catch (SearchPhaseExecutionException exception) { // All good } } @Test public void testNegativeWindow() { try { client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts") .window(-10) .modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy) .setBucketsPaths("_count")) ).execute().actionGet(); fail("MovingAvg should not accept a window that is negative"); } catch (SearchPhaseExecutionException exception) { //Throwable rootCause = exception.unwrapCause(); //assertThat(rootCause, instanceOf(SearchParseException.class)); //assertThat("[window] value must be a positive, non-zero integer. Value supplied was [0] in [movingAvg].", equalTo(exception.getMessage())); } } @Test public void testNoBucketsInHistogram() { SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field("test").interval(interval) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts") .window(windowSize) .modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy) .setBucketsPaths("the_metric")) ).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(0)); } @Test public void testNoBucketsInHistogramWithPredict() { int numPredictions = randomIntBetween(1,10); SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field("test").interval(interval) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts") .window(windowSize) .modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy) .setBucketsPaths("the_metric") .predict(numPredictions)) ).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(0)); } @Test public void testZeroPrediction() { try { client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts") .window(windowSize) .modelBuilder(randomModelBuilder()) .gapPolicy(gapPolicy) .predict(0) .setBucketsPaths("the_metric")) ).execute().actionGet(); fail("MovingAvg should not accept a prediction size that is zero"); } catch (SearchPhaseExecutionException exception) { // All Good } } @Test public void testNegativePrediction() { try { client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts") .window(windowSize) .modelBuilder(randomModelBuilder()) .gapPolicy(gapPolicy) .predict(-10) .setBucketsPaths("the_metric")) ).execute().actionGet(); fail("MovingAvg should not accept a prediction size that is negative"); } catch (SearchPhaseExecutionException exception) { // All Good } } @Test public void testHoltWintersNotEnoughData() { try { SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(10) .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .alpha(alpha).beta(beta).gamma(gamma).period(20).seasonalityType(seasonalityType)) .gapPolicy(gapPolicy) .setBucketsPaths("_count")) .subAggregation(movingAvg("movavg_values") .window(windowSize) .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .alpha(alpha).beta(beta).gamma(gamma).period(20).seasonalityType(seasonalityType)) .gapPolicy(gapPolicy) .setBucketsPaths("the_metric")) ).execute().actionGet(); } catch (SearchPhaseExecutionException e) { // All good } } @Test public void testTwoMovAvgsWithPredictions() { SearchResponse response = client() .prepareSearch("double_predict") .setTypes("type") .addAggregation( histogram("histo") .field(INTERVAL_FIELD) .interval(1) .subAggregation(avg("avg").field(VALUE_FIELD)) .subAggregation(derivative("deriv") .setBucketsPaths("avg").gapPolicy(gapPolicy)) .subAggregation( movingAvg("avg_movavg").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy).predict(12).setBucketsPaths("avg")) .subAggregation( movingAvg("deriv_movavg").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy).predict(12).setBucketsPaths("deriv")) ).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(24)); Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((long) bucket.getKey(), equalTo((long) 0)); assertThat(bucket.getDocCount(), equalTo(1l)); Avg avgAgg = bucket.getAggregations().get("avg"); assertThat(avgAgg, notNullValue()); assertThat(avgAgg.value(), equalTo(10d)); SimpleValue movAvgAgg = bucket.getAggregations().get("avg_movavg"); assertThat(movAvgAgg, nullValue()); Derivative deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, nullValue()); SimpleValue derivMovAvg = bucket.getAggregations().get("deriv_movavg"); assertThat(derivMovAvg, nullValue()); // Second bucket bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((long) bucket.getKey(), equalTo(1L)); assertThat(bucket.getDocCount(), equalTo(1l)); avgAgg = bucket.getAggregations().get("avg"); assertThat(avgAgg, notNullValue()); assertThat(avgAgg.value(), equalTo(10d)); deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, notNullValue()); assertThat(deriv.value(), equalTo(0d)); movAvgAgg = bucket.getAggregations().get("avg_movavg"); assertThat(movAvgAgg, notNullValue()); assertThat(movAvgAgg.value(), equalTo(10d)); derivMovAvg = bucket.getAggregations().get("deriv_movavg"); assertThat(derivMovAvg, Matchers.nullValue()); // still null because of movavg delay for (int i = 2; i < 12; i++) { bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat((long) bucket.getKey(), equalTo((long) i)); assertThat(bucket.getDocCount(), equalTo(1l)); avgAgg = bucket.getAggregations().get("avg"); assertThat(avgAgg, notNullValue()); assertThat(avgAgg.value(), equalTo(10d)); deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, notNullValue()); assertThat(deriv.value(), equalTo(0d)); movAvgAgg = bucket.getAggregations().get("avg_movavg"); assertThat(movAvgAgg, notNullValue()); assertThat(movAvgAgg.value(), equalTo(10d)); derivMovAvg = bucket.getAggregations().get("deriv_movavg"); assertThat(derivMovAvg, notNullValue()); assertThat(derivMovAvg.value(), equalTo(0d)); } // Predictions for (int i = 12; i < 24; i++) { bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat((long) bucket.getKey(), equalTo((long) i)); assertThat(bucket.getDocCount(), equalTo(0l)); avgAgg = bucket.getAggregations().get("avg"); assertThat(avgAgg, nullValue()); deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, nullValue()); movAvgAgg = bucket.getAggregations().get("avg_movavg"); assertThat(movAvgAgg, notNullValue()); assertThat(movAvgAgg.value(), equalTo(10d)); derivMovAvg = bucket.getAggregations().get("deriv_movavg"); assertThat(derivMovAvg, notNullValue()); assertThat(derivMovAvg.value(), equalTo(0d)); } } @Test public void testBadModelParams() { try { SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(10) .modelBuilder(randomModelBuilder(100)) .gapPolicy(gapPolicy) .setBucketsPaths("_count")) ).execute().actionGet(); } catch (SearchPhaseExecutionException e) { // All good } } @Test public void HoltWintersMinimization() { SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(windowSize) .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .period(period).seasonalityType(seasonalityType)) .gapPolicy(gapPolicy) .minimize(true) .setBucketsPaths("_count")) .subAggregation(movingAvg("movavg_values") .window(windowSize) .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .period(period).seasonalityType(seasonalityType)) .gapPolicy(gapPolicy) .minimize(true) .setBucketsPaths("the_metric")) ).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(mockHisto.size())); List<Double> expectedCounts = testValues.get(MovAvgType.HOLT_WINTERS.name() + "_" + MetricTarget.COUNT.name()); List<Double> expectedValues = testValues.get(MovAvgType.HOLT_WINTERS.name() + "_" + MetricTarget.VALUE.name()); Iterator<? extends Histogram.Bucket> actualIter = buckets.iterator(); Iterator<PipelineAggregationHelperTests.MockBucket> expectedBucketIter = mockHisto.iterator(); Iterator<Double> expectedCountsIter = expectedCounts.iterator(); Iterator<Double> expectedValueIter = expectedValues.iterator(); // The minimizer is stochastic, so just make sure all the values coming back aren't null while (actualIter.hasNext()) { Histogram.Bucket actual = actualIter.next(); PipelineAggregationHelperTests.MockBucket expected = expectedBucketIter.next(); Double expectedCount = expectedCountsIter.next(); Double expectedValue = expectedValueIter.next(); assertThat("keys do not match", ((Number) actual.getKey()).longValue(), equalTo(expected.key)); assertThat("doc counts do not match", actual.getDocCount(), equalTo((long)expected.count)); SimpleValue countMovAvg = actual.getAggregations().get("movavg_counts"); SimpleValue valuesMovAvg = actual.getAggregations().get("movavg_values"); if (expectedCount == null) { //this bucket wasn't supposed to have a value (empty, skipped, etc), so //movavg should be null too assertThat(countMovAvg, nullValue()); } else { // Note that we don't compare against the mock values, since those are assuming // a non-minimized set of coefficients. Just check for not-nullness assertThat(countMovAvg, notNullValue()); } if (expectedValue == null) { //this bucket wasn't supposed to have a value (empty, skipped, etc), so //movavg should be null too assertThat(valuesMovAvg, nullValue()); } else { // Note that we don't compare against the mock values, since those are assuming // a non-minimized set of coefficients. Just check for not-nullness assertThat(valuesMovAvg, notNullValue()); } } } /** * If the minimizer is turned on, but there isn't enough data to minimize with, it will simply use * the default settings. Which means our mock histo will match the generated result (which it won't * if the minimizer is actually working, since the coefficients will be different and thus generate different * data) * * We can simulate this by setting the window size == size of histo */ @Test public void minimizeNotEnoughData() { SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(numBuckets) .modelBuilder(new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha).beta(beta)) .gapPolicy(gapPolicy) .minimize(true) .setBucketsPaths("_count")) .subAggregation(movingAvg("movavg_values") .window(numBuckets) .modelBuilder(new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha).beta(beta)) .gapPolicy(gapPolicy) .minimize(true) .setBucketsPaths("the_metric")) ).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(mockHisto.size())); List<Double> expectedCounts = testValues.get(MovAvgType.HOLT_BIG_MINIMIZE.name() + "_" + MetricTarget.COUNT.name()); List<Double> expectedValues = testValues.get(MovAvgType.HOLT_BIG_MINIMIZE.name() + "_" + MetricTarget.VALUE.name()); Iterator<? extends Histogram.Bucket> actualIter = buckets.iterator(); Iterator<PipelineAggregationHelperTests.MockBucket> expectedBucketIter = mockHisto.iterator(); Iterator<Double> expectedCountsIter = expectedCounts.iterator(); Iterator<Double> expectedValuesIter = expectedValues.iterator(); while (actualIter.hasNext()) { assertValidIterators(expectedBucketIter, expectedCountsIter, expectedValuesIter); Histogram.Bucket actual = actualIter.next(); PipelineAggregationHelperTests.MockBucket expected = expectedBucketIter.next(); Double expectedCount = expectedCountsIter.next(); Double expectedValue = expectedValuesIter.next(); assertThat("keys do not match", ((Number) actual.getKey()).longValue(), equalTo(expected.key)); assertThat("doc counts do not match", actual.getDocCount(), equalTo((long)expected.count)); assertBucketContents(actual, expectedCount, expectedValue); } } /** * Only some models can be minimized, should throw exception for: simple, linear */ @Test public void checkIfNonTunableCanBeMinimized() { try { client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(numBuckets) .modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy) .minimize(true) .setBucketsPaths("_count")) ).execute().actionGet(); fail("Simple Model cannot be minimized, but an exception was not thrown"); } catch (SearchPhaseExecutionException e) { // All good } try { client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(numBuckets) .modelBuilder(new LinearModel.LinearModelBuilder()) .gapPolicy(gapPolicy) .minimize(true) .setBucketsPaths("_count")) ).execute().actionGet(); fail("Linear Model cannot be minimized, but an exception was not thrown"); } catch (SearchPhaseExecutionException e) { // all good } } /** * These models are all minimizable, so they should not throw exceptions */ @Test public void checkIfTunableCanBeMinimized() { MovAvgModelBuilder[] builders = new MovAvgModelBuilder[]{ new EwmaModel.EWMAModelBuilder(), new HoltLinearModel.HoltLinearModelBuilder(), new HoltWintersModel.HoltWintersModelBuilder() }; for (MovAvgModelBuilder builder : builders) { try { client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(numBuckets) .modelBuilder(builder) .gapPolicy(gapPolicy) .minimize(true) .setBucketsPaths("_count")) ).execute().actionGet(); } catch (SearchPhaseExecutionException e) { fail("Model [" + builder.toString() + "] can be minimized, but an exception was thrown"); } } } @Test public void testUnrecognizedParams() { MovAvgModelBuilder[] builders = new MovAvgModelBuilder[]{ new SimpleModel.SimpleModelBuilder(), new LinearModel.LinearModelBuilder(), new EwmaModel.EWMAModelBuilder(), new HoltLinearModel.HoltLinearModelBuilder(), new HoltWintersModel.HoltWintersModelBuilder() }; Map<String, Object> badSettings = new HashMap<>(1); badSettings.put("abc", 1.2); for (MovAvgModelBuilder builder : builders) { try { SearchResponse response = client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts") .window(10) .modelBuilder(builder) .gapPolicy(gapPolicy) .settings(badSettings) .setBucketsPaths("_count")) ).execute().actionGet(); } catch (SearchPhaseExecutionException e) { // All good } } } private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { if (!expectedBucketIter.hasNext()) { fail("`expectedBucketIter` iterator ended before `actual` iterator, size mismatch"); } if (!expectedCountsIter.hasNext()) { fail("`expectedCountsIter` iterator ended before `actual` iterator, size mismatch"); } if (!expectedValuesIter.hasNext()) { fail("`expectedValuesIter` iterator ended before `actual` iterator, size mismatch"); } } private void assertBucketContents(Histogram.Bucket actual, Double expectedCount, Double expectedValue) { // This is a gap bucket SimpleValue countMovAvg = actual.getAggregations().get("movavg_counts"); if (expectedCount == null) { assertThat("[_count] movavg is not null", countMovAvg, nullValue()); } else if (Double.isNaN(expectedCount)) { assertThat("[_count] movavg should be NaN, but is ["+countMovAvg.value()+"] instead", countMovAvg.value(), equalTo(Double.NaN)); } else { assertThat("[_count] movavg is null", countMovAvg, notNullValue()); assertTrue("[_count] movavg does not match expected [" + countMovAvg.value() + " vs " + expectedCount + "]", nearlyEqual(countMovAvg.value(), expectedCount, 0.1)); } // This is a gap bucket SimpleValue valuesMovAvg = actual.getAggregations().get("movavg_values"); if (expectedValue == null) { assertThat("[value] movavg is not null", valuesMovAvg, Matchers.nullValue()); } else if (Double.isNaN(expectedValue)) { assertThat("[value] movavg should be NaN, but is ["+valuesMovAvg.value()+"] instead", valuesMovAvg.value(), equalTo(Double.NaN)); } else { assertThat("[value] movavg is null", valuesMovAvg, notNullValue()); assertTrue("[value] movavg does not match expected [" + valuesMovAvg.value() + " vs " + expectedValue + "]", nearlyEqual(valuesMovAvg.value(), expectedValue, 0.1)); } } /** * Better floating point comparisons courtesy of https://github.com/brazzy/floating-point-gui.de * * Snippet adapted to use doubles instead of floats * * @param a * @param b * @param epsilon * @return */ private static boolean nearlyEqual(double a, double b, double epsilon) { final double absA = Math.abs(a); final double absB = Math.abs(b); final double diff = Math.abs(a - b); if (a == b) { // shortcut, handles infinities return true; } else if (a == 0 || b == 0 || diff < Double.MIN_NORMAL) { // a or b is zero or both are extremely close to it // relative error is less meaningful here return diff < (epsilon * Double.MIN_NORMAL); } else { // use relative error return diff / Math.min((absA + absB), Double.MAX_VALUE) < epsilon; } } private MovAvgModelBuilder randomModelBuilder() { return randomModelBuilder(0); } private MovAvgModelBuilder randomModelBuilder(double padding) { int rand = randomIntBetween(0,3); // HoltWinters is excluded from random generation, because it's "cold start" behavior makes // randomized testing too tricky. Should probably add dedicated, randomized tests just for HoltWinters, // which can compensate for the idiosyncrasies switch (rand) { case 0: return new SimpleModel.SimpleModelBuilder(); case 1: return new LinearModel.LinearModelBuilder(); case 2: return new EwmaModel.EWMAModelBuilder().alpha(alpha + padding); case 3: return new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha + padding).beta(beta + padding); default: return new SimpleModel.SimpleModelBuilder(); } } private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) { int rand = randomIntBetween(0,3); switch (rand) { case 0: return min(name).field(field); case 2: return max(name).field(field); case 3: return avg(name).field(field); default: return avg(name).field(field); } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Class RemoteConfigurable * @author Jeka */ package com.intellij.execution.remote; import com.intellij.execution.ExecutionBundle; import com.intellij.execution.configurations.RemoteConnection; import com.intellij.execution.ui.ConfigurationArgumentsHelpArea; import com.intellij.execution.ui.ConfigurationModuleSelector; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.options.SettingsEditor; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.LabeledComponent; import com.intellij.openapi.util.SystemInfo; import com.intellij.ui.DocumentAdapter; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import java.awt.event.*; public class RemoteConfigurable extends SettingsEditor<RemoteConfiguration> { JPanel myPanel; private JRadioButton myRbSocket; private JRadioButton myRbShmem; private JRadioButton myRbListen; private JRadioButton myRbAttach; private JTextField myAddressField; private JTextField myHostField; private JTextField myPortField; private JPanel myShmemPanel; private JPanel mySocketPanel; private ConfigurationArgumentsHelpArea myHelpArea; @NonNls private ConfigurationArgumentsHelpArea myJDK13HelpArea; private ConfigurationArgumentsHelpArea myJDK14HelpArea; private LabeledComponent<JComboBox> myModule; private String myHostName = ""; @NonNls protected static final String LOCALHOST = "localhost"; private final ConfigurationModuleSelector myModuleSelector; public RemoteConfigurable(final Project project) { myHelpArea.setLabelText(ExecutionBundle.message("remote.configuration.remote.debugging.allows.you.to.connect.idea.to.a.running.jvm.label")); myHelpArea.setToolbarVisible(); myJDK13HelpArea.setLabelText(ExecutionBundle.message("environment.variables.helper.use.arguments.jdk13.label")); myJDK13HelpArea.setToolbarVisible(); myJDK14HelpArea.setLabelText(ExecutionBundle.message("environment.variables.helper.use.arguments.jdk14.label")); myJDK14HelpArea.setToolbarVisible(); final ButtonGroup transportGroup = new ButtonGroup(); transportGroup.add(myRbSocket); transportGroup.add(myRbShmem); final ButtonGroup connectionGroup = new ButtonGroup(); connectionGroup.add(myRbListen); connectionGroup.add(myRbAttach); final DocumentListener helpTextUpdater = new DocumentAdapter() { public void textChanged(DocumentEvent event) { updateHelpText(); } }; myAddressField.getDocument().addDocumentListener(helpTextUpdater); myHostField.getDocument().addDocumentListener(helpTextUpdater); myPortField.getDocument().addDocumentListener(helpTextUpdater); myRbSocket.setSelected(true); final ActionListener listener = new ActionListener() { public void actionPerformed(final ActionEvent e) { final Object source = e.getSource(); if (source.equals(myRbSocket)) { myShmemPanel.setVisible(false); mySocketPanel.setVisible(true); } else if (source.equals(myRbShmem)) { myShmemPanel.setVisible(true); mySocketPanel.setVisible(false); } myPanel.repaint(); updateHelpText(); } }; myRbShmem.addActionListener(listener); myRbSocket.addActionListener(listener); final ItemListener updateListener = new ItemListener() { public void itemStateChanged(final ItemEvent e) { final boolean isAttach = myRbAttach.isSelected(); if(!isAttach && myHostField.isEditable()) { myHostName = myHostField.getText(); } myHostField.setEditable(isAttach); myHostField.setEnabled(isAttach); myHostField.setText(isAttach ? myHostName : LOCALHOST); updateHelpText(); } }; myRbAttach.addItemListener(updateListener); myRbListen.addItemListener(updateListener); final FocusListener fieldFocusListener = new FocusAdapter() { public void focusLost(final FocusEvent e) { updateHelpText(); } }; myAddressField.addFocusListener(fieldFocusListener); myPortField.addFocusListener(fieldFocusListener); myModuleSelector = new ConfigurationModuleSelector(project, myModule.getComponent(), "<whole project>"); } public void applyEditorTo(@NotNull final RemoteConfiguration configuration) throws ConfigurationException { configuration.HOST = (myHostField.isEditable() ? myHostField.getText() : myHostName).trim(); if (configuration.HOST != null && configuration.HOST.isEmpty()) { configuration.HOST = null; } configuration.PORT = myPortField.getText().trim(); if (configuration.PORT != null && configuration.PORT.isEmpty()) { configuration.PORT = null; } configuration.SHMEM_ADDRESS = myAddressField.getText().trim(); if (configuration.SHMEM_ADDRESS != null && configuration.SHMEM_ADDRESS.isEmpty()) { configuration.SHMEM_ADDRESS = null; } configuration.USE_SOCKET_TRANSPORT = myRbSocket.isSelected(); configuration.SERVER_MODE = myRbListen.isSelected(); myModuleSelector.applyTo(configuration); } public void resetEditorFrom(final RemoteConfiguration configuration) { if (!SystemInfo.isWindows) { configuration.USE_SOCKET_TRANSPORT = true; myRbShmem.setEnabled(false); myAddressField.setEditable(false); } myAddressField.setText(configuration.SHMEM_ADDRESS); myHostName = configuration.HOST; myHostField.setText(configuration.HOST); myPortField.setText(configuration.PORT); if (configuration.USE_SOCKET_TRANSPORT) { myRbSocket.doClick(); } else { myRbShmem.doClick(); } if (configuration.SERVER_MODE) { myRbListen.doClick(); } else { myRbAttach.doClick(); } myRbShmem.setEnabled(SystemInfo.isWindows); myModuleSelector.reset(configuration); } @NotNull public JComponent createEditor() { return myPanel; } private void updateHelpText() { boolean useSockets = !myRbShmem.isSelected(); final RemoteConnection connection = new RemoteConnection( useSockets, myHostName, useSockets ? myPortField.getText().trim() : myAddressField.getText().trim(), myRbListen.isSelected() ); final String cmdLine = connection.getLaunchCommandLine(); // -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=7007 final String jvmtiCmdLine = cmdLine.replace("-Xdebug", "").replace("-Xrunjdwp:", "-agentlib:jdwp=").trim(); myHelpArea.updateText(jvmtiCmdLine); myJDK14HelpArea.updateText(cmdLine); myJDK13HelpArea.updateText("-Xnoagent -Djava.compiler=NONE " + cmdLine); } }
/* * Autopsy Forensic Browser * * Copyright 2015-2019 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.experimental.autoingest; import java.io.Serializable; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Objects; import java.util.TreeMap; import javax.annotation.concurrent.Immutable; import org.apache.commons.codec.DecoderException; import org.joda.time.DateTime; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.apache.commons.codec.binary.Hex; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; /** * Uniquely named file export rules organized into uniquely named rule sets. */ final class FileExportRuleSet implements Serializable, Comparable<FileExportRuleSet> { private static final long serialVersionUID = 1L; private String name; private final TreeMap<String, Rule> rules; /** * Constructs an empty named set of uniquely named rules. * * @param name The name of the set. */ FileExportRuleSet(String name) { this.name = name; rules = new TreeMap<>(); } /** * Gets the name of the rule set. * * @return The rules set name. */ String getName() { return name; } /** * Sets the name of the rule set. * * @param setName The name of the rule set */ public void setName(String setName) { this.name = setName; } /** * Gets the uniquely named rules in the rule set. * * @return A map of rules with name keys, sorted by name. */ NavigableMap<String, Rule> getRules() { return Collections.unmodifiableNavigableMap(rules); } /** * Gets a rule by name. * * @return A rule if found, null otherwise. */ Rule getRule(String ruleName) { return rules.get(ruleName); } /** * Adds a rule to this set. If there is a rule in the set with the same * name, the existing rule is replaced by the new rule. * * @param rule The rule to be added to the set. */ void addRule(Rule rule) { this.rules.put(rule.getName(), rule); } /** * Removes a rule from a set, if it is present. * * @param rule The rule to be removed from the set. */ void removeRule(Rule rule) { this.rules.remove(rule.getName()); } /** * Removes a rule from a set, if it is present. * * @param ruleName The rule to be removed from the set. */ void removeRule(String ruleName) { this.rules.remove(ruleName); } /** * @inheritDoc */ @Override public boolean equals(Object that) { if (this == that) { return true; } else if (!(that instanceof FileExportRuleSet)) { return false; } else { FileExportRuleSet thatSet = (FileExportRuleSet) that; return this.name.equals(thatSet.getName()); } } /** * @inheritDoc */ @Override public int hashCode() { return this.name.hashCode(); } /** * @inheritDoc */ @Override public int compareTo(FileExportRuleSet that) { return this.name.compareTo(that.getName()); } /** * A named file export rule consisting of zero to many conditions. */ static final class Rule implements Serializable, Comparable<Rule> { private static final long serialVersionUID = 1L; private final String name; private FileMIMETypeCondition fileTypeCondition; private final List<FileSizeCondition> fileSizeConditions; private final List<ArtifactCondition> artifactConditions; /** * Constructs a named file export rule consisting of zero to many * conditions. * * @param name The name of the rule. */ Rule(String name) { this.name = name; this.fileSizeConditions = new ArrayList<>(); this.artifactConditions = new ArrayList<>(); } /** * Sort the file size conditions of this rule. */ void sortFileSizeConditions() { this.fileSizeConditions.sort(null); } /** * Sort the artifact conditions of this rule. */ void sortArtifactConditions() { this.artifactConditions.sort(null); } /** * Gets the name of the rule. * * @return The rule name. */ String getName() { return this.name; } /** * Adds a file MIME type condition to the rule. If the rule already has * a file MIME type condition, the existing condition is replaced by the * new condition. * * @param condition The new file MIME type condition. */ void addFileMIMETypeCondition(FileMIMETypeCondition condition) { this.fileTypeCondition = condition; } /** * Removes a file MIME type condition from the rule. * * @param condition The new file MIME type condition. */ void removeFileMIMETypeCondition() { this.fileTypeCondition = null; } /** * Gets the file MIME type condition of a rule. * * @return The file MIME type condition, possibly null. */ FileMIMETypeCondition getFileMIMETypeCondition() { return this.fileTypeCondition; } /** * Adds a file size condition to the rule. If the rule already has a * file size or file size range condition, the existing condition is * replaced by the new condition. * * A rule may have either a file size condition or a file size range * condition, but not both. * * @param condition The new file size condition. */ void addFileSizeCondition(FileSizeCondition condition) { this.fileSizeConditions.clear(); this.fileSizeConditions.add(condition); } /** * Removes a file size condition from the rule A rule may have either a * file size condition or a file size range condition, but not both. * */ void removeFileSizeCondition() { this.fileSizeConditions.clear(); } /** * Adds a file size range condition to the rule. If the rule already has * a file size or file size range condition, the existing condition is * replaced by the new condition. * * The file size conditions that make up the file size range condition * are not validated. * * A rule may have either a file size condition or a file size range * condtion, but not both. * * @param conditionOne One part of the new size range condition. * @param conditionTwo The other part of the new size range conditon. */ void addFileSizeRangeCondition(FileSizeCondition conditionOne, FileSizeCondition conditionTwo) { this.fileSizeConditions.clear(); this.fileSizeConditions.add(conditionOne); this.fileSizeConditions.add(conditionTwo); } /** * Gets the file size conditions of a rule. * * @return A list of zero to two file size conditions. */ List<FileSizeCondition> getFileSizeConditions() { return Collections.unmodifiableList(this.fileSizeConditions); } /** * Adds a condition that requires a file to have an artifact of a given * type with an attribute of a given type with a value comparable to a * specified value. * * @param condition The new artifact condition. */ void addArtfactCondition(ArtifactCondition condition) { for (ArtifactCondition ac : artifactConditions) { if (ac.equals(condition)) { // already exists, do not re-add return; } } this.artifactConditions.add(condition); } /** * Removes a condition that requires a file to have an artifact of a * given type with an attribute of a given type with a value comparable * to a specified value. * * @param condition The new artifact condition. */ void removeArtifactCondition(ArtifactCondition condition) { this.artifactConditions.remove(condition); } /** * Removes all artifact condition that requires a file to have an * artifact of a given type with an attribute of a given type with a * value comparable to a specified value. * */ void removeArtifactConditions() { this.artifactConditions.clear(); } /** * Gets the artifact conditions of a rule. * * @return A list of artifact conditions, possibly empty. */ List<ArtifactCondition> getArtifactConditions() { return Collections.unmodifiableList(this.artifactConditions); } /** * @inheritDoc */ @Override public boolean equals(Object that) { if (this == that) { return true; } else if (!(that instanceof Rule)) { return false; } else { Rule thatRule = (Rule) that; return this.name.equals(thatRule.getName()) && conditionsAreEqual(thatRule); } } boolean conditionsAreEqual(Rule that) { if (!Objects.equals(this.fileTypeCondition, that.getFileMIMETypeCondition())) { return false; } this.sortFileSizeConditions(); that.sortFileSizeConditions(); if (!this.fileSizeConditions.equals(that.getFileSizeConditions())) { return false; } this.sortArtifactConditions(); that.sortArtifactConditions(); return this.artifactConditions.equals(that.getArtifactConditions()); } /** * @inheritDoc */ @Override public int hashCode() { return this.name.hashCode(); } /** * @inheritDoc */ @Override public int compareTo(Rule that) { return this.name.compareTo(that.getName()); } /** * Evaluates a rule to determine if there are any files that satisfy the * rule. * * @param dataSourceId The data source id of the files. * * @return A list of file ids, possibly empty. * * @throws * org.sleuthkit.autopsy.autoingest.fileexporter.ExportRuleSet.ExportRulesException */ List<Long> evaluate(long dataSourceId) throws ExportRulesException { try { SleuthkitCase db = Case.getCurrentCaseThrows().getSleuthkitCase(); try (SleuthkitCase.CaseDbQuery queryResult = db.executeQuery(getQuery(dataSourceId)); ResultSet resultSet = queryResult.getResultSet();) { List<Long> fileIds = new ArrayList<>(); while (resultSet.next()) { fileIds.add(resultSet.getLong("obj_id")); } return fileIds; } } catch (NoCurrentCaseException ex) { throw new ExportRulesException("No current case", ex); } catch (TskCoreException ex) { throw new ExportRulesException("Error querying case database", ex); } catch (SQLException ex) { throw new ExportRulesException("Error processing result set", ex); } } /** * Gets an SQL query statement that returns the object ids (column name * is files.obj_id) of the files that satisfy the rule. * * @param dataSourceId The data source id of the files. * * @return The SQL query. * * @throws ExportRulesException If the artifact type or attribute type * for a condition does not exist. */ private String getQuery(long dataSourceId) throws ExportRulesException { String query = "SELECT DISTINCT files.obj_id FROM tsk_files AS files"; if (!this.artifactConditions.isEmpty()) { for (int i = 0; i < this.artifactConditions.size(); ++i) { query += String.format(", blackboard_artifacts AS arts%d, blackboard_attributes AS attrs%d", i, i); } } query += (" WHERE meta_type=1 AND mime_type IS NOT NULL AND md5 IS NOT NULL AND files.data_source_obj_id = " + dataSourceId); List<String> conditions = this.getConditionClauses(); if (!conditions.isEmpty()) { for (int i = 0; i < conditions.size(); ++i) { query += " AND " + conditions.get(i); } } return query; } /** * Gets the SQL condition clauses for all the conditions. * * @return A collection of SQL condition clauses. * * @throws ExportRulesException If the artifact type or attribute type * for a condition does not exist. */ private List<String> getConditionClauses() throws ExportRulesException { List<String> conditions = new ArrayList<>(); if (null != this.fileTypeCondition) { conditions.add(fileTypeCondition.getConditionClause()); } if (!this.fileSizeConditions.isEmpty()) { for (FileSizeCondition condition : this.fileSizeConditions) { conditions.add(condition.getConditionClause()); } } if (!this.artifactConditions.isEmpty()) { for (int i = 0; i < this.artifactConditions.size(); ++i) { conditions.add(this.artifactConditions.get(i).getConditionClause(i)); } } return conditions; } /** * Relational operators that can be used to define rule conditions. */ enum RelationalOp { Equals("="), LessThanEquals("<="), LessThan("<"), GreaterThanEquals(">="), GreaterThan(">"), NotEquals("!="); private String symbol; private static final Map<String, RelationalOp> symbolToEnum = new HashMap<>(); static { for (RelationalOp op : RelationalOp.values()) { symbolToEnum.put(op.getSymbol(), op); } } /** * Constructs a relational operator enum member that can are used to * define rule conditions. * * @param symbol The symbolic form of the operator. */ private RelationalOp(String symbol) { this.symbol = symbol; } /** * Gets the symbolic form of the operator. * * @return The operator symbol. */ String getSymbol() { return this.symbol; } /** * Looks up the relational operator with a given symbol. * * @return The relational operator or null if there is no operator * for the symbol. */ static RelationalOp fromSymbol(String symbol) { return symbolToEnum.get(symbol); } } /** * A condition that requires a file to be of a specified MIME type. */ @Immutable static final class FileMIMETypeCondition implements Serializable, Comparable<FileMIMETypeCondition> { private static final long serialVersionUID = 1L; private final String mimeType; private final RelationalOp operator; /** * Constructs a condition that requires a file to be of a specified * MIME type. * * @param mimeType The MIME type. */ FileMIMETypeCondition(String mimeType, RelationalOp operator) { this.mimeType = mimeType; this.operator = operator; } /** * Gets the MIME type required by the condition. * * @return The MIME type. */ String getMIMEType() { return mimeType; } /** * Gets the operator required by the condition. * * @return the operator. */ public RelationalOp getRelationalOp() { return operator; } /** * @inheritDoc */ @Override public boolean equals(Object that) { if (this == that) { return true; } else if (!(that instanceof FileMIMETypeCondition)) { return false; } else { FileMIMETypeCondition thatCondition = (FileMIMETypeCondition) that; return ((this.mimeType.equals(thatCondition.getMIMEType())) && (this.operator == thatCondition.getRelationalOp())); } } /** * @inheritDoc */ @Override public int hashCode() { return this.mimeType.hashCode(); } @Override public int compareTo(FileMIMETypeCondition that) { return this.mimeType.compareTo(that.getMIMEType()); } /** * Gets an SQL condition clause for the condition. * * @return The SQL condition clause. */ private String getConditionClause() { return String.format("files.mime_type = '%s'", this.mimeType); } } /** * A condition that requires a file to have a size in bytes comparable * to a specified size. */ @Immutable static final class FileSizeCondition implements Serializable, Comparable<FileSizeCondition> { private static final long serialVersionUID = 1L; private final int size; private final SizeUnit unit; private final Rule.RelationalOp op; /** * Constructs a condition that requires a file to have a size in * bytes comparable to a specified size. * * @param sizeinBytes The specified size. * @param op The relational operator for the comparison. */ FileSizeCondition(int size, SizeUnit unit, Rule.RelationalOp op) { this.size = size; this.unit = unit; this.op = op; } /** * Gets the size required by the condition. * * @return The size. */ int getSize() { return size; } /** * Gets the size unit for the size required by the condition. * * @return The size unit. */ SizeUnit getUnit() { return unit; } /** * Gets the relational operator for the condition. * * @return The operator. */ RelationalOp getRelationalOperator() { return this.op; } /** * @inheritDoc */ @Override public boolean equals(Object that) { if (this == that) { return true; } else if (!(that instanceof FileSizeCondition)) { return false; } else { FileSizeCondition thatCondition = (FileSizeCondition) that; return this.size == thatCondition.getSize() && this.unit == thatCondition.getUnit() && this.op == thatCondition.getRelationalOperator(); } } /** * @inheritDoc */ @Override public int hashCode() { int hash = 7; hash = 9 * hash + this.size; hash = 11 * hash + this.unit.hashCode(); hash = 13 * hash + this.op.hashCode(); return hash; } @Override public int compareTo(FileSizeCondition that) { int retVal = this.unit.compareTo(that.getUnit()); if (0 != retVal) { return retVal; } retVal = new Long(this.size).compareTo(new Long(that.getSize())); if (0 != retVal) { return retVal; } return this.op.compareTo(that.getRelationalOperator()); } /** * Gets an SQL condition clause for the condition. * * @return The SQL condition clause. */ private String getConditionClause() { return String.format("files.size %s %d", op.getSymbol(), size * unit.getMultiplier()); } /** * Size units used to define file size conditions. */ enum SizeUnit { Bytes(1L), Kilobytes(1024L), Megabytes(1024L * 1024), Gigabytes(1024L * 1024 * 1024), Terabytes(1024L * 1024 * 1024 * 1024), Petabytes(1024L * 1024 * 1024 * 1024 * 1024); private final long multiplier; /** * Constructs a member of this enum. * * @param multiplier A multiplier for the size field of a file * size condition. */ private SizeUnit(long multiplier) { this.multiplier = multiplier; } /** * Gets the multiplier for the size field of a file size * condition. * * @return The multiplier. */ long getMultiplier() { return this.multiplier; } } } /** * A condition that requires a file to have an artifact of a given type * with an attribute of a given type with a value comparable to a * specified value. */ @Immutable static final class ArtifactCondition implements Serializable, Comparable<ArtifactCondition> { private static final long serialVersionUID = 1L; private final String artifactTypeName; private final String attributeTypeName; private final BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE attributeValueType; private Integer intValue; private Long longValue; private Double doubleValue; private String stringValue; private DateTime dateTimeValue; private byte[] byteValue; private final RelationalOp op; private String treeDisplayName; /** * Constructs a condition that requires a file to have an artifact * of a given type. * * @param treeDisplayName The name to display in the tree * @param artifactTypeName The name of the artifact type. * @param attributeTypeName The name of the attribute type. * @param value The String representation of the value. * @param attributeValueType The type of the value being passed in. * @param op The relational operator for the * comparison. */ ArtifactCondition(String artifactTypeName, String attributeTypeName, String value, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE attributeValueType, RelationalOp op) throws IllegalArgumentException { this.artifactTypeName = artifactTypeName; this.attributeTypeName = attributeTypeName; this.attributeValueType = attributeValueType; this.treeDisplayName = artifactTypeName; this.intValue = null; this.longValue = null; this.doubleValue = null; this.stringValue = null; this.byteValue = null; this.op = op; try { switch (this.attributeValueType) { case STRING: this.stringValue = value; break; case INTEGER: this.intValue = Integer.parseInt(value); break; case LONG: this.longValue = Long.parseLong(value); break; case DOUBLE: this.doubleValue = Double.parseDouble(value); break; case BYTE: try { this.byteValue = Hex.decodeHex(value.toCharArray()); } catch (DecoderException ex) { this.byteValue = null; throw new IllegalArgumentException("Bad hex decode"); //NON-NLS } break; case DATETIME: long result = Long.parseLong(value); this.dateTimeValue = new DateTime(result); break; default: throw new NumberFormatException("Bad type chosen"); //NON-NLS } } catch (NumberFormatException ex) { this.intValue = null; this.longValue = null; this.doubleValue = null; this.stringValue = null; this.byteValue = null; this.dateTimeValue = null; throw new IllegalArgumentException(ex); } } /** * Gets the artifact type name for this condition. * * @return The type name. */ String getArtifactTypeName() { return this.artifactTypeName; } /** * Gets the tree display name for this condition. * * @return The tree display name for this condition. */ String getTreeDisplayName() { return this.treeDisplayName; } /** * Sets the tree display name for this condition. * * @param name The tree display name for this condition. */ void setTreeDisplayName(String name) { this.treeDisplayName = name; } /** * Gets the attribute type name for this condition. * * @return The type name. */ String getAttributeTypeName() { return this.attributeTypeName; } /** * Gets the value type for this condition. * * @return The value type. */ BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE getAttributeValueType() { return this.attributeValueType; } /** * Gets the integer value for this condition. * * @return The value, may be null. */ Integer getIntegerValue() { return this.intValue; } /** * Gets the long value for this condition. * * @return The value, may be null. */ Long getLongValue() { return this.longValue; } /** * Gets the double value for this condition. * * @return The value, may be null. */ Double getDoubleValue() { return this.doubleValue; } /** * Gets the string value for this condition. * * @return The value, may be null. */ String getStringValue() { return this.stringValue; } /** * Gets the byte value for this condition. * * @return The value, may be null. */ byte[] getByteValue() { return this.byteValue.clone(); } /** * Gets the DateTime value for this condition. * * @return The value, may be null. */ DateTime getDateTimeValue() { return this.dateTimeValue; } /** * Gets the string representation of the value, regardless of the * data type * * @return The value, may be null. */ String getStringRepresentationOfValue() { String valueText; switch (this.attributeValueType) { case BYTE: valueText = new String(Hex.encodeHex(getByteValue())); break; case DATETIME: valueText = ""; break; case DOUBLE: valueText = getDoubleValue().toString(); break; case INTEGER: valueText = getIntegerValue().toString(); break; case LONG: valueText = getLongValue().toString(); break; case STRING: valueText = getStringValue(); break; default: valueText = "Undefined"; break; } return valueText; } /** * Gets the relational operator for the condition. * * @return The operator. */ RelationalOp getRelationalOperator() { return this.op; } /** * @inheritDoc */ @Override public boolean equals(Object that) { if (this == that) { return true; } else if (!(that instanceof ArtifactCondition)) { return false; } else { ArtifactCondition thatCondition = (ArtifactCondition) that; return this.artifactTypeName.equals(thatCondition.getArtifactTypeName()) && this.attributeTypeName.equals(thatCondition.getAttributeTypeName()) && this.attributeValueType == thatCondition.getAttributeValueType() && this.op == thatCondition.getRelationalOperator() && Objects.equals(this.intValue, thatCondition.getIntegerValue()) && Objects.equals(this.longValue, thatCondition.getLongValue()) && Objects.equals(this.doubleValue, thatCondition.getDoubleValue()) && Objects.equals(this.stringValue, thatCondition.getStringValue()) && Arrays.equals(this.byteValue, thatCondition.getByteValue()) && Objects.equals(this.dateTimeValue, thatCondition.getDateTimeValue()); } } /** * @inheritDoc */ @Override public int hashCode() { int hash = 7; hash = 9 * hash + this.artifactTypeName.hashCode(); hash = 13 * hash + this.attributeTypeName.hashCode(); hash = 11 * hash + this.attributeValueType.hashCode(); hash = 13 * hash + this.op.hashCode(); hash = 15 * hash + Objects.hashCode(this.intValue); hash = 7 * hash + Objects.hashCode(this.longValue); hash = 17 * hash + Objects.hashCode(this.doubleValue); hash = 8 * hash + Objects.hashCode(this.stringValue); hash = 27 * hash + Objects.hashCode(this.byteValue); hash = 3 * hash + Objects.hashCode(this.dateTimeValue); return hash; } /** * @inheritDoc */ @Override public int compareTo(ArtifactCondition that) { int retVal = this.artifactTypeName.compareTo(that.getArtifactTypeName()); if (0 != retVal) { return retVal; } retVal = this.attributeTypeName.compareTo(that.getAttributeTypeName()); if (0 != retVal) { return retVal; } retVal = this.attributeValueType.compareTo(that.getAttributeValueType()); if (0 != retVal) { return retVal; } else { switch (this.attributeValueType) { case STRING: retVal = this.stringValue.compareTo(that.getStringValue()); if (0 != retVal) { return retVal; } break; case INTEGER: retVal = this.intValue.compareTo(that.getIntegerValue()); if (0 != retVal) { return retVal; } break; case LONG: retVal = this.longValue.compareTo(that.getLongValue()); if (0 != retVal) { return retVal; } break; case DOUBLE: retVal = this.doubleValue.compareTo(that.getDoubleValue()); if (0 != retVal) { return retVal; } break; case BYTE: if (Arrays.equals(this.byteValue, that.getByteValue())) { return 0; } else { return 1; } case DATETIME: retVal = this.dateTimeValue.compareTo(that.getDateTimeValue()); if (0 != retVal) { return retVal; } break; } } return this.op.compareTo(that.getRelationalOperator()); } /** * Gets the SQL condition clause for the condition. * * @param index The index of the condition within the collection of * conditions that make up a rule. It is used for table * name aliasing. * * @return The SQL clause as a string, without leading or trailing * spaces. * * @throws ExportRulesException If the artifact type or attribute * type for the condition does not * exist. */ private String getConditionClause(int index) throws ExportRulesException { Case currentCase; try { currentCase = Case.getCurrentCaseThrows(); } catch (NoCurrentCaseException ex) { throw new ExportRulesException("Exception while getting open case.", ex); } SleuthkitCase caseDb = currentCase.getSleuthkitCase(); BlackboardArtifact.Type artifactType; try { artifactType = caseDb.getArtifactType(artifactTypeName); } catch (TskCoreException ex) { throw new ExportRulesException(String.format("The specified %s artifact type does not exist in case database for %s", artifactTypeName, currentCase.getCaseDirectory()), ex); } BlackboardAttribute.Type attributeType; try { attributeType = caseDb.getAttributeType(attributeTypeName); } catch (TskCoreException ex) { throw new ExportRulesException(String.format("The specified %s attribute type does not exist in case database for %s", attributeTypeName, currentCase.getCaseDirectory()), ex); } String clause = String.format("files.obj_id = arts%d.obj_id AND arts%d.artifact_type_id = %d AND attrs%d.artifact_id = arts%d.artifact_id AND attrs%d.attribute_type_id = %d AND ", index, index, artifactType.getTypeID(), index, index, index, attributeType.getTypeID()); switch (this.attributeValueType) { case INTEGER: clause += String.format("attrs%d.value_int32 %s %d", index, this.op.getSymbol(), this.intValue); break; case LONG: clause += String.format("attrs%d.value_int64 %s %d", index, this.op.getSymbol(), this.longValue); break; case DOUBLE: clause += String.format("attrs%d.value_double %s %f", index, this.op.getSymbol(), this.doubleValue); break; case STRING: clause += String.format("attrs%d.value_text %s '%s'", index, this.op.getSymbol(), this.stringValue); break; case BYTE: clause += String.format("attrs%d.value_byte %s decode('%s', 'hex')", index, this.op.getSymbol(), new String(Hex.encodeHex(getByteValue()))); break; case DATETIME: clause += String.format("attrs%d.value_int64 %s '%s'", index, this.op.getSymbol(), this.dateTimeValue.getMillis() / 1000); break; default: break; } return clause; } } } /** * Exception type thrown by the export rules class. */ public final static class ExportRulesException extends Exception { private static final long serialVersionUID = 1L; /** * Constructs an exception. * * @param message The exception message. */ private ExportRulesException(String message) { super(message); } /** * Constructs an exception. * * @param message The exception message. * @param cause The exception cause. */ private ExportRulesException(String message, Throwable cause) { super(message, cause); } } }
/* * ConnectBot: simple, powerful, open-source SSH client for Android * Copyright 2007 Kenny Root, Jeffrey Sharkey * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.schoentoon.connectbot; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.PrivateKey; import java.security.PublicKey; import java.security.SecureRandom; import android.app.Dialog; import android.app.ProgressDialog; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.text.Editable; import android.text.TextWatcher; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.View.OnFocusChangeListener; import android.widget.Button; import android.widget.CheckBox; import android.widget.EditText; import android.widget.RadioGroup; import android.widget.RadioGroup.OnCheckedChangeListener; import android.widget.SeekBar; import android.widget.SeekBar.OnSeekBarChangeListener; import com.actionbarsherlock.app.SherlockActivity; import com.actionbarsherlock.view.MenuItem; import com.schoentoon.connectbot.bean.PubkeyBean; import com.schoentoon.connectbot.util.EntropyDialog; import com.schoentoon.connectbot.util.EntropyView; import com.schoentoon.connectbot.util.OnEntropyGatheredListener; import com.schoentoon.connectbot.util.PubkeyDatabase; import com.schoentoon.connectbot.util.PubkeyUtils; public class GeneratePubkeyActivity extends SherlockActivity implements OnEntropyGatheredListener { public final static String TAG = "ConnectBot.GeneratePubkeyActivity"; final static int DEFAULT_BITS = 1024; private LayoutInflater inflater = null; private EditText nickname; private RadioGroup keyTypeGroup; private SeekBar bitsSlider; private EditText bitsText; private CheckBox unlockAtStartup; private CheckBox confirmUse; private Button save; private Dialog entropyDialog; private ProgressDialog progress; private EditText password1, password2; private String keyType = PubkeyDatabase.KEY_TYPE_RSA; private int minBits = 768; private int bits = DEFAULT_BITS; private byte[] entropy; @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); getSupportActionBar().setSubtitle("Generate key"); getSupportActionBar().setDisplayHomeAsUpEnabled(true); setContentView(R.layout.act_generatepubkey); nickname = (EditText) findViewById(R.id.nickname); keyTypeGroup = (RadioGroup) findViewById(R.id.key_type); bitsText = (EditText) findViewById(R.id.bits); bitsSlider = (SeekBar) findViewById(R.id.bits_slider); password1 = (EditText) findViewById(R.id.password1); password2 = (EditText) findViewById(R.id.password2); unlockAtStartup = (CheckBox) findViewById(R.id.unlock_at_startup); confirmUse = (CheckBox) findViewById(R.id.confirm_use); save = (Button) findViewById(R.id.save); inflater = LayoutInflater.from(this); nickname.addTextChangedListener(textChecker); password1.addTextChangedListener(textChecker); password2.addTextChangedListener(textChecker); keyTypeGroup.setOnCheckedChangeListener(new OnCheckedChangeListener() { public void onCheckedChanged(RadioGroup group, int checkedId) { if (checkedId == R.id.rsa) { minBits = 768; bitsSlider.setEnabled(true); bitsSlider.setProgress(DEFAULT_BITS - minBits); bitsText.setText(String.valueOf(DEFAULT_BITS)); bitsText.setEnabled(true); keyType = PubkeyDatabase.KEY_TYPE_RSA; } else if (checkedId == R.id.dsa) { // DSA keys can only be 1024 bits bitsSlider.setEnabled(false); bitsSlider.setProgress(DEFAULT_BITS - minBits); bitsText.setText(String.valueOf(DEFAULT_BITS)); bitsText.setEnabled(false); keyType = PubkeyDatabase.KEY_TYPE_DSA; } } }); bitsSlider.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { public void onProgressChanged(SeekBar seekBar, int progress, boolean fromTouch) { // Stay evenly divisible by 8 because it looks nicer to have // 2048 than 2043 bits. int leftover = progress % 8; int ourProgress = progress; if (leftover > 0) ourProgress += 8 - leftover; bits = minBits + ourProgress; bitsText.setText(String.valueOf(bits)); } public void onStartTrackingTouch(SeekBar seekBar) { // We don't care about the start. } public void onStopTrackingTouch(SeekBar seekBar) { // We don't care about the stop. } }); bitsText.setOnFocusChangeListener(new OnFocusChangeListener() { public void onFocusChange(View v, boolean hasFocus) { if (!hasFocus) { try { bits = Integer.parseInt(bitsText.getText().toString()); if (bits < minBits) { bits = minBits; bitsText.setText(String.valueOf(bits)); } } catch (NumberFormatException nfe) { bits = DEFAULT_BITS; bitsText.setText(String.valueOf(bits)); } bitsSlider.setProgress(bits - minBits); } } }); save.setOnClickListener(new OnClickListener() { public void onClick(View view) { GeneratePubkeyActivity.this.save.setEnabled(false); GeneratePubkeyActivity.this.startEntropyGather(); } }); } @Override public boolean onMenuItemSelected(int featureId, MenuItem item) { switch (item.getItemId()) { case android.R.id.home: finish(); return true; } return super.onMenuItemSelected(featureId, item); } private void checkEntries() { boolean allowSave = true; if (!password1.getText().toString().equals(password2.getText().toString())) allowSave = false; if (nickname.getText().length() == 0) allowSave = false; save.setEnabled(allowSave); } private void startEntropyGather() { final View entropyView = inflater.inflate(R.layout.dia_gatherentropy, null, false); ((EntropyView)entropyView.findViewById(R.id.entropy)).addOnEntropyGatheredListener(GeneratePubkeyActivity.this); entropyDialog = new EntropyDialog(GeneratePubkeyActivity.this, entropyView); entropyDialog.show(); } public void onEntropyGathered(byte[] entropy) { // For some reason the entropy dialog was aborted, exit activity if (entropy == null) { finish(); return; } this.entropy = entropy.clone(); int numSetBits = 0; for (int i = 0; i < 20; i++) numSetBits += measureNumberOfSetBits(this.entropy[i]); Log.d(TAG, "Entropy distribution=" + (int)(100.0 * numSetBits / 160.0) + "%"); Log.d(TAG, "entropy gathered; attemping to generate key..."); startKeyGen(); } private void startKeyGen() { progress = new ProgressDialog(GeneratePubkeyActivity.this); progress.setMessage(GeneratePubkeyActivity.this.getResources().getText(R.string.pubkey_generating)); progress.setIndeterminate(true); progress.setCancelable(false); progress.show(); Thread keyGenThread = new Thread(mKeyGen); keyGenThread.setName("KeyGen"); keyGenThread.start(); } private Handler handler = new Handler() { @Override public void handleMessage(Message msg) { progress.dismiss(); GeneratePubkeyActivity.this.finish(); } }; final private Runnable mKeyGen = new Runnable() { public void run() { try { boolean encrypted = false; SecureRandom random = SecureRandom.getInstance("SHA1PRNG"); random.setSeed(entropy); KeyPairGenerator keyPairGen = KeyPairGenerator.getInstance(keyType); keyPairGen.initialize(bits, random); KeyPair pair = keyPairGen.generateKeyPair(); PrivateKey priv = pair.getPrivate(); PublicKey pub = pair.getPublic(); String secret = password1.getText().toString(); if (secret.length() > 0) encrypted = true; Log.d(TAG, "private: " + PubkeyUtils.formatKey(priv)); Log.d(TAG, "public: " + PubkeyUtils.formatKey(pub)); PubkeyBean pubkey = new PubkeyBean(); pubkey.setNickname(nickname.getText().toString()); pubkey.setType(keyType); pubkey.setPrivateKey(PubkeyUtils.getEncodedPrivate(priv, secret)); pubkey.setPublicKey(PubkeyUtils.getEncodedPublic(pub)); pubkey.setEncrypted(encrypted); pubkey.setStartup(unlockAtStartup.isChecked()); pubkey.setConfirmUse(confirmUse.isChecked()); PubkeyDatabase pubkeydb = new PubkeyDatabase(GeneratePubkeyActivity.this); pubkeydb.savePubkey(pubkey); pubkeydb.close(); } catch (Exception e) { Log.e(TAG, "Could not generate key pair"); e.printStackTrace(); } handler.sendEmptyMessage(0); } }; final private TextWatcher textChecker = new TextWatcher() { public void afterTextChanged(Editable s) {} public void beforeTextChanged(CharSequence s, int start, int count, int after) {} public void onTextChanged(CharSequence s, int start, int before, int count) { checkEntries(); } }; private int measureNumberOfSetBits(byte b) { int numSetBits = 0; for (int i = 0; i < 8; i++) { if ((b & 1) == 1) numSetBits++; b >>= 1; } return numSetBits; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.util; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.nio.file.NoSuchFileException; import java.util.Map; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.HostsFileReader.HostDetails; import org.junit.*; import static org.junit.Assert.*; /* * Test for HostsFileReader.java * */ public class TestHostsFileReader { // Using /test/build/data/tmp directory to store temprory files final String HOSTS_TEST_DIR = GenericTestUtils.getTestDir().getAbsolutePath(); File EXCLUDES_FILE = new File(HOSTS_TEST_DIR, "dfs.exclude"); File INCLUDES_FILE = new File(HOSTS_TEST_DIR, "dfs.include"); String excludesFile = HOSTS_TEST_DIR + "/dfs.exclude"; String includesFile = HOSTS_TEST_DIR + "/dfs.include"; private String excludesXmlFile = HOSTS_TEST_DIR + "/dfs.exclude.xml"; @Before public void setUp() throws Exception { } @After public void tearDown() throws Exception { // Delete test files after running tests EXCLUDES_FILE.delete(); INCLUDES_FILE.delete(); } /* * 1.Create dfs.exclude,dfs.include file * 2.Write host names per line * 3.Write comments starting with # * 4.Close file * 5.Compare if number of hosts reported by HostsFileReader * are equal to the number of hosts written */ @Test public void testHostsFileReader() throws Exception { FileWriter efw = new FileWriter(excludesFile); FileWriter ifw = new FileWriter(includesFile); efw.write("#DFS-Hosts-excluded\n"); efw.write("somehost1\n"); efw.write("#This-is-comment\n"); efw.write("somehost2\n"); efw.write("somehost3 # host3\n"); efw.write("somehost4\n"); efw.write("somehost4 somehost5\n"); efw.close(); ifw.write("#Hosts-in-DFS\n"); ifw.write("somehost1\n"); ifw.write("somehost2\n"); ifw.write("somehost3\n"); ifw.write("#This-is-comment\n"); ifw.write("somehost4 # host4\n"); ifw.write("somehost4 somehost5\n"); ifw.close(); HostsFileReader hfp = new HostsFileReader(includesFile, excludesFile); int includesLen = hfp.getHosts().size(); int excludesLen = hfp.getExcludedHosts().size(); assertEquals(5, includesLen); assertEquals(5, excludesLen); assertTrue(hfp.getHosts().contains("somehost5")); assertFalse(hfp.getHosts().contains("host3")); assertTrue(hfp.getExcludedHosts().contains("somehost5")); assertFalse(hfp.getExcludedHosts().contains("host4")); // test for refreshing hostreader wit new include/exclude host files String newExcludesFile = HOSTS_TEST_DIR + "/dfs1.exclude"; String newIncludesFile = HOSTS_TEST_DIR + "/dfs1.include"; efw = new FileWriter(newExcludesFile); ifw = new FileWriter(newIncludesFile); efw.write("#DFS-Hosts-excluded\n"); efw.write("node1\n"); efw.close(); ifw.write("#Hosts-in-DFS\n"); ifw.write("node2\n"); ifw.close(); hfp.refresh(newIncludesFile, newExcludesFile); assertTrue(hfp.getExcludedHosts().contains("node1")); assertTrue(hfp.getHosts().contains("node2")); HostDetails hostDetails = hfp.getHostDetails(); assertTrue(hostDetails.getExcludedHosts().contains("node1")); assertTrue(hostDetails.getIncludedHosts().contains("node2")); assertEquals(newIncludesFile, hostDetails.getIncludesFile()); assertEquals(newExcludesFile, hostDetails.getExcludesFile()); } /* * Test creating a new HostsFileReader with nonexistent files */ @Test public void testCreateHostFileReaderWithNonexistentFile() throws Exception { try { new HostsFileReader( HOSTS_TEST_DIR + "/doesnt-exist", HOSTS_TEST_DIR + "/doesnt-exist"); Assert.fail("Should throw NoSuchFileException"); } catch (NoSuchFileException ex) { // Exception as expected } } /* * Test refreshing an existing HostsFileReader with an includes file that no longer exists */ @Test public void testRefreshHostFileReaderWithNonexistentFile() throws Exception { FileWriter efw = new FileWriter(excludesFile); FileWriter ifw = new FileWriter(includesFile); efw.close(); ifw.close(); HostsFileReader hfp = new HostsFileReader(includesFile, excludesFile); assertTrue(INCLUDES_FILE.delete()); try { hfp.refresh(); Assert.fail("Should throw NoSuchFileException"); } catch (NoSuchFileException ex) { // Exception as expected } } /* * Test for null file */ @Test public void testHostFileReaderWithNull() throws Exception { FileWriter efw = new FileWriter(excludesFile); FileWriter ifw = new FileWriter(includesFile); efw.close(); ifw.close(); HostsFileReader hfp = new HostsFileReader(includesFile, excludesFile); int includesLen = hfp.getHosts().size(); int excludesLen = hfp.getExcludedHosts().size(); // TestCase1: Check if lines beginning with # are ignored assertEquals(0, includesLen); assertEquals(0, excludesLen); // TestCase2: Check if given host names are reported by getHosts and // getExcludedHosts assertFalse(hfp.getHosts().contains("somehost5")); assertFalse(hfp.getExcludedHosts().contains("somehost5")); } /* * Check if only comments can be written to hosts file */ @Test public void testHostFileReaderWithCommentsOnly() throws Exception { FileWriter efw = new FileWriter(excludesFile); FileWriter ifw = new FileWriter(includesFile); efw.write("#DFS-Hosts-excluded\n"); efw.close(); ifw.write("#Hosts-in-DFS\n"); ifw.close(); HostsFileReader hfp = new HostsFileReader(includesFile, excludesFile); int includesLen = hfp.getHosts().size(); int excludesLen = hfp.getExcludedHosts().size(); assertEquals(0, includesLen); assertEquals(0, excludesLen); assertFalse(hfp.getHosts().contains("somehost5")); assertFalse(hfp.getExcludedHosts().contains("somehost5")); } /* * Test if spaces are allowed in host names */ @Test public void testHostFileReaderWithSpaces() throws Exception { FileWriter efw = new FileWriter(excludesFile); FileWriter ifw = new FileWriter(includesFile); efw.write("#DFS-Hosts-excluded\n"); efw.write(" somehost somehost2"); efw.write(" somehost3 # somehost4"); efw.close(); ifw.write("#Hosts-in-DFS\n"); ifw.write(" somehost somehost2"); ifw.write(" somehost3 # somehost4"); ifw.close(); HostsFileReader hfp = new HostsFileReader(includesFile, excludesFile); int includesLen = hfp.getHosts().size(); int excludesLen = hfp.getExcludedHosts().size(); assertEquals(3, includesLen); assertEquals(3, excludesLen); assertTrue(hfp.getHosts().contains("somehost3")); assertFalse(hfp.getHosts().contains("somehost5")); assertFalse(hfp.getHosts().contains("somehost4")); assertTrue(hfp.getExcludedHosts().contains("somehost3")); assertFalse(hfp.getExcludedHosts().contains("somehost5")); assertFalse(hfp.getExcludedHosts().contains("somehost4")); } /* * Test if spaces , tabs and new lines are allowed */ @Test public void testHostFileReaderWithTabs() throws Exception { FileWriter efw = new FileWriter(excludesFile); FileWriter ifw = new FileWriter(includesFile); efw.write("#DFS-Hosts-excluded\n"); efw.write(" \n"); efw.write(" somehost \t somehost2 \n somehost4"); efw.write(" somehost3 \t # somehost5"); efw.close(); ifw.write("#Hosts-in-DFS\n"); ifw.write(" \n"); ifw.write(" somehost \t somehost2 \n somehost4"); ifw.write(" somehost3 \t # somehost5"); ifw.close(); HostsFileReader hfp = new HostsFileReader(includesFile, excludesFile); int includesLen = hfp.getHosts().size(); int excludesLen = hfp.getExcludedHosts().size(); assertEquals(4, includesLen); assertEquals(4, excludesLen); assertTrue(hfp.getHosts().contains("somehost2")); assertFalse(hfp.getHosts().contains("somehost5")); assertTrue(hfp.getExcludedHosts().contains("somehost2")); assertFalse(hfp.getExcludedHosts().contains("somehost5")); } /* * Test if timeout values are provided in HostFile */ @Test public void testHostFileReaderWithTimeout() throws Exception { FileWriter efw = new FileWriter(excludesXmlFile); FileWriter ifw = new FileWriter(includesFile); efw.write("<?xml version=\"1.0\"?>\n"); efw.write("<!-- yarn.nodes.exclude -->\n"); efw.write("<hosts>\n"); efw.write("<host><name>host1</name></host>\n"); efw.write("<host><name>host2</name><timeout>123</timeout></host>\n"); efw.write("<host><name>host3</name><timeout>-1</timeout></host>\n"); efw.write("<host><name>10000</name></host>\n"); efw.write("<host><name>10001</name><timeout>123</timeout></host>\n"); efw.write("<host><name>10002</name><timeout>-1</timeout></host>\n"); efw.write("<host><name>host4,host5, host6</name>" + "<timeout>1800</timeout></host>\n"); efw.write("</hosts>\n"); efw.close(); ifw.write("#Hosts-in-DFS\n"); ifw.write(" \n"); ifw.write(" somehost \t somehost2 \n somehost4"); ifw.write(" somehost3 \t # somehost5"); ifw.close(); HostsFileReader hfp = new HostsFileReader(includesFile, excludesXmlFile); int includesLen = hfp.getHosts().size(); int excludesLen = hfp.getExcludedHosts().size(); assertEquals(4, includesLen); assertEquals(9, excludesLen); HostDetails hostDetails = hfp.getHostDetails(); Map<String, Integer> excludes = hostDetails.getExcludedMap(); assertTrue(excludes.containsKey("host1")); assertTrue(excludes.containsKey("host2")); assertTrue(excludes.containsKey("host3")); assertTrue(excludes.containsKey("10000")); assertTrue(excludes.containsKey("10001")); assertTrue(excludes.containsKey("10002")); assertTrue(excludes.containsKey("host4")); assertTrue(excludes.containsKey("host5")); assertTrue(excludes.containsKey("host6")); assertTrue(excludes.get("host1") == null); assertTrue(excludes.get("host2") == 123); assertTrue(excludes.get("host3") == -1); assertTrue(excludes.get("10000") == null); assertTrue(excludes.get("10001") == 123); assertTrue(excludes.get("10002") == -1); assertTrue(excludes.get("host4") == 1800); assertTrue(excludes.get("host5") == 1800); assertTrue(excludes.get("host6") == 1800); } @Test public void testLazyRefresh() throws IOException { FileWriter efw = new FileWriter(excludesFile); FileWriter ifw = new FileWriter(includesFile); efw.write("host1\n"); efw.write("host2\n"); efw.close(); ifw.write("host3\n"); ifw.write("host4\n"); ifw.close(); HostsFileReader hfp = new HostsFileReader(includesFile, excludesFile); ifw = new FileWriter(includesFile); ifw.close(); efw = new FileWriter(excludesFile, true); efw.write("host3\n"); efw.write("host4\n"); efw.close(); hfp.lazyRefresh(includesFile, excludesFile); HostDetails details = hfp.getHostDetails(); HostDetails lazyDetails = hfp.getLazyLoadedHostDetails(); assertEquals("Details: no. of excluded hosts", 2, details.getExcludedHosts().size()); assertEquals("Details: no. of included hosts", 2, details.getIncludedHosts().size()); assertEquals("LazyDetails: no. of excluded hosts", 4, lazyDetails.getExcludedHosts().size()); assertEquals("LayDetails: no. of included hosts", 0, lazyDetails.getIncludedHosts().size()); hfp.finishRefresh(); details = hfp.getHostDetails(); assertEquals("Details: no. of excluded hosts", 4, details.getExcludedHosts().size()); assertEquals("Details: no. of included hosts", 0, details.getIncludedHosts().size()); assertNull("Lazy host details should be null", hfp.getLazyLoadedHostDetails()); } @Test(expected = IllegalStateException.class) public void testFinishRefreshWithoutLazyRefresh() throws IOException { FileWriter efw = new FileWriter(excludesFile); FileWriter ifw = new FileWriter(includesFile); efw.close(); ifw.close(); HostsFileReader hfp = new HostsFileReader(includesFile, excludesFile); hfp.finishRefresh(); } }
/* * Copyright 2015 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.web.vo; import java.util.Comparator; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.navercorp.pinpoint.common.server.bo.AgentInfoBo; import com.navercorp.pinpoint.common.server.bo.JvmInfoBo; import com.navercorp.pinpoint.common.server.bo.ServerMetaDataBo; import com.navercorp.pinpoint.web.view.AgentInfoSerializer; /** * @author HyunGil Jeong */ @JsonSerialize(using = AgentInfoSerializer.class) public class AgentInfo { public static final Comparator<AgentInfo> AGENT_NAME_ASC_COMPARATOR = new Comparator<AgentInfo>() { @Override public int compare(AgentInfo lhs, AgentInfo rhs) { final String lhsAgentId = lhs.agentId == null ? "" : lhs.agentId; final String rhsAgentId = rhs.agentId == null ? "" : rhs.agentId; return lhsAgentId.compareTo(rhsAgentId); } }; private String applicationName; private String agentId; private long startTimestamp; private String hostName; private String ip; private String ports; private short serviceTypeCode; private int pid; private String vmVersion; private String agentVersion; private ServerMetaDataBo serverMetaData; private JvmInfoBo jvmInfo; private long initialStartTimestamp; private AgentStatus status; public AgentInfo() { } public AgentInfo(AgentInfoBo agentInfoBo) { this.applicationName = agentInfoBo.getApplicationName(); this.agentId = agentInfoBo.getAgentId(); this.startTimestamp = agentInfoBo.getStartTime(); this.hostName = agentInfoBo.getHostName(); this.ip = agentInfoBo.getIp(); this.ports = agentInfoBo.getPorts(); this.serviceTypeCode = agentInfoBo.getServiceTypeCode(); this.pid = agentInfoBo.getPid(); this.vmVersion = agentInfoBo.getVmVersion(); this.agentVersion = agentInfoBo.getAgentVersion(); this.serverMetaData = agentInfoBo.getServerMetaData(); this.jvmInfo = agentInfoBo.getJvmInfo(); } public String getApplicationName() { return applicationName; } public void setApplicationName(String applicationName) { this.applicationName = applicationName; } public String getAgentId() { return agentId; } public void setAgentId(String agentId) { this.agentId = agentId; } public long getStartTimestamp() { return startTimestamp; } public void setStartTimestamp(long startTimestamp) { this.startTimestamp = startTimestamp; } public String getHostName() { return hostName; } public void setHostName(String hostName) { this.hostName = hostName; } public String getIp() { return ip; } public void setIp(String ip) { this.ip = ip; } public String getPorts() { return ports; } public void setPorts(String ports) { this.ports = ports; } public short getServiceTypeCode() { return serviceTypeCode; } public void setServiceTypeCode(short serviceTypeCode) { this.serviceTypeCode = serviceTypeCode; } public int getPid() { return pid; } public void setPid(int pid) { this.pid = pid; } public String getVmVersion() { return vmVersion; } public void setVmVersion(String vmVersion) { this.vmVersion = vmVersion; } public String getAgentVersion() { return agentVersion; } public void setAgentVersion(String agentVersion) { this.agentVersion = agentVersion; } public ServerMetaDataBo getServerMetaData() { return serverMetaData; } public void setServerMetaData(ServerMetaDataBo serverMetaData) { this.serverMetaData = serverMetaData; } public JvmInfoBo getJvmInfo() { return jvmInfo; } public void setJvmInfo(JvmInfoBo jvmInfo) { this.jvmInfo = jvmInfo; } public long getInitialStartTimestamp() { return initialStartTimestamp; } public void setInitialStartTimestamp(long initialStartTimestamp) { this.initialStartTimestamp = initialStartTimestamp; } public AgentStatus getStatus() { return status; } public void setStatus(AgentStatus status) { this.status = status; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AgentInfo agentInfo = (AgentInfo) o; if (startTimestamp != agentInfo.startTimestamp) return false; return agentId != null ? agentId.equals(agentInfo.agentId) : agentInfo.agentId == null; } @Override public int hashCode() { int result = agentId != null ? agentId.hashCode() : 0; result = 31 * result + (int) (startTimestamp ^ (startTimestamp >>> 32)); return result; } @Override public String toString() { final StringBuilder sb = new StringBuilder("AgentInfo{"); sb.append("applicationName='").append(applicationName).append('\''); sb.append(", agentId='").append(agentId).append('\''); sb.append(", startTimestamp=").append(startTimestamp); sb.append(", hostName='").append(hostName).append('\''); sb.append(", ip='").append(ip).append('\''); sb.append(", ports='").append(ports).append('\''); sb.append(", serviceTypeCode=").append(serviceTypeCode); sb.append(", pid=").append(pid); sb.append(", vmVersion='").append(vmVersion).append('\''); sb.append(", agentVersion='").append(agentVersion).append('\''); sb.append(", serverMetaData=").append(serverMetaData); sb.append(", jvmInfo=").append(jvmInfo); sb.append(", initialStartTimestamp=").append(initialStartTimestamp); sb.append(", status=").append(status); sb.append('}'); return sb.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.dht; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.*; import org.apache.cassandra.config.*; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.commons.lang3.ArrayUtils; import org.apache.cassandra.db.DecoratedKey; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.db.marshal.BytesType; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.Hex; import org.apache.cassandra.utils.Pair; public abstract class AbstractByteOrderedPartitioner extends AbstractPartitioner<BytesToken> { public static final BytesToken MINIMUM = new BytesToken(ArrayUtils.EMPTY_BYTE_ARRAY); public static final BigInteger BYTE_MASK = new BigInteger("255"); public DecoratedKey decorateKey(ByteBuffer key) { return new DecoratedKey(getToken(key), key); } public BytesToken midpoint(Token ltoken, Token rtoken) { int ll,rl; ByteBuffer lb,rb; if(ltoken.token instanceof byte[]) { ll = ((byte[])ltoken.token).length; lb = ByteBuffer.wrap(((byte[])ltoken.token)); } else { ll = ((ByteBuffer)ltoken.token).remaining(); lb = (ByteBuffer)ltoken.token; } if(rtoken.token instanceof byte[]) { rl = ((byte[])rtoken.token).length; rb = ByteBuffer.wrap(((byte[])rtoken.token)); } else { rl = ((ByteBuffer)rtoken.token).remaining(); rb = (ByteBuffer)rtoken.token; } int sigbytes = Math.max(ll, rl); BigInteger left = bigForBytes(lb, sigbytes); BigInteger right = bigForBytes(rb, sigbytes); Pair<BigInteger,Boolean> midpair = FBUtilities.midpoint(left, right, 8*sigbytes); return new BytesToken(bytesForBig(midpair.left, sigbytes, midpair.right)); } /** * Convert a byte array containing the most significant of 'sigbytes' bytes * representing a big-endian magnitude into a BigInteger. */ private BigInteger bigForBytes(ByteBuffer bytes, int sigbytes) { byte[] b = new byte[sigbytes]; ByteBufferUtil.arrayCopy(bytes, bytes.position(), b, 0, bytes.remaining()); return new BigInteger(1, b); } /** * Convert a (positive) BigInteger into a byte array representing its magnitude. * If remainder is true, an additional byte with the high order bit enabled * will be added to the end of the array */ private byte[] bytesForBig(BigInteger big, int sigbytes, boolean remainder) { byte[] bytes = new byte[sigbytes + (remainder ? 1 : 0)]; if (remainder) { // remaining bit is the most significant in the last byte bytes[sigbytes] |= 0x80; } // bitmask for a single byte for (int i = 0; i < sigbytes; i++) { int maskpos = 8 * (sigbytes - (i + 1)); // apply bitmask and get byte value bytes[i] = (byte)(big.and(BYTE_MASK.shiftLeft(maskpos)).shiftRight(maskpos).intValue() & 0xFF); } return bytes; } public BytesToken getMinimumToken() { return MINIMUM; } public BytesToken getRandomToken() { Random r = new Random(); byte[] buffer = new byte[16]; r.nextBytes(buffer); return new BytesToken(buffer); } private final Token.TokenFactory<byte[]> tokenFactory = new Token.TokenFactory<byte[]>() { public ByteBuffer toByteArray(Token<byte[]> bytesToken) { return ByteBuffer.wrap(bytesToken.token); } public Token<byte[]> fromByteArray(ByteBuffer bytes) { return new BytesToken(bytes); } public String toString(Token<byte[]> bytesToken) { return Hex.bytesToHex(bytesToken.token); } public void validate(String token) throws ConfigurationException { try { if (token.length() % 2 == 1) token = "0" + token; Hex.hexToBytes(token); } catch (NumberFormatException e) { throw new ConfigurationException("Token " + token + " contains non-hex digits"); } } public Token<byte[]> fromString(String string) { if (string.length() % 2 == 1) string = "0" + string; return new BytesToken(Hex.hexToBytes(string)); } }; public Token.TokenFactory<byte[]> getTokenFactory() { return tokenFactory; } public boolean preservesOrder() { return true; } public abstract BytesToken getToken(ByteBuffer key); public Map<Token, Float> describeOwnership(List<Token> sortedTokens) { // allTokens will contain the count and be returned, sorted_ranges is shorthand for token<->token math. Map<Token, Float> allTokens = new HashMap<Token, Float>(); List<Range<Token>> sortedRanges = new ArrayList<Range<Token>>(sortedTokens.size()); // this initializes the counts to 0 and calcs the ranges in order. Token lastToken = sortedTokens.get(sortedTokens.size() - 1); for (Token node : sortedTokens) { allTokens.put(node, new Float(0.0)); sortedRanges.add(new Range<Token>(lastToken, node)); lastToken = node; } for (String ks : Schema.instance.getKeyspaces()) { for (CFMetaData cfmd : Schema.instance.getKSMetaData(ks).cfMetaData().values()) { for (Range<Token> r : sortedRanges) { // Looping over every KS:CF:Range, get the splits size and add it to the count allTokens.put(r.right, allTokens.get(r.right) + StorageService.instance.getSplits(ks, cfmd.cfName, r, 1).size()); } } } // Sum every count up and divide count/total for the fractional ownership. Float total = new Float(0.0); for (Float f : allTokens.values()) total += f; for (Map.Entry<Token, Float> row : allTokens.entrySet()) allTokens.put(row.getKey(), row.getValue() / total); return allTokens; } public AbstractType<?> getTokenValidator() { return BytesType.instance; } }
/** * Copyright 2015-2019 Maven Source Dependencies * Plugin contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.srcdeps.core.impl.scm; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Base64; import java.util.Collection; import java.util.List; import java.util.Set; import javax.inject.Named; import javax.inject.Singleton; import org.eclipse.jgit.api.FetchCommand; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.ResetCommand.ResetType; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.lib.StoredConfig; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.transport.FetchResult; import org.eclipse.jgit.transport.RefSpec; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.srcdeps.core.BuildRequest; import org.srcdeps.core.Scm; import org.srcdeps.core.ScmException; import org.srcdeps.core.SrcVersion; import org.srcdeps.core.SrcVersion.WellKnownType; import org.srcdeps.core.util.SrcdepsCoreUtils; /** * A JGit based implementation of a Git {@link Scm}. * * @author <a href="https://github.com/ppalaga">Peter Palaga</a> */ @Named @Singleton public class JGitScm implements Scm { private static final Logger log = LoggerFactory.getLogger(JGitScm.class); private static final String REMOTE = "remote"; private static final String SCM_GIT_PREFIX = "git:"; private static final String SRCDEPS_WORKING_BRANCH = "srcdeps-working-branch"; static void ensureRemoteAvailable(String useUrl, String remoteAlias, Git git) throws IOException { final StoredConfig config = git.getRepository().getConfig(); boolean save = false; final String foundUrl = config.getString(REMOTE, remoteAlias, "url"); if (!useUrl.equals(foundUrl)) { config.setString(REMOTE, remoteAlias, "url", useUrl); save = true; } final String foundFetch = config.getString(REMOTE, remoteAlias, "fetch"); final String expectedFetch = "+refs/heads/*:refs/remotes/" + remoteAlias + "/*"; if (!expectedFetch.equals(foundFetch)) { config.setString(REMOTE, remoteAlias, "fetch", expectedFetch); save = true; } if (save) { config.save(); } } public static String getScmGitPrefix() { return SCM_GIT_PREFIX; } /** * @return srcdeps will use this branch to perform its magic */ public static String getSrcdepsWorkingBranch() { return SRCDEPS_WORKING_BRANCH; } private static Git openGit(String requestId, Path dir) throws ScmException { try { return Git.open(dir.toFile()); } catch (IOException e) { log.debug(String.format("srcdeps[%s]: No git repository in [%s]", requestId, dir), e); } try { SrcdepsCoreUtils.ensureDirectoryExistsAndEmpty(dir); return Git.init().setDirectory(dir.toFile()).call(); } catch (IOException | GitAPIException e) { throw new ScmException(String.format("Could not create directory [%s]", dir), e); } } private static String stripUriPrefix(String url) { return url.substring(SCM_GIT_PREFIX.length()); } /** * @param url the git URL to generate a remote alias for * @return a Byte64 encoded sha1 hash of the given {@code url} prefixed with {@code origin-} */ static String toRemoteAlias(String url) { try { final MessageDigest sha1Digest = MessageDigest.getInstance("SHA-1"); sha1Digest.update(url.getBytes(StandardCharsets.UTF_8)); final byte[] bytes = sha1Digest.digest(); return "origin-" + Base64.getUrlEncoder().encodeToString(bytes); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } } /** * Makes sure that the given {@code refToFind} is available in the {@code advertisedRefs}. * * @param advertisedRefs the {@link Collection} of {@link Ref}s to search in * @param refToFind the ref name to find * @param url the URL used to fetch * @throws ScmException if the given {@code refToFind} could not be found in the {@code advertisedRefs} */ private void assertRefFetched(Collection<Ref> advertisedRefs, String refToFind, String url) throws ScmException { for (Ref ref : advertisedRefs) { if (refToFind.equals(ref.getName())) { return; } } throw new ScmException(String.format("Could not fetch ref [%s] from [%s]", refToFind, url)); } /** * Walks back through the history of the {@code advertisedRefs} and tries to find the given {@code commitSha1}. * * @param repository the current {@link Repository} to search in * @param advertisedRefs the list of refs that were fetched and whose histories should be searched through * @param commitSha1 the commit to find * @param url the URL that was used to fetch * @throws ScmException if the given {@code commitSha1} could not be found in the history of any of the * {@code advertisedRefs} */ private void assertRevisionFetched(Repository repository, Collection<Ref> advertisedRefs, String commitSha1, String url) throws ScmException { ObjectId needle = ObjectId.fromString(commitSha1); try { for (Ref ref : advertisedRefs) { try (RevWalk walk = new RevWalk(repository)) { walk.markStart(walk.parseCommit(ref.getTarget().getObjectId())); walk.setRetainBody(false); for (RevCommit commit : walk) { if (commit.getId().equals(needle)) { return; } } } } } catch (IOException e) { new ScmException(String.format("Could not fetch ref [%s] from [%s]", commitSha1, url), e); } throw new ScmException(String.format("Could not fetch ref [%s] from [%s]", commitSha1, url)); } /** * Checkout the source tree of a project to build, esp. using {@link BuildRequest#getScmUrls()} and * {@link BuildRequest#getSrcVersion()} of the given {@code request}. * <p> * This implementation first checks if {@code request.getProjectRootDirectory()} returns a directory containing a * valid git repository. If it does not, git init operation is invoked. After that git fetch and git reset are used * to checkout the sources. * * @param request determines the project to checkout * @return the {@code commitId} the {@code HEAD} points at * @throws ScmException on any SCM related problem * @see org.srcdeps.core.Scm#checkout(org.srcdeps.core.BuildRequest) */ @Override public String checkout(BuildRequest request) throws ScmException { final Path dir = request.getProjectRootDirectory(); int i = 0; final List<String> urls = request.getScmUrls(); try (Git git = openGit(request.getScmRepositoryId(), dir)) { for (String url : urls) { final String useUrl = stripUriPrefix(url); final String result = fetchAndReset(request.getScmRepositoryId(), useUrl, i, urls.size(), request.getSrcVersion(), dir, git); if (result != null) { return result; } i++; } } throw new ScmException( String.format("Could not checkout [%s] from URLs %s", request.getSrcVersion(), request.getScmUrls())); } String fetchAndReset(String requestId, String useUrl, int urlIndex, int urlCount, SrcVersion srcVersion, Path dir, Git git) throws ScmException { /* Forget local changes */ try { Set<String> removedFiles = git.clean().setCleanDirectories(true).call(); for (String removedFile : removedFiles) { log.debug("srcdeps[{}]: Removed an unstaged file [{}]", requestId, removedFile); } git.reset().setMode(ResetType.HARD).call(); } catch (Exception e) { log.warn(String.format("srcdeps[%s]: Could not forget local changes in [%s]", requestId, dir), e); } log.info("srcdeps[{}]: Fetching version [{}] from SCM URL {}/{} [{}]", requestId, srcVersion, urlIndex + 1, urlCount, useUrl); final String remoteAlias = toRemoteAlias(useUrl); try { ensureRemoteAvailable(useUrl, remoteAlias, git); final String scmVersion = srcVersion.getScmVersion(); final String startPoint; final String refToFetch; final FetchCommand fetch = git.fetch().setRemote(remoteAlias); switch (srcVersion.getWellKnownType()) { case branch: refToFetch = "refs/heads/" + scmVersion; fetch.setRefSpecs( new RefSpec("+refs/heads/" + scmVersion + ":refs/remotes/" + remoteAlias + "/" + scmVersion)); startPoint = remoteAlias + "/" + scmVersion; break; case tag: refToFetch = "refs/tags/" + scmVersion; fetch.setRefSpecs(new RefSpec(refToFetch)); startPoint = scmVersion; break; case revision: refToFetch = null; startPoint = scmVersion; break; default: throw new IllegalStateException("Unexpected " + WellKnownType.class.getName() + " value '" + srcVersion.getWellKnownType() + "'."); } FetchResult fetchResult = fetch.call(); /* * Let's check that the desired startPoint was really fetched from the current URL. Otherwise, the * startPoint may come from an older fetch of the same repo URL (but was removed in between) or it may come * from an older fetch of another URL. These cases may introduce situations when one developer can see a * successful srcdep build (because he still has the outdated ref in his local git repo) but another dev * with exectly the same setup cannot checkout because the ref is not there in any of the remote repos * anymore. */ Collection<Ref> advertisedRefs = fetchResult.getAdvertisedRefs(); switch (srcVersion.getWellKnownType()) { case branch: case tag: assertRefFetched(advertisedRefs, refToFetch, useUrl); break; case revision: assertRevisionFetched(git.getRepository(), advertisedRefs, scmVersion, useUrl); break; default: throw new IllegalStateException("Unexpected " + WellKnownType.class.getName() + " value '" + srcVersion.getWellKnownType() + "'."); } /* Reset the srcdeps-working-branch */ git.branchCreate().setName(SRCDEPS_WORKING_BRANCH).setForce(true).setStartPoint(startPoint).call(); git.checkout().setName(SRCDEPS_WORKING_BRANCH).call(); git.reset().setMode(ResetType.HARD).setRef(startPoint).call(); final Ref ref = git.getRepository().exactRef("HEAD"); return ref.getObjectId().getName(); } catch (ScmException e) { final String msg = String.format("srcdeps[%s]: Could not checkout [%s] from SCM URL %d/%d [%s]", requestId, srcVersion, urlIndex + 1, urlCount, useUrl); if (urlIndex + 1 == urlCount) { throw new ScmException(msg, e); } else { log.warn(msg, e); } } catch (Exception e) { throw new ScmException(String.format("Could not checkout [%s] from SCM URL %d/%d [%s]", srcVersion, urlIndex + 1, urlCount, useUrl), e); } return null; } @Override public boolean supports(String url) { return url.startsWith(SCM_GIT_PREFIX); } }
/* * Encog(tm) Core v3.1 - Java Version * http://www.heatonresearch.com/encog/ * http://code.google.com/p/encog-java/ * Copyright 2008-2012 Heaton Research, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For more information on Heaton Research copyrights, licenses * and trademarks visit: * http://www.heatonresearch.com/copyright */ package org.encog.neural.networks.training.pnn; import org.encog.Encog; import org.encog.util.EngineArray; import org.encog.util.logging.EncogLogging; /** * This class determines optimal values for multiple sigmas in a PNN kernel. * This is done using a CJ (conjugate gradient) method. * * * Some of the algorithms in this class are based on C++ code from: * * Advanced Algorithms for Neural Networks: A C++ Sourcebook by Timothy Masters * John Wiley & Sons Inc (Computers); April 3, 1995 ISBN: 0471105880 */ public class DeriveMinimum { /** * Derive the minimum, using a conjugate gradient method. * * @param maxIterations * The max iterations. * @param maxError * Stop at this error rate. * @param eps * The machine's precision. * @param tol * The convergence tolerance. * @param network * The network to get the error from. * @param n * The number of variables. * @param x * The independent variable. * @param ystart * The start for y. * @param base * Work vector, must have n elements. * @param direc * Work vector, must have n elements. * @param g * Work vector, must have n elements. * @param h * Work vector, must have n elements. * @param deriv2 * Work vector, must have n elements. * @return The best error. */ public double calculate(final int maxIterations, final double maxError, final double eps, final double tol, final CalculationCriteria network, final int n, final double[] x, final double ystart, final double[] base, final double[] direc, final double[] g, final double[] h, final double[] deriv2) { double prevBest, toler, gam, improvement; final GlobalMinimumSearch globalMinimum = new GlobalMinimumSearch(); double fbest = network.calcErrorWithMultipleSigma(x, direc, deriv2, true); prevBest = 1.e30; for (int i = 0; i < n; i++) { direc[i] = -direc[i]; } EngineArray.arrayCopy(direc, g); EngineArray.arrayCopy(direc, h); int convergenceCounter = 0; int poorCJ = 0; // Main loop for (int iteration = 0; iteration < maxIterations; iteration++) { if (fbest < maxError) { break; } EncogLogging.log(EncogLogging.LEVEL_INFO, "Beginning internal Iteration #" + iteration + ", currentError=" + fbest + ",target=" + maxError); // Check for convergence if (prevBest <= 1.0) { toler = tol; } else { toler = tol * prevBest; } // Stop if there is little improvement if ((prevBest - fbest) <= toler) { if (++convergenceCounter >= 3) { break; } } else { convergenceCounter = 0; } double dot1 = 0; double dot2 = 0; double dlen = 0; dot1 = dot2 = dlen = 0.0; double high = 1.e-4; for (int i = 0; i < n; i++) { base[i] = x[i]; if (deriv2[i] > high) { high = deriv2[i]; } dot1 += direc[i] * g[i]; // Directional first derivative dot2 += direc[i] * direc[i] * deriv2[i]; // and second dlen += direc[i] * direc[i]; // Length of search vector } dlen = Math.sqrt(dlen); double scale; if (Math.abs(dot2) < Encog.DEFAULT_DOUBLE_EQUAL) { scale = 0; } else { scale = dot1 / dot2; } high = 1.5 / high; if (high < 1.e-4) { high = 1.e-4; } if (scale < 0.0) { scale = high; } else if (scale < 0.1 * high) { scale = 0.1 * high; } else if (scale > 10.0 * high) { scale = 10.0 * high; } prevBest = fbest; globalMinimum.setY2(fbest); globalMinimum.findBestRange(0.0, 2.0 * scale, -3, false, maxError, network); if (globalMinimum.getY2() < maxError) { if (globalMinimum.getY2() < fbest) { for (int i = 0; i < n; i++) { x[i] = base[i] + globalMinimum.getY2() * direc[i]; if (x[i] < 1.e-10) { x[i] = 1.e-10; } } fbest = globalMinimum.getY2(); } else { for (int i = 0; i < n; i++) { x[i] = base[i]; } } break; } if (convergenceCounter > 0) { fbest = globalMinimum.brentmin(20, maxError, eps, 1.e-7, network, globalMinimum.getY2()); } else { fbest = globalMinimum.brentmin(10, maxError, 1.e-6, 1.e-5, network, globalMinimum.getY2()); } for (int i = 0; i < n; i++) { x[i] = base[i] + globalMinimum.getX2() * direc[i]; if (x[i] < 1.e-10) { x[i] = 1.e-10; } } improvement = (prevBest - fbest) / prevBest; if (fbest < maxError) { break; } for (int i = 0; i < n; i++) { direc[i] = -direc[i]; // negative gradient } gam = gamma(n, g, direc); if (gam < 0.0) { gam = 0.0; } if (gam > 10.0) { gam = 10.0; } if (improvement < 0.001) { ++poorCJ; } else { poorCJ = 0; } if (poorCJ >= 2) { if (gam > 1.0) { gam = 1.0; } } if (poorCJ >= 6) { poorCJ = 0; gam = 0.0; } findNewDir(n, gam, g, h, direc); } return fbest; } /** * Find gamma. * * @param n * The number of variables. * @param gam * The gamma value. * @param g * The "g" value, used for CJ algorithm. * @param h * The "h" value, used for CJ algorithm. * @param grad * The gradients. */ private void findNewDir(final int n, final double gam, final double[] g, final double[] h, final double[] grad) { int i; for (i = 0; i < n; i++) { g[i] = grad[i]; grad[i] = h[i] = g[i] + gam * h[i]; } } /** * Find correction for next iteration. * * @param n * The number of variables. * @param g * The "g" value, used for CJ algorithm. * @param grad * The gradients. * @return The correction for the next iteration. */ private double gamma(final int n, final double[] g, final double[] grad) { int i; double denom, numer; numer = denom = 0.0; for (i = 0; i < n; i++) { denom += g[i] * g[i]; numer += (grad[i] - g[i]) * grad[i]; // Grad is neg gradient } if (denom == 0.0) { return 0.0; } else { return numer / denom; } } }
package jgame.platform; import jgame.*; import java.awt.*; import javax.swing.JOptionPane; import mygame.Constants; import java.awt.event.*; import java.io.*; /** A basic framework for a game. It supports an animation and game timer, * object creation at fixed intervals, score, lives, levels, configurable keys. * There are title, start-level, next-level, death, and game-over sequences. * Todo: highscores, key configuration file and GUI. * <p>To initialise this class, use the regular initEngine (from main), or * initEngineApplet (from parameterless constructor). You can supply the width * and height of the window as command line arguments by calling * parseSizeArgs(args) from your main(). Define initGame() as usual. StdGame * does all its logic in the doFrame method, so you should ensure that it is * called (i.e. call super.doFrame() if you choose to override doFrame). The * game will automatically start in the "Title" gamestate when it finds that * it isn't in this state in the first call to doFrame(). You can also set the * "Title" state in initGame if you even want the first frame to be in * "Title". * <p>The class uses the following state machine, using JGEngine's state * machine mechanism: * <p><i>Title</i>: displays title screen. Transition to * {StartLevel,StartGame} when the key_startgame is pressed. Before the * transition, initNewGame(), defineLevel(), and initNewLife() are called. * <p><i>InGame</i>: game is playing. Transition to LifeLost when lifeLost() * is called from within the game. Transition to LevelDone when levelDone() is * called from within the game. Transition to GameOver when gameOver() is * called (i.e. to quit game). The gametime timer indicates how many ticks the * game has been running since the beginning of the level. * <p>StdGame supports a set of standard game sequences, which are represented * as game states: StartLevel/StartGame, LevelDone, LifeLost, GameOver. These * can be configured so they add the InGame state to the sequence state (i.e. * the game is in both states simultaneously). This can be used to animate the * game while playing the corresponding sequence. This is off by default. The * seqtimer timer is set to 0 at the beginning of each sequence, and increments * during the sequence to indicate how many ticks the sequence has been * playing. The number of ticks that the sequence should take can be * configured, or the sequence can be skipped altogether by setting ticks to 0. * <p><i>StartGame</i>: start game sequence is played. Transition to InGame * after a certain time has elapsed or the continuegame key is pressed. * <p><i>StartLevel</i>: start level sequence is played. Transition to InGame * after a certain time has elapsed or the continuegame key is pressed. Is always * active in combination with StartGame; it's just an indication that StartGame * is also a start of a new level. * <p><i>LevelDone</i>: next level sequence is played. Transition to * StartLevel/StartGame after a certain time has elapsed or the continuegame key * is pressed. Before the transition, resp. incrementLevel() and defineLevel() * are called. * <p><i>LifeLost</i>: player has just died, a death sequence is played. * Transition to either GameOver or StartGame after a certain time has elapsed * or the continuegame key is pressed, dependent of whether there are lives left. * Before the transition to StartGame, decrementLives() and initNewLife are * called. * <p><i>GameOver</i>: game over sequence is played. Transition to Title after * a certain time or the continuegame key is pressed. * <p>Built in are also game exit (through the key_quitgame, which is Escape by * default), pause game (through the key_pausegame, which defaults to 'P'), and * program exit (key_quitprogram, default Escape). */ public abstract class StdGame extends JGEngine { // XXX can levelDone and lifeLost be triggered simultaneously? (ramjet) /* settings */ /** Flag indicating that audio is enabled */ public boolean audioenabled=true; /** Key for starting the game, JRE default is space, MIDP default is "*" */ public int key_startgame = ' '; /** Key for invoking the game settings window, default = enter. */ public int key_gamesettings = KeyEnter; /** Key for continuing the game when in a sequence, JRE default is space, * MIDP default is "*" */ public int key_continuegame = ' '; /** Key for quitting the current game, JRE default is escape, MIDP default * is "#". */ public int key_quitgame = 27; /** Key for quitting the program, JRE default is escape, MIDP default is * "#". */ public int key_quitprogram = 27; /** Key for pausing the game, JRE default is P, MIDP default is '0' */ public int key_pausegame = 'P'; /** Key for moving, default = cursors. */ public int key_left=KeyLeft, key_right=KeyRight, key_up =KeyUp, key_down=KeyDown; /** Key for moving diagonally, default = none. */ //public int key_upleft=0, key_downleft=0, // key_upright=0, key_downright=0; /** Key for firing (in case there are no separate directional fire keys), * JRE default is Z, MIDP default is Fire. */ public int key_fire = 'Z'; /** Key for directional firing, default is WSAD keys for JRE, 2456 for * MIDP. */ public int key_fireleft = 'A', key_fireright= 'D', key_fireup = 'W', key_firedown = 'S'; /** Key for special action, default is X for JRE, 8 for MIDP. */ public int key_action = 'X'; /** Key for diagonal firing, default is none */ //public int key_fireupleft =0, key_firedownleft=0, // key_fireupright=0, key_firedownright=0; /** Keys for special actions. Default = action[0]=ctrl, action[1]=alt */ //public int [] key_action = new int [] //{ KeyCtrl,KeyAlt, 0,0,0, 0,0,0,0,0 }; /** Game timer. Is reset to 0 at the beginning of each level, increments * with gamespeed during InGame. */ public double gametime=0; /** Sequence timer. Is reset to 0 at the start of the Title, Highscores, * EnterHighscore, StartLevel, StartGame, * LevelDone, LifeLost, GameOver sequences. Increments with gamespeed * always. Can be used to time animations for these sequences. */ public double seqtimer=0; /** Animation timer. Always increments with gamespeed. * Can be used to time animations etc. */ public double timer=0; /** Player score; starts at 0 at beginning of game. */ public int items=0; /** Difficulty level; starts at 0 at beginning of game. Can be * incremented each time a level is complete. Can be used to determine game * difficulty settings throughout the game. */ public int level=0; /** Game stage, which is usually the same as level, but typically goes on * counting, while level may stop increasing at a certain value. * Can be used to vary graphic sets, display stage number, etc. */ public int stage=0; /** Lives count, 0 means game over. */ public int lives=0; /** Initial value for lives; default=4 */ public int initial_lives=Constants.USER_LIVES; /** Number of ticks to stay in StartLevel/StartGame state, 0 = skip */ public int startgame_ticks=80; /** Number of ticks to stay in LevelDone state, 0 = skip */ public int leveldone_ticks=80; /** Number of ticks to stay in LifeLost state, 0 = skip */ public int lifelost_ticks=80; /** Number of ticks to stay in GameOver state, 0 = skip */ public int gameover_ticks=120; /** Indicates whether the InGame state should be retained when in the * corresponding sequence state. */ public boolean startgame_ingame=false, leveldone_ingame=false, lifelost_ingame=false, gameover_ingame=false; /** Horizontal margins to be used by status displays, default 12 pixels. */ public int status_l_margin=12,status_r_margin=12; /** Font to use to display score */ public JGFont status_font = new JGFont("Courier",Font.BOLD,12); /** Color to use to display score */ public JGColor status_color = JGColor.white; /** Image to use to display lives */ public String lives_img = null; /** Font to use to display title and messages */ public JGFont title_font = new JGFont("Courier",0,18); /** Color to use to display title and messages */ public JGColor title_color = JGColor.white; /** Color to use to display background effects behind title and messages */ public JGColor title_bg_color = JGColor.blue; /** indicates that engine has just started and has not produced a single * frame. */ boolean just_inited=true; /** The application configuration handler. Default is null, use * initAppConfig to initialise it. */ public AppConfig appconfig=null; /** Set the status display variables in one go. */ public void setStatusDisplay(JGFont status_font,JGColor status_color, String lives_img) { this.status_font=status_font; this.status_color=status_color; this.lives_img=lives_img; } /** Set all sequence variables in one go. */ public void setSequences(boolean startgame_ingame,int startgame_ticks, boolean leveldone_ingame, int leveldone_ticks, boolean lifelost_ingame, int lifelost_ticks, boolean gameover_ingame, int gameover_ticks) { this.startgame_ingame=startgame_ingame; this.leveldone_ingame=leveldone_ingame; this.lifelost_ingame=lifelost_ingame; this.gameover_ingame=gameover_ingame; this.startgame_ticks=startgame_ticks; this.leveldone_ticks=leveldone_ticks; this.lifelost_ticks=lifelost_ticks; this.gameover_ticks=gameover_ticks; } /** Highscore table, null (default) means not defined. Use setHighscores * to define the table. If defined, the game will handle highscores by * means of the states Highscores and EnterHighscore. */ public Highscore [] highscores=null; /** Maximum length of name typed by user. */ public int highscore_maxnamelen=15; /** Player's name being entered in EnterHighscore; is reset to the empty * string before the EnterHighscore state is entered. Is altered by * doFrameEnterHighscore. */ public String playername=""; /** Time to wait in title screen before showing highscores. */ public int highscore_waittime=500; /** Time to show highscores before going back to title screen. */ public int highscore_showtime=600; /** Font to use to display highscores */ public JGFont highscore_font = new JGFont("Courier",Font.BOLD,16); /** Color to use to display highscores */ public JGColor highscore_color = JGColor.white; /** Font to use to display highscore title information */ public JGFont highscore_title_font = new JGFont("Courier",Font.BOLD,16); /** Color to use to display highscore title information */ public JGColor highscore_title_color = JGColor.white; /** Title string to display above highscores */ public String highscore_title="Highest Scores"; /** String to display above highscore entry screen. */ public String highscore_entry="You have a high score!"; /** Define highscore table. */ public void setHighscores(int nr_highscores, Highscore default_hisc, int maxnamelen) { highscores = new Highscore [nr_highscores]; for (int i=0; i<nr_highscores; i++) // XXX maybe clone? highscores[i] = default_hisc; highscore_maxnamelen=maxnamelen; } /** Set highscore display settings. */ public void setHighscoreDisplay(int waittime,int showtime, JGFont font, JGColor color, String title, String entry, JGFont titlefont, JGColor titlecolor) { highscore_waittime=waittime; highscore_showtime=showtime; highscore_font=font; highscore_color=color; highscore_title=title; highscore_entry=entry; highscore_title_font=titlefont; highscore_title_color=titlecolor; } /** Look for two arguments starting at index arg_ofs, and parse them as * width and height. Returns (0.0) if there are no arguments. Prints a * usage message otherwise. */ public static JGPoint parseSizeArgs(String [] args,int arg_ofs) { JGPoint size = new JGPoint(0,0); if (args.length==arg_ofs+2) { try { size.x = Integer.parseInt(args[arg_ofs]); size.y = Integer.parseInt(args[arg_ofs+1]); } catch (Exception e) { System.out.println("\nError parsing width/height arguments." +"They should be integers.\n" ); System.exit(0); } } else if (args.length!=arg_ofs) { System.out.println( "\nYou can supply either no arguments, or [width] [height].\n" ); System.exit(0); } return size; } /* special state functions */ /** Initialise the game when a new game is started. Default sets level, * stage, score to 0, and lives to initial_lives. */ public void initNewGame() { level=0; stage=0; items=0; lives=initial_lives; } /** Initialise play specifically after a new life is introduced (that is, * at game start and after the player has died. This is typically used to * reinitialise the player. If you want a specific initialisation at * both the beginning of the level or after the player death, use * startInGame(). Default is do nothing. */ public void initNewLife() {} /** Initialise a level. Default is do nothing. */ public void defineLevel() {} /** Code for losing a life before transition from LifeLost to InGame is * made. Default is decrement lives. */ public void decrementLives() { lives--; } /** Code for incrementing a level before transition from LevelDone to * InGame is made. Default is increment level and stage. */ public void incrementLevel() { level++; stage++; } /* state transition functions */ /** Call to make state transition to LifeLost. Is ignored when in * another state than InGame or {InGame,StartLevel/StartGame}. * After the LifeLost * sequence, goes to InGame or GameOver, depending on lives left. */ public final void lifeLost() { clearKey(key_continuegame); removeGameState("StartLevel"); removeGameState("StartGame"); seqtimer=0; if (lifelost_ticks > 0) { if (lifelost_ingame) addGameState("LifeLost"); else setGameState("LifeLost"); new JGTimer(lifelost_ticks,true,"LifeLost") { public void alarm() { endLifeLost(); } }; } else { endLifeLost(); } } private void endLifeLost() { clearKey(key_continuegame); decrementLives(); if (lives <= 0) { gameOver(); } else { initNewLife(); seqtimer=0; if (startgame_ticks > 0) { // force call to startInGame() setGameState("StartGame"); if (startgame_ingame) addGameState("InGame"); new JGTimer(startgame_ticks,true,"StartGame") { public void alarm() { setGameState("InGame"); } }; } else { // force call to startInGame() clearGameState(); setGameState("InGame"); } } } /** Call to make state transition to LevelDone. Is ignored when state is * not InGame or {Ingame,StartLevel/StartGame}. After the LevelDone * sequence, it sets gametime to 0, calls * incrementLevel and defineLevel, and goes to StartLevel/StartGame. */ public final void levelDone() { if (!inGameState("InGame") || inGameState("LevelDone") || inGameState("LifeLost") || inGameState("GameOver") ) return; // System.err.println( // "Warning: levelDone() called from other state than InGame." ); //} clearKey(key_continuegame); removeGameState("StartLevel"); removeGameState("StartGame"); seqtimer=0; if (leveldone_ticks > 0) { if (leveldone_ingame) addGameState("LevelDone"); else setGameState("LevelDone"); new JGTimer(leveldone_ticks,true,"LevelDone") {public void alarm() { levelDoneToStartLevel(); } }; } else { levelDoneToStartLevel(); } } private void levelDoneToStartLevel() { clearKey(key_continuegame); gametime=0; incrementLevel(); defineLevel(); seqtimer=0; if (startgame_ticks > 0) { // force call to startInGame setGameState("StartLevel"); addGameState("StartGame"); if (startgame_ingame) addGameState("InGame"); new JGTimer(startgame_ticks,true,"StartLevel") { public void alarm() { setGameState("InGame"); } }; } else { // force call to startInGame clearGameState(); setGameState("InGame"); } } /** Call to make straight transition to GameOver; is called automatically * by lifeLost when appropriate. Is ignored when game state is not * {InGame}, {Ingame,Start*}, or LifeLost. Will go to Title after GameOver * sequence. */ public final void gameOver() { // XXX hmm. we should check out these conditions if ( inGameState("GameOver") || (!inGameState("InGame") && !inGameState("LifeLost")) ) return; // System.err.println( "Warning: gameOver() called from other state" // +" than InGame or LifeLost." ); //} clearKey(key_continuegame); removeGameState("StartLevel"); removeGameState("StartGame"); removeGameState("LifeLost"); seqtimer=0; if (gameover_ticks > 0) { if (gameover_ingame) addGameState("GameOver"); else setGameState("GameOver"); } else { gotoTitle(); } } /** Go to title or to highscore entry screen. */ private void gotoTitle() { seqtimer=0; clearKey(key_startgame); if (highscores!=null && Highscore.findPos(highscores,items)>=0 ) { setGameState("EnterHighscore"); } else { setGameState("Title"); } } /** Define appconfig for configuring keys. Override to define your own * appconfig; define an empty method to remove the ability of game * configuration. Default behaviour is: create appconfig, set the "key_" * fields as configuration fields, then load them from a file named * $JGAMEHOME/[classname].cfg, and save these in the StdGame object. * Create your own AppConfig like this: * <pre> * appconfig = new AppConfig("[title]", this, getConfigPath("[filename]"); * </pre> * getConfigPath returns a path to a writable file in [user.home]/.jgame/ */ public void initAppConfig() { appconfig = new AppConfig(getClass().getName().substring( getClass().getName().indexOf('.')+1 )+" settings", this, getConfigPath(getClass().getName()+".cfg") ); appconfig.defineField("audioenabled","Enable Sound","boolean"); appconfig.defineFields("key_","","","","key"); appconfig.loadFromFile(); appconfig.saveToObject(); } /** The main doFrame takes care of all the standard game actions. If you * override it, you should typically call super.doFrame(). doFrame * increments timer, increments gametime when in InGame, quits game when * key_quitgame is pressed in InGame. In Title, it waits for the user to * press the key_startgame, then sets gametime to 0, calls initNewGame, * defineLevel, and goes to StartLevel. It also handles the continue_game * key inside the sequences, and the gamesettings and quitprogram keys in * Title. It also ensures the audioenabled flag is passed to engine. */ public void doFrame() { // pass audioenabled if (audioenabled) { enableAudio(); } else { disableAudio(); } // handle pause mode if (inGameState("Paused")) { clearKey(key_pausegame); // stop and remove game state on the next frame removeGameState("Paused"); stop(); } if (getKey(key_pausegame) && !inGameState("EnterHighscore")) { addGameState("Paused"); clearKey(key_pausegame); wakeUpOnKey(key_pausegame); } // handle general actions timer += getGameSpeed(); seqtimer += getGameSpeed(); if (just_inited) { setGameState("Title"); just_inited=false; //handle appconfig //creating appconfig while el.objects locked gives deadlock (?) //so we do it in separate thread Thread t = new Thread(new Runnable() { public void run() { initAppConfig(); if (appconfig!=null) { // continue when config window is closed appconfig.setListener(new ActionListener() { public void actionPerformed(ActionEvent e) { start();//applet.start that is requestGameFocus(); } } ); } } }); t.start(); // load highscores try { Highscore [] loadedhisc = Highscore.load(new FileInputStream( getConfigPath(getClass().getName()+".hsc") ) ); if (loadedhisc.length > 0) { // empty file, ignore highscores=loadedhisc; } } catch (Exception e) { //do nothing, keep old highscores (which should be the //default highscores) } } else if (inGameState("InGame")) { gametime += getGameSpeed(); if (getKey(key_quitgame)) gameOver(); } else if (inGameState("Title")||inGameState("Highscores")) { if (getKey(key_quitprogram) && !isApplet()) { clearKey(key_quitprogram); new Thread(new QuitgameThread()).start(); stop(); } if (getKey(key_gamesettings) && appconfig!=null) { appconfig.openGui(); clearKey(key_gamesettings); //pause application until config window is closed //appconfig.waitCloseGui(); stop(); } if (getKey(key_startgame)) { gametime=0; initNewGame(); defineLevel(); initNewLife(); // code duplicated in levelDone clearKey(key_continuegame); seqtimer=0; if (startgame_ticks > 0) { setGameState("StartLevel"); addGameState("StartGame"); if (startgame_ingame) addGameState("InGame"); new JGTimer(startgame_ticks,true,"StartLevel") { public void alarm() { setGameState("InGame"); } }; } else { setGameState("InGame"); } } if (highscores!=null) { if (getKey(key_continuegame)) { clearKey(key_continuegame); seqtimer=0; if (inGameState("Title")) setGameState("Highscores"); else setGameState("Title"); } if (inGameState("Title") && seqtimer>=highscore_waittime) { seqtimer=0; setGameState("Highscores"); } else if (inGameState("Highscores") && seqtimer>=highscore_showtime) { seqtimer=0; setGameState("Title"); } } } else if (inGameState("StartGame")) { if (getKey(key_continuegame)) setGameState("InGame"); } else if (inGameState("LevelDone")) { if (getKey(key_continuegame)) levelDoneToStartLevel(); } else if (inGameState("LifeLost")) { if (getKey(key_continuegame)) endLifeLost(); } else if (inGameState("GameOver")) { if (getKey(key_continuegame)) gotoTitle(); } } /* default doFrame... actions; note we still have to define the others.*/ /** Default lets user type name into the variable playername. If enter is * pressed, highscore is put in table and saved to disk. */ public void doFrameEnterHighscore() { char key = getLastKeyChar(); clearLastKey(); if (key==KeyBackspace && playername.length()>0) playername = playername.substring(0,playername.length()-1); if (key==KeyEnter) { highscores = Highscore.insert(highscores, new Highscore(items,playername)); clearLastKey(); clearKey(KeyEnter); saveHighscores(); seqtimer=0; setGameState("Highscores"); } if (key>=32 && key<127 && playername.length()<highscore_maxnamelen) playername += key; } /** Try to save highscores to default location, * $HOMEDIR/.jgame/$CLASSNAME.hsc. Returns true on * success, false on failure. */ public boolean saveHighscores() { try { Highscore.save(highscores,new FileOutputStream( getConfigPath(getClass().getName()+".hsc") ) ); return true; } catch (Exception e) { // ioexception or accesscontrolexception return false; } } class QuitgameThread implements Runnable { public QuitgameThread() {} public void run() { // we must wait for the option pane in a separate thread, // otherwise the game thread will deadlock with the awt event // thread when the event thread tries to repaint something. //new JOptionPane("Really quit?",JOptionPane.QUESTION_MESSAGE, // JOptionPane.OK_CANCEL_OPTION); int opt=JOptionPane.showConfirmDialog(null, "Really quit?","Quit Game",JOptionPane.OK_CANCEL_OPTION); if (opt==JOptionPane.YES_OPTION) System.exit(0); //restart the game thread start(); } } /* default start... functions */ /** Initialise the title screen. This is a standard state transition * function. Default is do nothing. */ public void startTitle() {} /** Initialise the highscore display. This is a standard state transition * function. Default is do nothing. */ public void startHighscores() {} /** Initialisation at the start of the in-game action. This is a * standard state transition function. Note that it is always called after * StartLevel and LifeLost, even if startgame_ingame and * lifelost_ingame are set. Default is do nothing. */ public void startInGame() {} /** Initialise start-level sequence. This is a * standard state transition function. Default is do nothing. */ public void startStartLevel() {} /** Initialise start-game sequence. This is a * standard state transition function. Default is do nothing. */ public void startStartGame() {} /** Initialise next-level sequence. This is a * standard state transition function. Default is do nothing. */ public void startLevelDone() {} /** Initialise death sequence. This is a * standard state transition function. Default is do nothing. */ public void startLifeLost() {} /** Initialise game over sequence. This is a * standard state transition function. Default is do nothing. */ public void startGameOver() {} /** Initialise enter-highscore screen. This is a standard state * transition function. Default is clear lastkey and set playername to * the empty string.*/ public void startEnterHighscore() { clearLastKey(); playername=""; } /* default paint functions */ /** Default paintFrame displays score at top left, lives at top right. * When lives_img is set, it uses that image to display lives. */ public void paintFrame() { setFont(status_font); setColor(status_color); drawString("Items "+items,status_l_margin,0,-1); if (lives_img==null) { drawString("Lives "+lives,viewWidth()-status_r_margin,0,1); } else { drawCount(lives-1, lives_img, viewWidth()-status_r_margin,0, - getImageSize(lives_img).x-2 ); } } /** Default displays class name as title, and "press [key_startgame] to * start" below it. */ public void paintFrameTitle() { drawString(getClass().getName().substring(getClass().getName() .lastIndexOf('.' )+1 ), viewWidth()/2,viewHeight()/3,0,title_font,title_color); drawString("Press "+getKeyDesc(key_startgame)+" to start", viewWidth()/2,6*viewHeight()/10,0,title_font,title_color); drawString("Press "+getKeyDesc(key_gamesettings)+" for settings", viewWidth()/2,7*viewHeight()/10,0,title_font,title_color); } /** The game is halted in pause mode, but the paintFrame is still done to * refresh the screen. Default behaviour of paintFramePaused() is display * "Paused", "Press [key_pausegame] to continue" using title_font, * title_color */ public void paintFramePaused() { setColor(title_bg_color); drawRect(viewWidth()/20,15*viewHeight()/36,18*viewWidth()/20, 5*viewHeight()/36+(int)getFontHeight(title_font), true,false,false); drawString("Paused",viewWidth()/2,16*viewHeight()/36,0, title_font,title_color); drawString("Press "+getKeyDesc(key_pausegame)+" to continue", viewWidth()/2,19*viewHeight()/36,0, title_font,title_color); } /** Default displays "Level "+(stage+1). */ public void paintFrameStartLevel() { drawString("Level "+(stage+1), viewWidth()/2,3*viewHeight()/5,0,title_font,title_color); } /** Default displays "Start !". */ public void paintFrameStartGame() { drawString("Start !", viewWidth()/2,viewHeight()/3,0,title_font,title_color); } /** Default displays "Level Done !". */ public void paintFrameLevelDone() { drawString("Level Done !", viewWidth()/2,viewHeight()/3,0,title_font,title_color); } /** Default displays "Life Lost !". */ public void paintFrameLifeLost() { drawString("Life Lost !", viewWidth()/2,viewHeight()/3,0,title_font,title_color); } /** Default displays "Game Over!". */ public void paintFrameGameOver() { drawString("Game Over !", viewWidth()/2,viewHeight()/3,0,title_font,title_color); } /** Default displays highscore_entry, and the player's score and * playername currently being entered. */ public void paintFrameEnterHighscore() { drawString(highscore_entry, viewWidth()/2,viewHeight()/3,0,highscore_title_font, highscore_title_color); drawString(""+items, viewWidth()/2,viewHeight()/2,0,highscore_font,highscore_color); drawString(playername+"|", viewWidth()/2,2*viewHeight()/3,0,highscore_font,highscore_color); } /** Default displays the highscore list. Fields are not yet supported. */ public void paintFrameHighscores() { drawString(highscore_title, viewWidth()/2,viewHeight()/7,0,highscore_title_font, highscore_title_color); double yinc = 0.7*viewHeight()/highscores.length; double ypos = 0.6*viewHeight() - yinc*(highscores.length/2.0); for (int i=0; i<highscores.length; i++) { drawString(""+highscores[i].score, 0.35*viewWidth(), ypos + i*yinc, 1,highscore_font,highscore_color); drawString(highscores[i].name, 0.6*viewWidth(), ypos + i*yinc, 0,highscore_font,highscore_color); } } /* handy game functions */ /** Returns true every increment ticks, but only when gametime is between * min_time and max_time. */ public boolean checkTime(int min_time,int max_time,int increment) { return gametime>min_time && gametime<max_time && ((gametime-1)%increment)<getGameSpeed(); } /** Returns true every increment ticks. */ public boolean checkTime(int increment) { return ((gametime-1)%increment)<getGameSpeed(); } /* handy draw and effects functions */ /** Draw a row of objects to indicate the value count. This is typically * used to indicate lives left. */ public void drawCount(int count, String image,int x,int y,int increment_x) { if (increment_x < 0) x += increment_x; for (int i=0; i<count; i++) drawImage(x + i*increment_x, y, image, false); } /** Draw a string with letters that move up and down individually. */ public void drawWavyString(String s, int x,int y,int align,int increment_x, double tmr,double amplitude, double pos_phaseshift, double timer_phaseshift, JGFont font, JGColor col) { setFont(font); setColor(col); if (align==0) { x -= increment_x*s.length()/2; } else if (align==1) { x -= increment_x*s.length(); } for (int i=0; i<s.length(); i++) drawString(s.substring(i,i+1), x + i*increment_x, y + (int)(amplitude * -Math.cos(Math.PI*(pos_phaseshift*i + tmr*timer_phaseshift)) ), 0); } /** Draw a String that zooms in and out. Alignment is always center. Note * that tmr = 0 will start the font zooming in. */ //public void drawZoomString(String s,int x,int y, //int tmr, double min_size_fac, double speed, Font font, JGColor col) { // drawString(s,x,y,0,zoomed,col); //} /** Get font for zooming text in and out. Note that tmr = 0 will start * the font zooming in from the farthest position. */ public JGFont getZoomingFont(JGFont basejgfont, double tmr, double min_size_fac, double speed) { return new JGFont(basejgfont.name,basejgfont.style, basejgfont.size*(min_size_fac+0.5-0.5*Math.cos(Math.PI*speed*tmr))); //Font basefont = // new Font(basejgfont.name,basejgfont.style,(int)basejgfont.size); //double origsize = basefont.getSize2D(); //Font font = basefont.deriveFont((float)( origsize*(min_size_fac + // 0.5 - 0.5*Math.cos(Math.PI*speed*tmr) ) )); //return new JGFont(basejgfont.name,basejgfont.style,font.getSize()); } /** Get a colour from a colour cycle. */ public JGColor cycleColor(JGColor [] cycle, double tmr, double speed) { return cycle[ ( (int)(tmr*speed) ) % cycle.length ]; } /** Walk across the screen, standing still momentarily at a specific * position. */ public int posWalkForwards(int begin_pos, int end_pos, double tmr, int end_time,int standstill_pos,int standstill_time,int standstill_count){ if (tmr < standstill_time) { double step = (standstill_pos - begin_pos)/(double)standstill_time; return begin_pos + (int)(tmr*step); } else if (tmr>=standstill_time&&tmr<standstill_time+standstill_count){ return standstill_pos; } else if (tmr >= standstill_time+standstill_count && tmr < end_time) { int beg2_time = standstill_time + standstill_count; double step=(end_pos-standstill_pos)/(double)(end_time - beg2_time); return standstill_pos + (int)((tmr-beg2_time) * step); } else { return end_pos; } } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.apache.ofbiz.service.job; import java.io.IOException; import java.sql.Timestamp; import java.util.Date; import java.util.HashMap; import java.util.Map; import javax.xml.parsers.ParserConfigurationException; import org.apache.commons.lang.StringUtils; import org.apache.ofbiz.base.config.GenericConfigException; import org.apache.ofbiz.base.util.Debug; import org.apache.ofbiz.base.util.UtilDateTime; import org.apache.ofbiz.base.util.UtilGenerics; import org.apache.ofbiz.base.util.UtilValidate; import org.apache.ofbiz.entity.Delegator; import org.apache.ofbiz.entity.GenericEntityException; import org.apache.ofbiz.entity.GenericValue; import org.apache.ofbiz.entity.serialize.SerializeException; import org.apache.ofbiz.entity.serialize.XmlSerializer; import org.apache.ofbiz.entity.util.EntityQuery; import org.apache.ofbiz.service.DispatchContext; import org.apache.ofbiz.service.GenericRequester; import org.apache.ofbiz.service.ServiceUtil; import org.apache.ofbiz.service.calendar.RecurrenceInfo; import org.apache.ofbiz.service.calendar.RecurrenceInfoException; import org.apache.ofbiz.service.calendar.TemporalExpression; import org.apache.ofbiz.service.calendar.TemporalExpressionWorker; import org.apache.ofbiz.service.config.ServiceConfigUtil; import org.xml.sax.SAXException; import com.ibm.icu.util.Calendar; /** * A {@link Job} that is backed by the entity engine. Job data is stored * in the JobSandbox entity. * <p>When the job is queued, this object "owns" the entity value. Any external changes * are ignored except the cancelDateTime field - jobs can be canceled after they are queued.</p> */ @SuppressWarnings("serial") public class PersistedServiceJob extends GenericServiceJob { public static final String module = PersistedServiceJob.class.getName(); private final transient Delegator delegator; private long nextRecurrence = -1; private final long maxRetry; private final long currentRetryCount; private final GenericValue jobValue; private final long startTime; /** * Creates a new PersistedServiceJob * @param dctx * @param jobValue * @param req */ public PersistedServiceJob(DispatchContext dctx, GenericValue jobValue, GenericRequester req) { super(dctx, jobValue.getString("jobId"), jobValue.getString("jobName"), null, null, req); this.delegator = dctx.getDelegator(); this.jobValue = jobValue; Timestamp storedDate = jobValue.getTimestamp("runTime"); this.startTime = storedDate.getTime(); this.maxRetry = jobValue.get("maxRetry") != null ? jobValue.getLong("maxRetry").longValue() : -1; Long retryCount = jobValue.getLong("currentRetryCount"); if (retryCount != null) { this.currentRetryCount = retryCount.longValue(); } else { // backward compatibility this.currentRetryCount = getRetries(this.delegator); } } @Override public void queue() throws InvalidJobException { super.queue(); try { jobValue.refresh(); } catch (GenericEntityException e) { throw new InvalidJobException("Unable to refresh JobSandbox value", e); } if (!JobManager.instanceId.equals(jobValue.getString("runByInstanceId"))) { throw new InvalidJobException("Job has been accepted by a different instance"); } Timestamp cancelTime = jobValue.getTimestamp("cancelDateTime"); Timestamp startTime = jobValue.getTimestamp("startDateTime"); if (cancelTime != null || startTime != null) { // job not available throw new InvalidJobException("Job [" + getJobId() + "] is not available"); } else { jobValue.set("statusId", "SERVICE_QUEUED"); try { jobValue.store(); } catch (GenericEntityException e) { throw new InvalidJobException("Unable to set the startDateTime and statusId on the current job [" + getJobId() + "]; not running!", e); } if (Debug.verboseOn()) { Debug.logVerbose("Placing job [" + getJobId() + "] in queue", module); } } } @Override protected void init() throws InvalidJobException { super.init(); try { jobValue.refresh(); } catch (GenericEntityException e) { throw new InvalidJobException("Unable to refresh JobSandbox value", e); } if (!JobManager.instanceId.equals(jobValue.getString("runByInstanceId"))) { throw new InvalidJobException("Job has been accepted by a different instance"); } if (jobValue.getTimestamp("cancelDateTime") != null) { // Job cancelled throw new InvalidJobException("Job [" + getJobId() + "] was cancelled"); } jobValue.set("startDateTime", UtilDateTime.nowTimestamp()); jobValue.set("statusId", "SERVICE_RUNNING"); try { jobValue.store(); } catch (GenericEntityException e) { throw new InvalidJobException("Unable to set the startDateTime and statusId on the current job [" + getJobId() + "]; not running!", e); } if (Debug.verboseOn()) { Debug.logVerbose("Job [" + getJobId() + "] running", module); } // configure any additional recurrences long maxRecurrenceCount = -1; long currentRecurrenceCount = 0; TemporalExpression expr = null; RecurrenceInfo recurrence = getRecurrenceInfo(); if (recurrence != null) { Debug.logWarning("Persisted Job [" + getJobId() + "] references a RecurrenceInfo, recommend using TemporalExpression instead", module); currentRecurrenceCount = recurrence.getCurrentCount(); expr = RecurrenceInfo.toTemporalExpression(recurrence); } if (expr == null && UtilValidate.isNotEmpty(jobValue.getString("tempExprId"))) { try { expr = TemporalExpressionWorker.getTemporalExpression(this.delegator, jobValue.getString("tempExprId")); } catch (GenericEntityException e) { throw new RuntimeException(e.getMessage()); } } if (jobValue.get("maxRecurrenceCount") != null) { maxRecurrenceCount = jobValue.getLong("maxRecurrenceCount").longValue(); } if (jobValue.get("currentRecurrenceCount") != null) { currentRecurrenceCount = jobValue.getLong("currentRecurrenceCount").longValue(); } if (maxRecurrenceCount != -1) { currentRecurrenceCount++; jobValue.set("currentRecurrenceCount", currentRecurrenceCount); } try { if (expr != null && (maxRecurrenceCount == -1 || currentRecurrenceCount <= maxRecurrenceCount)) { if (recurrence != null) { recurrence.incrementCurrentCount(); } Calendar next = expr.next(Calendar.getInstance()); if (next != null) { createRecurrence(next.getTimeInMillis(), false); } } } catch (GenericEntityException e) { throw new InvalidJobException(e); } if (Debug.infoOn()) Debug.logInfo("Job [" + getJobName() + "] Id [" + getJobId() + "] -- Next runtime: " + new Date(nextRecurrence), module); } private void createRecurrence(long next, boolean isRetryOnFailure) throws GenericEntityException { if (Debug.verboseOn()) Debug.logVerbose("Next runtime returned: " + next, module); if (next > startTime) { String pJobId = jobValue.getString("parentJobId"); if (pJobId == null) { pJobId = jobValue.getString("jobId"); } GenericValue newJob = GenericValue.create(jobValue); newJob.remove("jobId"); newJob.set("previousJobId", jobValue.getString("jobId")); newJob.set("parentJobId", pJobId); newJob.set("statusId", "SERVICE_PENDING"); newJob.set("startDateTime", null); newJob.set("runByInstanceId", null); newJob.set("runTime", new java.sql.Timestamp(next)); if (isRetryOnFailure) { newJob.set("currentRetryCount", new Long(currentRetryCount + 1)); } else { newJob.set("currentRetryCount", new Long(0)); } nextRecurrence = next; delegator.createSetNextSeqId(newJob); if (Debug.verboseOn()) Debug.logVerbose("Created next job entry: " + newJob, module); } } @Override protected void finish(Map<String, Object> result) throws InvalidJobException { super.finish(result); // set the finish date jobValue.set("statusId", "SERVICE_FINISHED"); jobValue.set("finishDateTime", UtilDateTime.nowTimestamp()); String jobResult = null; if (ServiceUtil.isError(result)) { jobResult = StringUtils.substring(ServiceUtil.getErrorMessage(result), 0, 255); } else { jobResult = StringUtils.substring(ServiceUtil.makeSuccessMessage(result, "", "", "", ""), 0, 255); } if (UtilValidate.isNotEmpty(jobResult)) { jobValue.set("jobResult", jobResult); } try { jobValue.store(); } catch (GenericEntityException e) { Debug.logError(e, "Cannot update the job [" + getJobId() + "] sandbox", module); } } @Override protected void failed(Throwable t) throws InvalidJobException { super.failed(t); // if the job has not been re-scheduled; we need to re-schedule and run again if (nextRecurrence == -1) { if (this.canRetry()) { // create a recurrence Calendar cal = Calendar.getInstance(); try { cal.add(Calendar.MINUTE, ServiceConfigUtil.getServiceEngine().getThreadPool().getFailedRetryMin()); } catch (GenericConfigException e) { Debug.logWarning(e, "Unable to get retry minutes for job [" + getJobId() + "], defaulting to now: ", module); } long next = cal.getTimeInMillis(); try { createRecurrence(next, true); } catch (GenericEntityException e) { Debug.logError(e, "Unable to re-schedule job [" + getJobId() + "]: ", module); } Debug.logInfo("Persisted Job [" + getJobId() + "] Failed. Re-Scheduling : " + next, module); } else { Debug.logWarning("Persisted Job [" + getJobId() + "] Failed. Max Retry Hit, not re-scheduling", module); } } // set the failed status jobValue.set("statusId", "SERVICE_FAILED"); jobValue.set("finishDateTime", UtilDateTime.nowTimestamp()); jobValue.set("jobResult", StringUtils.substring(t.getMessage(), 0, 255)); try { jobValue.store(); } catch (GenericEntityException e) { Debug.logError(e, "Cannot update the JobSandbox entity", module); } } @Override protected String getServiceName() { if (jobValue == null || jobValue.get("serviceName") == null) { return null; } return jobValue.getString("serviceName"); } @Override protected Map<String, Object> getContext() throws InvalidJobException { Map<String, Object> context = null; try { if (UtilValidate.isNotEmpty(jobValue.getString("runtimeDataId"))) { GenericValue contextObj = jobValue.getRelatedOne("RuntimeData", false); if (contextObj != null) { context = UtilGenerics.checkMap(XmlSerializer.deserialize(contextObj.getString("runtimeInfo"), delegator), String.class, Object.class); } } if (context == null) { context = new HashMap<String, Object>(); } // check the runAsUser if (UtilValidate.isNotEmpty(jobValue.getString("runAsUser"))) { context.put("userLogin", ServiceUtil.getUserLogin(dctx, context, jobValue.getString("runAsUser"))); } } catch (GenericEntityException e) { Debug.logError(e, "PersistedServiceJob.getContext(): Entity Exception", module); } catch (SerializeException e) { Debug.logError(e, "PersistedServiceJob.getContext(): Serialize Exception", module); } catch (ParserConfigurationException e) { Debug.logError(e, "PersistedServiceJob.getContext(): Parse Exception", module); } catch (SAXException e) { Debug.logError(e, "PersistedServiceJob.getContext(): SAXException", module); } catch (IOException e) { Debug.logError(e, "PersistedServiceJob.getContext(): IOException", module); } if (context == null) { Debug.logError("Job context is null", module); } return context; } // returns the number of current retries private long getRetries(Delegator delegator) { String pJobId = jobValue.getString("parentJobId"); if (pJobId == null) { return 0; } long count = 0; try { count = EntityQuery.use(delegator).from("JobSandbox").where("parentJobId", pJobId, "statusId", "SERVICE_FAILED").queryCount(); } catch (GenericEntityException e) { Debug.logError(e, "Exception thrown while counting retries: ", module); } return count + 1; // add one for the parent } private boolean canRetry() { if (maxRetry == -1) { return true; } return currentRetryCount < maxRetry; } private RecurrenceInfo getRecurrenceInfo() { try { if (UtilValidate.isNotEmpty(jobValue.getString("recurrenceInfoId"))) { GenericValue ri = jobValue.getRelatedOne("RecurrenceInfo", false); if (ri != null) { return new RecurrenceInfo(ri); } } } catch (GenericEntityException e) { Debug.logError(e, "Problem getting RecurrenceInfo entity from JobSandbox", module); } catch (RecurrenceInfoException re) { Debug.logError(re, "Problem creating RecurrenceInfo instance: " + re.getMessage(), module); } return null; } @Override public void deQueue() throws InvalidJobException { if (currentState != State.QUEUED) { throw new InvalidJobException("Illegal state change"); } currentState = State.CREATED; try { jobValue.refresh(); jobValue.set("startDateTime", null); jobValue.set("runByInstanceId", null); jobValue.set("statusId", "SERVICE_PENDING"); jobValue.store(); } catch (GenericEntityException e) { throw new InvalidJobException("Unable to dequeue job [" + getJobId() + "]", e); } if (Debug.verboseOn()) { Debug.logVerbose("Job [" + getJobId() + "] not queued, rescheduling", module); } } @Override public Date getStartTime() { return new Date(startTime); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.client.cli; import org.apache.flink.api.common.JobID; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.table.api.TableSchema; import org.apache.flink.table.client.cli.utils.TerminalUtils; import org.apache.flink.table.client.config.Environment; import org.apache.flink.table.client.config.entries.ViewEntry; import org.apache.flink.table.client.gateway.Executor; import org.apache.flink.table.client.gateway.ProgramTargetDescriptor; import org.apache.flink.table.client.gateway.ResultDescriptor; import org.apache.flink.table.client.gateway.SessionContext; import org.apache.flink.table.client.gateway.SqlExecutionException; import org.apache.flink.table.client.gateway.TypedResult; import org.apache.flink.types.Row; import org.apache.flink.util.TestLogger; import org.jline.reader.Candidate; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; import org.jline.reader.ParsedLine; import org.jline.reader.Parser; import org.jline.terminal.Terminal; import org.jline.terminal.impl.DumbTerminal; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; /** * Tests for the {@link CliClient}. */ public class CliClientTest extends TestLogger { private static final String INSERT_INTO_STATEMENT = "INSERT INTO MyTable SELECT * FROM MyOtherTable"; private static final String INSERT_OVERWRITE_STATEMENT = "INSERT OVERWRITE MyTable SELECT * FROM MyOtherTable"; private static final String SELECT_STATEMENT = "SELECT * FROM MyOtherTable"; @Test public void testUpdateSubmission() { verifyUpdateSubmission(INSERT_INTO_STATEMENT, false, false); verifyUpdateSubmission(INSERT_OVERWRITE_STATEMENT, false, false); } @Test public void testFailedUpdateSubmission() { // fail at executor verifyUpdateSubmission(INSERT_INTO_STATEMENT, true, true); verifyUpdateSubmission(INSERT_OVERWRITE_STATEMENT, true, true); // fail early in client verifyUpdateSubmission(SELECT_STATEMENT, false, true); } @Test public void testSqlCompletion() throws IOException { verifySqlCompletion("", 0, Arrays.asList("SELECT", "QUIT;", "RESET;"), Collections.emptyList()); verifySqlCompletion("SELEC", 5, Collections.singletonList("SELECT"), Collections.singletonList("QUIT;")); verifySqlCompletion("SELE", 0, Collections.singletonList("SELECT"), Collections.singletonList("QUIT;")); verifySqlCompletion("QU", 2, Collections.singletonList("QUIT;"), Collections.singletonList("SELECT")); verifySqlCompletion("qu", 2, Collections.singletonList("QUIT;"), Collections.singletonList("SELECT")); verifySqlCompletion(" qu", 2, Collections.singletonList("QUIT;"), Collections.singletonList("SELECT")); verifySqlCompletion("set ", 3, Collections.emptyList(), Collections.singletonList("SET")); verifySqlCompletion("show t ", 6, Collections.emptyList(), Collections.singletonList("SET")); } @Test public void testUseNonExistingDB() throws Exception { Executor executor = mock(Executor.class); doThrow(new SqlExecutionException("mocked exception")).when(executor).useDatabase(any(), any()); InputStream inputStream = new ByteArrayInputStream("use db;\n".getBytes()); // don't care about the output OutputStream outputStream = new OutputStream() { @Override public void write(int b) throws IOException { } }; SessionContext session = new SessionContext("test-session", new Environment()); String sessionId = executor.openSession(session); CliClient cliClient = null; try (Terminal terminal = new DumbTerminal(inputStream, outputStream)) { cliClient = new CliClient(terminal, sessionId, executor); cliClient.open(); verify(executor).useDatabase(any(), any()); } finally { if (cliClient != null) { cliClient.close(); } } } @Test public void testUseNonExistingCatalog() throws Exception { Executor executor = mock(Executor.class); doThrow(new SqlExecutionException("mocked exception")).when(executor).useCatalog(any(), any()); InputStream inputStream = new ByteArrayInputStream("use catalog cat;\n".getBytes()); // don't care about the output OutputStream outputStream = new OutputStream() { @Override public void write(int b) throws IOException { } }; CliClient cliClient = null; SessionContext sessionContext = new SessionContext("test-session", new Environment()); String sessionId = executor.openSession(sessionContext); try (Terminal terminal = new DumbTerminal(inputStream, outputStream)) { cliClient = new CliClient(terminal, sessionId, executor); cliClient.open(); verify(executor).useCatalog(any(), any()); } finally { if (cliClient != null) { cliClient.close(); } } } // -------------------------------------------------------------------------------------------- private void verifyUpdateSubmission(String statement, boolean failExecution, boolean testFailure) { final SessionContext context = new SessionContext("test-session", new Environment()); final MockExecutor mockExecutor = new MockExecutor(); String sessionId = mockExecutor.openSession(context); mockExecutor.failExecution = failExecution; CliClient cli = null; try { cli = new CliClient(TerminalUtils.createDummyTerminal(), sessionId, mockExecutor); if (testFailure) { assertFalse(cli.submitUpdate(statement)); } else { assertTrue(cli.submitUpdate(statement)); assertEquals(statement, mockExecutor.receivedStatement); assertEquals(context, mockExecutor.receivedContext); } } finally { if (cli != null) { cli.close(); } } } private void verifySqlCompletion(String statement, int position, List<String> expectedHints, List<String> notExpectedHints) throws IOException { final SessionContext context = new SessionContext("test-session", new Environment()); final MockExecutor mockExecutor = new MockExecutor(); String sessionId = mockExecutor.openSession(context); final SqlCompleter completer = new SqlCompleter(sessionId, mockExecutor); final SqlMultiLineParser parser = new SqlMultiLineParser(); try (Terminal terminal = TerminalUtils.createDummyTerminal()) { final LineReader reader = LineReaderBuilder.builder().terminal(terminal).build(); final ParsedLine parsedLine = parser.parse(statement, position, Parser.ParseContext.COMPLETE); final List<Candidate> candidates = new ArrayList<>(); final List<String> results = new ArrayList<>(); completer.complete(reader, parsedLine, candidates); candidates.forEach(item -> results.add(item.value())); assertTrue(results.containsAll(expectedHints)); assertEquals(statement, mockExecutor.receivedStatement); assertEquals(context, mockExecutor.receivedContext); assertEquals(position, mockExecutor.receivedPosition); assertTrue(results.contains("HintA")); assertTrue(results.contains("Hint B")); results.retainAll(notExpectedHints); assertEquals(0, results.size()); } } // -------------------------------------------------------------------------------------------- private static class MockExecutor implements Executor { public boolean failExecution; public SessionContext receivedContext; public String receivedStatement; public int receivedPosition; private final Map<String, SessionContext> sessionMap = new HashMap<>(); @Override public void start() throws SqlExecutionException { // nothing to do } @Override public String openSession(SessionContext session) throws SqlExecutionException { String sessionId = UUID.randomUUID().toString(); sessionMap.put(sessionId, session); return sessionId; } @Override public void closeSession(String sessionId) throws SqlExecutionException { } @Override public Map<String, String> getSessionProperties(String sessionId) throws SqlExecutionException { return null; } @Override public void resetSessionProperties(String sessionId) throws SqlExecutionException { } @Override public void setSessionProperty(String sessionId, String key, String value) throws SqlExecutionException { } @Override public void addView(String sessionId, String name, String query) throws SqlExecutionException { } @Override public void removeView(String sessionId, String name) throws SqlExecutionException { } @Override public Map<String, ViewEntry> listViews(String sessionId) throws SqlExecutionException { return null; } @Override public List<String> listCatalogs(String sessionId) throws SqlExecutionException { return null; } @Override public List<String> listDatabases(String sessionId) throws SqlExecutionException { return null; } @Override public void createTable(String sessionId, String ddl) throws SqlExecutionException { } @Override public void dropTable(String sessionId, String ddl) throws SqlExecutionException { } @Override public List<String> listTables(String sessionId) throws SqlExecutionException { return null; } @Override public List<String> listUserDefinedFunctions(String sessionId) throws SqlExecutionException { return null; } @Override public List<String> listFunctions(String sessionId) throws SqlExecutionException { return null; } @Override public List<String> listModules(String sessionId) throws SqlExecutionException { return null; } @Override public void useCatalog(String sessionId, String catalogName) throws SqlExecutionException { } @Override public void useDatabase(String sessionId, String databaseName) throws SqlExecutionException { } @Override public TableSchema getTableSchema(String sessionId, String name) throws SqlExecutionException { return null; } @Override public String explainStatement(String sessionId, String statement) throws SqlExecutionException { return null; } @Override public List<String> completeStatement(String sessionId, String statement, int position) { receivedContext = sessionMap.get(sessionId); receivedStatement = statement; receivedPosition = position; return Arrays.asList("HintA", "Hint B"); } @Override public ResultDescriptor executeQuery(String sessionId, String query) throws SqlExecutionException { return null; } @Override public TypedResult<List<Tuple2<Boolean, Row>>> retrieveResultChanges(String sessionId, String resultId) throws SqlExecutionException { return null; } @Override public TypedResult<Integer> snapshotResult(String sessionId, String resultId, int pageSize) throws SqlExecutionException { return null; } @Override public List<Row> retrieveResultPage(String resultId, int page) throws SqlExecutionException { return null; } @Override public void cancelQuery(String sessionId, String resultId) throws SqlExecutionException { // nothing to do } @Override public ProgramTargetDescriptor executeUpdate(String sessionId, String statement) throws SqlExecutionException { receivedContext = sessionMap.get(sessionId); receivedStatement = statement; if (failExecution) { throw new SqlExecutionException("Fail execution."); } JobID jobID = JobID.generate(); return new ProgramTargetDescriptor(jobID); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.metrics.prometheus; import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.metrics.CharacterFilter; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.Histogram; import org.apache.flink.metrics.HistogramStatistics; import org.apache.flink.metrics.Meter; import org.apache.flink.metrics.Metric; import org.apache.flink.metrics.MetricConfig; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.metrics.reporter.MetricReporter; import org.apache.flink.runtime.metrics.groups.AbstractMetricGroup; import org.apache.flink.runtime.metrics.groups.FrontMetricGroup; import fi.iki.elonen.NanoHTTPD; import io.prometheus.client.Collector; import io.prometheus.client.CollectorRegistry; import io.prometheus.client.exporter.common.TextFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.regex.Pattern; /** * {@link MetricReporter} that exports {@link Metric Metrics} via Prometheus. */ @PublicEvolving public class PrometheusReporter implements MetricReporter { private static final Logger LOG = LoggerFactory.getLogger(PrometheusReporter.class); static final String ARG_PORT = "port"; private static final int DEFAULT_PORT = 9249; private static final Pattern UNALLOWED_CHAR_PATTERN = Pattern.compile("[^a-zA-Z0-9:_]"); private static final CharacterFilter CHARACTER_FILTER = new CharacterFilter() { @Override public String filterCharacters(String input) { return replaceInvalidChars(input); } }; private static final char SCOPE_SEPARATOR = '_'; private static final String SCOPE_PREFIX = "flink" + SCOPE_SEPARATOR; private PrometheusEndpoint prometheusEndpoint; private final Map<String, Collector> collectorsByMetricName = new HashMap<>(); @VisibleForTesting static String replaceInvalidChars(final String input) { // https://prometheus.io/docs/instrumenting/writing_exporters/ // Only [a-zA-Z0-9:_] are valid in metric names, any other characters should be sanitized to an underscore. return UNALLOWED_CHAR_PATTERN.matcher(input).replaceAll("_"); } @Override public void open(MetricConfig config) { int port = config.getInteger(ARG_PORT, DEFAULT_PORT); LOG.info("Using port {}.", port); prometheusEndpoint = new PrometheusEndpoint(port); try { prometheusEndpoint.start(NanoHTTPD.SOCKET_READ_TIMEOUT, true); } catch (IOException e) { final String msg = "Could not start PrometheusEndpoint on port " + port; LOG.warn(msg, e); throw new RuntimeException(msg, e); } } @Override public void close() { prometheusEndpoint.stop(); CollectorRegistry.defaultRegistry.clear(); } @Override public void notifyOfAddedMetric(final Metric metric, final String metricName, final MetricGroup group) { final String scope = SCOPE_PREFIX + getLogicalScope(group); List<String> dimensionKeys = new LinkedList<>(); List<String> dimensionValues = new LinkedList<>(); for (final Map.Entry<String, String> dimension : group.getAllVariables().entrySet()) { final String key = dimension.getKey(); dimensionKeys.add(CHARACTER_FILTER.filterCharacters(key.substring(1, key.length() - 1))); dimensionValues.add(CHARACTER_FILTER.filterCharacters(dimension.getValue())); } final String validMetricName = scope + SCOPE_SEPARATOR + CHARACTER_FILTER.filterCharacters(metricName); final String metricIdentifier = group.getMetricIdentifier(metricName); final Collector collector; if (metric instanceof Gauge) { collector = createGauge((Gauge) metric, validMetricName, metricIdentifier, dimensionKeys, dimensionValues); } else if (metric instanceof Counter) { collector = createGauge((Counter) metric, validMetricName, metricIdentifier, dimensionKeys, dimensionValues); } else if (metric instanceof Meter) { collector = createGauge((Meter) metric, validMetricName, metricIdentifier, dimensionKeys, dimensionValues); } else if (metric instanceof Histogram) { collector = createSummary((Histogram) metric, validMetricName, metricIdentifier, dimensionKeys, dimensionValues); } else { LOG.warn("Cannot add unknown metric type: {}. This indicates that the metric type is not supported by this reporter.", metric.getClass().getName()); return; } collector.register(); collectorsByMetricName.put(metricName, collector); } @Override public void notifyOfRemovedMetric(final Metric metric, final String metricName, final MetricGroup group) { CollectorRegistry.defaultRegistry.unregister(collectorsByMetricName.get(metricName)); collectorsByMetricName.remove(metricName); } @SuppressWarnings("unchecked") private static String getLogicalScope(MetricGroup group) { return ((FrontMetricGroup<AbstractMetricGroup<?>>) group).getLogicalScope(CHARACTER_FILTER, SCOPE_SEPARATOR); } private Collector createGauge(final Gauge gauge, final String name, final String identifier, final List<String> labelNames, final List<String> labelValues) { return newGauge(name, identifier, labelNames, labelValues, new io.prometheus.client.Gauge.Child() { @Override public double get() { final Object value = gauge.getValue(); if (value instanceof Double) { return (double) value; } if (value instanceof Number) { return ((Number) value).doubleValue(); } else if (value instanceof Boolean) { return ((Boolean) value) ? 1 : 0; } else { LOG.debug("Invalid type for Gauge {}: {}, only number types and booleans are supported by this reporter.", gauge, value.getClass().getName()); return 0; } } }); } private static Collector createGauge(final Counter counter, final String name, final String identifier, final List<String> labelNames, final List<String> labelValues) { return newGauge(name, identifier, labelNames, labelValues, new io.prometheus.client.Gauge.Child() { @Override public double get() { return (double) counter.getCount(); } }); } private Collector createGauge(final Meter meter, final String name, final String identifier, final List<String> labelNames, final List<String> labelValues) { return newGauge(name, identifier, labelNames, labelValues, new io.prometheus.client.Gauge.Child() { @Override public double get() { return meter.getRate(); } }); } private static Collector newGauge(String name, String identifier, List<String> labelNames, List<String> labelValues, io.prometheus.client.Gauge.Child child) { return io.prometheus.client.Gauge .build() .name(name) .help(identifier) .labelNames(toArray(labelNames)) .create() .setChild(child, toArray(labelValues)); } private static HistogramSummaryProxy createSummary(final Histogram histogram, final String name, final String identifier, final List<String> dimensionKeys, final List<String> dimensionValues) { return new HistogramSummaryProxy(histogram, name, identifier, dimensionKeys, dimensionValues); } static class PrometheusEndpoint extends NanoHTTPD { static final String MIME_TYPE = "plain/text"; PrometheusEndpoint(int port) { super(port); } @Override public Response serve(IHTTPSession session) { if (session.getUri().equals("/metrics")) { StringWriter writer = new StringWriter(); try { TextFormat.write004(writer, CollectorRegistry.defaultRegistry.metricFamilySamples()); } catch (IOException e) { return newFixedLengthResponse(Response.Status.INTERNAL_ERROR, MIME_TYPE, "Unable to output metrics"); } return newFixedLengthResponse(Response.Status.OK, TextFormat.CONTENT_TYPE_004, writer.toString()); } else { return newFixedLengthResponse(Response.Status.NOT_FOUND, MIME_TYPE, "Not found"); } } } private static class HistogramSummaryProxy extends Collector { private static final List<Double> QUANTILES = Arrays.asList(.5, .75, .95, .98, .99, .999); private final Histogram histogram; private final String metricName; private final String metricIdentifier; private final List<String> labelNamesWithQuantile; private final List<String> labelValues; HistogramSummaryProxy(final Histogram histogram, final String metricName, final String metricIdentifier, final List<String> labelNames, final List<String> labelValues) { this.histogram = histogram; this.metricName = metricName; this.metricIdentifier = metricIdentifier; this.labelNamesWithQuantile = addToList(labelNames, "quantile"); this.labelValues = labelValues; } @Override public List<MetricFamilySamples> collect() { // We cannot use SummaryMetricFamily because it is impossible to get a sum of all values (at least for Dropwizard histograms, // whose snapshot's values array only holds a sample of recent values). final HistogramStatistics statistics = histogram.getStatistics(); List<MetricFamilySamples.Sample> samples = new LinkedList<>(); samples.add(new MetricFamilySamples.Sample(metricName + "_count", labelNamesWithQuantile.subList(0, labelNamesWithQuantile.size() - 1), labelValues, histogram.getCount())); for (final Double quantile : QUANTILES) { samples.add(new MetricFamilySamples.Sample(metricName, labelNamesWithQuantile, addToList(labelValues, quantile.toString()), statistics.getQuantile(quantile))); } return Collections.singletonList(new MetricFamilySamples(metricName, Type.SUMMARY, metricIdentifier, samples)); } } private static List<String> addToList(List<String> list, String element) { final List<String> result = new ArrayList<>(list); result.add(element); return result; } private static String[] toArray(List<String> labelNames) { return labelNames.toArray(new String[labelNames.size()]); } }
package com.swtworkbench.community.xswt.examples.newsyntax; /* * Copyright (c) 2003 Advanced Systems Concepts, Inc. All rights reserved. * This file is made available under the terms of the Common Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/cpl-v10.html */ import org.eclipse.swt.SWT; import org.eclipse.swt.events.PaintEvent; import org.eclipse.swt.events.PaintListener; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.GC; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.widgets.Canvas; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; /** * A SWT desktop blotter component based on Michael Isbell's desktop blotter * component for Delphi. * * Provided by www.swtworkbench.com * Michael Isbell maintains a weblog at: http://radio.weblogs.com/0117185/ * * @author djo - David J. Orme */ public class Blotter extends Canvas implements PaintListener { public Blotter(Composite parent, int style) { super(parent, style); // We'll handle our own painting addPaintListener(this); // Set a default background color display = Display.getCurrent(); Color background = display.getSystemColor(SWT.COLOR_DARK_GREEN); setBackground(background); } // Keep track of the system Display object private Display display; /** * @see org.eclipse.swt.events.PaintListener#paintControl(PaintEvent) */ public void paintControl(PaintEvent e) { GC gc = e.gc; Color black = display.getSystemColor(SWT.COLOR_BLACK); Color gray = display.getSystemColor(SWT.COLOR_GRAY); Color yellow = display.getSystemColor(SWT.COLOR_YELLOW); Color darkYellow = display.getSystemColor(SWT.COLOR_DARK_YELLOW); Rectangle bounds = getBounds(); int width = bounds.width; int height = bounds.height; // ***Draw border lines // Draw vertical lines on left side gc.setForeground(black); gc.drawLine(0, 0, 0, height); gc.setForeground(gray); gc.drawLine(1, 0, 1, height); gc.setForeground(black); gc.drawLine(4, 0, 4, height-4); // Draw vertical lines on right side gc.setForeground(gray); gc.drawLine(width-4, 0, width-4, height-4); gc.setForeground(black); gc.drawLine(width-1, 0, width-1, height); // Draw horizontal lines at top //gc.setForeground(black); gc.drawLine(0, 0, width, 0); gc.setForeground(gray); gc.drawLine(0, 1, width, 1); gc.setForeground(black); gc.drawLine(0, 4, width, 4); // Draw horizontal lines at bottom gc.setForeground(gray); gc.drawLine(5, height-4, width-4, height-4); gc.setForeground(black); gc.drawLine(0, height-1, width, height-1); // ***Draw outer corners gc.setForeground(yellow); // Upper left vertical and horizontal gc.drawLine(1, 1, 1, 15); gc.drawLine(1, 1, 15, 1); // Lower left vertical only gc.drawLine(1, height-1, 1, height-15); // Lower right gc.setForeground(black); gc.drawLine(width-2, height-1, width-15, height-1); gc.drawLine(width-1, height-1, width-1, height-15); gc.setForeground(yellow); gc.drawLine(width-15, height-1, width-16, height-1); gc.drawLine(width-1, height-15, width-1, height-16); // Upper right horizontal only gc.drawLine(width-14, 1, width-1, 1); // ***Draw blotter inner corners gc.setForeground(black); // Upper left gc.drawLine(5, 5, 5, 11); gc.drawLine(5, 5, 11, 5); // Lower left gc.drawLine(5, height-5, 5, height-5-6); gc.setForeground(yellow); gc.drawLine(6, height-5, 11, height-5); // Lower right gc.setForeground(yellow); gc.drawLine(width-5, height-5, width-5, height-11); gc.drawLine(width-5, height-5, width-11, height-5); // Upper right gc.setForeground(black); gc.drawLine(width-10, 5, width-5, 5); gc.setForeground(yellow); gc.drawLine(width-5, 5, width-5, 10); // ***Draw the staircase pixels gc.setForeground(black); // Upper left, lower pixels gc.drawLine(1, 15, 4, 12); gc.drawLine(2, height-13, 3, height-12); gc.drawLine(4, height-11, 4, height-11); // Upper left, upper pixels gc.drawLine(15, 1, 12, 4); // Lower left, upper pixels gc.setForeground(yellow); gc.drawLine(1, height-15, 5, height-11); // Lower left, lower pixels gc.setForeground(black); gc.drawLine(11, height-5, 15, height-1); // Lower right gc.setForeground(yellow); gc.drawLine(width-15, height-1, width-11, height-5); gc.drawLine(width-1, height-15, width-5, height-11); // Upper right gc.setForeground(black); gc.drawLine(width-1, 15, width-5, 11); gc.drawLine(width-15, 1, width-11, 5); // ***Fill in "brass" areas for corners gc.setForeground(darkYellow); gc.setBackground(darkYellow); // Upper left // Fill in large areas first gc.fillRectangle(2, 2, 3, 10); gc.fillRectangle(2, 2, 10, 3); // Fill in upper pixels gc.drawLine(12, 2, 13, 2); gc.drawLine(12, 3, 12, 3); // Fill in lower pixels gc.drawLine(2, 12, 2, 13); gc.drawLine(3, 12, 3, 12); // Lower left // Fill in large areas first gc.fillRectangle(2, height-4, 9, height-1); gc.fillRectangle(2, height-10, 3, height-2); // Fill in upper pixels gc.drawLine(2,height-12, 2, height-12); gc.drawLine(2,height-11, 3, height-11); // Fill in lower pixels gc.drawLine(13,height-2, 13, height-2); gc.drawLine(12,height-2, 12, height-3); gc.drawLine(11,height-2, 11, height-4); // Lower right // Fill in large areas first gc.fillRectangle(width-11,height-4,width-1, height-1); gc.fillRectangle(width-4,height-11,width-1,height-1); // Fill in upper pixels gc.drawLine(width-3,height-12, width-1, height-12); gc.drawLine(width-2,height-13, width-1, height-13); // Fill in lower pixels gc.drawLine(width-12,height-3, width-12, height-1); gc.drawLine(width-13,height-2, width-13, height-1); // Upper right // Fill in large areas gc.fillRectangle(width-11,2, width-1,3); gc.fillRectangle(width-4,2, 4, 10); // Fill in upper pixels gc.drawLine(width-12,2, width-12,3); gc.drawLine(width-13,2, width-13,2); // Fill in lower pixels gc.drawLine(width-3,12, width-2,12); gc.drawLine(width-2,13, width-2,13); // ***Clean up corner pixels // Upper left gc.setForeground(black); gc.drawLine(0,0, 0,10); // Lower Left gc.drawLine(0,height-1, 13,height-1); gc.drawLine(0,height-1, 0,height-14); // Upper Right gc.drawLine(width-1,0, width-14,0); gc.drawLine(width-1,0, width-1,13); // Lower Right gc.drawLine(width-1,height-1, width-14,height-1); gc.drawLine(width-1,height-1, width-1,height-14); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.direct; import static com.google.common.base.Preconditions.checkState; import com.google.auto.value.AutoValue; import com.google.common.base.MoreObjects; import com.google.common.base.Optional; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.Iterables; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import org.apache.beam.runners.core.KeyedWorkItem; import org.apache.beam.runners.core.KeyedWorkItems; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.WatermarkManager.FiredTimers; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.transforms.AppliedPTransform; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.util.TimerInternals.TimerData; import org.apache.beam.sdk.util.UserCodeException; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PValue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An {@link PipelineExecutor} that uses an underlying {@link ExecutorService} and * {@link EvaluationContext} to execute a {@link Pipeline}. */ final class ExecutorServiceParallelExecutor implements PipelineExecutor { private static final Logger LOG = LoggerFactory.getLogger(ExecutorServiceParallelExecutor.class); private final int targetParallelism; private final ExecutorService executorService; private final DirectGraph graph; private final Set<PValue> keyedPValues; private final RootProviderRegistry rootProviderRegistry; private final TransformEvaluatorRegistry registry; @SuppressWarnings("rawtypes") private final Map<Class<? extends PTransform>, Collection<ModelEnforcementFactory>> transformEnforcements; private final EvaluationContext evaluationContext; private final LoadingCache<StepAndKey, TransformExecutorService> executorServices; private final Queue<ExecutorUpdate> allUpdates; private final BlockingQueue<VisibleExecutorUpdate> visibleUpdates; private final TransformExecutorService parallelExecutorService; private final CompletionCallback defaultCompletionCallback; private final ConcurrentMap<AppliedPTransform<?, ?, ?>, ConcurrentLinkedQueue<CommittedBundle<?>>> pendingRootBundles; private final AtomicReference<ExecutorState> state = new AtomicReference<>(ExecutorState.QUIESCENT); /** * Measures the number of {@link TransformExecutor TransformExecutors} that have been scheduled * but not yet completed. * * <p>Before a {@link TransformExecutor} is scheduled, this value is incremented. All methods in * {@link CompletionCallback} decrement this value. */ private final AtomicLong outstandingWork = new AtomicLong(); public static ExecutorServiceParallelExecutor create( int targetParallelism, DirectGraph graph, Set<PValue> keyedPValues, RootProviderRegistry rootProviderRegistry, TransformEvaluatorRegistry registry, @SuppressWarnings("rawtypes") Map<Class<? extends PTransform>, Collection<ModelEnforcementFactory>> transformEnforcements, EvaluationContext context) { return new ExecutorServiceParallelExecutor( targetParallelism, graph, keyedPValues, rootProviderRegistry, registry, transformEnforcements, context); } private ExecutorServiceParallelExecutor( int targetParallelism, DirectGraph graph, Set<PValue> keyedPValues, RootProviderRegistry rootProviderRegistry, TransformEvaluatorRegistry registry, @SuppressWarnings("rawtypes") Map<Class<? extends PTransform>, Collection<ModelEnforcementFactory>> transformEnforcements, EvaluationContext context) { this.targetParallelism = targetParallelism; this.executorService = Executors.newFixedThreadPool(targetParallelism); this.graph = graph; this.keyedPValues = keyedPValues; this.rootProviderRegistry = rootProviderRegistry; this.registry = registry; this.transformEnforcements = transformEnforcements; this.evaluationContext = context; // Weak Values allows TransformExecutorServices that are no longer in use to be reclaimed. // Executing TransformExecutorServices have a strong reference to their TransformExecutorService // which stops the TransformExecutorServices from being prematurely garbage collected executorServices = CacheBuilder.newBuilder().weakValues().build(serialTransformExecutorServiceCacheLoader()); this.allUpdates = new ConcurrentLinkedQueue<>(); this.visibleUpdates = new LinkedBlockingQueue<>(); parallelExecutorService = TransformExecutorServices.parallel(executorService); defaultCompletionCallback = new TimerIterableCompletionCallback(Collections.<TimerData>emptyList()); this.pendingRootBundles = new ConcurrentHashMap<>(); } private CacheLoader<StepAndKey, TransformExecutorService> serialTransformExecutorServiceCacheLoader() { return new CacheLoader<StepAndKey, TransformExecutorService>() { @Override public TransformExecutorService load(StepAndKey stepAndKey) throws Exception { return TransformExecutorServices.serial(executorService); } }; } @Override public void start(Collection<AppliedPTransform<?, ?, ?>> roots) { int numTargetSplits = Math.max(3, targetParallelism); for (AppliedPTransform<?, ?, ?> root : roots) { ConcurrentLinkedQueue<CommittedBundle<?>> pending = new ConcurrentLinkedQueue<>(); try { Collection<CommittedBundle<?>> initialInputs = rootProviderRegistry.getInitialInputs(root, numTargetSplits); pending.addAll(initialInputs); } catch (Exception e) { throw UserCodeException.wrap(e); } pendingRootBundles.put(root, pending); } evaluationContext.initialize(pendingRootBundles); Runnable monitorRunnable = new MonitorRunnable(); executorService.submit(monitorRunnable); } @SuppressWarnings("unchecked") public void scheduleConsumption( AppliedPTransform<?, ?, ?> consumer, CommittedBundle<?> bundle, CompletionCallback onComplete) { evaluateBundle(consumer, bundle, onComplete); } private <T> void evaluateBundle( final AppliedPTransform<?, ?, ?> transform, final CommittedBundle<T> bundle, final CompletionCallback onComplete) { TransformExecutorService transformExecutor; if (isKeyed(bundle.getPCollection())) { final StepAndKey stepAndKey = StepAndKey.of(transform, bundle.getKey()); // This executor will remain reachable until it has executed all scheduled transforms. // The TransformExecutors keep a strong reference to the Executor, the ExecutorService keeps // a reference to the scheduled TransformExecutor callable. Follow-up TransformExecutors // (scheduled due to the completion of another TransformExecutor) are provided to the // ExecutorService before the Earlier TransformExecutor callable completes. transformExecutor = executorServices.getUnchecked(stepAndKey); } else { transformExecutor = parallelExecutorService; } Collection<ModelEnforcementFactory> enforcements = MoreObjects.firstNonNull( transformEnforcements.get(transform.getTransform().getClass()), Collections.<ModelEnforcementFactory>emptyList()); TransformExecutor<T> callable = TransformExecutor.create( evaluationContext, registry, enforcements, bundle, transform, onComplete, transformExecutor); outstandingWork.incrementAndGet(); transformExecutor.schedule(callable); } private boolean isKeyed(PValue pvalue) { return keyedPValues.contains(pvalue); } private void scheduleConsumers(ExecutorUpdate update) { CommittedBundle<?> bundle = update.getBundle().get(); for (AppliedPTransform<?, ?, ?> consumer : update.getConsumers()) { scheduleConsumption(consumer, bundle, defaultCompletionCallback); } } @Override public void awaitCompletion() throws Exception { VisibleExecutorUpdate update; do { // Get an update; don't block forever if another thread has handled it update = visibleUpdates.poll(2L, TimeUnit.SECONDS); if (update == null && executorService.isShutdown()) { // there are no updates to process and no updates will ever be published because the // executor is shutdown return; } else if (update != null && update.exception.isPresent()) { throw update.exception.get(); } } while (update == null || !update.isDone()); executorService.shutdown(); } /** * The base implementation of {@link CompletionCallback} that provides implementations for * {@link #handleResult(CommittedBundle, TransformResult)} and * {@link #handleException(CommittedBundle, Exception)}. */ private class TimerIterableCompletionCallback implements CompletionCallback { private final Iterable<TimerData> timers; protected TimerIterableCompletionCallback(Iterable<TimerData> timers) { this.timers = timers; } @Override public final CommittedResult handleResult( CommittedBundle<?> inputBundle, TransformResult<?> result) { CommittedResult committedResult = evaluationContext.handleResult(inputBundle, timers, result); for (CommittedBundle<?> outputBundle : committedResult.getOutputs()) { allUpdates.offer( ExecutorUpdate.fromBundle( outputBundle, graph.getPrimitiveConsumers(outputBundle.getPCollection()))); } CommittedBundle<?> unprocessedInputs = committedResult.getUnprocessedInputs(); if (unprocessedInputs != null && !Iterables.isEmpty(unprocessedInputs.getElements())) { if (inputBundle.getPCollection() == null) { // TODO: Split this logic out of an if statement pendingRootBundles.get(result.getTransform()).offer(unprocessedInputs); } else { allUpdates.offer( ExecutorUpdate.fromBundle( unprocessedInputs, Collections.<AppliedPTransform<?, ?, ?>>singleton( committedResult.getTransform()))); } } if (!committedResult.getProducedOutputTypes().isEmpty()) { state.set(ExecutorState.ACTIVE); } outstandingWork.decrementAndGet(); return committedResult; } @Override public void handleEmpty(AppliedPTransform<?, ?, ?> transform) { outstandingWork.decrementAndGet(); } @Override public final void handleException(CommittedBundle<?> inputBundle, Exception e) { allUpdates.offer(ExecutorUpdate.fromException(e)); outstandingWork.decrementAndGet(); } } /** * An internal status update on the state of the executor. * * <p>Used to signal when the executor should be shut down (due to an exception). */ @AutoValue abstract static class ExecutorUpdate { public static ExecutorUpdate fromBundle( CommittedBundle<?> bundle, Collection<AppliedPTransform<?, ?, ?>> consumers) { return new AutoValue_ExecutorServiceParallelExecutor_ExecutorUpdate( Optional.of(bundle), consumers, Optional.<Exception>absent()); } public static ExecutorUpdate fromException(Exception e) { return new AutoValue_ExecutorServiceParallelExecutor_ExecutorUpdate( Optional.<CommittedBundle<?>>absent(), Collections.<AppliedPTransform<?, ?, ?>>emptyList(), Optional.of(e)); } /** * Returns the bundle that produced this update. */ public abstract Optional<? extends CommittedBundle<?>> getBundle(); /** * Returns the transforms to process the bundle. If nonempty, {@link #getBundle()} will return * a present {@link Optional}. */ public abstract Collection<AppliedPTransform<?, ?, ?>> getConsumers(); public abstract Optional<? extends Exception> getException(); } /** * An update of interest to the user. Used in {@link #awaitCompletion} to decide whether to * return normally or throw an exception. */ private static class VisibleExecutorUpdate { private final Optional<? extends Exception> exception; private final boolean done; public static VisibleExecutorUpdate fromException(Exception e) { return new VisibleExecutorUpdate(false, e); } public static VisibleExecutorUpdate finished() { return new VisibleExecutorUpdate(true, null); } private VisibleExecutorUpdate(boolean done, @Nullable Exception exception) { this.exception = Optional.fromNullable(exception); this.done = done; } public boolean isDone() { return done; } } private class MonitorRunnable implements Runnable { private final String runnableName = String.format("%s$%s-monitor", evaluationContext.getPipelineOptions().getAppName(), ExecutorServiceParallelExecutor.class.getSimpleName()); private boolean exceptionThrown = false; @Override public void run() { String oldName = Thread.currentThread().getName(); Thread.currentThread().setName(runnableName); try { boolean noWorkOutstanding = outstandingWork.get() == 0L; ExecutorState startingState = state.get(); if (startingState == ExecutorState.ACTIVE) { // The remainder of this call will add all available work to the Executor, and there will // be no new work available state.compareAndSet(ExecutorState.ACTIVE, ExecutorState.PROCESSING); } else if (startingState == ExecutorState.PROCESSING && noWorkOutstanding) { // The executor has consumed all new work and no new work was added state.compareAndSet(ExecutorState.PROCESSING, ExecutorState.QUIESCING); } else if (startingState == ExecutorState.QUIESCING && noWorkOutstanding) { // The executor re-ran all blocked work and nothing could make progress. state.compareAndSet(ExecutorState.QUIESCING, ExecutorState.QUIESCENT); } fireTimers(); Collection<ExecutorUpdate> updates = new ArrayList<>(); // Pull all available updates off of the queue before adding additional work. This ensures // both loops terminate. ExecutorUpdate pendingUpdate = allUpdates.poll(); while (pendingUpdate != null) { updates.add(pendingUpdate); pendingUpdate = allUpdates.poll(); } for (ExecutorUpdate update : updates) { applyUpdate(noWorkOutstanding, startingState, update); } addWorkIfNecessary(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); LOG.error("Monitor died due to being interrupted"); while (!visibleUpdates.offer(VisibleExecutorUpdate.fromException(e))) { visibleUpdates.poll(); } } catch (Exception t) { LOG.error("Monitor thread died due to exception", t); while (!visibleUpdates.offer(VisibleExecutorUpdate.fromException(t))) { visibleUpdates.poll(); } } finally { if (!shouldShutdown()) { // The monitor thread should always be scheduled; but we only need to be scheduled once executorService.submit(this); } Thread.currentThread().setName(oldName); } } private void applyUpdate( boolean noWorkOutstanding, ExecutorState startingState, ExecutorUpdate update) { LOG.debug("Executor Update: {}", update); if (update.getBundle().isPresent()) { if (ExecutorState.ACTIVE == startingState || (ExecutorState.PROCESSING == startingState && noWorkOutstanding)) { scheduleConsumers(update); } else { allUpdates.offer(update); } } else if (update.getException().isPresent()) { checkState( visibleUpdates.offer(VisibleExecutorUpdate.fromException(update.getException().get())), "VisibleUpdates should always be able to receive an offered update"); exceptionThrown = true; } } /** * Fires any available timers. */ private void fireTimers() throws Exception { try { for (FiredTimers transformTimers : evaluationContext.extractFiredTimers()) { Collection<TimerData> delivery = transformTimers.getTimers(); KeyedWorkItem<?, Object> work = KeyedWorkItems.timersWorkItem(transformTimers.getKey().getKey(), delivery); @SuppressWarnings({"unchecked", "rawtypes"}) CommittedBundle<?> bundle = evaluationContext .createKeyedBundle( transformTimers.getKey(), (PCollection) transformTimers.getTransform().getInput()) .add(WindowedValue.valueInGlobalWindow(work)) .commit(evaluationContext.now()); scheduleConsumption( transformTimers.getTransform(), bundle, new TimerIterableCompletionCallback(delivery)); state.set(ExecutorState.ACTIVE); } } catch (Exception e) { LOG.error("Internal Error while delivering timers", e); throw e; } } private boolean shouldShutdown() { boolean shouldShutdown = exceptionThrown || evaluationContext.isDone(); if (shouldShutdown) { LOG.debug("Pipeline has terminated. Shutting down."); executorService.shutdown(); try { registry.cleanup(); } catch (Exception e) { visibleUpdates.add(VisibleExecutorUpdate.fromException(e)); } if (evaluationContext.isDone()) { while (!visibleUpdates.offer(VisibleExecutorUpdate.finished())) { visibleUpdates.poll(); } } } return shouldShutdown; } /** * If all active {@link TransformExecutor TransformExecutors} are in a blocked state, * add more work from root nodes that may have additional work. This ensures that if a pipeline * has elements available from the root nodes it will add those elements when necessary. */ private void addWorkIfNecessary() { // If any timers have fired, they will add more work; We don't need to add more if (state.get() == ExecutorState.QUIESCENT) { // All current TransformExecutors are blocked; add more work from the roots. for (Map.Entry<AppliedPTransform<?, ?, ?>, ConcurrentLinkedQueue<CommittedBundle<?>>> pendingRootEntry : pendingRootBundles.entrySet()) { Collection<CommittedBundle<?>> bundles = new ArrayList<>(); // Pull all available work off of the queue, then schedule it all, so this loop // terminates while (!pendingRootEntry.getValue().isEmpty()) { CommittedBundle<?> bundle = pendingRootEntry.getValue().poll(); bundles.add(bundle); } for (CommittedBundle<?> bundle : bundles) { scheduleConsumption(pendingRootEntry.getKey(), bundle, defaultCompletionCallback); state.set(ExecutorState.ACTIVE); } } } } } /** * The state of the executor. The state of the executor determines the behavior of the * {@link MonitorRunnable} when it runs. */ private enum ExecutorState { /** * Output has been produced since the last time the monitor ran. Work exists that has not yet * been evaluated, and all pending, including potentially blocked work, should be evaluated. * * <p>The executor becomes active whenever a timer fires, a {@link PCollectionView} is updated, * or output is produced by the evaluation of a {@link TransformExecutor}. */ ACTIVE, /** * The Executor does not have any unevaluated work available to it, but work is in progress. * Work should not be added until the Executor becomes active or no work is outstanding. * * <p>If all outstanding work completes without the executor becoming {@code ACTIVE}, the * Executor enters state {@code QUIESCING}. Previously evaluated work must be reevaluated, in * case a side input has made progress. */ PROCESSING, /** * All outstanding work is work that may be blocked on a side input. When there is no * outstanding work, the executor becomes {@code QUIESCENT}. */ QUIESCING, /** * All elements are either buffered in state or are blocked on a side input. There are no * timers that are permitted to fire but have not. There is no outstanding work. * * <p>The pipeline will not make progress without the progression of watermarks, the progression * of processing time, or the addition of elements. */ QUIESCENT } }
package org.duraspace.fcrepo.cloudsync.service.backend; import org.duraspace.fcrepo.cloudsync.api.ObjectInfo; import org.duraspace.fcrepo.cloudsync.api.ObjectStore; import com.github.cwilper.fcrepo.dto.core.Datastream; import com.github.cwilper.fcrepo.dto.core.DatastreamVersion; import com.github.cwilper.fcrepo.dto.core.FedoraObject; import com.github.cwilper.fcrepo.httpclient.HttpClientConfig; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.entity.FileEntity; import org.apache.http.protocol.BasicHttpContext; import org.apache.http.util.EntityUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.Iterator; public abstract class StoreConnector { private static final Logger logger = LoggerFactory.getLogger(StoreConnector.class); protected BasicHttpContext localContext = null; public static StoreConnector getInstance(ObjectStore store, HttpClientConfig httpClientConfig) { if (store.getType().equals("fedora")) { return new FedoraConnector(store, httpClientConfig); } else if (store.getType().equals("duracloud")) { return new DuraCloudConnector(store, httpClientConfig); } else if (store.getType().equals("filesystem")) { return new FilesystemConnector(store); } else { throw new IllegalArgumentException("Unrecognized ObjectStore type: " + store.getType()); } } protected void listObjects(Iterator<String> pidIterator, ObjectListHandler handler) { boolean keepGoing = true; while (pidIterator.hasNext() && keepGoing) { String pid = pidIterator.next(); if (hasObject(pid)) { ObjectInfo o = new ObjectInfo(); o.setPid(pid); keepGoing = handler.handleObject(o); } } } protected boolean headCheck(HttpClient httpClient, String url) { logger.debug("Doing HEAD request on " + url); HttpHead head = new HttpHead(url); try { HttpResponse response = execute(httpClient, head); int responseCode = response.getStatusLine().getStatusCode(); logger.debug("responseCode: " + responseCode); return responseCode == 200; } catch (IOException e) { throw new RuntimeException(e); } } // returns null if 404 protected String getString(HttpClient httpClient, String url) { try { HttpEntity entity = get(httpClient, url); if (entity == null) return null; return EntityUtils.toString(entity); } catch (IOException e) { throw new RuntimeException(e); } } // returns null if 404 protected InputStream getStream(HttpClient httpClient, String url) { try { HttpEntity entity = get(httpClient, url); if (entity == null) return null; return entity.getContent(); } catch (IOException e) { throw new RuntimeException(e); } } // returns null if 404 protected HttpEntity get(HttpClient httpClient, String url) throws IOException { logger.debug("Doing GET request on " + url); HttpGet get = new HttpGet(url); try { HttpResponse response = execute(httpClient, get); int responseCode = response.getStatusLine().getStatusCode(); if (responseCode == 404) { return null; } else if (responseCode != 200) { throw new RuntimeException("Unexpected response code (" + responseCode + ") getting " + url); } return response.getEntity(); } catch (IOException e) { throw new RuntimeException(e); } } protected void delete(HttpClient httpClient, String url) { logger.debug("Doing DELETE request on " + url); HttpDelete delete = new HttpDelete(url); HttpEntity entity = null; try { HttpResponse response = execute(httpClient, delete); entity = response.getEntity(); int responseCode = response.getStatusLine().getStatusCode(); if (responseCode != 200 && responseCode != 204) { throw new RuntimeException("Unexpected response code (" + responseCode + ") deleting " + url); } } catch (IOException e) { throw new RuntimeException(e); } finally { if (entity != null) { try { entity.consumeContent(); } catch (Exception e) { } } } } protected void post(HttpClient httpClient, String url, File file, String mimeType) { logger.debug("Doing POST request on " + url); HttpPost post = new HttpPost(url); HttpEntity entity = null; try { post.setHeader("Content-type", mimeType); post.setEntity(new FileEntity(file, mimeType)); HttpResponse response = execute(httpClient, post); entity = response.getEntity(); int responseCode = response.getStatusLine().getStatusCode(); if (responseCode < 200 || responseCode > 204) { throw new RuntimeException("Unexpected response code (" + responseCode + ") posting " + url); } } catch (IOException e) { throw new RuntimeException(e); } finally { if (entity != null) { try { entity.consumeContent(); } catch (Exception e) { } } } } protected void put(HttpClient httpClient, String url, File file, String mimeType, String md5) { logger.debug("Doing PUT request on {} (md5: {})", url, md5); HttpPut put = new HttpPut(url); HttpEntity entity = null; try { if (mimeType == null || mimeType.trim().length() == 0) { mimeType = "application/octet-stream"; } put.setHeader("Content-Type", mimeType); put.setHeader("Content-MD5", md5); put.setEntity(new FileEntity(file, mimeType)); HttpResponse response = execute(httpClient, put); entity = response.getEntity(); int responseCode = response.getStatusLine().getStatusCode(); if (responseCode != 200 && responseCode != 201 && responseCode != 204) { throw new RuntimeException("Unexpected response code (" + responseCode + ") putting " + url); } } catch (IOException e) { throw new RuntimeException(e); } finally { if (entity != null) { try { entity.consumeContent(); } catch (Exception e) { } } } } // executes a request with the localContext, if set private HttpResponse execute(HttpClient client, HttpUriRequest request) throws IOException { if (localContext != null) { return client.execute(request, localContext); } else { return client.execute(request); } } public abstract void listObjects(ObjectQuery query, ObjectListHandler handler); protected abstract boolean hasObject(String pid); // return null if object doesn't exist public abstract FedoraObject getObject(String pid); // true if the object previously existed public abstract boolean putObject(FedoraObject o, StoreConnector source, boolean overwrite, boolean copyExternal, boolean copyRedirect); public abstract InputStream getContent(FedoraObject o, Datastream ds, DatastreamVersion dsv); public abstract void close(); }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.rssoutput; import java.io.File; import java.io.FileWriter; import java.io.Writer; import java.util.Date; import org.apache.commons.vfs2.FileObject; import org.dom4j.DocumentHelper; import org.dom4j.Element; import org.dom4j.io.OutputFormat; import org.dom4j.io.XMLWriter; import org.pentaho.di.core.Const; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.ResultFile; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import com.sun.syndication.feed.module.georss.GeoRSSModule; import com.sun.syndication.feed.module.georss.SimpleModuleImpl; import com.sun.syndication.feed.module.georss.W3CGeoModuleImpl; import com.sun.syndication.feed.module.georss.geometries.Position; import com.sun.syndication.feed.synd.SyndContent; import com.sun.syndication.feed.synd.SyndContentImpl; import com.sun.syndication.feed.synd.SyndEntry; import com.sun.syndication.feed.synd.SyndEntryImpl; import com.sun.syndication.feed.synd.SyndFeedImpl; import com.sun.syndication.feed.synd.SyndImage; import com.sun.syndication.feed.synd.SyndImageImpl; import com.sun.syndication.io.SyndFeedOutput; /** * Output rows to RSS feed and create a file. * * @author Samatar * @since 6-nov-2007 */ public class RssOutput extends BaseStep implements StepInterface { private static Class<?> PKG = RssOutput.class; // for i18n purposes, needed by Translator2!! private RssOutputMeta meta; private RssOutputData data; public RssOutput( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (RssOutputMeta) smi; data = (RssOutputData) sdi; Object[] r = getRow(); // this also waits for a previous step to be finished. if ( r == null ) { // no more input to be expected... if ( !first ) { if ( !meta.isCustomRss() ) { // No more input..so write and close the file. WriteToFile( data.channeltitlevalue, data.channellinkvalue, data.channeldescriptionvalue, data.channelpubdatevalue, data.channelcopyrightvalue, data.channelimagelinkvalue, data.channelimagedescriptionvalue, data.channelimagelinkvalue, data.channelimageurlvalue, data.channellanguagevalue, data.channelauthorvalue ); } else { // Write to document OutputFormat format = org.dom4j.io.OutputFormat.createPrettyPrint(); // Set encoding ... if ( Utils.isEmpty( meta.getEncoding() ) ) { format.setEncoding( "iso-8859-1" ); } else { format.setEncoding( meta.getEncoding() ); } try { XMLWriter writer = new XMLWriter( new FileWriter( new File( data.filename ) ), format ); writer.write( data.document ); writer.close(); } catch ( Exception e ) { // Ignore errors } finally { data.document = null; } } } setOutputDone(); return false; } if ( first ) { first = false; data.inputRowMeta = getInputRowMeta(); data.outputRowMeta = data.inputRowMeta.clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); // Let's check for filename... if ( meta.isFilenameInField() ) { if ( Utils.isEmpty( meta.getFileNameField() ) ) { logError( BaseMessages.getString( PKG, "RssOutput.Log.FilenameFieldMissing" ) ); setErrors( 1 ); stopAll(); return false; } // get filename field index data.indexOfFieldfilename = data.inputRowMeta.indexOfValue( meta.getFileNameField() ); if ( data.indexOfFieldfilename < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getFileNameField() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getFileNameField() ) ); } } else { data.filename = buildFilename(); } // Check if filename is empty.. if ( Utils.isEmpty( data.filename ) ) { logError( BaseMessages.getString( PKG, "RssOutput.Log.FilenameEmpty" ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.FilenameEmpty" ) ); } // Do we need to create parent folder ? if ( meta.isCreateParentFolder() ) { // Check for parent folder FileObject parentfolder = null; try { // Get parent folder parentfolder = KettleVFS.getFileObject( data.filename, getTransMeta() ).getParent(); if ( !parentfolder.exists() ) { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "RssOutput.Log.ParentFolderExists", parentfolder .getName().toString() ) ); } parentfolder.createFolder(); if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "RssOutput.Log.CanNotCreateParentFolder", parentfolder .getName().toString() ) ); } } } catch ( Exception e ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.CanNotCreateParentFolder", parentfolder .getName().toString() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.CanNotCreateParentFolder", parentfolder.getName().toString() ) ); } finally { if ( parentfolder != null ) { try { parentfolder.close(); } catch ( Exception ex ) { /* Ignore */ } } } } if ( !meta.isCustomRss() ) { // Let's check for mandatory fields ... if ( Utils.isEmpty( meta.getChannelTitle() ) ) { logError( BaseMessages.getString( PKG, "RssOutput.Log.ChannelTitleMissing" ) ); setErrors( 1 ); stopAll(); return false; } if ( Utils.isEmpty( meta.getChannelDescription() ) ) { logError( BaseMessages.getString( PKG, "RssOutput.Log.ChannelDescription" ) ); setErrors( 1 ); stopAll(); return false; } if ( Utils.isEmpty( meta.getChannelLink() ) ) { logError( BaseMessages.getString( PKG, "RssOutput.Log.ChannelLink" ) ); setErrors( 1 ); stopAll(); return false; } // Let's take the index of channel title field ... data.indexOfFieldchanneltitle = data.inputRowMeta.indexOfValue( meta.getChannelTitle() ); if ( data.indexOfFieldchanneltitle < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getChannelTitle() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelTitle() ) ); } data.channeltitlevalue = data.inputRowMeta.getString( r, data.indexOfFieldchanneltitle ); // Let's take the index of channel description field ... data.indexOfFieldchanneldescription = data.inputRowMeta.indexOfValue( meta.getChannelDescription() ); if ( data.indexOfFieldchanneldescription < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getChannelDescription() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelDescription() ) ); } data.channeldescriptionvalue = data.inputRowMeta.getString( r, data.indexOfFieldchanneldescription ); // Let's take the index of channel link field ... data.indexOfFieldchannellink = data.inputRowMeta.indexOfValue( meta.getChannelLink() ); if ( data.indexOfFieldchannellink < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getChannelLink() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelLink() ) ); } data.channellinkvalue = data.inputRowMeta.getString( r, data.indexOfFieldchannellink ); if ( !Utils.isEmpty( meta.getItemTitle() ) ) { // Let's take the index of item title field ... data.indexOfFielditemtitle = data.inputRowMeta.indexOfValue( meta.getItemTitle() ); if ( data.indexOfFielditemtitle < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getItemTitle() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getItemTitle() ) ); } } if ( !Utils.isEmpty( meta.getItemDescription() ) ) { // Let's take the index of item description field ... data.indexOfFielditemdescription = data.inputRowMeta.indexOfValue( meta.getItemDescription() ); if ( data.indexOfFielditemdescription < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getItemDescription() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getItemDescription() ) ); } } if ( meta.AddGeoRSS() ) { if ( Utils.isEmpty( meta.getGeoPointLong() ) ) { throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.GeoPointLatEmpty" ) ); } if ( Utils.isEmpty( meta.getGeoPointLong() ) ) { throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.GeoPointLongEmpty" ) ); } // Let's take the index of item geopointX field ... data.indexOfFielditempointx = data.inputRowMeta.indexOfValue( meta.getGeoPointLat() ); if ( data.indexOfFielditempointx < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getGeoPointLat() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getGeoPointLat() ) ); } // Let's take the index of item geopointY field ... data.indexOfFielditempointy = data.inputRowMeta.indexOfValue( meta.getGeoPointLong() ); if ( data.indexOfFielditempointy < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getGeoPointLong() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getGeoPointLong() ) ); } } // It's time to check non empty fields ! // Channel PubDate field ... if ( !Utils.isEmpty( meta.getChannelPubDate() ) ) { data.indexOfFieldchannelpubdate = data.inputRowMeta.indexOfValue( meta.getChannelPubDate() ); if ( data.indexOfFieldchannelpubdate < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getChannelPubDate() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelPubDate() ) ); } data.channelpubdatevalue = data.inputRowMeta.getDate( r, data.indexOfFieldchannelpubdate ); } // Channel Language field ... if ( !Utils.isEmpty( meta.getChannelLanguage() ) ) { data.indexOfFieldchannellanguage = data.inputRowMeta.indexOfValue( meta.getChannelLanguage() ); if ( data.indexOfFieldchannellanguage < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getChannelLanguage() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelLanguage() ) ); } data.channellanguagevalue = data.inputRowMeta.getString( r, data.indexOfFieldchannellanguage ); } // Channel Copyright field ... if ( !Utils.isEmpty( meta.getChannelCopyright() ) ) { data.indexOfFieldchannelcopyright = data.inputRowMeta.indexOfValue( meta.getChannelCopyright() ); if ( data.indexOfFieldchannelcopyright < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getChannelCopyright() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelCopyright() ) ); } data.channelcopyrightvalue = data.inputRowMeta.getString( r, data.indexOfFieldchannelcopyright ); } // Channel Author field ... if ( !Utils.isEmpty( meta.getChannelAuthor() ) ) { data.indexOfFieldchannelauthor = data.inputRowMeta.indexOfValue( meta.getChannelAuthor() ); if ( data.indexOfFieldchannelauthor < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getChannelAuthor() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelAuthor() ) ); } data.channelauthorvalue = data.inputRowMeta.getString( r, data.indexOfFieldchannelauthor ); } // Channel Image field ... if ( meta.AddImage() ) { // Channel image title if ( !Utils.isEmpty( meta.getChannelImageTitle() ) ) { data.indexOfFieldchannelimagetitle = data.inputRowMeta.indexOfValue( meta.getChannelImageTitle() ); if ( data.indexOfFieldchannelimagetitle < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelImageTitle() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelImageTitle() ) ); } data.channelimagetitlevalue = data.inputRowMeta.getString( r, data.indexOfFieldchannelimagetitle ); } // Channel link title if ( !Utils.isEmpty( meta.getChannelImageLink() ) ) { data.indexOfFieldchannelimagelink = data.inputRowMeta.indexOfValue( meta.getChannelImageLink() ); if ( data.indexOfFieldchannelimagelink < 0 ) { // The field is unreachable ! logError( BaseMessages .getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getChannelImageLink() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelImageLink() ) ); } data.channelimagelinkvalue = data.inputRowMeta.getString( r, data.indexOfFieldchannelimagelink ); } // Channel url title if ( !Utils.isEmpty( meta.getChannelImageUrl() ) ) { data.indexOfFieldchannelimageurl = data.inputRowMeta.indexOfValue( meta.getChannelImageUrl() ); if ( data.indexOfFieldchannelimageurl < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getChannelImageUrl() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelImageUrl() ) ); } data.channelimageurlvalue = data.inputRowMeta.getString( r, data.indexOfFieldchannelimageurl ); } // Channel description title if ( !Utils.isEmpty( meta.getChannelImageDescription() ) ) { data.indexOfFieldchannelimagedescription = data.inputRowMeta.indexOfValue( meta.getChannelImageDescription() ); if ( data.indexOfFieldchannelimagedescription < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelImageDescription() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getChannelImageDescription() ) ); } data.channelimagedescriptionvalue = data.inputRowMeta.getString( r, data.indexOfFieldchannelimagedescription ); } } // Item link field ... if ( !Utils.isEmpty( meta.getItemLink() ) ) { data.indexOfFielditemlink = data.inputRowMeta.indexOfValue( meta.getItemLink() ); if ( data.indexOfFielditemlink < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getItemLink() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getItemLink() ) ); } } // Item pubdate field ... if ( !Utils.isEmpty( meta.getItemPubDate() ) ) { data.indexOfFielditempubdate = data.inputRowMeta.indexOfValue( meta.getItemPubDate() ); if ( data.indexOfFielditempubdate < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getItemPubDate() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getItemPubDate() ) ); } } // Item author field ... if ( !Utils.isEmpty( meta.getItemAuthor() ) ) { data.indexOfFielditemauthor = data.inputRowMeta.indexOfValue( meta.getItemAuthor() ); if ( data.indexOfFielditemauthor < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta.getItemAuthor() ) ); throw new KettleException( BaseMessages.getString( PKG, "RssOutput.Log.ErrorFindingField", meta .getItemAuthor() ) ); } } } else { // Custom RSS // Check Custom channel fields data.customchannels = new int[meta.getChannelCustomFields().length]; for ( int i = 0; i < meta.getChannelCustomFields().length; i++ ) { data.customchannels[i] = data.inputRowMeta.indexOfValue( meta.getChannelCustomFields()[i] ); if ( data.customchannels[i] < 0 ) { // couldn't find field! throw new KettleStepException( BaseMessages.getString( PKG, "RssOutput.Exception.FieldRequired", meta .getChannelCustomFields()[i] ) ); } } // Check Custom channel fields data.customitems = new int[meta.getItemCustomFields().length]; for ( int i = 0; i < meta.getItemCustomFields().length; i++ ) { data.customitems[i] = data.inputRowMeta.indexOfValue( meta.getItemCustomFields()[i] ); if ( data.customitems[i] < 0 ) { // couldn't find field! throw new KettleStepException( BaseMessages.getString( PKG, "RssOutput.Exception.FieldRequired", meta .getItemCustomFields()[i] ) ); } } // Prepare Output RSS Custom document data.document = DocumentHelper.createDocument(); data.rssElement = data.document.addElement( "rss" ); data.rssElement.addAttribute( "version", "2.0" ); // add namespaces here ... for ( int i = 0; i < meta.getNameSpaces().length; i++ ) { data.rssElement.addNamespace( environmentSubstitute( meta.getNameSpacesTitle()[i] ), environmentSubstitute( meta.getNameSpaces()[i] ) ); } // Add channel data.channel = data.rssElement.addElement( "channel" ); // Set channel Only the first time ... for ( int i = 0; i < data.customchannels.length; i++ ) { String channelname = environmentSubstitute( meta.getChannelCustomTags()[i] ); String channelvalue = data.inputRowMeta.getString( r, data.customchannels[i] ); if ( log.isDetailed() ) { logDetailed( "outputting channel value <" + channelname + ">" + channelvalue + "<" + channelname + "/>" ); } // add Channel Element channeltag = data.channel.addElement( channelname ); channeltag.setText( channelvalue ); } } } // end test first time // Let's get value for each item... if ( !meta.isCustomRss() ) { String itemtitlevalue = null; String itemauthorvalue = null; String itemlinkvalue = null; Date itemdatevalue = null; String itemdescvalue = null; String itemgeopointx = null; String itemgeopointy = null; if ( data.indexOfFielditemtitle > -1 ) { itemtitlevalue = data.inputRowMeta.getString( r, data.indexOfFielditemtitle ); } if ( data.indexOfFielditemauthor > -1 ) { itemauthorvalue = data.inputRowMeta.getString( r, data.indexOfFielditemauthor ); } if ( data.indexOfFielditemlink > -1 ) { itemlinkvalue = data.inputRowMeta.getString( r, data.indexOfFielditemlink ); } if ( data.indexOfFielditempubdate > -1 ) { itemdatevalue = data.inputRowMeta.getDate( r, data.indexOfFielditempubdate ); } if ( data.indexOfFielditemdescription > -1 ) { itemdescvalue = data.inputRowMeta.getString( r, data.indexOfFielditemdescription ); } if ( data.indexOfFielditempointx > -1 ) { itemgeopointx = data.inputRowMeta.getString( r, data.indexOfFielditempointx ); } if ( data.indexOfFielditempointy > -1 ) { itemgeopointy = data.inputRowMeta.getString( r, data.indexOfFielditempointy ); } // Now add entry .. if ( !createEntry( itemauthorvalue, itemtitlevalue, itemlinkvalue, itemdatevalue, itemdescvalue, itemgeopointx, itemgeopointy ) ) { throw new KettleException( "Error adding item to feed" ); } } else { // Set item tag at each row received if ( meta.isDisplayItem() ) { data.itemtag = data.channel.addElement( "item" ); } for ( int i = 0; i < data.customitems.length; i++ ) { // get item value and name String itemname = environmentSubstitute( meta.getItemCustomTags()[i] ); String itemvalue = data.inputRowMeta.getString( r, data.customitems[i] ); if ( log.isDetailed() ) { logDetailed( "outputting item value <" + itemname + ">" + itemvalue + "<" + itemname + "/>" ); } // add Item if ( meta.isDisplayItem() ) { Element itemtagsub = data.itemtag.addElement( itemname ); itemtagsub.setText( itemvalue ); } else { // display item at channel level Element temp = data.channel.addElement( itemname ); temp.setText( itemvalue ); } } } try { putRow( data.outputRowMeta, r ); // in case we want it to go further... incrementLinesOutput(); if ( checkFeedback( getLinesOutput() ) ) { if ( log.isDebug() ) { logDebug( BaseMessages.getString( PKG, "RssOutput.Log.Linenr", "" + getLinesOutput() ) ); } } } catch ( KettleStepException e ) { logError( BaseMessages.getString( PKG, "RssOutputMeta.Log.ErrorInStep" ) + e.getMessage() ); setErrors( 1 ); stopAll(); setOutputDone(); // signal end to receiver(s) return false; } return true; } public String buildFilename() throws KettleStepException { return meta.buildFilename( this, getCopy() ); } /** * @param author * : The author of the event * @param title * : The title of the event * @param link * : The link to the element in RES * @param date * : The event's date * @param desc * : The event's description */ @SuppressWarnings( "unchecked" ) public boolean createEntry( String author, String title, String link, Date date, String desc, String geopointLat, String geopointLong ) { boolean retval = false; try { // Add entry to the feed SyndEntry entry = new SyndEntryImpl(); SyndContent description; entry = new SyndEntryImpl(); if ( title != null ) { entry.setTitle( title ); } if ( link != null ) { entry.setLink( link ); } if ( date != null ) { entry.setPublishedDate( date ); } if ( author != null ) { entry.setAuthor( author ); } if ( desc != null ) { description = new SyndContentImpl(); description.setType( "text/plain" ); description.setValue( desc ); entry.setDescription( description ); } if ( meta.AddGeoRSS() && geopointLat != null && geopointLong != null ) { // Add GeoRSS? GeoRSSModule geoRSSModule = new SimpleModuleImpl(); if ( meta.useGeoRSSGML() ) { geoRSSModule = new W3CGeoModuleImpl(); } geoRSSModule.setPosition( new Position( Const.toDouble( geopointLat.replace( ',', '.' ), 0 ), Const .toDouble( geopointLong.replace( ',', '.' ), 0 ) ) ); entry.getModules().add( geoRSSModule ); } data.entries.add( entry ); retval = true; } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorAddingEntry", e.getMessage() ) ); setErrors( 1 ); retval = false; } return retval; } private boolean WriteToFile( String title, String link, String description, Date Pubdate, String copyright, String imageTitle, String imageDescription, String imageLink, String imageUrl, String language, String author ) { boolean retval = false; try { // Specify Filename String fileName = data.filename; // Set channel ... data.feed = new SyndFeedImpl(); if ( Utils.isEmpty( meta.getVersion() ) ) { data.feed.setFeedType( "rss_2.0" ); } else { data.feed.setFeedType( meta.getVersion() ); } // Set encoding ... if ( Utils.isEmpty( meta.getEncoding() ) ) { data.feed.setEncoding( "iso-8859-1" ); } else { data.feed.setEncoding( meta.getEncoding() ); } if ( title != null ) { data.feed.setTitle( title ); } if ( link != null ) { data.feed.setLink( link ); } if ( description != null ) { data.feed.setDescription( description ); } if ( Pubdate != null ) { data.feed.setPublishedDate( Pubdate ); // data.dateParser.parse(Pubdate.toString())); } // Set image .. if ( meta.AddImage() ) { SyndImage image = new SyndImageImpl(); if ( imageTitle != null ) { image.setTitle( title ); } if ( imageLink != null ) { image.setLink( link ); } if ( imageUrl != null ) { image.setUrl( imageUrl ); } if ( imageDescription != null ) { image.setDescription( imageDescription ); } data.feed.setImage( image ); } if ( language != null ) { data.feed.setLanguage( language ); } if ( copyright != null ) { data.feed.setCopyright( copyright ); } if ( author != null ) { data.feed.setAuthor( author ); } // Add entries data.feed.setEntries( data.entries ); Writer writer = new FileWriter( fileName ); SyndFeedOutput output = new SyndFeedOutput(); output.output( data.feed, writer ); writer.close(); if ( meta.AddToResult() ) { // Add this to the result file names... ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject( fileName, getTransMeta() ), getTransMeta() .getName(), getStepname() ); resultFile.setComment( "This file was created with a RSS Output step" ); addResultFile( resultFile ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "RssOutput.Log.CreatingFileOK", fileName ) ); } retval = true; } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "RssOutput.Log.ErrorCreatingFile", e.toString() ) ); setErrors( 1 ); retval = false; } return retval; } public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (RssOutputMeta) smi; data = (RssOutputData) sdi; if ( super.init( smi, sdi ) ) { return true; } return false; } public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { meta = (RssOutputMeta) smi; data = (RssOutputData) sdi; if ( data.document != null ) { data.document = null; } if ( data.rssElement != null ) { data.rssElement = null; } if ( data.channel != null ) { data.channel = null; } setOutputDone(); super.dispose( smi, sdi ); } }
/** * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.hystrix; import com.netflix.hystrix.strategy.concurrency.HystrixConcurrencyStrategy; import com.netflix.hystrix.strategy.concurrency.HystrixRequestVariableDefault; import com.netflix.hystrix.strategy.concurrency.HystrixRequestVariableHolder; import com.netflix.hystrix.strategy.concurrency.HystrixRequestVariableLifecycle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import rx.Observable; import rx.internal.operators.CachedObservable; import java.util.concurrent.ConcurrentHashMap; /** * Cache that is scoped to the current request as managed by {@link HystrixRequestVariableDefault}. * <p> * This is used for short-lived caching of {@link HystrixCommand} instances to allow de-duping of command executions within a request. */ public class HystrixRequestCache { @SuppressWarnings("unused") private static final Logger logger = LoggerFactory.getLogger(HystrixRequestCache.class); // the String key must be: HystrixRequestCache.prefix + concurrencyStrategy + cacheKey private final static ConcurrentHashMap<RequestCacheKey, HystrixRequestCache> caches = new ConcurrentHashMap<RequestCacheKey, HystrixRequestCache>(); private final RequestCacheKey rcKey; private final HystrixConcurrencyStrategy concurrencyStrategy; /** * A ConcurrentHashMap per 'prefix' and per request scope that is used to to dedupe requests in the same request. * <p> * Key => CommandPrefix + CacheKey : Future<?> from queue() */ private static final HystrixRequestVariableHolder<ConcurrentHashMap<ValueCacheKey, HystrixCachedObservable<?>>> requestVariableForCache = new HystrixRequestVariableHolder<ConcurrentHashMap<ValueCacheKey, HystrixCachedObservable<?>>>(new HystrixRequestVariableLifecycle<ConcurrentHashMap<ValueCacheKey, HystrixCachedObservable<?>>>() { @Override public ConcurrentHashMap<ValueCacheKey, HystrixCachedObservable<?>> initialValue() { return new ConcurrentHashMap<ValueCacheKey, HystrixCachedObservable<?>>(); } @Override public void shutdown(ConcurrentHashMap<ValueCacheKey, HystrixCachedObservable<?>> value) { // nothing to shutdown } }); private HystrixRequestCache(RequestCacheKey rcKey, HystrixConcurrencyStrategy concurrencyStrategy) { this.rcKey = rcKey; this.concurrencyStrategy = concurrencyStrategy; } public static HystrixRequestCache getInstance(HystrixCommandKey key, HystrixConcurrencyStrategy concurrencyStrategy) { return getInstance(new RequestCacheKey(key, concurrencyStrategy), concurrencyStrategy); } public static HystrixRequestCache getInstance(HystrixCollapserKey key, HystrixConcurrencyStrategy concurrencyStrategy) { return getInstance(new RequestCacheKey(key, concurrencyStrategy), concurrencyStrategy); } private static HystrixRequestCache getInstance(RequestCacheKey rcKey, HystrixConcurrencyStrategy concurrencyStrategy) { HystrixRequestCache c = caches.get(rcKey); if (c == null) { HystrixRequestCache newRequestCache = new HystrixRequestCache(rcKey, concurrencyStrategy); HystrixRequestCache existing = caches.putIfAbsent(rcKey, newRequestCache); if (existing == null) { // we won so use the new one c = newRequestCache; } else { // we lost so use the existing c = existing; } } return c; } /** * Retrieve a cached Future for this request scope if a matching command has already been executed/queued. * * @return {@code Future<T>} */ // suppressing warnings because we are using a raw Future since it's in a heterogeneous ConcurrentHashMap cache @SuppressWarnings({ "unchecked" }) /* package */<T> HystrixCachedObservable<T> get(String cacheKey) { ValueCacheKey key = getRequestCacheKey(cacheKey); if (key != null) { ConcurrentHashMap<ValueCacheKey, HystrixCachedObservable<?>> cacheInstance = requestVariableForCache.get(concurrencyStrategy); if (cacheInstance == null) { throw new IllegalStateException("Request caching is not available. Maybe you need to initialize the HystrixRequestContext?"); } /* look for the stored value */ return (HystrixCachedObservable<T>) cacheInstance.get(key); } return null; } /** * Put the Future in the cache if it does not already exist. * <p> * If this method returns a non-null value then another thread won the race and it should be returned instead of proceeding with execution of the new Future. * * @param cacheKey * key as defined by {@link HystrixCommand#getCacheKey()} * @param f * Future to be cached * * @return null if nothing else was in the cache (or this {@link HystrixCommand} does not have a cacheKey) or previous value if another thread beat us to adding to the cache */ // suppressing warnings because we are using a raw Future since it's in a heterogeneous ConcurrentHashMap cache @SuppressWarnings({ "unchecked" }) /* package */<T> HystrixCachedObservable<T> putIfAbsent(String cacheKey, HystrixCachedObservable<T> f) { ValueCacheKey key = getRequestCacheKey(cacheKey); if (key != null) { /* look for the stored value */ ConcurrentHashMap<ValueCacheKey, HystrixCachedObservable<?>> cacheInstance = requestVariableForCache.get(concurrencyStrategy); if (cacheInstance == null) { throw new IllegalStateException("Request caching is not available. Maybe you need to initialize the HystrixRequestContext?"); } HystrixCachedObservable<T> alreadySet = (HystrixCachedObservable<T>) cacheInstance.putIfAbsent(key, f); if (alreadySet != null) { // someone beat us so we didn't cache this return alreadySet; } } // we either set it in the cache or do not have a cache key return null; } /** * Clear the cache for a given cacheKey. * * @param cacheKey * key as defined by {@link HystrixCommand#getCacheKey()} */ public void clear(String cacheKey) { ValueCacheKey key = getRequestCacheKey(cacheKey); if (key != null) { ConcurrentHashMap<ValueCacheKey, HystrixCachedObservable<?>> cacheInstance = requestVariableForCache.get(concurrencyStrategy); if (cacheInstance == null) { throw new IllegalStateException("Request caching is not available. Maybe you need to initialize the HystrixRequestContext?"); } /* remove this cache key */ cacheInstance.remove(key); } } /** * Request CacheKey: HystrixRequestCache.prefix + concurrencyStrategy + HystrixCommand.getCacheKey (as injected via get/put to this class) * <p> * We prefix with {@link HystrixCommandKey} or {@link HystrixCollapserKey} since the cache is heterogeneous and we don't want to accidentally return cached Futures from different * types. * * @return ValueCacheKey */ private ValueCacheKey getRequestCacheKey(String cacheKey) { if (cacheKey != null) { /* create the cache key we will use to retrieve/store that include the type key prefix */ return new ValueCacheKey(rcKey, cacheKey); } return null; } private static class ValueCacheKey { private final RequestCacheKey rvKey; private final String valueCacheKey; private ValueCacheKey(RequestCacheKey rvKey, String valueCacheKey) { this.rvKey = rvKey; this.valueCacheKey = valueCacheKey; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((rvKey == null) ? 0 : rvKey.hashCode()); result = prime * result + ((valueCacheKey == null) ? 0 : valueCacheKey.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ValueCacheKey other = (ValueCacheKey) obj; if (rvKey == null) { if (other.rvKey != null) return false; } else if (!rvKey.equals(other.rvKey)) return false; if (valueCacheKey == null) { if (other.valueCacheKey != null) return false; } else if (!valueCacheKey.equals(other.valueCacheKey)) return false; return true; } } private static class RequestCacheKey { private final short type; // used to differentiate between Collapser/Command if key is same between them private final String key; private final HystrixConcurrencyStrategy concurrencyStrategy; private RequestCacheKey(HystrixCommandKey commandKey, HystrixConcurrencyStrategy concurrencyStrategy) { type = 1; if (commandKey == null) { this.key = null; } else { this.key = commandKey.name(); } this.concurrencyStrategy = concurrencyStrategy; } private RequestCacheKey(HystrixCollapserKey collapserKey, HystrixConcurrencyStrategy concurrencyStrategy) { type = 2; if (collapserKey == null) { this.key = null; } else { this.key = collapserKey.name(); } this.concurrencyStrategy = concurrencyStrategy; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((concurrencyStrategy == null) ? 0 : concurrencyStrategy.hashCode()); result = prime * result + ((key == null) ? 0 : key.hashCode()); result = prime * result + type; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; RequestCacheKey other = (RequestCacheKey) obj; if (type != other.type) return false; if (key == null) { if (other.key != null) return false; } else if (!key.equals(other.key)) return false; if (concurrencyStrategy == null) { if (other.concurrencyStrategy != null) return false; } else if (!concurrencyStrategy.equals(other.concurrencyStrategy)) return false; return true; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.config; import java.io.*; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; import com.google.common.collect.*; import org.apache.avro.Schema; import org.apache.avro.util.Utf8; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.cassandra.io.SerDeUtils; import org.apache.cassandra.db.ColumnFamilyType; import org.apache.cassandra.db.ClockType; import org.apache.cassandra.db.clock.AbstractReconciler; import org.apache.cassandra.db.clock.TimestampReconciler; import org.apache.cassandra.db.HintedHandOffManager; import org.apache.cassandra.db.SystemTable; import org.apache.cassandra.db.StatisticsTable; import org.apache.cassandra.db.Table; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.db.marshal.BytesType; import org.apache.cassandra.db.marshal.TimeUUIDType; import org.apache.cassandra.db.marshal.UTF8Type; import org.apache.cassandra.db.migration.Migration; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.Pair; public final class CFMetaData { public final static double DEFAULT_READ_REPAIR_CHANCE = 1.0; public final static double DEFAULT_KEY_CACHE_SIZE = 200000; public final static double DEFAULT_ROW_CACHE_SIZE = 0.0; public final static boolean DEFAULT_PRELOAD_ROW_CACHE = false; public final static int DEFAULT_GC_GRACE_SECONDS = 864000; private static final int MIN_CF_ID = 1000; private static final AtomicInteger idGen = new AtomicInteger(MIN_CF_ID); private static final Map<Integer, String> currentCfNames = new HashMap<Integer, String>(); private static final BiMap<Pair<String, String>, Integer> cfIdMap = HashBiMap.<Pair<String, String>, Integer>create(); public static final CFMetaData StatusCf = new CFMetaData(Table.SYSTEM_TABLE, SystemTable.STATUS_CF, ColumnFamilyType.Standard, ClockType.Timestamp, UTF8Type.instance, null, TimestampReconciler.instance, "persistent metadata for the local node", 0, false, 0.01, 0, DEFAULT_GC_GRACE_SECONDS, 0, Collections.<byte[],ColumnDefinition>emptyMap()); public static final CFMetaData HintsCf = new CFMetaData(Table.SYSTEM_TABLE, HintedHandOffManager.HINTS_CF, ColumnFamilyType.Super, ClockType.Timestamp, BytesType.instance, BytesType.instance, TimestampReconciler.instance, "hinted handoff data", 0, false, 0.01, 0, DEFAULT_GC_GRACE_SECONDS, 1, Collections.<byte[], ColumnDefinition>emptyMap()); public static final CFMetaData MigrationsCf = new CFMetaData(Table.SYSTEM_TABLE, Migration.MIGRATIONS_CF, ColumnFamilyType.Standard, ClockType.Timestamp, TimeUUIDType.instance, null, TimestampReconciler.instance, "individual schema mutations", 0, false, 0.01, 0, DEFAULT_GC_GRACE_SECONDS, 2, Collections.<byte[], ColumnDefinition>emptyMap()); public static final CFMetaData SchemaCf = new CFMetaData(Table.SYSTEM_TABLE, Migration.SCHEMA_CF, ColumnFamilyType.Standard, ClockType.Timestamp, UTF8Type.instance, null, TimestampReconciler.instance, "current state of the schema", 0, false, 0.01, 0, DEFAULT_GC_GRACE_SECONDS, 3, Collections. <byte[], ColumnDefinition>emptyMap()); public static final CFMetaData StatisticsCf = new CFMetaData(Table.SYSTEM_TABLE, StatisticsTable.STATISTICS_CF, ColumnFamilyType.Super, ClockType.Timestamp, UTF8Type.instance, BytesType.instance, TimestampReconciler.instance, "persistent CF statistics for the local node", 0, false, 0.01, DEFAULT_GC_GRACE_SECONDS, 0, 4, Collections.<byte[], ColumnDefinition>emptyMap()); /** * @return An immutable mapping of (ksname,cfname) to id. */ public static final Map<Pair<String, String>, Integer> getCfToIdMap() { return Collections.unmodifiableMap(cfIdMap); } /** * @return An immutable mapping of id to (ksname,cfname). */ public static final Map<Integer, Pair<String, String>> getIdToCfMap() { return Collections.unmodifiableMap(cfIdMap.inverse()); } /** * @return The (ksname,cfname) pair for the given id, or null if it has been dropped. */ public static final Pair<String,String> getCF(Integer cfId) { return cfIdMap.inverse().get(cfId); } /** * @return The id for the given (ksname,cfname) pair, or null if it has been dropped. */ public static final Integer getId(String table, String cfName) { return cfIdMap.get(new Pair<String, String>(table, cfName)); } // this gets called after initialization to make sure that id generation happens properly. public static final void fixMaxId() { // never set it to less than 1000. this ensures that we have enough system CFids for future use. idGen.set(cfIdMap.size() == 0 ? MIN_CF_ID : Math.max(Collections.max(cfIdMap.values()) + 1, MIN_CF_ID)); } public final String tableName; // name of table which has this column family public final String cfName; // name of the column family public final ColumnFamilyType cfType; // type: super, standard, etc. public final ClockType clockType; // clock type: timestamp, etc. public final AbstractType comparator; // name sorted, time stamp sorted etc. public final AbstractType subcolumnComparator; // like comparator, for supercolumns public final AbstractReconciler reconciler; // determine correct column from conflicting versions public final String comment; // for humans only public final double rowCacheSize; // default 0 public final double keyCacheSize; // default 0.01 public final double readRepairChance; //chance 0 to 1, of doing a read repair; defaults 1.0 (always) public final Integer cfId; public boolean preloadRowCache; public final int gcGraceSeconds; // default 864000 (ten days) public final Map<byte[], ColumnDefinition> column_metadata; private CFMetaData(String tableName, String cfName, ColumnFamilyType cfType, ClockType clockType, AbstractType comparator, AbstractType subcolumnComparator, AbstractReconciler reconciler, String comment, double rowCacheSize, boolean preloadRowCache, double keyCacheSize, double readRepairChance, int gcGraceSeconds, Integer cfId, Map<byte[], ColumnDefinition> column_metadata) { assert column_metadata != null; this.tableName = tableName; this.cfName = cfName; this.cfType = cfType; this.clockType = clockType; this.comparator = comparator; // the default subcolumncomparator is null per thrift spec, but only should be null if cfType == Standard. If // cfType == Super, subcolumnComparator should default to BytesType if not set. this.subcolumnComparator = subcolumnComparator == null && cfType == ColumnFamilyType.Super ? BytesType.instance : subcolumnComparator; this.reconciler = reconciler; this.comment = comment == null ? "" : comment; this.rowCacheSize = rowCacheSize; this.preloadRowCache = preloadRowCache; this.keyCacheSize = keyCacheSize; this.readRepairChance = readRepairChance; this.gcGraceSeconds = gcGraceSeconds; this.cfId = cfId; this.column_metadata = Collections.unmodifiableMap(column_metadata); } /** adds this cfm to the map. */ public static void map(CFMetaData cfm) throws ConfigurationException { Pair<String, String> key = new Pair<String, String>(cfm.tableName, cfm.cfName); if (cfIdMap.containsKey(key)) throw new ConfigurationException("Attempt to assign id to existing column family."); else { cfIdMap.put(key, cfm.cfId); currentCfNames.put(cfm.cfId, cfm.cfName); } } public CFMetaData(String tableName, String cfName, ColumnFamilyType cfType, ClockType clockType, AbstractType comparator, AbstractType subcolumnComparator, AbstractReconciler reconciler, String comment, double rowCacheSize, boolean preloadRowCache, double keyCacheSize, double readRepairChance, int gcGraceSeconds, Map<byte[], ColumnDefinition> column_metadata) { this(tableName, cfName, cfType, clockType, comparator, subcolumnComparator, reconciler, comment, rowCacheSize, preloadRowCache, keyCacheSize, readRepairChance, gcGraceSeconds, nextId(), column_metadata); } /** clones an existing CFMetaData using the same id. */ public static CFMetaData rename(CFMetaData cfm, String newName) { CFMetaData newCfm = new CFMetaData(cfm.tableName, newName, cfm.cfType, cfm.clockType, cfm.comparator, cfm.subcolumnComparator, cfm.reconciler, cfm.comment, cfm.rowCacheSize, cfm.preloadRowCache, cfm.keyCacheSize, cfm.readRepairChance, cfm.gcGraceSeconds, cfm.cfId, cfm.column_metadata); return newCfm; } /** clones existing CFMetaData. keeps the id but changes the table name.*/ public static CFMetaData renameTable(CFMetaData cfm, String tableName) { return new CFMetaData(tableName, cfm.cfName, cfm.cfType, cfm.clockType, cfm.comparator, cfm.subcolumnComparator, cfm.reconciler, cfm.comment, cfm.rowCacheSize, cfm.preloadRowCache, cfm.keyCacheSize, cfm.readRepairChance, cfm.gcGraceSeconds, cfm.cfId, cfm.column_metadata); } /** used for evicting cf data out of static tracking collections. */ public static void purge(CFMetaData cfm) { cfIdMap.remove(new Pair<String, String>(cfm.tableName, cfm.cfName)); currentCfNames.remove(cfm.cfId); } // a quick and dirty pretty printer for describing the column family... public String pretty() { return tableName + "." + cfName + "\n" + "Column Family Type: " + cfType + "\n" + "Column Family Clock Type: " + clockType + "\n" + "Columns Sorted By: " + comparator + "\n"; } public org.apache.cassandra.config.avro.CfDef deflate() { org.apache.cassandra.config.avro.CfDef cf = new org.apache.cassandra.config.avro.CfDef(); cf.id = cfId; cf.keyspace = new Utf8(tableName); cf.name = new Utf8(cfName); cf.column_type = new Utf8(cfType.name()); cf.clock_type = new Utf8(clockType.name()); cf.comparator_type = new Utf8(comparator.getClass().getName()); if (subcolumnComparator != null) cf.subcomparator_type = new Utf8(subcolumnComparator.getClass().getName()); cf.reconciler = new Utf8(reconciler.getClass().getName()); cf.comment = new Utf8(comment); cf.row_cache_size = rowCacheSize; cf.key_cache_size = keyCacheSize; cf.preload_row_cache = preloadRowCache; cf.read_repair_chance = readRepairChance; cf.gc_grace_seconds = gcGraceSeconds; cf.column_metadata = SerDeUtils.createArray(column_metadata.size(), org.apache.cassandra.config.avro.ColumnDef.SCHEMA$); for (ColumnDefinition cd : column_metadata.values()) cf.column_metadata.add(cd.deflate()); return cf; } public static CFMetaData inflate(org.apache.cassandra.config.avro.CfDef cf) { AbstractType comparator; AbstractType subcolumnComparator = null; AbstractReconciler reconciler; try { comparator = DatabaseDescriptor.getComparator(cf.comparator_type.toString()); if (cf.subcomparator_type != null) subcolumnComparator = DatabaseDescriptor.getComparator(cf.subcomparator_type.toString()); reconciler = DatabaseDescriptor.getReconciler(cf.reconciler.toString()); } catch (Exception ex) { throw new RuntimeException("Could not inflate CFMetaData for " + cf, ex); } Map<byte[], ColumnDefinition> column_metadata = new TreeMap<byte[], ColumnDefinition>(FBUtilities.byteArrayComparator); Iterator<org.apache.cassandra.config.avro.ColumnDef> cditer = cf.column_metadata.iterator(); while (cditer.hasNext()) { ColumnDefinition cd = ColumnDefinition.inflate(cditer.next()); column_metadata.put(cd.name, cd); } return new CFMetaData(cf.keyspace.toString(), cf.name.toString(), ColumnFamilyType.create(cf.column_type.toString()), ClockType.create(cf.clock_type.toString()), comparator, subcolumnComparator, reconciler, cf.comment.toString(), cf.row_cache_size, cf.preload_row_cache, cf.key_cache_size, cf.read_repair_chance, cf.gc_grace_seconds, cf.id, column_metadata); } public boolean equals(Object obj) { if (obj == this) { return true; } else if (obj == null || obj.getClass() != getClass()) { return false; } CFMetaData rhs = (CFMetaData) obj; return new EqualsBuilder() .append(tableName, rhs.tableName) .append(cfName, rhs.cfName) .append(cfType, rhs.cfType) .append(clockType, rhs.clockType) .append(comparator, rhs.comparator) .append(subcolumnComparator, rhs.subcolumnComparator) .append(reconciler, rhs.reconciler) .append(comment, rhs.comment) .append(rowCacheSize, rhs.rowCacheSize) .append(keyCacheSize, rhs.keyCacheSize) .append(readRepairChance, rhs.readRepairChance) .append(gcGraceSeconds, rhs.gcGraceSeconds) .append(cfId.intValue(), rhs.cfId.intValue()) .append(column_metadata, rhs.column_metadata) .isEquals(); } public int hashCode() { return new HashCodeBuilder(29, 1597) .append(tableName) .append(cfName) .append(cfType) .append(clockType) .append(comparator) .append(subcolumnComparator) .append(reconciler) .append(comment) .append(rowCacheSize) .append(keyCacheSize) .append(readRepairChance) .append(gcGraceSeconds) .append(cfId) .append(column_metadata) .toHashCode(); } private static int nextId() { return idGen.getAndIncrement(); } public AbstractType getValueValidator(byte[] column) { ColumnDefinition columnDefinition = column_metadata.get(column); if (columnDefinition == null) return null; return columnDefinition.validator; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.fluo.recipes.core.export; import java.util.Iterator; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import org.apache.fluo.api.client.TransactionBase; import org.apache.fluo.api.client.scanner.CellScanner; import org.apache.fluo.api.data.Bytes; import org.apache.fluo.api.data.Bytes.BytesBuilder; import org.apache.fluo.api.data.Column; import org.apache.fluo.api.data.RowColumn; import org.apache.fluo.api.data.RowColumnValue; import org.apache.fluo.api.data.Span; import org.apache.fluo.recipes.core.types.StringEncoder; import org.apache.fluo.recipes.core.types.TypeLayer; import org.apache.fluo.recipes.core.types.TypedTransactionBase; /** * This class encapsulates a buckets serialization code. */ // This class intentionally package private. class ExportBucket { private static final String NOTIFICATION_CF = "fluoRecipes"; private static final String NOTIFICATION_CQ_PREFIX = "eq:"; private static final Column EXPORT_COL = new Column("e", "v"); private static final Column NEXT_COL = new Column("e", "next"); static Column newNotificationColumn(String queueId) { return new Column(NOTIFICATION_CF, NOTIFICATION_CQ_PREFIX + queueId); } private final TypedTransactionBase ttx; private final String qid; private final Bytes bucketRow; static String genBucketId(int bucket, int maxBucket) { Preconditions.checkArgument(bucket >= 0); Preconditions.checkArgument(maxBucket > 0); int bits = 32 - Integer.numberOfLeadingZeros(maxBucket); int bucketLen = bits / 4 + (bits % 4 > 0 ? 1 : 0); return Strings.padStart(Integer.toHexString(bucket), bucketLen, '0'); } static Bytes generateBucketRow(String qid, int bucket, int numBuckets) { return Bytes.of(qid + ":" + genBucketId(bucket, numBuckets)); } ExportBucket(TransactionBase tx, String qid, int bucket, int numBuckets) { // TODO encode in a more robust way... but for now fail early Preconditions.checkArgument(!qid.contains(":"), "Export QID can not contain :"); this.ttx = new TypeLayer(new StringEncoder()).wrap(tx); this.qid = qid; this.bucketRow = generateBucketRow(qid, bucket, numBuckets); } ExportBucket(TransactionBase tx, Bytes bucketRow) { this.ttx = new TypeLayer(new StringEncoder()).wrap(tx); int colonLoc = -1; for (int i = 0; i < bucketRow.length(); i++) { if (bucketRow.byteAt(i) == ':') { colonLoc = i; break; } } Preconditions.checkArgument(colonLoc != -1 && colonLoc != bucketRow.length(), "Invalid bucket row " + bucketRow); Preconditions.checkArgument(bucketRow.byteAt(bucketRow.length() - 1) == ':', "Invalid bucket row " + bucketRow); this.bucketRow = bucketRow.subSequence(0, bucketRow.length() - 1); this.qid = bucketRow.subSequence(0, colonLoc).toString(); } private static void encSeq(BytesBuilder bb, long l) { bb.append((byte) (l >>> 56)); bb.append((byte) (l >>> 48)); bb.append((byte) (l >>> 40)); bb.append((byte) (l >>> 32)); bb.append((byte) (l >>> 24)); bb.append((byte) (l >>> 16)); bb.append((byte) (l >>> 8)); bb.append((byte) (l >>> 0)); } private static long decodeSeq(Bytes seq) { return (((long) seq.byteAt(0) << 56) + ((long) (seq.byteAt(1) & 255) << 48) + ((long) (seq.byteAt(2) & 255) << 40) + ((long) (seq.byteAt(3) & 255) << 32) + ((long) (seq.byteAt(4) & 255) << 24) + ((seq.byteAt(5) & 255) << 16) + ((seq.byteAt(6) & 255) << 8) + ((seq.byteAt(7) & 255) << 0)); } public void add(long seq, byte[] key, byte[] value) { BytesBuilder builder = Bytes.builder(bucketRow.length() + 1 + key.length + 8).append(bucketRow).append(':') .append(key); encSeq(builder, seq); ttx.set(builder.toBytes(), EXPORT_COL, Bytes.of(value)); } /** * Computes the minimial row for a bucket */ private Bytes getMinimalRow() { return Bytes.builder(bucketRow.length() + 1).append(bucketRow).append(':').toBytes(); } public void notifyExportObserver() { ttx.mutate().row(getMinimalRow()).col(newNotificationColumn(qid)).weaklyNotify(); } public Iterator<ExportEntry> getExportIterator(Bytes continueRow) { Span span; if (continueRow != null) { Span tmpSpan = Span.prefix(bucketRow); Span nextSpan = new Span(new RowColumn(continueRow, EXPORT_COL), true, tmpSpan.getEnd(), tmpSpan.isEndInclusive()); span = nextSpan; } else { span = Span.prefix(bucketRow); } CellScanner scanner = ttx.scanner().over(span).fetch(EXPORT_COL).build(); return new ExportIterator(scanner); } private class ExportIterator implements Iterator<ExportEntry> { private Iterator<RowColumnValue> rowIter; private Bytes lastRow; public ExportIterator(CellScanner scanner) { this.rowIter = scanner.iterator(); } @Override public boolean hasNext() { return rowIter.hasNext(); } @Override public ExportEntry next() { RowColumnValue rowColVal = rowIter.next(); Bytes row = rowColVal.getRow(); Bytes keyBytes = row.subSequence(bucketRow.length() + 1, row.length() - 8); Bytes seqBytes = row.subSequence(row.length() - 8, row.length()); ExportEntry ee = new ExportEntry(); ee.key = keyBytes.toArray(); ee.seq = decodeSeq(seqBytes); // TODO maybe leave as Bytes? ee.value = rowColVal.getValue().toArray(); lastRow = row; return ee; } @Override public void remove() { ttx.mutate().row(lastRow).col(EXPORT_COL).delete(); } } public Bytes getContinueRow() { return ttx.get(getMinimalRow(), NEXT_COL); } public void setContinueRow(ExportEntry ee) { BytesBuilder builder = Bytes.builder(bucketRow.length() + 1 + ee.key.length + 8).append(bucketRow).append(':') .append(ee.key); encSeq(builder, ee.seq); Bytes nextRow = builder.toBytes(); ttx.set(getMinimalRow(), NEXT_COL, nextRow); } public void clearContinueRow() { ttx.delete(getMinimalRow(), NEXT_COL); } }
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package org.unitime.timetable.onlinesectioning.status; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.TreeSet; import org.cpsolver.ifs.util.DistanceMetric; import org.cpsolver.studentsct.online.expectations.OverExpectedCriterion; import org.unitime.localization.impl.Localization; import org.unitime.timetable.gwt.client.sectioning.SectioningStatusFilterBox.SectioningStatusFilterRpcRequest; import org.unitime.timetable.gwt.resources.StudentSectioningMessages; import org.unitime.timetable.gwt.server.DayCode; import org.unitime.timetable.gwt.server.Query; import org.unitime.timetable.gwt.shared.ClassAssignmentInterface; import org.unitime.timetable.gwt.shared.ClassAssignmentInterface.CourseAssignment; import org.unitime.timetable.onlinesectioning.AcademicSessionInfo; import org.unitime.timetable.onlinesectioning.OnlineSectioningAction; import org.unitime.timetable.onlinesectioning.OnlineSectioningHelper; import org.unitime.timetable.onlinesectioning.OnlineSectioningServer; import org.unitime.timetable.onlinesectioning.model.XAcademicAreaCode; import org.unitime.timetable.onlinesectioning.model.XCourse; import org.unitime.timetable.onlinesectioning.model.XCourseRequest; import org.unitime.timetable.onlinesectioning.model.XEnrollment; import org.unitime.timetable.onlinesectioning.model.XEnrollments; import org.unitime.timetable.onlinesectioning.model.XExpectations; import org.unitime.timetable.onlinesectioning.model.XInstructor; import org.unitime.timetable.onlinesectioning.model.XOffering; import org.unitime.timetable.onlinesectioning.model.XRequest; import org.unitime.timetable.onlinesectioning.model.XRoom; import org.unitime.timetable.onlinesectioning.model.XSection; import org.unitime.timetable.onlinesectioning.model.XStudent; import org.unitime.timetable.onlinesectioning.model.XSubpart; /** * @author Tomas Muller */ public class FindEnrollmentAction implements OnlineSectioningAction<List<ClassAssignmentInterface.Enrollment>> { private static final long serialVersionUID = 1L; protected static StudentSectioningMessages MSG = Localization.create(StudentSectioningMessages.class); protected Query iQuery; protected Long iCourseId, iClassId; protected boolean iConsentToDoCourse; protected boolean iCanShowExtIds = false, iCanRegister = false, iCanUseAssistant = false; public FindEnrollmentAction withParams(String query, Long courseId, Long classId, boolean isConsentToDoCourse, boolean canShowExtIds, boolean canRegister, boolean canUseAssistant) { iQuery = new Query(query); iCourseId = courseId; iClassId = classId; iConsentToDoCourse = isConsentToDoCourse; iCanShowExtIds = canShowExtIds; iCanRegister = canRegister; iCanUseAssistant = canUseAssistant; return this; } protected SectioningStatusFilterRpcRequest iFilter = null; public FindEnrollmentAction withFilter(SectioningStatusFilterRpcRequest filter) { iFilter = filter; return this; } public Query query() { return iQuery; } public Long courseId() { return iCourseId; } public Long classId() { return iClassId; } public boolean isConsentToDoCourse() { return iConsentToDoCourse; } @Override public List<ClassAssignmentInterface.Enrollment> execute(OnlineSectioningServer server, OnlineSectioningHelper helper) { List<ClassAssignmentInterface.Enrollment> ret = new ArrayList<ClassAssignmentInterface.Enrollment>(); XCourse course = server.getCourse(courseId()); if (course == null) return ret; XOffering offering = server.getOffering(course.getOfferingId()); if (offering == null) return ret; XEnrollments enrollments = server.getEnrollments(course.getOfferingId()); DistanceMetric m = server.getDistanceMetric(); XExpectations expectations = server.getExpectations(offering.getOfferingId()); OverExpectedCriterion overExp = server.getOverExpectedCriterion(); AcademicSessionInfo session = server.getAcademicSession(); Set<Long> studentIds = (iFilter == null ? null : server.createAction(SectioningStatusFilterAction.class).forRequest(iFilter).getStudentIds(server, helper)); for (XCourseRequest request: enrollments.getRequests()) { if (request.getEnrollment() != null && !request.getEnrollment().getCourseId().equals(courseId())) continue; if (classId() != null && request.getEnrollment() != null && !request.getEnrollment().getSectionIds().contains(classId())) continue; if (request.getEnrollment() == null && !request.getCourseIds().contains(course)) continue; if (studentIds != null && !studentIds.contains(request.getStudentId())) continue; XStudent student = server.getStudent(request.getStudentId()); if (student == null) continue; if (request.getEnrollment() == null && !student.canAssign(request)) continue; if (!query().match(new StatusPageSuggestionsAction.CourseRequestMatcher(session, course, student, offering, request, isConsentToDoCourse()))) continue; ClassAssignmentInterface.Student st = new ClassAssignmentInterface.Student(); st.setId(student.getStudentId()); st.setSessionId(session.getUniqueId()); st.setExternalId(student.getExternalId()); st.setCanShowExternalId(iCanShowExtIds); st.setCanRegister(iCanRegister); st.setCanUseAssistant(iCanUseAssistant); st.setName(student.getName()); for (XAcademicAreaCode ac: student.getAcademicAreaClasiffications()) { st.addArea(ac.getArea()); st.addClassification(ac.getCode()); } for (XAcademicAreaCode ac: student.getMajors()) { st.addMajor(ac.getCode()); } for (String gr: student.getGroups()) { st.addGroup(gr); } for (String acc: student.getAccomodations()) { st.addAccommodation(acc); } ClassAssignmentInterface.Enrollment e = new ClassAssignmentInterface.Enrollment(); e.setStudent(st); e.setPriority(1 + request.getPriority()); CourseAssignment c = new CourseAssignment(); c.setCourseId(course.getCourseId()); c.setSubject(course.getSubjectArea()); c.setCourseNbr(course.getCourseNumber()); c.setTitle(course.getTitle()); e.setCourse(c); e.setWaitList(request.isWaitlist()); if (request.getEnrollment() == null) e.setEnrollmentMessage(request.getEnrollmentMessage()); if (!request.getCourseIds().get(0).equals(course)) e.setAlternative(request.getCourseIds().get(0).getCourseName()); if (request.isAlternative()) { for (XRequest r: student.getRequests()) { if (r instanceof XCourseRequest && !r.isAlternative() && ((XCourseRequest)r).getEnrollment() == null) { e.setAlternative(((XCourseRequest)r).getCourseIds().get(0).getCourseName()); } } } if (request.getTimeStamp() != null) e.setRequestedDate(request.getTimeStamp()); if (request.getEnrollment() != null) { if (request.getEnrollment().getReservation() != null) { switch (request.getEnrollment().getReservation().getType()) { case Individual: e.setReservation(MSG.reservationIndividual()); break; case Group: e.setReservation(MSG.reservationGroup()); break; case Course: e.setReservation(MSG.reservationCourse()); break; case Curriculum: e.setReservation(MSG.reservationCurriculum()); break; } } if (request.getEnrollment().getTimeStamp() != null) e.setEnrolledDate(request.getEnrollment().getTimeStamp()); if (request.getEnrollment().getApproval() != null) { e.setApprovedDate(request.getEnrollment().getApproval().getTimeStamp()); e.setApprovedBy(request.getEnrollment().getApproval().getName()); } for (XSection section: offering.getSections(request.getEnrollment())) { ClassAssignmentInterface.ClassAssignment a = e.getCourse().addClassAssignment(); a.setAlternative(request.isAlternative()); a.setClassId(section.getSectionId()); a.setSubpart(section.getSubpartName()); a.setSection(section.getName(course.getCourseId())); a.setClassNumber(section.getName(-1l)); a.setCancelled(section.isCancelled()); a.setLimit(new int[] {enrollments.countEnrollmentsForSection(section.getSectionId()), section.getLimit()}); if (section.getTime() != null) { for (DayCode d : DayCode.toDayCodes(section.getTime().getDays())) a.addDay(d.getIndex()); a.setStart(section.getTime().getSlot()); a.setLength(section.getTime().getLength()); a.setBreakTime(section.getTime().getBreakTime()); a.setDatePattern(section.getTime().getDatePatternName()); } if (section.getNrRooms() > 0) { for (XRoom rm: section.getRooms()) { a.addRoom(rm.getName()); } } if (section.getInstructors() != null) { for (XInstructor instructor: section.getInstructors()) { a.addInstructor(instructor.getName()); a.addInstructoEmail(instructor.getEmail()); } } if (section.getParentId()!= null) a.setParentSection(offering.getSection(section.getParentId()).getName(course.getCourseId())); a.setSubpartId(section.getSubpartId()); a.addNote(course.getNote()); a.addNote(section.getNote()); XSubpart subpart = offering.getSubpart(section.getSubpartId()); a.setCredit(subpart.getCredit(course.getCourseId())); int dist = 0; String from = null; TreeSet<String> overlap = new TreeSet<String>(); for (XRequest q: student.getRequests()) { if (q instanceof XCourseRequest) { XEnrollment otherEnrollment = ((XCourseRequest)q).getEnrollment(); if (otherEnrollment == null) continue; XOffering otherOffering = server.getOffering(otherEnrollment.getOfferingId()); for (XSection otherSection: otherOffering.getSections(otherEnrollment)) { if (otherSection.equals(section) || otherSection.getTime() == null) continue; int d = otherSection.getDistanceInMinutes(section, m); if (d > dist) { dist = d; from = ""; for (Iterator<XRoom> k = otherSection.getRooms().iterator(); k.hasNext();) from += k.next().getName() + (k.hasNext() ? ", " : ""); } if (d > otherSection.getTime().getBreakTime()) { a.setDistanceConflict(true); } if (section.getTime() != null && section.getTime().hasIntersection(otherSection.getTime()) && !section.isToIgnoreStudentConflictsWith(offering.getDistributions(), otherSection.getSectionId())) { XCourse otherCourse = otherOffering.getCourse(otherEnrollment.getCourseId()); XSubpart otherSubpart = otherOffering.getSubpart(otherSection.getSubpartId()); overlap.add(MSG.clazz(otherCourse.getSubjectArea(), otherCourse.getCourseNumber(), otherSubpart.getName(), otherSection.getName(otherCourse.getCourseId()))); } } } } if (!overlap.isEmpty()) { String note = null; for (Iterator<String> j = overlap.iterator(); j.hasNext(); ) { String n = j.next(); if (note == null) note = MSG.noteAllowedOverlapFirst(n); else if (j.hasNext()) note += MSG.noteAllowedOverlapMiddle(n); else note += MSG.noteAllowedOverlapLast(n); } a.setOverlapNote(note); } a.setBackToBackDistance(dist); a.setBackToBackRooms(from); a.setSaved(true); if (a.getParentSection() == null) a.setParentSection(course.getConsentLabel()); a.setExpected(overExp.getExpected(section.getLimit(), expectations.getExpectedSpace(section.getSectionId()))); } } ret.add(e); } return ret; } @Override public String name() { return "find-enrollments"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.client; import org.junit.Before; import org.junit.Test; import java.util.concurrent.CountDownLatch; import javax.jms.BytesMessage; import javax.jms.Connection; import javax.jms.ConnectionFactory; import javax.jms.DeliveryMode; import javax.jms.JMSException; import javax.jms.MessageProducer; import javax.jms.Session; import javax.jms.Topic; import javax.jms.TopicSubscriber; import javax.naming.Context; import javax.naming.NamingException; import org.apache.activemq.artemis.tests.integration.IntegrationTestLogger; import org.apache.activemq.artemis.tests.util.JMSTestBase; public class FlowControlOnIgnoreLargeMessageBodyTest extends JMSTestBase { IntegrationTestLogger log = IntegrationTestLogger.LOGGER; private Topic topic; private static int TOTAL_MESSAGES_COUNT = 20000; private static int MSG_SIZE = 150 * 1024; private final int CONSUMERS_COUNT = 5; private static final String ATTR_MSG_COUNTER = "msgIdex"; protected int receiveTimeout = 10000; private volatile boolean error = false; @Override @Before public void setUp() throws Exception { super.setUp(); jmsServer.createTopic(true, "topicIn", "/topic/topicIn"); topic = (Topic) namingContext.lookup("/topic/topicIn"); } @Override protected boolean usePersistence() { return false; } /** * LoadProducer */ class LoadProducer extends Thread { private final ConnectionFactory cf; private final Topic topic; private final int messagesCount; private volatile boolean requestForStop = false; private volatile boolean stopped = false; private int sentMessages = 0; LoadProducer(final String name, final Topic topic, final ConnectionFactory cf, final int messagesCount) throws Exception { super(name); this.cf = cf; this.topic = topic; this.messagesCount = messagesCount; } public void sendStopRequest() { stopped = false; requestForStop = true; } public boolean isStopped() { return stopped; } @Override public void run() { stopped = false; Connection connection = null; Session session = null; MessageProducer prod; log.info("Starting producer for " + topic + " - " + getName()); try { connection = cf.createConnection(); session = connection.createSession(true, Session.SESSION_TRANSACTED); prod = session.createProducer(topic); prod.setDeliveryMode(DeliveryMode.PERSISTENT); for (int i = 1; i <= messagesCount && !requestForStop; i++) { if (error) { break; } sentMessages++; BytesMessage msg = session.createBytesMessage(); msg.setIntProperty(FlowControlOnIgnoreLargeMessageBodyTest.ATTR_MSG_COUNTER, i); msg.writeBytes(new byte[FlowControlOnIgnoreLargeMessageBodyTest.MSG_SIZE]); prod.send(msg); if (i % 10 == 0) { session.commit(); } if (i % 100 == 0) { log.info("Address " + topic + " sent " + i + " messages"); } } System.out.println("Ending producer for " + topic + " - " + getName() + " messages " + sentMessages); } catch (Exception e) { error = true; e.printStackTrace(); } finally { try { session.commit(); } catch (Exception e) { e.printStackTrace(); } try { connection.close(); } catch (Exception e) { e.printStackTrace(); } } stopped = true; } public int getSentMessages() { return sentMessages; } } /** * LoadConsumer */ class LoadConsumer extends Thread { private final ConnectionFactory cf; private final Topic topic; private volatile boolean requestForStop = false; private volatile boolean stopped = false; private volatile int receivedMessages = 0; private final int numberOfMessages; private int receiveTimeout = 0; private final CountDownLatch consumerCreated; LoadConsumer(final CountDownLatch consumerCreated, final String name, final Topic topic, final ConnectionFactory cf, final int receiveTimeout, final int numberOfMessages) { super(name); this.cf = cf; this.topic = topic; this.receiveTimeout = receiveTimeout; this.numberOfMessages = numberOfMessages; this.consumerCreated = consumerCreated; } public void sendStopRequest() { stopped = false; requestForStop = true; } public boolean isStopped() { return stopped; } @Override public void run() { Connection connection = null; Session session = null; stopped = false; requestForStop = false; System.out.println("Starting consumer for " + topic + " - " + getName()); try { connection = cf.createConnection(); connection.setClientID(getName()); connection.start(); session = connection.createSession(true, Session.SESSION_TRANSACTED); TopicSubscriber subscriber = session.createDurableSubscriber(topic, getName()); consumerCreated.countDown(); int counter = 0; while (counter < numberOfMessages && !requestForStop && !error) { if (counter == 0) { System.out.println("Starting to consume for " + topic + " - " + getName()); } BytesMessage msg = (BytesMessage) subscriber.receive(receiveTimeout); if (msg == null) { System.out.println("Cannot get message in specified timeout: " + topic + " - " + getName()); error = true; } else { counter++; if (msg.getIntProperty(FlowControlOnIgnoreLargeMessageBodyTest.ATTR_MSG_COUNTER) != counter) { error = true; } } if (counter % 10 == 0) { session.commit(); } if (counter % 100 == 0) { log.info("## " + getName() + " " + topic + " received " + counter); } receivedMessages = counter; } session.commit(); } catch (Exception e) { System.out.println("Exception in consumer " + getName() + " : " + e.getMessage()); e.printStackTrace(); } finally { if (session != null) { try { session.close(); } catch (JMSException e) { System.err.println("Cannot close session " + e.getMessage()); } } if (connection != null) { try { connection.close(); } catch (JMSException e) { System.err.println("Cannot close connection " + e.getMessage()); } } } stopped = true; System.out.println("Stopping consumer for " + topic + " - " + getName() + ", received " + getReceivedMessages()); } public int getReceivedMessages() { return receivedMessages; } } @Test public void testFlowControl() { Context context = null; try { LoadProducer producer = new LoadProducer("producer", topic, cf, FlowControlOnIgnoreLargeMessageBodyTest.TOTAL_MESSAGES_COUNT); LoadConsumer[] consumers = new LoadConsumer[CONSUMERS_COUNT]; CountDownLatch latch = new CountDownLatch(CONSUMERS_COUNT); for (int i = 0; i < consumers.length; i++) { consumers[i] = new LoadConsumer(latch, "consumer " + i, topic, cf, receiveTimeout, FlowControlOnIgnoreLargeMessageBodyTest.TOTAL_MESSAGES_COUNT); } for (LoadConsumer consumer : consumers) { consumer.start(); } waitForLatch(latch); producer.start(); producer.join(); for (LoadConsumer consumer : consumers) { consumer.join(); } String errorMessage = null; if (producer.getSentMessages() != FlowControlOnIgnoreLargeMessageBodyTest.TOTAL_MESSAGES_COUNT) { errorMessage = "Producer did not send defined count of messages"; } else { for (LoadConsumer consumer : consumers) { if (consumer.getReceivedMessages() != FlowControlOnIgnoreLargeMessageBodyTest.TOTAL_MESSAGES_COUNT) { errorMessage = "Consumer did not send defined count of messages"; break; } } } if (errorMessage != null) { System.err.println(" ERROR ERROR ERROR ERROR ERROR ERROR ERROR ERROR ERROR "); System.err.println(errorMessage); } else { System.out.println(" OK "); } assertFalse(error); assertNull(errorMessage); } catch (Exception e) { log.warn(e.getMessage(), e); } finally { if (context != null) { try { context.close(); } catch (NamingException ex) { log.warn(ex.getMessage(), ex); } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.raid; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.zip.CRC32; import junit.framework.TestCase; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.raid.Statistics.Counters; import org.apache.hadoop.raid.protocol.PolicyInfo; /** * Verifies {@link Statistics} collects raid statistics */ public class TestStatisticsCollector extends TestCase { final static Log LOG = LogFactory.getLog(TestStatisticsCollector.class); final static Random rand = new Random(); final Configuration conf = new Configuration(); public void testExcludes() throws IOException { conf.set("raid.exclude.patterns", "/exclude/,/df_mf/"); RaidState.Checker checker = new RaidState.Checker( new ArrayList<PolicyInfo>(), conf); assertEquals(true, checker.shouldExclude("/a/b/c/df_mf/foo/bar")); assertEquals(false, checker.shouldExclude("/a/b/c/xdf_mf/foo/bar")); } public void testTimeFromName() { assertEquals( new Date(2011 - 1900, 0, 12).getTime(), RaidState.Checker.mtimeFromName("/a/b/c/ds=2011-01-12/d/e/f")); assertEquals( new Date(2011 - 1900, 0, 12).getTime(), RaidState.Checker.mtimeFromName("/a/b/c/ds=2011-01-12-02/d/e/f")); assertEquals( -1, RaidState.Checker.mtimeFromName("/a/b/c/ds=2011/d/e/f")); assertEquals( -1, RaidState.Checker.mtimeFromName("/a/b/c/d/e/f")); } public void testCollect() throws Exception { MiniDFSCluster dfs = null; try { dfs = new MiniDFSCluster(conf, 3, true, null); dfs.waitActive(); FileSystem fs = dfs.getFileSystem(); verifySourceCollect(ErasureCodeType.RS, fs); verifySourceCollect(ErasureCodeType.XOR, fs); verifyParityCollect(ErasureCodeType.RS, fs); verifyParityCollect(ErasureCodeType.XOR, fs); verifyLongPrefixOverride(fs); verifyRsCodeOverride(fs); } finally { if (dfs != null) { dfs.shutdown(); } } } public void verifySourceCollect(ErasureCodeType code, FileSystem fs) throws Exception { PolicyInfo info = new PolicyInfo("Test-Raided-" + code, conf); info.setSrcPath("/a/b"); info.setProperty("modTimePeriod", "0"); info.setProperty("targetReplication", "1"); info.setErasureCode(code.toString()); PolicyInfo infoTooNew = new PolicyInfo("Test-Too-New-" + code, conf); infoTooNew.setSrcPath("/a/new"); infoTooNew.setProperty("modTimePeriod", "" + Long.MAX_VALUE); infoTooNew.setProperty("targetReplication", "1"); infoTooNew.setErasureCode(code.toString()); createFile(fs, new Path("/a/b/TOO_SMALL"), 1, 1, 1024L); createFile(fs, new Path("/a/b/d/TOO_SMALL"), 2, 2, 1024L); createFile(fs, new Path("/a/b/f/g/RAIDED"), 1, 3, 1024L); createFile(fs, new Path("/a/b/f/g/h/RAIDED"), 1, 4, 1024L); createFile(fs, new Path("/a/b/f/g/NOT_RAIDED"), 3, 5, 1024L); createFile(fs, new Path("/a/new/i/TOO_NEW"), 3, 4, 1024L); createFile(fs, new Path("/a/new/j/TOO_NEW"), 3, 5, 1024L); StatisticsCollector collector = new StatisticsCollector(null, conf); List<PolicyInfo> allPolicies = Arrays.asList(info, infoTooNew); collector.collect(allPolicies); Statistics st = collector.getRaidStatistics(code); LOG.info("Statistics collected " + st); LOG.info("Statistics html:\n " + st.htmlTable()); Counters raided = st.getSourceCounters(RaidState.RAIDED); Counters tooSmall = st.getSourceCounters(RaidState.NOT_RAIDED_TOO_SMALL); Counters tooNew = st.getSourceCounters(RaidState.NOT_RAIDED_TOO_NEW); Counters notRaided = st.getSourceCounters(RaidState.NOT_RAIDED_BUT_SHOULD); assertCounters(raided, 2, 7, 7 * 1024L, 7 * 1024L); assertCounters(tooSmall, 2, 3, 5 * 1024L, 3 * 1024L); assertCounters(tooNew, 2, 9, 27 * 1024L, 9 * 1024L); assertCounters(notRaided, 1, 5, 15 * 1024L, 5 * 1024L); fs.delete(new Path("/a"), true); } public void verifyParityCollect(ErasureCodeType code, FileSystem fs) throws Exception { LOG.info("Start testing parity collect for " + code); Path parityPath = RaidNode.getDestinationPath(code, conf); fs.mkdirs(parityPath); createFile(fs, new Path(parityPath+ "/a"), 1, 1, 1024L); createFile(fs, new Path(parityPath + "/b/c"), 2, 2, 1024L); createFile(fs, new Path(parityPath + "/d/e/f"), 3, 3, 1024L); List<PolicyInfo> empty = Collections.emptyList(); StatisticsCollector collector = new StatisticsCollector(null, conf); collector.collect(empty); Statistics st = collector.getRaidStatistics(code); assertCounters(st.getParityCounters(), 3, 6, 14 * 1024L, 6 * 1024L); LOG.info("Statistics collected " + st); LOG.info("Statistics html:\n " + st.htmlTable()); fs.delete(parityPath, true); } public void verifyLongPrefixOverride(FileSystem fs) throws Exception { PolicyInfo info = new PolicyInfo("Test", conf); info.setSrcPath("/a/b"); info.setProperty("modTimePeriod", "0"); info.setProperty("targetReplication", "1"); info.setErasureCode("RS"); PolicyInfo infoLongPrefix = new PolicyInfo("Long-Prefix", conf); infoLongPrefix.setSrcPath("/a/b/c"); infoLongPrefix.setProperty("modTimePeriod", "0"); infoLongPrefix.setProperty("targetReplication", "2"); infoLongPrefix.setErasureCode("XOR"); createFile(fs, new Path("/a/b/k"), 3, 4, 1024L); createFile(fs, new Path("/a/b/c/d"), 3, 5, 1024L); StatisticsCollector collector = new StatisticsCollector(null, conf); List<PolicyInfo> allPolicies = Arrays.asList(info, infoLongPrefix); collector.collect(allPolicies); Statistics xorSt = collector.getRaidStatistics(ErasureCodeType.XOR); Statistics rsSt = collector.getRaidStatistics(ErasureCodeType.RS); Counters xorShouldRaid = xorSt.getSourceCounters(RaidState.NOT_RAIDED_BUT_SHOULD); Counters rsShouldRaid = rsSt.getSourceCounters(RaidState.NOT_RAIDED_BUT_SHOULD); Counters rsOther = rsSt.getSourceCounters(RaidState.NOT_RAIDED_OTHER_POLICY); assertCounters(xorShouldRaid, 1, 5, 15 * 1024L, 5 * 1024L); assertCounters(rsShouldRaid, 1, 4, 12 * 1024L, 4 * 1024L); assertCounters(rsOther, 1, 5, 15 * 1024L, 5 * 1024L); fs.delete(new Path("/a"), true); } public void verifyRsCodeOverride(FileSystem fs) throws Exception { PolicyInfo info = new PolicyInfo("Test", conf); info.setSrcPath("/a/b/*"); info.setProperty("modTimePeriod", "0"); info.setProperty("targetReplication", "1"); info.setErasureCode("XOR"); PolicyInfo infoLongPrefix = new PolicyInfo("Long-Prefix", conf); infoLongPrefix.setSrcPath("/a/b/c"); infoLongPrefix.setProperty("modTimePeriod", "0"); infoLongPrefix.setProperty("targetReplication", "2"); infoLongPrefix.setErasureCode("RS"); createFile(fs, new Path("/a/b/k"), 3, 4, 1024L); createFile(fs, new Path("/a/b/c/d"), 3, 5, 1024L); StatisticsCollector collector = new StatisticsCollector(null, conf); List<PolicyInfo> allPolicies = Arrays.asList(info, infoLongPrefix); collector.collect(allPolicies); Statistics xorSt = collector.getRaidStatistics(ErasureCodeType.XOR); Statistics rsSt = collector.getRaidStatistics(ErasureCodeType.RS); Counters xorShouldRaid = xorSt.getSourceCounters(RaidState.NOT_RAIDED_BUT_SHOULD); Counters xorOther = xorSt.getSourceCounters(RaidState.NOT_RAIDED_OTHER_POLICY); Counters rsShouldRaid = rsSt.getSourceCounters(RaidState.NOT_RAIDED_BUT_SHOULD); assertCounters(xorShouldRaid, 1, 4, 12 * 1024L, 4 * 1024L); assertCounters(xorOther, 1, 5, 15 * 1024L, 5 * 1024L); assertCounters(rsShouldRaid, 1, 5, 15 * 1024L, 5 * 1024L); fs.delete(new Path("/a"), true); } private void assertCounters(Counters counters, long numFiles, long numBlocks, long numBytes, long numLogicalBytes) { assertEquals(numFiles, counters.getNumFiles()); assertEquals(numBlocks, counters.getNumBlocks()); assertEquals(numBytes, counters.getNumBytes()); assertEquals(numLogicalBytes, counters.getNumLogical()); } private static long createFile( FileSystem fileSys, Path name, int repl, int numBlocks, long blocksize) throws IOException { CRC32 crc = new CRC32(); int bufSize = fileSys.getConf().getInt("io.file.buffer.size", 4096); FSDataOutputStream stm = fileSys.create( name, true, bufSize, (short)repl, blocksize); // fill random data into file byte[] b = new byte[(int)blocksize]; for (int i = 0; i < numBlocks; i++) { rand.nextBytes(b); stm.write(b); crc.update(b); } stm.close(); return crc.getValue(); } }
/* * Copyright 2014 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.vertx.blueprint.kue.service; import io.vertx.blueprint.kue.service.JobService; import io.vertx.core.Vertx; import io.vertx.core.Handler; import io.vertx.core.AsyncResult; import io.vertx.core.eventbus.EventBus; import io.vertx.core.eventbus.Message; import io.vertx.core.eventbus.MessageConsumer; import io.vertx.core.eventbus.DeliveryOptions; import io.vertx.core.eventbus.ReplyException; import io.vertx.core.json.JsonObject; import io.vertx.core.json.JsonArray; import java.util.Collection; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import io.vertx.serviceproxy.ProxyHelper; import io.vertx.serviceproxy.ProxyHandler; import io.vertx.serviceproxy.ServiceException; import io.vertx.serviceproxy.ServiceExceptionMessageCodec; import io.vertx.blueprint.kue.service.JobService; import io.vertx.core.Vertx; import io.vertx.blueprint.kue.queue.JobState; import io.vertx.core.json.JsonArray; import java.util.List; import io.vertx.blueprint.kue.queue.Job; import io.vertx.core.json.JsonObject; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; /* Generated Proxy code - DO NOT EDIT @author Roger the Robot */ @SuppressWarnings({"unchecked", "rawtypes"}) public class JobServiceVertxProxyHandler extends ProxyHandler { public static final long DEFAULT_CONNECTION_TIMEOUT = 5 * 60; // 5 minutes private final Vertx vertx; private final JobService service; private final long timerID; private long lastAccessed; private final long timeoutSeconds; public JobServiceVertxProxyHandler(Vertx vertx, JobService service) { this(vertx, service, DEFAULT_CONNECTION_TIMEOUT); } public JobServiceVertxProxyHandler(Vertx vertx, JobService service, long timeoutInSecond) { this(vertx, service, true, timeoutInSecond); } public JobServiceVertxProxyHandler(Vertx vertx, JobService service, boolean topLevel, long timeoutSeconds) { this.vertx = vertx; this.service = service; this.timeoutSeconds = timeoutSeconds; try { this.vertx.eventBus().registerDefaultCodec(ServiceException.class, new ServiceExceptionMessageCodec()); } catch (IllegalStateException ex) { } if (timeoutSeconds != -1 && !topLevel) { long period = timeoutSeconds * 1000 / 2; if (period > 10000) { period = 10000; } this.timerID = vertx.setPeriodic(period, this::checkTimedOut); } else { this.timerID = -1; } accessed(); } public MessageConsumer<JsonObject> registerHandler(String address) { MessageConsumer<JsonObject> consumer = vertx.eventBus().<JsonObject>consumer(address).handler(this); this.setConsumer(consumer); return consumer; } private void checkTimedOut(long id) { long now = System.nanoTime(); if (now - lastAccessed > timeoutSeconds * 1000000000) { close(); } } @Override public void close() { if (timerID != -1) { vertx.cancelTimer(timerID); } super.close(); } private void accessed() { this.lastAccessed = System.nanoTime(); } public void handle(Message<JsonObject> msg) { try { JsonObject json = msg.body(); String action = msg.headers().get("action"); if (action == null) { throw new IllegalStateException("action not specified"); } accessed(); switch (action) { case "getJob": { service.getJob(json.getValue("id") == null ? null : (json.getLong("id").longValue()), res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { msg.reply(res.result() == null ? null : res.result().toJson()); } }); break; } case "removeJob": { service.removeJob(json.getValue("id") == null ? null : (json.getLong("id").longValue()), createHandler(msg)); break; } case "existsJob": { service.existsJob(json.getValue("id") == null ? null : (json.getLong("id").longValue()), createHandler(msg)); break; } case "getJobLog": { service.getJobLog(json.getValue("id") == null ? null : (json.getLong("id").longValue()), createHandler(msg)); break; } case "jobRangeByState": { service.jobRangeByState((java.lang.String) json.getValue("state"), json.getValue("from") == null ? null : (json.getLong("from").longValue()), json.getValue("to") == null ? null : (json.getLong("to").longValue()), (java.lang.String) json.getValue("order"), res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { msg.reply(new JsonArray(res.result().stream().map(Job::toJson).collect(Collectors.toList()))); } }); break; } case "jobRangeByType": { service.jobRangeByType((java.lang.String) json.getValue("type"), (java.lang.String) json.getValue("state"), json.getValue("from") == null ? null : (json.getLong("from").longValue()), json.getValue("to") == null ? null : (json.getLong("to").longValue()), (java.lang.String) json.getValue("order"), res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { msg.reply(new JsonArray(res.result().stream().map(Job::toJson).collect(Collectors.toList()))); } }); break; } case "jobRange": { service.jobRange(json.getValue("from") == null ? null : (json.getLong("from").longValue()), json.getValue("to") == null ? null : (json.getLong("to").longValue()), (java.lang.String) json.getValue("order"), res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { msg.reply(new JsonArray(res.result().stream().map(Job::toJson).collect(Collectors.toList()))); } }); break; } case "cardByType": { service.cardByType((java.lang.String) json.getValue("type"), json.getString("state") == null ? null : io.vertx.blueprint.kue.queue.JobState.valueOf(json.getString("state")), createHandler(msg)); break; } case "card": { service.card(json.getString("state") == null ? null : io.vertx.blueprint.kue.queue.JobState.valueOf(json.getString("state")), createHandler(msg)); break; } case "completeCount": { service.completeCount((java.lang.String) json.getValue("type"), createHandler(msg)); break; } case "failedCount": { service.failedCount((java.lang.String) json.getValue("type"), createHandler(msg)); break; } case "inactiveCount": { service.inactiveCount((java.lang.String) json.getValue("type"), createHandler(msg)); break; } case "activeCount": { service.activeCount((java.lang.String) json.getValue("type"), createHandler(msg)); break; } case "delayedCount": { service.delayedCount((java.lang.String) json.getValue("type"), createHandler(msg)); break; } case "getAllTypes": { service.getAllTypes(createListHandler(msg)); break; } case "getIdsByState": { service.getIdsByState(json.getString("state") == null ? null : io.vertx.blueprint.kue.queue.JobState.valueOf(json.getString("state")), createListHandler(msg)); break; } case "getWorkTime": { service.getWorkTime(createHandler(msg)); break; } default: { throw new IllegalStateException("Invalid action: " + action); } } } catch (Throwable t) { msg.reply(new ServiceException(500, t.getMessage())); throw t; } } private <T> Handler<AsyncResult<T>> createHandler(Message msg) { return res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { if (res.result() != null && res.result().getClass().isEnum()) { msg.reply(((Enum) res.result()).name()); } else { msg.reply(res.result()); } } }; } private <T> Handler<AsyncResult<List<T>>> createListHandler(Message msg) { return res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { msg.reply(new JsonArray(res.result())); } }; } private <T> Handler<AsyncResult<Set<T>>> createSetHandler(Message msg) { return res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { msg.reply(new JsonArray(new ArrayList<>(res.result()))); } }; } private Handler<AsyncResult<List<Character>>> createListCharHandler(Message msg) { return res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { JsonArray arr = new JsonArray(); for (Character chr : res.result()) { arr.add((int) chr); } msg.reply(arr); } }; } private Handler<AsyncResult<Set<Character>>> createSetCharHandler(Message msg) { return res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { JsonArray arr = new JsonArray(); for (Character chr : res.result()) { arr.add((int) chr); } msg.reply(arr); } }; } private <T> Map<String, T> convertMap(Map map) { return (Map<String, T>) map; } private <T> List<T> convertList(List list) { return (List<T>) list; } private <T> Set<T> convertSet(List list) { return new HashSet<T>((List<T>) list); } }
package mireka.smtp; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; import mireka.transmission.immediate.Rfc821Status; import mireka.util.Multiline; /** * These class represents an SMTP status which includes enhanced status code. * * @see <a href="http://tools.ietf.org/html/rfc3463">RFC 3463 - Enhanced Mail * System Status Codes</a> * @see <a href="http://tools.ietf.org/html/rfc2034">RFC 2034 - SMTP Service * Extension for Returning Enhanced Error Codes</a> * @see <a * href="http://www.iana.org/assignments/smtp-enhanced-status-codes/smtp-enhanced-status-codes.xml">IANA * Enhanced Status Code Registry</a> */ public class EnhancedStatus implements MailSystemStatus { public static final EnhancedStatus TRANSIENT_SYSTEM_NOT_ACCEPTING_NETWORK_MESSAGES = new EnhancedStatus(421, "4.3.2", "System not accepting network messages"); public static final EnhancedStatus TRANSIENT_DIRECTORY_SERVER_FAILURE = new EnhancedStatus(450, "4.4.3", "Directory server failure"); public static final EnhancedStatus BAD_DESTINATION_SYSTEM_ADDRESS = new EnhancedStatus(550, "5.1.2", "Bad destination system address"); public static final EnhancedStatus PERMANENT_UNABLE_TO_ROUTE = new EnhancedStatus(550, "5.4.4", "Unable to route"); public static final EnhancedStatus TRANSIENT_LOCAL_ERROR_IN_PROCESSING = new EnhancedStatus(451, "4.3.0", "Local error in processing"); public static final EnhancedStatus MAIL_SYSTEM_FULL = new EnhancedStatus( 452, "4.3.1", "Mail system full"); public static final EnhancedStatus BAD_DESTINATION_MAILBOX_ADDRESS_SYNTAX = new EnhancedStatus(553, "5.1.3", "Bad destination mailbox address syntax"); public static final EnhancedStatus PERMANENT_INTERNAL_ERROR = new EnhancedStatus(554, "5.3.0", "Internal error"); public static final EnhancedStatus BAD_MESSAGE_BODY = new EnhancedStatus( 554, "5.6.0", "Message body is invalid"); public static final EnhancedStatus INCORRECT_CONFIGURATION = new EnhancedStatus(554, "5.3.5", "System incorrectly configured"); public static final EnhancedStatus BAD_CONNECTION = new EnhancedStatus(421, "4.4.2", "Bad connection"); public static final EnhancedStatus MESSAGE_TOO_BIG = new EnhancedStatus( 552, "5.3.4", "Message too big for system"); public static final EnhancedStatus AUTHENTICATION_REQUIRED = new EnhancedStatus(530, "5.7.0", "Authentication required"); private final int smtpReplyCode; private final String enhancedStatusCode; private final String message; public EnhancedStatus(int smtpReplyCode, String enhancedStatusCode, String message) { if (smtpReplyCode <= 0 || enhancedStatusCode == null || message == null) throw new IllegalArgumentException(); this.smtpReplyCode = smtpReplyCode; this.enhancedStatusCode = enhancedStatusCode; this.message = message; } public EnhancedStatus(Rfc821Status response) { this.smtpReplyCode = response.getSmtpReplyCode(); this.message = response.getMessage(); this.enhancedStatusCode = approximateEnhancedCodeFromSmtpReplyCode(); } /** * Returns an enhanced status code which roughly correspond to * {@link #smtpReplyCode}. */ private String approximateEnhancedCodeFromSmtpReplyCode() { if (200 <= smtpReplyCode && smtpReplyCode <= 299) return "2.0.0"; else if (400 <= smtpReplyCode && smtpReplyCode <= 499) return "4.0.0"; else if (500 <= smtpReplyCode && smtpReplyCode <= 599) return "5.0.0"; else throw new RuntimeException("Unexpected: " + Integer.toString(smtpReplyCode)); } @Override public int getSmtpReplyCode() { return smtpReplyCode; } public String getEnhancedStatusCode() { return enhancedStatusCode; } @Override public String getMessage() { return message; } /** * it returns true, if repeating the action may help */ public boolean shouldRetry() { switch (getStatusClass()) { case TransientFailure: return true; case PermanentFailure: return false; default: throw new RuntimeException(getStatusClass().toString()); } } private StatusClass getStatusClass() { if (smtpReplyCode >= 200 && smtpReplyCode <= 299) return StatusClass.Success; else if (smtpReplyCode >= 400 && smtpReplyCode <= 499) return StatusClass.TransientFailure; else if (smtpReplyCode >= 500 && smtpReplyCode <= 599) return StatusClass.PermanentFailure; else throw new RuntimeException("Unexpected: " + Integer.toString(smtpReplyCode)); } public String getMessagePrefixedWithEnhancedStatusCode() { try { if (message.isEmpty()) return enhancedStatusCode; BufferedReader reader = new BufferedReader(new StringReader(message)); String line; StringBuilder buffer = new StringBuilder(); boolean firstLine = true; while (null != (line = reader.readLine())) { if (!firstLine) buffer.append("\r\n"); firstLine = false; buffer.append(enhancedStatusCode); buffer.append(' '); buffer.append(line); } return buffer.toString(); } catch (IOException e) { throw new RuntimeException(); // impossible } } @Override public String getDiagnosticCode() { return Multiline.prependStatusCodeToMessage(smtpReplyCode, getMessagePrefixedWithEnhancedStatusCode()); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((enhancedStatusCode == null) ? 0 : enhancedStatusCode.hashCode()); result = prime * result + ((message == null) ? 0 : message.hashCode()); result = prime * result + smtpReplyCode; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; EnhancedStatus other = (EnhancedStatus) obj; if (enhancedStatusCode == null) { if (other.enhancedStatusCode != null) return false; } else if (!enhancedStatusCode.equals(other.enhancedStatusCode)) return false; if (message == null) { if (other.message != null) return false; } else if (!message.equals(other.message)) return false; if (smtpReplyCode != other.smtpReplyCode) return false; return true; } @Override public String toString() { return smtpReplyCode + " " + enhancedStatusCode + " " + message; } public static enum StatusClass { Success(1), TransientFailure(4), PermanentFailure(5); private final int code; StatusClass(int code) { this.code = code; } public int code() { return code; } } }
/* * Copyright (c) 2009, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * AWT Button is a DragSource and also a transferable object */ import java.awt.*; import java.awt.datatransfer.*; import java.awt.dnd.*; import java.io.*; class DnDSource extends Button implements Transferable, DragGestureListener, DragSourceListener { private DataFlavor df; private transient int dropAction; private final int dragOperation = DnDConstants.ACTION_COPY | DnDConstants.ACTION_MOVE | DnDConstants.ACTION_LINK; DragSource dragSource = new DragSource(); DnDSource(String label) { super(label); setBackground(Color.yellow); setForeground(Color.blue); df = new DataFlavor(DnDSource.class, "DnDSource"); dragSource.createDefaultDragGestureRecognizer( this, dragOperation, this ); dragSource.addDragSourceListener(this); } public void changeCursor( DragSourceContext dsc, int ra ) { java.awt.Cursor c = null; if ((ra & DnDConstants.ACTION_LINK) == DnDConstants.ACTION_LINK) c = DragSource.DefaultLinkDrop; else if ((ra & DnDConstants.ACTION_MOVE) == DnDConstants.ACTION_MOVE) c = MyCursor.MOVE;//DragSource.DefaultMoveDrop; else if ((ra & DnDConstants.ACTION_COPY) == DnDConstants.ACTION_COPY) c = MyCursor.COPY; else c = MyCursor.NO_DROP; dsc.setCursor(c); } /** * a Drag gesture has been recognized */ public void dragGestureRecognized(DragGestureEvent dge) { System.out.println("starting Drag"); try { if (DragSource.isDragImageSupported()) { System.out.println("starting Imaged Drag"); dge.startDrag( null, new ImageGenerator(50, 100, new Color(0xff, 0xff, 0xff, 0x00) ) { @Override public void paint(Graphics gr) { gr.translate(width/2, height/2); ((Graphics2D)gr).setStroke(new BasicStroke(3)); int R = width/4+5; gr.setColor(Color.BLUE); gr.fillRect(-R, -R, 2*R, 2*R); gr.setColor(Color.CYAN); gr.drawRect(-R, -R, 2*R, 2*R); gr.translate(10, 10); R -= 5; gr.setColor(Color.RED); gr.fillOval(-R, -R, 2*R, 2*R); gr.setColor(Color.MAGENTA); gr.drawOval(-R, -R, 2*R, 2*R); } }.getImage(), new Point(15, 40), this, this); } else { dge.startDrag( null, this, this); } } catch (InvalidDnDOperationException e) { e.printStackTrace(); } } /** * as the hotspot enters a platform dependent drop site */ public void dragEnter(DragSourceDragEvent dsde) { System.out.println("[Source] dragEnter"); changeCursor( dsde.getDragSourceContext(), dsde.getUserAction() & dsde.getDropAction() ); } /** * as the hotspot moves over a platform dependent drop site */ public void dragOver(DragSourceDragEvent dsde) { System.out.println("[Source] dragOver"); changeCursor( dsde.getDragSourceContext(), dsde.getUserAction() & dsde.getDropAction() ); dropAction = dsde.getUserAction() & dsde.getDropAction(); System.out.println("dropAction = " + dropAction); } /** * as the hotspot exits a platform dependent drop site */ public void dragExit(DragSourceEvent dse) { System.out.println("[Source] dragExit"); changeCursor( dse.getDragSourceContext(), DnDConstants.ACTION_NONE ); } /** * as the operation changes */ public void dragGestureChanged(DragSourceDragEvent dsde) { System.out.println("[Source] dragGestureChanged"); changeCursor( dsde.getDragSourceContext(), dsde.getUserAction() & dsde.getDropAction() ); dropAction = dsde.getUserAction() & dsde.getDropAction(); System.out.println("dropAction = " + dropAction); } /** * as the operation completes */ public void dragDropEnd(DragSourceDropEvent dsde) { System.out.println("[Source] dragDropEnd"); } public void dropActionChanged(DragSourceDragEvent dsde) { System.out.println("[Source] dropActionChanged"); dropAction = dsde.getUserAction() & dsde.getDropAction(); System.out.println("dropAction = " + dropAction); } public DataFlavor[] getTransferDataFlavors() { return new DataFlavor[]{df}; } public boolean isDataFlavorSupported(DataFlavor sdf) { return df.equals(sdf); } public Object getTransferData(DataFlavor tdf) throws UnsupportedFlavorException, IOException { Object copy = null; if( !df.equals(tdf) ){ throw new UnsupportedFlavorException(tdf); } Container parent = getParent(); switch (dropAction) { case DnDConstants.ACTION_COPY: try { copy = this.clone(); } catch (CloneNotSupportedException e) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(this); ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); ObjectInputStream ois = new ObjectInputStream(bais); try { copy = ois.readObject(); } catch (ClassNotFoundException cnfe) { // do nothing } } parent.add(this); return copy; case DnDConstants.ACTION_MOVE: synchronized (this) { if (parent != null) { parent.remove(this); Label label = new Label("[empty]"); label.setBackground(Color.cyan); label.setBounds(this.getBounds()); parent.add(label); } } return this; case DnDConstants.ACTION_LINK: return this; default: return null; } } }
package frameWork; import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintWriter; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; public class DataPreparer implements java.io.Serializable{ /** * */ private static final long serialVersionUID = 1L; public FeatureInfo[] FeatureInfo; String Delimiter=","; String SubDelimiter="\t"; int [] ColumnSizes; int [] usedFeatures; public int [] usedFeaturesMaster; int [] maxFeatures; int [] minFeatures; static final byte NewLineByte=0; static final byte DelimiterByteInt=1; static final byte SubDelimiterByteInt=2; static final byte DelimiterByteShort=3; static final byte SubDelimiterByteShort=4; static final byte DelimiterByteByte=5; static final byte SubDelimiterByteByte=6; static final byte PredictionFloat=7; static final byte IDInt=8; public void init(int FeatureCount){ FeatureInfo=new FeatureInfo[FeatureCount]; maxFeatures =new int[FeatureCount];; minFeatures =new int[FeatureCount];; for(int i=0;i<FeatureInfo.length;i++){ FeatureInfo[i]=new FeatureInfo(); FeatureInfo[i].myID=i; FeatureInfo[i].FeatureSummary.myID=i; } } public void Finalize(){ for(int i=0;i<FeatureInfo.length;i++){ FeatureInfo[i].Finalize(); } } public void Reduce(int maxReduce, int goal){ for(int i=0;i<FeatureInfo.length;i++){ FeatureInfo[i].FeatureSummary.ReduceMap(maxReduce, goal); } } public void Update(String [] RawFeatures){ for(int i=0;i<RawFeatures.length;i++){ if(RawFeatures[i].contains(SubDelimiter)){ String [] values=RawFeatures[i].split(SubDelimiter); for(int j=0;j<values.length;j++){ FeatureInfo[i].Update(values[j]); } }else{ FeatureInfo[i].Update(RawFeatures[i]); } } } public void PrepareFeatures(int minSmoothingLimit){ for(int i=0;i<FeatureInfo.length;i++){ FeatureInfo[i].PrepareFeatures(minSmoothingLimit); FeatureInfo[i].SetMaxID(); } } public void SetSmoothingLimit(int minSmoothingLimit){ maxFeatures =new int[FeatureInfo.length]; minFeatures =new int[FeatureInfo.length]; ColumnSizes=new int[FeatureInfo.length]; for(int i=0;i<FeatureInfo.length;i++){ FeatureInfo[i].SmoothingLimit=minSmoothingLimit; ColumnSizes[i] = FeatureInfo[i].SetMaxID(); minFeatures[i]=Integer.MAX_VALUE; maxFeatures[i]=Integer.MIN_VALUE; } } public String GetPreparedString(String [] RawFeatures){ String Result=""; for(int i=0;i<RawFeatures.length;i++){ if(RawFeatures[i].contains(SubDelimiter)){ String [] values=RawFeatures[i].split(SubDelimiter); int added=0; for(int j=0;j<values.length;j++){ int ReturnedFeature=FeatureInfo[i].GetPreparedFeature(values[j]); if(ReturnedFeature!=0){ Result+=ReturnedFeature+SubDelimiter; added++; } } if(added==0){ Result+="0"; }else{ Result=Result.substring(0,Result.length()-1); } }else{ Result+=FeatureInfo[i].GetPreparedFeature(RawFeatures[i]); } Result+=","; } //remove last comma as it's not needed Result=Result.substring(0,Result.length()-1); return Result; } public int appendBytesToBytes(byte[] main, byte[] append , int Bcount){ for(int i=0;i<append.length;i++){ main[Bcount]=append[i]; Bcount++; } return Bcount; } public byte [] PredictionToByte(String value){ float val=Float.parseFloat(value); if(Float.isNaN(val)){ System.out.println("Nan value found in predictionToByte"); } ByteBuffer.allocate(4).putFloat(val).array(); byte [] predictionByte=ByteBuffer.allocate(4).putFloat(Float.parseFloat(value)).array(); byte [] Result=new byte[predictionByte.length+1]; Result[0]=PredictionFloat; for(int i=0;i<predictionByte.length;i++){ Result[i+1]=predictionByte[i]; } return Result; } public byte [] IDToByte(String value){ float val=Float.parseFloat(value); if(Float.isNaN(val)){ System.out.println("Nan value found in predictionToByte"); } ByteBuffer.allocate(4).putFloat(val).array(); byte [] predictionByte=ByteBuffer.allocate(4).putInt(Integer.parseInt(value)).array(); byte [] Result=new byte[predictionByte.length+1]; Result[0]=IDInt; for(int i=0;i<predictionByte.length;i++){ Result[i+1]=predictionByte[i]; } return Result; } public int appendBytesToByte(byte[] main, byte append , int Bcount){ main[Bcount]=append; Bcount++; return Bcount; } public int AddMainValue(byte [] ByteWorking,int Bcount,int ReturnedFeature){ if(ReturnedFeature<Byte.MAX_VALUE){ Bcount=appendBytesToByte(ByteWorking,DelimiterByteByte,Bcount); byte [] val=UtilByte.ByteToBytes((byte) ReturnedFeature); Bcount=appendBytesToBytes(ByteWorking,val,Bcount); }else if(ReturnedFeature<Short.MAX_VALUE){ Bcount=appendBytesToByte(ByteWorking,DelimiterByteShort,Bcount); byte [] val=UtilByte.ShortToBytes((short) ReturnedFeature); Bcount=appendBytesToBytes(ByteWorking,val,Bcount); }else{ Bcount=appendBytesToByte(ByteWorking,DelimiterByteInt,Bcount); byte [] val=UtilByte.IntToBytes(ReturnedFeature); Bcount=appendBytesToBytes(ByteWorking,val,Bcount); } return Bcount; } public int AddSubValue(byte [] ByteWorking,int Bcount,int ReturnedFeature){ if(ReturnedFeature<Byte.MAX_VALUE){ Bcount=appendBytesToByte(ByteWorking,SubDelimiterByteByte,Bcount); byte [] val=UtilByte.ByteToBytes((byte) ReturnedFeature); Bcount=appendBytesToBytes(ByteWorking,val,Bcount); }else if(ReturnedFeature<Short.MAX_VALUE){ Bcount=appendBytesToByte(ByteWorking,SubDelimiterByteShort,Bcount); byte [] val=UtilByte.ShortToBytes((short) ReturnedFeature); Bcount=appendBytesToBytes(ByteWorking,val,Bcount); }else{ Bcount=appendBytesToByte(ByteWorking,SubDelimiterByteInt,Bcount); byte [] val=UtilByte.IntToBytes(ReturnedFeature); Bcount=appendBytesToBytes(ByteWorking,val,Bcount); } return Bcount; } public byte [] GetPreparedByte(String [] RawFeatures) throws InterruptedException{ int SubDelimiterCount=0; for(int i=0;i<RawFeatures.length;i++){ if(RawFeatures[i].contains(SubDelimiter)){ SubDelimiterCount+= RawFeatures[i].length() - RawFeatures[i].replace(SubDelimiter, "").length(); } } int FeatureCount=RawFeatures.length+SubDelimiterCount; // +1 for end byte and +4 for Feature count int byte [] ByteWorking=new byte[FeatureCount*5+1+4]; int Bcount=0; // add count of Features byte [] val=UtilByte.IntToBytes(FeatureCount); Bcount=appendBytesToBytes(ByteWorking,val,Bcount); for(int i=0;i<RawFeatures.length;i++){ if(RawFeatures[i].contains(SubDelimiter)){ String [] values=RawFeatures[i].split(SubDelimiter); int added=0; for(int j=0;j<values.length;j++){ int ReturnedFeature=FeatureInfo[i].GetPreparedFeature(values[j]); if(ReturnedFeature!=0){ if(added>0){ Bcount=AddSubValue( ByteWorking, Bcount, ReturnedFeature); }else{ Bcount=AddMainValue( ByteWorking, Bcount, ReturnedFeature); } added++; } } if(added==0){ Bcount=AddMainValue( ByteWorking, Bcount, 0); }else{ Bcount--; } }else{ int ReturnedFeature=FeatureInfo[i].GetPreparedFeature(RawFeatures[i]); Bcount=AddMainValue( ByteWorking, Bcount, ReturnedFeature); } //Bcount=appendBytesToBytes(ByteWorking,DelimiterByte,Bcount); } //remove last comma as it's not needed //Bcount--; Bcount=appendBytesToByte(ByteWorking,NewLineByte,Bcount); byte [] Result=new byte[Bcount]; for(int i=0;i<Result.length;i++){ Result[i]=ByteWorking[i]; } return Result; } public int CheckUpdateBuffer(FileChannel inChannel,ByteBuffer buffer){ if(!buffer.hasRemaining()){ buffer.clear(); try { if(!(inChannel.read(buffer) > 0)){ return -1; } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } buffer.flip(); } return 0; } public int BufferGetInt(FileChannel inChannel,ByteBuffer buffer){ int bytesRemaining=buffer.remaining(); if(bytesRemaining>=4){ return buffer.getInt(); }else{ int Pos=0; int Result=0; for(int i=0;i<bytesRemaining;i++){ if(i==0){ Result+=(buffer.get()<<24)&0xff000000; Pos++; }else if(i==1){ Result+=(buffer.get()<<16)&0x00ff0000; Pos++; }else if(i==2){ Result+=(buffer.get()<<8)&0x0000ff00; Pos++; }else{ System.out.println("error int Dataprepare.BufferGetInt"); } } if(CheckUpdateBuffer(inChannel,buffer)==-1){ return -1; } for(int i=Pos;i<4;i++){ if(i==0){ Result+=(buffer.get()<<24)&0xff000000; Pos++; }else if(i==1){ Result+=(buffer.get()<<16)&0x00ff0000; Pos++; }else if(i==2){ Result+=(buffer.get()<<8)&0x0000ff00; Pos++; }else if(i==3){ Result+=(buffer.get()<<0)&0x000000ff; Pos++; }else{ System.out.println("error int Dataprepare.BufferGetInt"); } } return Result; } } public float BufferGetFloat(FileChannel inChannel,ByteBuffer buffer){ int bytesRemaining=buffer.remaining(); if(bytesRemaining>=4){ return buffer.getFloat(); }else{ byte [] BytePrediction=new byte[4]; int Pos=0; float Result=0; for(int i=0;i<bytesRemaining;i++){ BytePrediction[i]=buffer.get(); Pos++; } if(CheckUpdateBuffer(inChannel,buffer)==-1){ return -1; } for(int i=Pos;i<4;i++){ BytePrediction[i]=buffer.get(); Pos++; } int asInt = ((BytePrediction[0] & 0xFF) << 24) | ((BytePrediction[1] & 0xFF) << 16) | ((BytePrediction[2] & 0xFF) << 8) | (BytePrediction[3] & 0xFF); Result = Float.intBitsToFloat(asInt); return Result; } } public int BufferGetShort(FileChannel inChannel,ByteBuffer buffer){ int bytesRemaining=buffer.remaining(); if(bytesRemaining>=2){ return buffer.getShort(); }else if(bytesRemaining==1){ int Result=0; Result+=(buffer.get()<<8)&0x0000ff00; if(CheckUpdateBuffer(inChannel,buffer)==-1){ return -1; } Result+=(buffer.get()<<0)&0x000000ff; return Result; }else{ int Result=0; if(CheckUpdateBuffer(inChannel,buffer)==-1){ return -1; } Result+=(buffer.get()<<8)&0x0000ff00; Result+=(buffer.get()<<0)&0x000000ff; return Result; } } public float GetPredictionBinary(FileChannel inChannel,ByteBuffer buffer){ if(CheckUpdateBuffer(inChannel,buffer)==-1){ System.out.println("issue getting prediction from binary empty buffer/inChannel"); return -1; } byte b=buffer.get(); if(b==PredictionFloat){ return BufferGetFloat(inChannel,buffer); }else{ System.out.println("issue getting prediction from binary wrong first byte"); return -1; } } public int GetIDBinary(FileChannel inChannel,ByteBuffer buffer){ if(CheckUpdateBuffer(inChannel,buffer)==-1){ System.out.println("issue getting prediction from binary empty buffer/inChannel"); return -1; } byte b=buffer.get(); if(b==IDInt){ return BufferGetInt(inChannel,buffer); }else{ System.out.println("issue getting prediction from binary wrong first byte"); return -1; } } public int [][] GetFeatures(FileChannel inChannel,ByteBuffer buffer){ int FeatureCount=BufferGetInt(inChannel,buffer); if(FeatureCount==-1){ return null; } int [] FullFeatureSet=new int [FeatureCount]; int [] Positions=new int [FeatureCount]; int Feature=0; int counter2=0; if(CheckUpdateBuffer(inChannel,buffer)==-1){ return null; } while (true){ if(CheckUpdateBuffer(inChannel,buffer)==-1){ return null; } byte b=buffer.get(); int val=0; boolean sub=false; if(b==DelimiterByteByte){ if(!buffer.hasRemaining()){ if(CheckUpdateBuffer(inChannel,buffer)==-1){ return null; } } val=buffer.get(); }else if(b==DelimiterByteShort){ val=BufferGetShort(inChannel,buffer); }else if(b==DelimiterByteInt){ val=BufferGetInt(inChannel,buffer); }else if(b==NewLineByte){ int [][] FinalResult=new int[2][]; FinalResult[0]=FullFeatureSet; FinalResult[1]=Positions; return FinalResult; }else if(b==SubDelimiterByteShort){ val=BufferGetShort(inChannel,buffer); sub=true; }else if(b==SubDelimiterByteInt){ val=BufferGetInt(inChannel,buffer); sub=true; }else if(b==SubDelimiterByteByte){ if(!buffer.hasRemaining()){ if(CheckUpdateBuffer(inChannel,buffer)==-1){ return null; } } val=buffer.get(); sub=true; } FullFeatureSet[counter2]=val; Positions[counter2]=Feature; counter2++; if(!sub){ Feature++; } } } public int [][] GetFeaturesLoud(FileChannel inChannel,ByteBuffer buffer){ int FeatureCount=BufferGetInt(inChannel,buffer); System.out.println("FeatureCount="+FeatureCount); if(FeatureCount==-1){ return null; } int [] FullFeatureSet=new int [FeatureCount]; int [] Positions=new int [FeatureCount]; int Feature=0; int counter2=0; if(CheckUpdateBuffer(inChannel,buffer)==-1){ return null; } while (true){ if(CheckUpdateBuffer(inChannel,buffer)==-1){ return null; } byte b=buffer.get(); System.out.println("b="+b); int val=0; boolean sub=false; if(b==DelimiterByteByte){ if(!buffer.hasRemaining()){ if(CheckUpdateBuffer(inChannel,buffer)==-1){ return null; } } val=buffer.get(); }else if(b==DelimiterByteShort){ val=BufferGetShort(inChannel,buffer); }else if(b==DelimiterByteInt){ val=BufferGetInt(inChannel,buffer); }else if(b==NewLineByte){ int [][] FinalResult=new int[2][]; FinalResult[0]=FullFeatureSet; FinalResult[1]=Positions; return FinalResult; }else if(b==SubDelimiterByteShort){ val=BufferGetShort(inChannel,buffer); sub=true; }else if(b==SubDelimiterByteInt){ val=BufferGetInt(inChannel,buffer); sub=true; }else if(b==SubDelimiterByteByte){ if(!buffer.hasRemaining()){ if(CheckUpdateBuffer(inChannel,buffer)==-1){ return null; } } val=buffer.get(); sub=true; } FullFeatureSet[counter2]=val; Positions[counter2]=Feature; counter2++; if(!sub){ Feature++; } } } public int [] GetFeaturesFromInt(int [] Features, int [] Positions){ int [] WorkingSet=new int [Features.length]; int usedFeature=0; int count=0; int Position=0; for(int i=0;i<Positions.length;i++){ while(usedFeatures[usedFeature]<Positions[i] && usedFeature<usedFeatures.length-1){ usedFeature++; } if(usedFeatures[usedFeature]==Positions[i]){ WorkingSet[count]=Position+FeatureInfo[Positions[i]].GetProcessedFeatureInt(Features[i]); if(WorkingSet[count]>maxFeatures[count]){ maxFeatures[count]=WorkingSet[count]; } if(WorkingSet[count]<minFeatures[count]){ minFeatures[count]=WorkingSet[count]; } if(Position>0){ WorkingSet[count]++; } Position+=FeatureInfo[usedFeatures[usedFeature]].GetMaxID()+1; count++; } } int [] Result=new int [count]; for(int i=0;i<count;i++){ Result[i]=WorkingSet[i]; } return Result; } public int [] GetFeaturesFromInt(int [] Features, int [] Positions,int [] usedFeatures){ int [] WorkingSet=new int [Features.length]; int usedFeature=0; int count=0; int Position=0; for(int i=0;i<Positions.length;i++){ while(usedFeatures[usedFeature]<Positions[i] && usedFeature<usedFeatures.length-1){ usedFeature++; } if(usedFeatures[usedFeature]==Positions[i]){ WorkingSet[count]=Position+FeatureInfo[Positions[i]].GetProcessedFeatureInt(Features[i]); if(WorkingSet[count]>maxFeatures[count]){ maxFeatures[count]=WorkingSet[count]; } if(WorkingSet[count]<minFeatures[count]){ minFeatures[count]=WorkingSet[count]; } if(Position>0){ WorkingSet[count]++; } Position+=FeatureInfo[usedFeatures[usedFeature]].GetMaxID()+1; count++; } } int [] Result=new int [count]; for(int i=0;i<count;i++){ Result[i]=WorkingSet[i]; } return Result; } public void PrintMaxMinFeautre(){ for(int i=0;i<maxFeatures.length;i++){ System.out.println(i + " max="+maxFeatures[i] + " min="+minFeatures[i]); } } public int [] GetFeatures(String FeaturesString){ //System.out.println(FeaturesString); String [] values=FeaturesString.split(Delimiter); if(values.length!=FeatureInfo.length){ System.out.println("FeatureString has wrong number of features :" + FeaturesString); } int SubDelimiterCount= FeaturesString.length() - FeaturesString.replace(SubDelimiter, "").length(); int [] FeatureList=new int [FeatureInfo.length+SubDelimiterCount]; int Position=0; int counter=0; for(int i=0;i<usedFeatures.length;i++){ if(values[usedFeatures[i]].contains(SubDelimiter)){ //if(values[i].contains(SubDelimiter)){ String [] valuesSub=values[usedFeatures[i]].split(SubDelimiter); //String [] valuesSub=values[i].split(SubDelimiter); int Added=0; for(int j=0;j<valuesSub.length;j++){ int temp=Position+FeatureInfo[usedFeatures[i]].GetProcessedFeature(valuesSub[j]); if(temp!=Position){ FeatureList[counter]=temp; counter++; Added++; } } if(Added==0){ FeatureList[counter]=Position; counter++; } }else{ int temp=Position+FeatureInfo[usedFeatures[i]].GetProcessedFeature(values[usedFeatures[i]]); //int temp=Position+FeatureInfo[usedFeatures[i]].GetProcessedFeature(values[i]); //System.out.println(usedFeatures[i]+" val="+values[usedFeatures[i]]+" temp="+temp+ " pos=" + Position); FeatureList[counter]=temp; counter++; } Position+=FeatureInfo[usedFeatures[i]].GetMaxID()+1; } int [] FinalFeatureList=new int [counter]; for(int i=0;i<counter;i++){ FinalFeatureList[i]=FeatureList[i]; } return FinalFeatureList; } public void setUsedFeatures(int []usedFeatures){ if(usedFeatures==null){ this.usedFeatures= new int [FeatureInfo.length]; for(int i=0;i<FeatureInfo.length;i++){ this.usedFeatures[i]=i; } }else{ this.usedFeatures=usedFeatures; } } public int GetTotalFeatureCount() { int TotalFeatureCount=0; for(int i=0;i<usedFeatures.length;i++){ TotalFeatureCount+=ColumnSizes[usedFeatures[i]]+1; } return TotalFeatureCount; } public int GetTotalFeatureCount(int [] SubUsedFeatures) { int TotalFeatureCount=0; for(int i=0;i<SubUsedFeatures.length;i++){ TotalFeatureCount+=ColumnSizes[usedFeatures[SubUsedFeatures[i]]]+1; } return TotalFeatureCount; } public int PrintTotalFeatureCount(String File) { int TotalFeatureCount=0; try { PrintWriter writer = new PrintWriter(File+"-Main"); for(int i=0;i<usedFeatures.length;i++){ writer.write(i+","+TotalFeatureCount + "\n"); FeatureInfo[usedFeatures[i]].printMain(TotalFeatureCount,File+"-"+i); TotalFeatureCount+=ColumnSizes[usedFeatures[i]]+1; } writer.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } return TotalFeatureCount; } public int[] GetColumns() { int [] Columns= new int [usedFeatures.length]; for(int i=0;i<usedFeatures.length;i++){ Columns[i]=ColumnSizes[usedFeatures[i]]+1; } return Columns; } public int[] GetColumns(int [] SubUsedFeatures) { int [] Columns= new int [SubUsedFeatures.length]; for(int i=0;i<SubUsedFeatures.length;i++){ Columns[i]=ColumnSizes[usedFeatures[SubUsedFeatures[i]]]+1; } return Columns; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2010.10.30 at 09:30:35 AM EDT // package jaxb; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlIDREF; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for fx_surface_init_cube_common complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="fx_surface_init_cube_common"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;choice> * &lt;element name="all"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attribute name="ref" use="required" type="{http://www.w3.org/2001/XMLSchema}IDREF" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="primary"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence minOccurs="0"> * &lt;element name="order" type="{http://www.collada.org/2005/11/COLLADASchema}fx_surface_face_enum" maxOccurs="6" minOccurs="6"/> * &lt;/sequence> * &lt;attribute name="ref" use="required" type="{http://www.w3.org/2001/XMLSchema}IDREF" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="face" maxOccurs="6" minOccurs="6"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attribute name="ref" use="required" type="{http://www.w3.org/2001/XMLSchema}IDREF" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/choice> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "fx_surface_init_cube_common", propOrder = { "faces", "primary", "all" }) public class FxSurfaceInitCubeCommon { @XmlElement(name = "face") protected List<FxSurfaceInitCubeCommon.Face> faces; protected FxSurfaceInitCubeCommon.Primary primary; protected FxSurfaceInitCubeCommon.All all; /** * Gets the value of the faces property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the faces property. * * <p> * For example, to add a new item, do as follows: * <pre> * getFaces().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link FxSurfaceInitCubeCommon.Face } * * */ public List<FxSurfaceInitCubeCommon.Face> getFaces() { if (faces == null) { faces = new ArrayList<FxSurfaceInitCubeCommon.Face>(); } return this.faces; } /** * Gets the value of the primary property. * * @return * possible object is * {@link FxSurfaceInitCubeCommon.Primary } * */ public FxSurfaceInitCubeCommon.Primary getPrimary() { return primary; } /** * Sets the value of the primary property. * * @param value * allowed object is * {@link FxSurfaceInitCubeCommon.Primary } * */ public void setPrimary(FxSurfaceInitCubeCommon.Primary value) { this.primary = value; } /** * Gets the value of the all property. * * @return * possible object is * {@link FxSurfaceInitCubeCommon.All } * */ public FxSurfaceInitCubeCommon.All getAll() { return all; } /** * Sets the value of the all property. * * @param value * allowed object is * {@link FxSurfaceInitCubeCommon.All } * */ public void setAll(FxSurfaceInitCubeCommon.All value) { this.all = value; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attribute name="ref" use="required" type="{http://www.w3.org/2001/XMLSchema}IDREF" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "") public static class All { @XmlAttribute(required = true) @XmlIDREF @XmlSchemaType(name = "IDREF") protected Object ref; /** * Gets the value of the ref property. * * @return * possible object is * {@link Object } * */ public Object getRef() { return ref; } /** * Sets the value of the ref property. * * @param value * allowed object is * {@link Object } * */ public void setRef(Object value) { this.ref = value; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attribute name="ref" use="required" type="{http://www.w3.org/2001/XMLSchema}IDREF" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "") public static class Face { @XmlAttribute(required = true) @XmlIDREF @XmlSchemaType(name = "IDREF") protected Object ref; /** * Gets the value of the ref property. * * @return * possible object is * {@link Object } * */ public Object getRef() { return ref; } /** * Sets the value of the ref property. * * @param value * allowed object is * {@link Object } * */ public void setRef(Object value) { this.ref = value; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence minOccurs="0"> * &lt;element name="order" type="{http://www.collada.org/2005/11/COLLADASchema}fx_surface_face_enum" maxOccurs="6" minOccurs="6"/> * &lt;/sequence> * &lt;attribute name="ref" use="required" type="{http://www.w3.org/2001/XMLSchema}IDREF" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "orders" }) public static class Primary { @XmlElement(name = "order") protected List<FxSurfaceFaceEnum> orders; @XmlAttribute(required = true) @XmlIDREF @XmlSchemaType(name = "IDREF") protected Object ref; /** * Gets the value of the orders property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the orders property. * * <p> * For example, to add a new item, do as follows: * <pre> * getOrders().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link FxSurfaceFaceEnum } * * */ public List<FxSurfaceFaceEnum> getOrders() { if (orders == null) { orders = new ArrayList<FxSurfaceFaceEnum>(); } return this.orders; } /** * Gets the value of the ref property. * * @return * possible object is * {@link Object } * */ public Object getRef() { return ref; } /** * Sets the value of the ref property. * * @param value * allowed object is * {@link Object } * */ public void setRef(Object value) { this.ref = value; } } }
package de.test.antennapod.ui; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.preference.PreferenceManager; import android.test.ActivityInstrumentationTestCase2; import android.test.FlakyTest; import android.view.View; import android.widget.ListView; import com.robotium.solo.Solo; import com.robotium.solo.Timeout; import java.util.List; import de.danoeh.antennapod.R; import de.danoeh.antennapod.activity.MainActivity; import de.danoeh.antennapod.core.feed.FeedItem; import de.danoeh.antennapod.core.preferences.UserPreferences; import de.danoeh.antennapod.core.service.playback.PlaybackService; import de.danoeh.antennapod.core.service.playback.PlayerStatus; import de.danoeh.antennapod.core.storage.DBReader; import de.danoeh.antennapod.core.storage.DBWriter; import de.danoeh.antennapod.core.storage.PodDBAdapter; /** * test cases for starting and ending playback from the MainActivity and AudioPlayerActivity */ public class PlaybackTest extends ActivityInstrumentationTestCase2<MainActivity> { private static final String TAG = PlaybackTest.class.getSimpleName(); public static final int EPISODES_DRAWER_LIST_INDEX = 1; public static final int QUEUE_DRAWER_LIST_INDEX = 0; private Solo solo; private UITestUtils uiTestUtils; private Context context; public PlaybackTest() { super(MainActivity.class); } @Override public void setUp() throws Exception { super.setUp(); PodDBAdapter.deleteDatabase(); context = getInstrumentation().getTargetContext(); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); prefs.edit() .clear() .putBoolean(UserPreferences.PREF_UNPAUSE_ON_HEADSET_RECONNECT, false) .putBoolean(UserPreferences.PREF_PAUSE_ON_HEADSET_DISCONNECT, false) .commit(); solo = new Solo(getInstrumentation(), getActivity()); uiTestUtils = new UITestUtils(context); uiTestUtils.setup(); // create database PodDBAdapter adapter = PodDBAdapter.getInstance(); adapter.open(); adapter.close(); } @Override public void tearDown() throws Exception { solo.finishOpenedActivities(); uiTestUtils.tearDown(); // shut down playback service skipEpisode(); context.sendBroadcast(new Intent(PlaybackService.ACTION_SHUTDOWN_PLAYBACK_SERVICE)); super.tearDown(); } private void openNavDrawer() { solo.clickOnScreen(50, 50); } private void setContinuousPlaybackPreference(boolean value) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); prefs.edit().putBoolean(UserPreferences.PREF_FOLLOW_QUEUE, value).commit(); } private void skipEpisode() { Intent skipIntent = new Intent(PlaybackService.ACTION_SKIP_CURRENT_EPISODE); context.sendBroadcast(skipIntent); } private void startLocalPlayback() { openNavDrawer(); // if we try to just click on plain old text then // we might wind up clicking on the fragment title and not // the drawer element like we want. ListView drawerView = (ListView)solo.getView(R.id.nav_list); // this should be 'Episodes' View targetView = drawerView.getChildAt(EPISODES_DRAWER_LIST_INDEX); solo.waitForView(targetView); solo.clickOnView(targetView); solo.waitForText(solo.getString(R.string.all_episodes_short_label)); solo.clickOnText(solo.getString(R.string.all_episodes_short_label)); final List<FeedItem> episodes = DBReader.getRecentlyPublishedEpisodes(10); assertTrue(solo.waitForView(solo.getView(R.id.butSecondaryAction))); solo.clickOnView(solo.getView(R.id.butSecondaryAction)); long mediaId = episodes.get(0).getMedia().getId(); boolean playing = solo.waitForCondition(() -> { if (uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() == mediaId; } else { return false; } }, Timeout.getSmallTimeout()); assertTrue(playing); } private void startLocalPlaybackFromQueue() { openNavDrawer(); // if we try to just click on plain old text then // we might wind up clicking on the fragment title and not // the drawer element like we want. ListView drawerView = (ListView)solo.getView(R.id.nav_list); // this should be 'Queue' View targetView = drawerView.getChildAt(QUEUE_DRAWER_LIST_INDEX); solo.waitForView(targetView); solo.clickOnView(targetView); assertTrue(solo.waitForView(solo.getView(R.id.butSecondaryAction))); final List<FeedItem> queue = DBReader.getQueue(); solo.clickOnImageButton(1); assertTrue(solo.waitForView(solo.getView(R.id.butPlay))); long mediaId = queue.get(0).getMedia().getId(); boolean playing = solo.waitForCondition(() -> { if(uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() == mediaId; } else { return false; } }, Timeout.getSmallTimeout()); assertTrue(playing); } public void testStartLocal() throws Exception { uiTestUtils.addLocalFeedData(true); DBWriter.clearQueue().get(); startLocalPlayback(); } public void testContinousPlaybackOffSingleEpisode() throws Exception { setContinuousPlaybackPreference(false); uiTestUtils.addLocalFeedData(true); DBWriter.clearQueue().get(); startLocalPlayback(); } @FlakyTest(tolerance = 3) public void testContinousPlaybackOffMultipleEpisodes() throws Exception { setContinuousPlaybackPreference(false); uiTestUtils.addLocalFeedData(true); List<FeedItem> queue = DBReader.getQueue(); final FeedItem first = queue.get(0); startLocalPlaybackFromQueue(); boolean stopped = solo.waitForCondition(() -> { if (uiTestUtils.getPlaybackController(getActivity()).getStatus() != PlayerStatus.PLAYING) { return true; } else if (uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() != first.getMedia().getId(); } else { return true; } }, Timeout.getSmallTimeout()); assertTrue(stopped); Thread.sleep(1000); PlayerStatus status = uiTestUtils.getPlaybackController(getActivity()).getStatus(); assertFalse(status.equals(PlayerStatus.PLAYING)); } @FlakyTest(tolerance = 3) public void testContinuousPlaybackOnMultipleEpisodes() throws Exception { setContinuousPlaybackPreference(true); uiTestUtils.addLocalFeedData(true); List<FeedItem> queue = DBReader.getQueue(); final FeedItem first = queue.get(0); final FeedItem second = queue.get(1); startLocalPlaybackFromQueue(); boolean firstPlaying = solo.waitForCondition(() -> { if (uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() == first.getMedia().getId(); } else { return false; } }, Timeout.getSmallTimeout()); assertTrue(firstPlaying); boolean secondPlaying = solo.waitForCondition(() -> { if (uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() == second.getMedia().getId(); } else { return false; } }, Timeout.getLargeTimeout()); assertTrue(secondPlaying); } /** * Check if an episode can be played twice without problems. */ private void replayEpisodeCheck(boolean followQueue) throws Exception { setContinuousPlaybackPreference(followQueue); uiTestUtils.addLocalFeedData(true); DBWriter.clearQueue().get(); final List<FeedItem> episodes = DBReader.getRecentlyPublishedEpisodes(10); startLocalPlayback(); long mediaId = episodes.get(0).getMedia().getId(); boolean startedPlaying = solo.waitForCondition(() -> { if (uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() == mediaId; } else { return false; } }, Timeout.getSmallTimeout()); assertTrue(startedPlaying); boolean stoppedPlaying = solo.waitForCondition(() -> { return uiTestUtils.getCurrentMedia(getActivity()) == null || uiTestUtils.getCurrentMedia(getActivity()).getId() != mediaId; }, Timeout.getLargeTimeout()); assertTrue(stoppedPlaying); startLocalPlayback(); boolean startedReplay = solo.waitForCondition(() -> { if(uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() == mediaId; } else { return false; } }, Timeout.getLargeTimeout()); assertTrue(startedReplay); } public void testReplayEpisodeContinuousPlaybackOn() throws Exception { replayEpisodeCheck(true); } public void testReplayEpisodeContinuousPlaybackOff() throws Exception { replayEpisodeCheck(false); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.range; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link ByteKeyRangeTracker}. */ @RunWith(JUnit4.class) public class ByteKeyRangeTrackerTest { private static final ByteKey BEFORE_START_KEY = ByteKey.of(0x11); private static final ByteKey INITIAL_START_KEY = ByteKey.of(0x12); private static final ByteKey AFTER_START_KEY = ByteKey.of(0x13); private static final ByteKey INITIAL_MIDDLE_KEY = ByteKey.of(0x23); private static final ByteKey NEW_START_KEY = ByteKey.of(0x14); private static final ByteKey NEW_MIDDLE_KEY = ByteKey.of(0x24); private static final ByteKey BEFORE_END_KEY = ByteKey.of(0x33); private static final ByteKey END_KEY = ByteKey.of(0x34); private static final ByteKey KEY_LARGER_THAN_END = ByteKey.of(0x35); private static final double INITIAL_RANGE_SIZE = 0x34 - 0x12; private static final ByteKeyRange INITIAL_RANGE = ByteKeyRange.of(INITIAL_START_KEY, END_KEY); private static final double NEW_RANGE_SIZE = 0x34 - 0x14; private static final ByteKeyRange NEW_RANGE = ByteKeyRange.of(NEW_START_KEY, END_KEY); @Rule public final ExpectedException expected = ExpectedException.none(); /** Tests for {@link ByteKeyRangeTracker#toString}. */ @Test public void testToString() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); String expected = String.format("ByteKeyRangeTracker{range=%s, position=null}", INITIAL_RANGE); assertEquals(expected, tracker.toString()); tracker.tryReturnRecordAt(true, INITIAL_START_KEY); tracker.tryReturnRecordAt(true, INITIAL_MIDDLE_KEY); expected = String.format( "ByteKeyRangeTracker{range=%s, position=%s}", INITIAL_RANGE, INITIAL_MIDDLE_KEY); assertEquals(expected, tracker.toString()); } /** Tests for updating the start key to the first record returned. */ @Test public void testUpdateStartKey() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); tracker.tryReturnRecordAt(true, NEW_START_KEY); String expected = String.format("ByteKeyRangeTracker{range=%s, position=%s}", NEW_RANGE, NEW_START_KEY); assertEquals(expected, tracker.toString()); } /** Tests for {@link ByteKeyRangeTracker#of}. */ @Test public void testBuilding() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); assertEquals(INITIAL_START_KEY, tracker.getStartPosition()); assertEquals(END_KEY, tracker.getStopPosition()); } /** Tests for {@link ByteKeyRangeTracker#getFractionConsumed()}. */ @Test public void testGetFractionConsumed() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); double delta = 0.00001; assertEquals(0.0, tracker.getFractionConsumed(), delta); tracker.tryReturnRecordAt(true, INITIAL_START_KEY); assertEquals(0.0, tracker.getFractionConsumed(), delta); tracker.tryReturnRecordAt(true, INITIAL_MIDDLE_KEY); assertEquals(0.5, tracker.getFractionConsumed(), delta); tracker.tryReturnRecordAt(true, BEFORE_END_KEY); assertEquals(1 - 1 / INITIAL_RANGE_SIZE, tracker.getFractionConsumed(), delta); } @Test public void testGetFractionConsumedAfterDone() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); double delta = 0.00001; assertTrue(tracker.tryReturnRecordAt(true, INITIAL_START_KEY)); tracker.markDone(); assertEquals(1.0, tracker.getFractionConsumed(), delta); } @Test public void testGetFractionConsumedAfterOutOfRangeClaim() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); double delta = 0.00001; assertTrue(tracker.tryReturnRecordAt(true, INITIAL_START_KEY)); assertTrue(tracker.tryReturnRecordAt(false, KEY_LARGER_THAN_END)); assertEquals(1.0, tracker.getFractionConsumed(), delta); } /** Tests for {@link ByteKeyRangeTracker#getFractionConsumed()} with updated start key. */ @Test public void testGetFractionConsumedUpdateStartKey() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); double delta = 0.00001; tracker.tryReturnRecordAt(true, NEW_START_KEY); assertEquals(0.0, tracker.getFractionConsumed(), delta); tracker.tryReturnRecordAt(true, NEW_MIDDLE_KEY); assertEquals(0.5, tracker.getFractionConsumed(), delta); tracker.tryReturnRecordAt(true, BEFORE_END_KEY); assertEquals(1 - 1 / NEW_RANGE_SIZE, tracker.getFractionConsumed(), delta); } /** Tests for {@link ByteKeyRangeTracker#tryReturnRecordAt}. */ @Test public void testTryReturnRecordAt() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); // Should be able to emit at the same key twice, should that happen. // Should be able to emit within range (in order, but system guarantees won't try out of order). // Should not be able to emit past end of range. assertTrue(tracker.tryReturnRecordAt(true, INITIAL_START_KEY)); assertTrue(tracker.tryReturnRecordAt(true, INITIAL_START_KEY)); assertTrue(tracker.tryReturnRecordAt(true, INITIAL_MIDDLE_KEY)); assertTrue(tracker.tryReturnRecordAt(true, INITIAL_MIDDLE_KEY)); assertTrue(tracker.tryReturnRecordAt(true, BEFORE_END_KEY)); assertFalse(tracker.tryReturnRecordAt(true, END_KEY)); // after end assertFalse(tracker.tryReturnRecordAt(true, BEFORE_END_KEY)); // false because done } @Test public void testTryReturnFirstRecordNotSplitPoint() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); expected.expect(IllegalStateException.class); tracker.tryReturnRecordAt(false, INITIAL_START_KEY); } @Test public void testTryReturnBeforeStartKey() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); expected.expect(IllegalStateException.class); tracker.tryReturnRecordAt(true, BEFORE_START_KEY); } @Test public void testTryReturnBeforeLastReturnedRecord() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); assertTrue(tracker.tryReturnRecordAt(true, INITIAL_START_KEY)); assertTrue(tracker.tryReturnRecordAt(true, INITIAL_MIDDLE_KEY)); expected.expect(IllegalStateException.class); tracker.tryReturnRecordAt(true, AFTER_START_KEY); } /** Tests for {@link ByteKeyRangeTracker#trySplitAtPosition}. */ @Test public void testSplitAtPosition() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); // Unstarted, should not split. assertFalse(tracker.trySplitAtPosition(INITIAL_MIDDLE_KEY)); // Start it, split it before the end. assertTrue(tracker.tryReturnRecordAt(true, INITIAL_START_KEY)); assertTrue(tracker.trySplitAtPosition(BEFORE_END_KEY)); assertEquals(BEFORE_END_KEY, tracker.getStopPosition()); // Should not be able to split it after the end. assertFalse(tracker.trySplitAtPosition(END_KEY)); // Should not be able to split after emitting. assertTrue(tracker.tryReturnRecordAt(true, INITIAL_MIDDLE_KEY)); assertFalse(tracker.trySplitAtPosition(INITIAL_MIDDLE_KEY)); assertTrue(tracker.tryReturnRecordAt(true, INITIAL_MIDDLE_KEY)); } /** Tests for {@link ByteKeyRangeTracker#getSplitPointsConsumed()}. */ @Test public void testGetSplitPointsConsumed() { ByteKeyRangeTracker tracker = ByteKeyRangeTracker.of(INITIAL_RANGE); assertEquals(0, tracker.getSplitPointsConsumed()); // Started, 0 split points consumed assertTrue(tracker.tryReturnRecordAt(true, INITIAL_START_KEY)); assertEquals(0, tracker.getSplitPointsConsumed()); // Processing new split point, 1 split point consumed assertTrue(tracker.tryReturnRecordAt(true, AFTER_START_KEY)); assertEquals(1, tracker.getSplitPointsConsumed()); // Processing new non-split point, 1 split point consumed assertTrue(tracker.tryReturnRecordAt(false, INITIAL_MIDDLE_KEY)); assertEquals(1, tracker.getSplitPointsConsumed()); // Processing new split point, 2 split points consumed assertTrue(tracker.tryReturnRecordAt(true, BEFORE_END_KEY)); assertEquals(2, tracker.getSplitPointsConsumed()); // Mark tracker as done, 3 split points consumed tracker.markDone(); assertEquals(3, tracker.getSplitPointsConsumed()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.request; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import org.apache.wicket.util.lang.Args; import org.apache.wicket.util.time.Time; /** * a multivalue map of headers names and header values suitable for * processing http request and response headers. * * @author Peter Ertl * * @since 1.5 */ public class HttpHeaderCollection { private final Map<HeaderKey, List<Object>> headers; /** returned in case no header values were found */ private static final String[] NO_VALUES = new String[0]; public HttpHeaderCollection() { headers = new HashMap<HeaderKey, List<Object>>(); } /** * internally add new object to header values * * @param name * header name * @param object * header value (can be a string or a {@link Time} object */ private void internalAdd(String name, Object object) { final HeaderKey key = new HeaderKey(name); List<Object> values = headers.get(key); if (values == null) { values = new ArrayList<Object>(); headers.put(key, values); } values.add(object); } /** * set header value (and remove previous values) * * @param name * header name * @param value * header value */ public void setHeader(String name, String value) { // remove previous values removeHeader(name); // add new values addHeader(name, value); } /** * add header value * * @param name * header name * @param value * header value */ public void addHeader(String name, String value) { // be lenient and strip leading / trailing blanks value = Args.notNull(value, "value").trim(); internalAdd(name, value); } /** * add date header value * * @param name * header name * @param time * timestamp */ public void addDateHeader(String name, Time time) { internalAdd(name, time); } /** * add date header value * * @param name * header name * @param time * timestamp */ public void setDateHeader(String name, Time time) { // remove previous values removeHeader(name); // add time object to values addDateHeader(name, time); } /** * remove header values for header name * * @param name * header name */ public void removeHeader(String name) { final HeaderKey key = new HeaderKey(name); final Iterator<Map.Entry<HeaderKey, List<Object>>> it = headers.entrySet().iterator(); while (it.hasNext()) { final Map.Entry<HeaderKey, List<Object>> header = it.next(); if (header.getKey().equals(key)) { it.remove(); } } } private String valueToString(Object value) { if (value instanceof Time) { return ((Time)value).toRfc1123TimestampString(); } else { return value.toString(); } } /** * check if header is defined * * @param name * header name * @return <code>true</code> if header has one or more values */ public boolean containsHeader(String name) { final HeaderKey searchKey = new HeaderKey(name); // get the header value (case might differ) for (HeaderKey key : headers.keySet()) { if (key.equals(searchKey)) { return true; } } return false; } /** * returns names of headers * * @return set of header names */ public Set<String> getHeaderNames() { if (headers.isEmpty()) { return Collections.emptySet(); } final Set<String> names = new HashSet<String>(headers.size()); for (HeaderKey key : headers.keySet()) { names.add(key.getName()); } return names; } /** * get header values (dates will be converted into strings) * * @param name * header name * * @return array of header values or empty array if not found */ public String[] getHeaderValues(String name) { final List<Object> objects = headers.get(new HeaderKey(name)); if (objects == null) { return NO_VALUES; } final String[] values = new String[objects.size()]; for (int i = 0; i < values.length; i++) { values[i] = valueToString(objects.get(i)); } return values; } public String getHeader(String name) { final List<Object> objects = headers.get(new HeaderKey(name)); if (objects.isEmpty()) { return null; } return valueToString(objects.get(0)); } public Time getDateHeader(String name) { final List<Object> objects = headers.get(new HeaderKey(name)); if (objects.isEmpty()) { return null; } Object object = objects.get(0); if ((object instanceof Time) == false) { throw new IllegalStateException("header value is not of type date"); } return (Time)object; } /** * check if collection is empty * * @return <code>true</code> if collection is empty, <code>false</code> otherwise */ public boolean isEmpty() { return headers.isEmpty(); } /** * get number of headers * * @return count */ public int getCount() { return headers.size(); } /** * clear all headers */ public void clear() { headers.clear(); } /** * key for header collection */ private static class HeaderKey { private final String key; private final String name; private HeaderKey(String name) { this.name = Args.notEmpty(name, "name").trim(); this.key = this.name.toLowerCase(Locale.US); } public String getName() { return name; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof HeaderKey)) return false; HeaderKey that = (HeaderKey)o; if (!key.equals(that.key)) return false; return true; } @Override public int hashCode() { return key.hashCode(); } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/spanner/admin/database/v1/backup.proto package com.google.spanner.admin.database.v1; /** * * * <pre> * The request for [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. * </pre> * * Protobuf type {@code google.spanner.admin.database.v1.CreateBackupRequest} */ public final class CreateBackupRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.spanner.admin.database.v1.CreateBackupRequest) CreateBackupRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateBackupRequest.newBuilder() to construct. private CreateBackupRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateBackupRequest() { parent_ = ""; backupId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateBackupRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CreateBackupRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); backupId_ = s; break; } case 26: { com.google.spanner.admin.database.v1.Backup.Builder subBuilder = null; if (backup_ != null) { subBuilder = backup_.toBuilder(); } backup_ = input.readMessage( com.google.spanner.admin.database.v1.Backup.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(backup_); backup_ = subBuilder.buildPartial(); } break; } case 34: { com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig.Builder subBuilder = null; if (encryptionConfig_ != null) { subBuilder = encryptionConfig_.toBuilder(); } encryptionConfig_ = input.readMessage( com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(encryptionConfig_); encryptionConfig_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.spanner.admin.database.v1.BackupProto .internal_static_google_spanner_admin_database_v1_CreateBackupRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.spanner.admin.database.v1.BackupProto .internal_static_google_spanner_admin_database_v1_CreateBackupRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.spanner.admin.database.v1.CreateBackupRequest.class, com.google.spanner.admin.database.v1.CreateBackupRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * * * <pre> * Required. The name of the instance in which the backup will be * created. This must be the same instance that contains the database the * backup will be created from. The backup will be stored in the * location(s) specified in the instance configuration of this * instance. Values are of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The name of the instance in which the backup will be * created. This must be the same instance that contains the database the * backup will be created from. The backup will be stored in the * location(s) specified in the instance configuration of this * instance. Values are of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int BACKUP_ID_FIELD_NUMBER = 2; private volatile java.lang.Object backupId_; /** * * * <pre> * Required. The id of the backup to be created. The `backup_id` appended to * `parent` forms the full backup name of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;/backups/&lt;backup_id&gt;`. * </pre> * * <code>string backup_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The backupId. */ @java.lang.Override public java.lang.String getBackupId() { java.lang.Object ref = backupId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); backupId_ = s; return s; } } /** * * * <pre> * Required. The id of the backup to be created. The `backup_id` appended to * `parent` forms the full backup name of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;/backups/&lt;backup_id&gt;`. * </pre> * * <code>string backup_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for backupId. */ @java.lang.Override public com.google.protobuf.ByteString getBackupIdBytes() { java.lang.Object ref = backupId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); backupId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int BACKUP_FIELD_NUMBER = 3; private com.google.spanner.admin.database.v1.Backup backup_; /** * * * <pre> * Required. The backup to create. * </pre> * * <code> * .google.spanner.admin.database.v1.Backup backup = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the backup field is set. */ @java.lang.Override public boolean hasBackup() { return backup_ != null; } /** * * * <pre> * Required. The backup to create. * </pre> * * <code> * .google.spanner.admin.database.v1.Backup backup = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The backup. */ @java.lang.Override public com.google.spanner.admin.database.v1.Backup getBackup() { return backup_ == null ? com.google.spanner.admin.database.v1.Backup.getDefaultInstance() : backup_; } /** * * * <pre> * Required. The backup to create. * </pre> * * <code> * .google.spanner.admin.database.v1.Backup backup = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.spanner.admin.database.v1.BackupOrBuilder getBackupOrBuilder() { return getBackup(); } public static final int ENCRYPTION_CONFIG_FIELD_NUMBER = 4; private com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryptionConfig_; /** * * * <pre> * Optional. The encryption configuration used to encrypt the backup. If this field is * not specified, the backup will use the same * encryption configuration as the database by default, namely * [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] = * `USE_DATABASE_ENCRYPTION`. * </pre> * * <code> * .google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryption_config = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the encryptionConfig field is set. */ @java.lang.Override public boolean hasEncryptionConfig() { return encryptionConfig_ != null; } /** * * * <pre> * Optional. The encryption configuration used to encrypt the backup. If this field is * not specified, the backup will use the same * encryption configuration as the database by default, namely * [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] = * `USE_DATABASE_ENCRYPTION`. * </pre> * * <code> * .google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryption_config = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The encryptionConfig. */ @java.lang.Override public com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig getEncryptionConfig() { return encryptionConfig_ == null ? com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig.getDefaultInstance() : encryptionConfig_; } /** * * * <pre> * Optional. The encryption configuration used to encrypt the backup. If this field is * not specified, the backup will use the same * encryption configuration as the database by default, namely * [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] = * `USE_DATABASE_ENCRYPTION`. * </pre> * * <code> * .google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryption_config = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.spanner.admin.database.v1.CreateBackupEncryptionConfigOrBuilder getEncryptionConfigOrBuilder() { return getEncryptionConfig(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getParentBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!getBackupIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, backupId_); } if (backup_ != null) { output.writeMessage(3, getBackup()); } if (encryptionConfig_ != null) { output.writeMessage(4, getEncryptionConfig()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getParentBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!getBackupIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, backupId_); } if (backup_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getBackup()); } if (encryptionConfig_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getEncryptionConfig()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.spanner.admin.database.v1.CreateBackupRequest)) { return super.equals(obj); } com.google.spanner.admin.database.v1.CreateBackupRequest other = (com.google.spanner.admin.database.v1.CreateBackupRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getBackupId().equals(other.getBackupId())) return false; if (hasBackup() != other.hasBackup()) return false; if (hasBackup()) { if (!getBackup().equals(other.getBackup())) return false; } if (hasEncryptionConfig() != other.hasEncryptionConfig()) return false; if (hasEncryptionConfig()) { if (!getEncryptionConfig().equals(other.getEncryptionConfig())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + BACKUP_ID_FIELD_NUMBER; hash = (53 * hash) + getBackupId().hashCode(); if (hasBackup()) { hash = (37 * hash) + BACKUP_FIELD_NUMBER; hash = (53 * hash) + getBackup().hashCode(); } if (hasEncryptionConfig()) { hash = (37 * hash) + ENCRYPTION_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getEncryptionConfig().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.spanner.admin.database.v1.CreateBackupRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.spanner.admin.database.v1.CreateBackupRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.spanner.admin.database.v1.CreateBackupRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.spanner.admin.database.v1.CreateBackupRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.spanner.admin.database.v1.CreateBackupRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.spanner.admin.database.v1.CreateBackupRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.spanner.admin.database.v1.CreateBackupRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.spanner.admin.database.v1.CreateBackupRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.spanner.admin.database.v1.CreateBackupRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.spanner.admin.database.v1.CreateBackupRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.spanner.admin.database.v1.CreateBackupRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.spanner.admin.database.v1.CreateBackupRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.spanner.admin.database.v1.CreateBackupRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request for [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. * </pre> * * Protobuf type {@code google.spanner.admin.database.v1.CreateBackupRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.spanner.admin.database.v1.CreateBackupRequest) com.google.spanner.admin.database.v1.CreateBackupRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.spanner.admin.database.v1.BackupProto .internal_static_google_spanner_admin_database_v1_CreateBackupRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.spanner.admin.database.v1.BackupProto .internal_static_google_spanner_admin_database_v1_CreateBackupRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.spanner.admin.database.v1.CreateBackupRequest.class, com.google.spanner.admin.database.v1.CreateBackupRequest.Builder.class); } // Construct using com.google.spanner.admin.database.v1.CreateBackupRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); parent_ = ""; backupId_ = ""; if (backupBuilder_ == null) { backup_ = null; } else { backup_ = null; backupBuilder_ = null; } if (encryptionConfigBuilder_ == null) { encryptionConfig_ = null; } else { encryptionConfig_ = null; encryptionConfigBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.spanner.admin.database.v1.BackupProto .internal_static_google_spanner_admin_database_v1_CreateBackupRequest_descriptor; } @java.lang.Override public com.google.spanner.admin.database.v1.CreateBackupRequest getDefaultInstanceForType() { return com.google.spanner.admin.database.v1.CreateBackupRequest.getDefaultInstance(); } @java.lang.Override public com.google.spanner.admin.database.v1.CreateBackupRequest build() { com.google.spanner.admin.database.v1.CreateBackupRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.spanner.admin.database.v1.CreateBackupRequest buildPartial() { com.google.spanner.admin.database.v1.CreateBackupRequest result = new com.google.spanner.admin.database.v1.CreateBackupRequest(this); result.parent_ = parent_; result.backupId_ = backupId_; if (backupBuilder_ == null) { result.backup_ = backup_; } else { result.backup_ = backupBuilder_.build(); } if (encryptionConfigBuilder_ == null) { result.encryptionConfig_ = encryptionConfig_; } else { result.encryptionConfig_ = encryptionConfigBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.spanner.admin.database.v1.CreateBackupRequest) { return mergeFrom((com.google.spanner.admin.database.v1.CreateBackupRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.spanner.admin.database.v1.CreateBackupRequest other) { if (other == com.google.spanner.admin.database.v1.CreateBackupRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } if (!other.getBackupId().isEmpty()) { backupId_ = other.backupId_; onChanged(); } if (other.hasBackup()) { mergeBackup(other.getBackup()); } if (other.hasEncryptionConfig()) { mergeEncryptionConfig(other.getEncryptionConfig()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.spanner.admin.database.v1.CreateBackupRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.spanner.admin.database.v1.CreateBackupRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the instance in which the backup will be * created. This must be the same instance that contains the database the * backup will be created from. The backup will be stored in the * location(s) specified in the instance configuration of this * instance. Values are of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the instance in which the backup will be * created. This must be the same instance that contains the database the * backup will be created from. The backup will be stored in the * location(s) specified in the instance configuration of this * instance. Values are of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the instance in which the backup will be * created. This must be the same instance that contains the database the * backup will be created from. The backup will be stored in the * location(s) specified in the instance configuration of this * instance. Values are of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. The name of the instance in which the backup will be * created. This must be the same instance that contains the database the * backup will be created from. The backup will be stored in the * location(s) specified in the instance configuration of this * instance. Values are of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. The name of the instance in which the backup will be * created. This must be the same instance that contains the database the * backup will be created from. The backup will be stored in the * location(s) specified in the instance configuration of this * instance. Values are of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } private java.lang.Object backupId_ = ""; /** * * * <pre> * Required. The id of the backup to be created. The `backup_id` appended to * `parent` forms the full backup name of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;/backups/&lt;backup_id&gt;`. * </pre> * * <code>string backup_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The backupId. */ public java.lang.String getBackupId() { java.lang.Object ref = backupId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); backupId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The id of the backup to be created. The `backup_id` appended to * `parent` forms the full backup name of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;/backups/&lt;backup_id&gt;`. * </pre> * * <code>string backup_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for backupId. */ public com.google.protobuf.ByteString getBackupIdBytes() { java.lang.Object ref = backupId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); backupId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The id of the backup to be created. The `backup_id` appended to * `parent` forms the full backup name of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;/backups/&lt;backup_id&gt;`. * </pre> * * <code>string backup_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The backupId to set. * @return This builder for chaining. */ public Builder setBackupId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } backupId_ = value; onChanged(); return this; } /** * * * <pre> * Required. The id of the backup to be created. The `backup_id` appended to * `parent` forms the full backup name of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;/backups/&lt;backup_id&gt;`. * </pre> * * <code>string backup_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearBackupId() { backupId_ = getDefaultInstance().getBackupId(); onChanged(); return this; } /** * * * <pre> * Required. The id of the backup to be created. The `backup_id` appended to * `parent` forms the full backup name of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;/backups/&lt;backup_id&gt;`. * </pre> * * <code>string backup_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for backupId to set. * @return This builder for chaining. */ public Builder setBackupIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); backupId_ = value; onChanged(); return this; } private com.google.spanner.admin.database.v1.Backup backup_; private com.google.protobuf.SingleFieldBuilderV3< com.google.spanner.admin.database.v1.Backup, com.google.spanner.admin.database.v1.Backup.Builder, com.google.spanner.admin.database.v1.BackupOrBuilder> backupBuilder_; /** * * * <pre> * Required. The backup to create. * </pre> * * <code> * .google.spanner.admin.database.v1.Backup backup = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the backup field is set. */ public boolean hasBackup() { return backupBuilder_ != null || backup_ != null; } /** * * * <pre> * Required. The backup to create. * </pre> * * <code> * .google.spanner.admin.database.v1.Backup backup = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The backup. */ public com.google.spanner.admin.database.v1.Backup getBackup() { if (backupBuilder_ == null) { return backup_ == null ? com.google.spanner.admin.database.v1.Backup.getDefaultInstance() : backup_; } else { return backupBuilder_.getMessage(); } } /** * * * <pre> * Required. The backup to create. * </pre> * * <code> * .google.spanner.admin.database.v1.Backup backup = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setBackup(com.google.spanner.admin.database.v1.Backup value) { if (backupBuilder_ == null) { if (value == null) { throw new NullPointerException(); } backup_ = value; onChanged(); } else { backupBuilder_.setMessage(value); } return this; } /** * * * <pre> * Required. The backup to create. * </pre> * * <code> * .google.spanner.admin.database.v1.Backup backup = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setBackup(com.google.spanner.admin.database.v1.Backup.Builder builderForValue) { if (backupBuilder_ == null) { backup_ = builderForValue.build(); onChanged(); } else { backupBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. The backup to create. * </pre> * * <code> * .google.spanner.admin.database.v1.Backup backup = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeBackup(com.google.spanner.admin.database.v1.Backup value) { if (backupBuilder_ == null) { if (backup_ != null) { backup_ = com.google.spanner.admin.database.v1.Backup.newBuilder(backup_) .mergeFrom(value) .buildPartial(); } else { backup_ = value; } onChanged(); } else { backupBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Required. The backup to create. * </pre> * * <code> * .google.spanner.admin.database.v1.Backup backup = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearBackup() { if (backupBuilder_ == null) { backup_ = null; onChanged(); } else { backup_ = null; backupBuilder_ = null; } return this; } /** * * * <pre> * Required. The backup to create. * </pre> * * <code> * .google.spanner.admin.database.v1.Backup backup = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.spanner.admin.database.v1.Backup.Builder getBackupBuilder() { onChanged(); return getBackupFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The backup to create. * </pre> * * <code> * .google.spanner.admin.database.v1.Backup backup = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.spanner.admin.database.v1.BackupOrBuilder getBackupOrBuilder() { if (backupBuilder_ != null) { return backupBuilder_.getMessageOrBuilder(); } else { return backup_ == null ? com.google.spanner.admin.database.v1.Backup.getDefaultInstance() : backup_; } } /** * * * <pre> * Required. The backup to create. * </pre> * * <code> * .google.spanner.admin.database.v1.Backup backup = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.spanner.admin.database.v1.Backup, com.google.spanner.admin.database.v1.Backup.Builder, com.google.spanner.admin.database.v1.BackupOrBuilder> getBackupFieldBuilder() { if (backupBuilder_ == null) { backupBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.spanner.admin.database.v1.Backup, com.google.spanner.admin.database.v1.Backup.Builder, com.google.spanner.admin.database.v1.BackupOrBuilder>( getBackup(), getParentForChildren(), isClean()); backup_ = null; } return backupBuilder_; } private com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryptionConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig, com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig.Builder, com.google.spanner.admin.database.v1.CreateBackupEncryptionConfigOrBuilder> encryptionConfigBuilder_; /** * * * <pre> * Optional. The encryption configuration used to encrypt the backup. If this field is * not specified, the backup will use the same * encryption configuration as the database by default, namely * [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] = * `USE_DATABASE_ENCRYPTION`. * </pre> * * <code> * .google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryption_config = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the encryptionConfig field is set. */ public boolean hasEncryptionConfig() { return encryptionConfigBuilder_ != null || encryptionConfig_ != null; } /** * * * <pre> * Optional. The encryption configuration used to encrypt the backup. If this field is * not specified, the backup will use the same * encryption configuration as the database by default, namely * [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] = * `USE_DATABASE_ENCRYPTION`. * </pre> * * <code> * .google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryption_config = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The encryptionConfig. */ public com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig getEncryptionConfig() { if (encryptionConfigBuilder_ == null) { return encryptionConfig_ == null ? com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig.getDefaultInstance() : encryptionConfig_; } else { return encryptionConfigBuilder_.getMessage(); } } /** * * * <pre> * Optional. The encryption configuration used to encrypt the backup. If this field is * not specified, the backup will use the same * encryption configuration as the database by default, namely * [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] = * `USE_DATABASE_ENCRYPTION`. * </pre> * * <code> * .google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryption_config = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setEncryptionConfig( com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig value) { if (encryptionConfigBuilder_ == null) { if (value == null) { throw new NullPointerException(); } encryptionConfig_ = value; onChanged(); } else { encryptionConfigBuilder_.setMessage(value); } return this; } /** * * * <pre> * Optional. The encryption configuration used to encrypt the backup. If this field is * not specified, the backup will use the same * encryption configuration as the database by default, namely * [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] = * `USE_DATABASE_ENCRYPTION`. * </pre> * * <code> * .google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryption_config = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setEncryptionConfig( com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig.Builder builderForValue) { if (encryptionConfigBuilder_ == null) { encryptionConfig_ = builderForValue.build(); onChanged(); } else { encryptionConfigBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Optional. The encryption configuration used to encrypt the backup. If this field is * not specified, the backup will use the same * encryption configuration as the database by default, namely * [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] = * `USE_DATABASE_ENCRYPTION`. * </pre> * * <code> * .google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryption_config = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeEncryptionConfig( com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig value) { if (encryptionConfigBuilder_ == null) { if (encryptionConfig_ != null) { encryptionConfig_ = com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig.newBuilder( encryptionConfig_) .mergeFrom(value) .buildPartial(); } else { encryptionConfig_ = value; } onChanged(); } else { encryptionConfigBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Optional. The encryption configuration used to encrypt the backup. If this field is * not specified, the backup will use the same * encryption configuration as the database by default, namely * [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] = * `USE_DATABASE_ENCRYPTION`. * </pre> * * <code> * .google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryption_config = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearEncryptionConfig() { if (encryptionConfigBuilder_ == null) { encryptionConfig_ = null; onChanged(); } else { encryptionConfig_ = null; encryptionConfigBuilder_ = null; } return this; } /** * * * <pre> * Optional. The encryption configuration used to encrypt the backup. If this field is * not specified, the backup will use the same * encryption configuration as the database by default, namely * [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] = * `USE_DATABASE_ENCRYPTION`. * </pre> * * <code> * .google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryption_config = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig.Builder getEncryptionConfigBuilder() { onChanged(); return getEncryptionConfigFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. The encryption configuration used to encrypt the backup. If this field is * not specified, the backup will use the same * encryption configuration as the database by default, namely * [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] = * `USE_DATABASE_ENCRYPTION`. * </pre> * * <code> * .google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryption_config = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.spanner.admin.database.v1.CreateBackupEncryptionConfigOrBuilder getEncryptionConfigOrBuilder() { if (encryptionConfigBuilder_ != null) { return encryptionConfigBuilder_.getMessageOrBuilder(); } else { return encryptionConfig_ == null ? com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig.getDefaultInstance() : encryptionConfig_; } } /** * * * <pre> * Optional. The encryption configuration used to encrypt the backup. If this field is * not specified, the backup will use the same * encryption configuration as the database by default, namely * [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] = * `USE_DATABASE_ENCRYPTION`. * </pre> * * <code> * .google.spanner.admin.database.v1.CreateBackupEncryptionConfig encryption_config = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig, com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig.Builder, com.google.spanner.admin.database.v1.CreateBackupEncryptionConfigOrBuilder> getEncryptionConfigFieldBuilder() { if (encryptionConfigBuilder_ == null) { encryptionConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig, com.google.spanner.admin.database.v1.CreateBackupEncryptionConfig.Builder, com.google.spanner.admin.database.v1.CreateBackupEncryptionConfigOrBuilder>( getEncryptionConfig(), getParentForChildren(), isClean()); encryptionConfig_ = null; } return encryptionConfigBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.spanner.admin.database.v1.CreateBackupRequest) } // @@protoc_insertion_point(class_scope:google.spanner.admin.database.v1.CreateBackupRequest) private static final com.google.spanner.admin.database.v1.CreateBackupRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.spanner.admin.database.v1.CreateBackupRequest(); } public static com.google.spanner.admin.database.v1.CreateBackupRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateBackupRequest> PARSER = new com.google.protobuf.AbstractParser<CreateBackupRequest>() { @java.lang.Override public CreateBackupRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CreateBackupRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CreateBackupRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateBackupRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.spanner.admin.database.v1.CreateBackupRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.java.compiler.notNullVerification; import com.intellij.JavaTestUtil; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.compiler.instrumentation.FailSafeClassReader; import com.intellij.compiler.instrumentation.InstrumenterClassWriter; import com.intellij.compiler.notNullVerification.NotNullVerifyingInstrumenter; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.io.IoTestUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.testFramework.IdeaTestUtil; import com.intellij.testFramework.PlatformTestUtil; import com.intellij.testFramework.rules.TempDirectory; import com.intellij.util.ArrayUtil; import com.intellij.util.ExceptionUtil; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.Nullable; import org.jetbrains.org.objectweb.asm.ClassReader; import org.jetbrains.org.objectweb.asm.ClassWriter; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExternalResource; import org.junit.rules.TestName; import org.junit.runner.Description; import org.junit.runners.model.Statement; import java.io.File; import java.io.IOException; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Member; import java.lang.reflect.Method; import java.util.Arrays; import java.util.List; import static com.intellij.testFramework.UsefulTestCase.assertInstanceOf; import static org.junit.Assert.*; public abstract class NotNullVerifyingInstrumenterTest { @Retention(RetentionPolicy.RUNTIME) private @interface TestDirectory { String value(); } @TestDirectory("members") public static class MembersTargetTest extends NotNullVerifyingInstrumenterTest { } @TestDirectory("types") public static class TypesTargetTest extends WithTypeUse { } @TestDirectory("mixed") public static class MixedTargetTest extends WithTypeUse { } private static final String TEST_DATA_PATH = "/compiler/notNullVerification/"; private static class AnnotationCompiler extends ExternalResource { File classes; @Override public Statement apply(Statement base, Description description) { TestDirectory annotation = description.getAnnotation(TestDirectory.class); if (annotation == null) throw new IllegalArgumentException("Class " + description.getTestClass() + " misses @TestDirectory annotation"); File source = new File(JavaTestUtil.getJavaTestDataPath() + TEST_DATA_PATH + annotation.value()); File[] annotations = source.listFiles(); if (annotations == null || annotations.length == 0) throw new IllegalArgumentException("Cannot find annotations at " + source); classes = IoTestUtil.createTestDir("test-notNullInstrumenter-" + annotation.value()); for (File file : annotations) IdeaTestUtil.compileFile(file, classes); return super.apply(base, description); } @Override protected void after() { IoTestUtil.delete(classes); } } @ClassRule public static final AnnotationCompiler annotation = new AnnotationCompiler(); @Rule public TempDirectory tempDir = new TempDirectory(); @Rule public TestName testName = new TestName(); @Test public void testSimpleReturn() throws Exception { Class<?> testClass = prepareTest(); Object instance = testClass.getDeclaredConstructor().newInstance(); Method method = testClass.getMethod("test"); verifyCallThrowsException("@NotNull method SimpleReturn.test must not return null", instance, method); } @Test public void testSimpleReturnWithMessage() throws Exception { Class<?> testClass = prepareTest(); Object instance = testClass.getDeclaredConstructor().newInstance(); Method method = testClass.getMethod("test"); verifyCallThrowsException("This method cannot return null", instance, method); } @Test public void testMultipleReturns() throws Exception { Class<?> testClass = prepareTest(); Object instance = testClass.getDeclaredConstructor().newInstance(); Method method = testClass.getMethod("test", int.class); verifyCallThrowsException("@NotNull method MultipleReturns.test must not return null", instance, method, 1); } @Test public void testSimpleParam() throws Exception { Class<?> testClass = prepareTest(); Object instance = testClass.getDeclaredConstructor().newInstance(); Method method = testClass.getMethod("test", Object.class); verifyCallThrowsException("Argument 0 for @NotNull parameter of SimpleParam.test must not be null", instance, method, (Object)null); } @Test public void testSimpleParamWithMessage() throws Exception { Class<?> testClass = prepareTest(); Object instance = testClass.getDeclaredConstructor().newInstance(); Method method = testClass.getMethod("test", Object.class); verifyCallThrowsException("SimpleParamWithMessage.test(o) cant be null", instance, method, (Object)null); } @Test public void testConstructorParam() throws Exception { Class<?> testClass = prepareTest(); Constructor<?> method = testClass.getConstructor(Object.class); verifyCallThrowsException("Argument 0 for @NotNull parameter of ConstructorParam.<init> must not be null", null, method, (Object)null); } @Test public void testConstructorParamWithMessage() throws Exception { Class<?> testClass = prepareTest(); Constructor<?> method = testClass.getConstructor(Object.class); verifyCallThrowsException("ConstructorParam.ConstructorParam.o cant be null", null, method, (Object)null); } @Test public void testUseParameterNames() throws Exception { Class<?> testClass = prepareTest(true, AnnotationUtil.NOT_NULL); Constructor<?> constructor = testClass.getConstructor(Object.class, Object.class); verifyCallThrowsException("Argument for @NotNull parameter 'obj2' of UseParameterNames.<init> must not be null", null, constructor, null, null); Method staticMethod = testClass.getMethod("staticMethod", Object.class); verifyCallThrowsException("Argument for @NotNull parameter 'y' of UseParameterNames.staticMethod must not be null", null, staticMethod, (Object)null); Object instance = constructor.newInstance("", ""); Method instanceMethod = testClass.getMethod("instanceMethod", Object.class); verifyCallThrowsException("Argument for @NotNull parameter 'x' of UseParameterNames.instanceMethod must not be null", instance, instanceMethod, (Object)null); } @Test public void testLongParameter() throws Exception { Class<?> testClass = prepareTest(true, AnnotationUtil.NOT_NULL); Method staticMethod = testClass.getMethod("foo", long.class, String.class, String.class); verifyCallThrowsException("Argument for @NotNull parameter 'c' of LongParameter.foo must not be null", null, staticMethod, Long.valueOf(2), "z", null); } @Test public void testDoubleParameter() throws Exception { Class<?> testClass = prepareTest(true, AnnotationUtil.NOT_NULL); Method staticMethod = testClass.getMethod("foo", double.class, String.class, String.class); verifyCallThrowsException("Argument for @NotNull parameter 'c' of DoubleParameter.foo must not be null", null, staticMethod, Long.valueOf(2), "z", null); } @Test public void testEnumConstructor() throws Exception { Class<?> testClass = prepareTest(); assertNotNull(testClass.getField("Value").get(null)); } @Test public void testCustomExceptionType() throws Exception { Class<?> testClass = prepareTest(); try { testClass.getMethod("foo", Object.class, Object.class).invoke(testClass.getDeclaredConstructor().newInstance(), null, null); fail(); } catch (InvocationTargetException e) { assertInstanceOf(e.getCause(), NullPointerException.class); assertEquals("Argument 1 for @NotNull parameter of CustomExceptionType.foo must not be null", e.getCause().getMessage()); } } @Test public void testEnumConstructorSecondParam() throws Exception { Class<?> testClass = prepareTest(); assertNotNull(testClass.getField("Value").get(null)); } @Test public void testGroovyEnum() throws Exception { Class<?> testClass = prepareTest(); assertNotNull(testClass.getField("Value").get(null)); } @Test public void testStaticInnerClass() throws Exception { Class<?> aClass = prepareTest(); assertNotNull(aClass.getDeclaredConstructor().newInstance()); } @Test public void testNonStaticInnerClass() throws Exception { Class<?> testClass = prepareTest(); assertNotNull(testClass.getDeclaredConstructor().newInstance()); verifyCallThrowsException( "Argument 1 for @NotNull parameter of NonStaticInnerClass$Inner.<init> must not be null", null, testClass.getMethod("fail")); } @Test public void testGroovyInnerClass() throws Exception { Class<?> testClass = prepareTest(); assertNotNull(testClass.getDeclaredConstructor().newInstance()); verifyCallThrowsException( "Argument for @NotNull parameter 's2' of GroovyInnerClass$Inner.<init> must not be null", null, testClass.getMethod("fail")); } @Test public void testSkipBridgeMethods() throws Exception { Class<?> testClass = prepareTest(); try { testClass.getMethod("main").invoke(null); fail(); } catch (InvocationTargetException e) { assertInstanceOf(e.getCause(), IllegalArgumentException.class); String trace = ExceptionUtil.getThrowableText(e.getCause()); assertEquals("Exception should happen in real, non-bridge method: " + trace, 2, StringUtil.getOccurrenceCount(trace, "B.getObject(SkipBridgeMethods")); } } @Test public void testMultipleMessages() throws Exception { Class<?> test = prepareTest(); Object instance = test.getDeclaredConstructor().newInstance(); verifyCallThrowsException("Argument 0 for @NotNull parameter of MultipleMessages.bar1 must not be null", instance, test.getMethod("bar1", Object.class), (Object)null); verifyCallThrowsException("Argument 0 for @NotNull parameter of MultipleMessages.bar2 must not be null", instance, test.getMethod("bar2", Object.class), (Object)null); verifyCallThrowsException("@NotNull method MultipleMessages.foo1 must not return null", instance, test.getMethod("foo1")); verifyCallThrowsException("@NotNull method MultipleMessages.foo2 must not return null", instance, test.getMethod("foo2")); } @Test public void testMultipleAnnotations() throws Exception { Class<?> test = prepareTest(false, "FooAnno", "BarAnno"); Object instance = test.getDeclaredConstructor().newInstance(); verifyCallThrowsException("@FooAnno method MultipleAnnotations.foo1 must not return null", instance, test.getMethod("foo1")); verifyCallThrowsException("@BarAnno method MultipleAnnotations.foo2 must not return null", instance, test.getMethod("foo2")); } @Test public void testTypeUseOnlyAnnotations() throws Exception { Class<?> test = prepareTest(false, "FooAnno"); Object instance = test.getDeclaredConstructor().newInstance(); verifyCallThrowsException("@FooAnno method TypeUseOnlyAnnotations.foo1 must not return null", instance, test.getMethod("foo1")); verifyCallThrowsException("Argument 0 for @FooAnno parameter of TypeUseOnlyAnnotations.foo2 must not be null", instance, test.getMethod("foo2", String.class), (String)null); test.getMethod("foo3", List.class).invoke(instance, new Object[]{null}); } @Test public void testTypeUseInEnumConstructor() throws Exception { Class<?> test = prepareTest(false, "TypeUseNotNull"); assertEquals(1, test.getEnumConstants().length); } @Test public void testTypeUseAndMemberAnnotations() throws Exception { Class<?> test = prepareTest(false, "FooAnno"); Object instance = test.getDeclaredConstructor().newInstance(); verifyCallThrowsException("@FooAnno method TypeUseAndMemberAnnotations.foo1 must not return null", instance, test.getMethod("foo1")); verifyCallThrowsException("Argument 0 for @FooAnno parameter of TypeUseAndMemberAnnotations.foo2 must not be null", instance, test.getMethod("foo2", String.class), (String)null); Method returnType = test.getMethod("returnType"); verifyCallThrowsException("@FooAnno method TypeUseAndMemberAnnotations.returnType must not return null", instance, returnType); assertEquals(1, returnType.getAnnotations().length); assertEquals(1, returnType.getAnnotatedReturnType().getAnnotations().length); } public static abstract class WithTypeUse extends NotNullVerifyingInstrumenterTest { @Test public void testTypeUseAndMemberAnnotationsOnArrays() throws Exception { Class<?> test = prepareTest(); Object instance = test.getDeclaredConstructor().newInstance(); Object[] singleNullArg = {null}; verifyCallThrowsException("Argument 0 for @NotNull parameter of TypeUseAndMemberAnnotationsOnArrays.notNullArray must not be null", instance, test.getMethod("notNullArray", String[].class), singleNullArg); test.getMethod("nullableArray", String[].class).invoke(instance, singleNullArg); verifyCallThrowsException("@NotNull method TypeUseAndMemberAnnotationsOnArrays.notNullReturn must not return null", instance, test.getMethod("notNullReturn")); assertNull(test.getMethod("nullableReturn").invoke(instance)); } } @Test public void testMalformedBytecode() throws Exception { Class<?> testClass = prepareTest(); verifyCallThrowsException("Argument 0 for @NotNull parameter of MalformedBytecode$NullTest2.handle must not be null", null, testClass.getMethod("main")); } @Test public void testEnclosingClass() throws Exception { Class<?> testClass = prepareTest(); Object obj1 = testClass.getMethod("fromStatic").invoke(null); assertEquals(testClass, obj1.getClass().getEnclosingClass()); Object obj2 = testClass.getMethod("fromInstance").invoke(testClass.getDeclaredConstructor().newInstance()); assertEquals(testClass, obj2.getClass().getEnclosingClass()); } @Test public void testLocalClassImplicitParameters() throws Exception { Class<?> test = prepareTest(true, "NotNull"); Object instance = test.getDeclaredConstructor().newInstance(); assertEquals(42, test.getMethod("ok").invoke(instance)); verifyCallThrowsException("Argument for @NotNull parameter 'test' of LocalClassImplicitParameters$1Test.<init> must not be null", instance, test.getMethod("failLocal")); verifyCallThrowsException("Argument for @NotNull parameter 'test' of LocalClassImplicitParameters$1Test2.<init> must not be null", instance, test.getMethod("failLocal2NotNull")); verifyCallThrowsException("Argument for @NotNull parameter 'another' of LocalClassImplicitParameters$1Test3.<init> must not be null", instance, test.getMethod("failLocalNullableNotNull")); verifyCallThrowsException("Argument for @NotNull parameter 'test' of LocalClassImplicitParameters$1.method must not be null", instance, test.getMethod("failAnonymous")); verifyCallThrowsException("Argument for @NotNull parameter 'param' of LocalClassImplicitParameters$Inner.<init> must not be null", instance, test.getMethod("failInner")); } @Test public void testNoCheckForConstant() throws Exception { verifyNotInstrumented(); } @Test public void testNoCheckForNewObject() throws Exception { verifyNotInstrumented(); } @Test public void testNoCheckForNewConstructorCall() throws Exception { verifyNotInstrumented(); } @Test public void testNoCheckForNewArray() throws Exception { verifyNotInstrumented(); } @Test public void testNoCheckForNewMultiArray() throws Exception { verifyNotInstrumented(); } @Test public void testNoCheckForPrivateNotNullMethodCall() throws Exception { verifyNotInstrumented(); } @Test public void testNoCheckForFinalNotNullMethodCall() throws Exception { verifyNotInstrumented(); } @Test public void testNoCheckForStaticNotNullMethodCall() throws Exception { verifyNotInstrumented(); } @Test public void testInterfaceStaticMethodParameter() throws Exception { Class<?> testClass = prepareTest(); Method method = testClass.getMethod("test"); verifyCallThrowsException("Argument 0 for @NotNull parameter of I.test must not be null", null, method); } @Test public void testInterfaceDefaultMethodParameter() throws Exception { Class<?> testClass = prepareTest(); Method method = testClass.getMethod("test"); verifyCallThrowsException("Argument 0 for @NotNull parameter of I.test must not be null", null, method); } protected static void verifyCallThrowsException(String expectedError, @Nullable Object instance, Member member, Object... args) throws Exception { String exceptionText = null; try { if (member instanceof Constructor) { ((Constructor<?>)member).newInstance(args); } else { ((Method)member).invoke(instance, args); } } catch (InvocationTargetException ex) { Throwable cause = ex.getCause(); if (cause instanceof IllegalStateException || cause instanceof IllegalArgumentException) { exceptionText = cause.getMessage(); } else { throw ex; } } assertEquals(expectedError, exceptionText); } protected Class<?> prepareTest() throws IOException { return prepareTest(false, AnnotationUtil.NOT_NULL); } protected Class<?> prepareTest(boolean withDebugInfo, String... notNullAnnotations) throws IOException { return prepareTest(withDebugInfo, true, notNullAnnotations); } protected void verifyNotInstrumented() throws IOException { prepareTest(false, false, AnnotationUtil.NOT_NULL); } protected Class<?> prepareTest(boolean withDebugInfo, boolean expectInstrumented, String... notNullAnnotations) throws IOException { String testName = PlatformTestUtil.getTestName(this.testName.getMethodName(), false); File testFile = IdeaTestUtil.findSourceFile((JavaTestUtil.getJavaTestDataPath() + TEST_DATA_PATH) + testName); File classesDir = tempDir.newDirectory("output"); List<String> args = ContainerUtil.newArrayList("-cp", annotation.classes.getPath()); if (withDebugInfo) args.add("-g"); IdeaTestUtil.compileFile(testFile, classesDir, ArrayUtil.toStringArray(args)); File[] files = classesDir.listFiles(); assertNotNull(files); Arrays.sort(files, (o1, o2) -> o1.getName().compareToIgnoreCase(o2.getName())); boolean modified = false; MyClassLoader classLoader = new MyClassLoader(getClass().getClassLoader()); Class<?> mainClass = null; for (File file : files) { ClassReader reader = new FailSafeClassReader(FileUtil.loadFileBytes(file)); int flags = InstrumenterClassWriter.getAsmClassWriterFlags(InstrumenterClassWriter.getClassFileVersion(reader)); ClassWriter writer = new ClassWriter(reader, flags); modified |= NotNullVerifyingInstrumenter.processClassFile(reader, writer, notNullAnnotations); String className = FileUtil.getNameWithoutExtension(file.getName()); Class<?> aClass = classLoader.doDefineClass(className, writer.toByteArray()); if (className.equals(testName)) { mainClass = aClass; } } if (expectInstrumented) { assertTrue("Class file not instrumented!", modified); } else { assertFalse("Class file instrumented, but should have not!", modified); } assertNotNull("Class " + testName + " not found!", mainClass); return mainClass; } private static class MyClassLoader extends ClassLoader { MyClassLoader(ClassLoader parent) { super(parent); } public Class<?> doDefineClass(String name, byte[] data) { return defineClass(name, data, 0, data.length); } } }
package org.docksidestage.hangar.dbflute.bsbhv; import java.util.List; import org.dbflute.*; import org.dbflute.bhv.readable.*; import org.dbflute.bhv.writable.*; import org.dbflute.bhv.writable.coins.DateUpdateAdjuster; import org.dbflute.bhv.referrer.*; import org.dbflute.cbean.*; import org.dbflute.cbean.chelper.HpSLSFunction; import org.dbflute.cbean.result.*; import org.dbflute.exception.*; import org.dbflute.optional.OptionalEntity; import org.dbflute.outsidesql.executor.*; import org.docksidestage.hangar.dbflute.exbhv.*; import org.docksidestage.hangar.dbflute.bsbhv.loader.*; import org.docksidestage.hangar.dbflute.exentity.*; import org.docksidestage.hangar.dbflute.bsentity.dbmeta.*; import org.docksidestage.hangar.dbflute.cbean.*; /** * The behavior of VENDOR_PRIMARY_KEY_ONLY as TABLE. <br> * <pre> * [primary key] * PRIMARY_KEY_ONLY_ID * * [column] * PRIMARY_KEY_ONLY_ID * * [sequence] * * * [identity] * * * [version-no] * * * [foreign table] * * * [referrer table] * * * [foreign property] * * * [referrer property] * * </pre> * @author DBFlute(AutoGenerator) */ public abstract class BsVendorPrimaryKeyOnlyBhv extends org.docksidestage.hangar.dbflute.nogen.ExtendedAbstractBehaviorWritable<VendorPrimaryKeyOnly, VendorPrimaryKeyOnlyCB> { // =================================================================================== // Definition // ========== /*df:beginQueryPath*/ /*df:endQueryPath*/ // =================================================================================== // DB Meta // ======= /** {@inheritDoc} */ public VendorPrimaryKeyOnlyDbm asDBMeta() { return VendorPrimaryKeyOnlyDbm.getInstance(); } /** {@inheritDoc} */ public String asTableDbName() { return "VENDOR_PRIMARY_KEY_ONLY"; } // =================================================================================== // New Instance // ============ /** {@inheritDoc} */ public VendorPrimaryKeyOnlyCB newConditionBean() { return new VendorPrimaryKeyOnlyCB(); } // =================================================================================== // Count Select // ============ /** * Select the count of uniquely-selected records by the condition-bean. {IgnorePagingCondition, IgnoreSpecifyColumn}<br> * SpecifyColumn is ignored but you can use it only to remove text type column for union's distinct. * <pre> * <span style="color: #70226C">int</span> count = <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">selectCount</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.query().set... * }); * </pre> * @param cbLambda The callback for condition-bean of VendorPrimaryKeyOnly. (NotNull) * @return The count for the condition. (NotMinus) */ public int selectCount(CBCall<VendorPrimaryKeyOnlyCB> cbLambda) { return facadeSelectCount(createCB(cbLambda)); } // =================================================================================== // Entity Select // ============= /** * Select the entity by the condition-bean. <br> * It returns not-null optional entity, so you should ... <br> * <span style="color: #AD4747; font-size: 120%">If the data is always present as your business rule, alwaysPresent().</span> <br> * <span style="color: #AD4747; font-size: 120%">If it might be no data, isPresent() and orElse(), ...</span> * <pre> * <span style="color: #3F7E5E">// if the data always exists as your business rule</span> * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">selectEntity</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.query().set... * }).<span style="color: #CC4747">alwaysPresent</span>(<span style="color: #553000">vendorPrimaryKeyOnly</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #3F7E5E">// called if present, or exception</span> * ... = <span style="color: #553000">vendorPrimaryKeyOnly</span>.get... * }); * * <span style="color: #3F7E5E">// if it might be no data, ...</span> * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">selectEntity</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.query().set... * }).<span style="color: #CC4747">ifPresent</span>(<span style="color: #553000">vendorPrimaryKeyOnly</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #3F7E5E">// called if present</span> * ... = <span style="color: #553000">vendorPrimaryKeyOnly</span>.get... * }).<span style="color: #994747">orElse</span>(() <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #3F7E5E">// called if not present</span> * }); * </pre> * @param cbLambda The callback for condition-bean of VendorPrimaryKeyOnly. (NotNull) * @return The optional entity selected by the condition. (NotNull: if no data, empty entity) * @throws EntityAlreadyDeletedException When get(), required() of return value is called and the value is null, which means entity has already been deleted (not found). * @throws EntityDuplicatedException When the entity has been duplicated. * @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found. */ public OptionalEntity<VendorPrimaryKeyOnly> selectEntity(CBCall<VendorPrimaryKeyOnlyCB> cbLambda) { return facadeSelectEntity(createCB(cbLambda)); } protected OptionalEntity<VendorPrimaryKeyOnly> facadeSelectEntity(VendorPrimaryKeyOnlyCB cb) { return doSelectOptionalEntity(cb, typeOfSelectedEntity()); } protected <ENTITY extends VendorPrimaryKeyOnly> OptionalEntity<ENTITY> doSelectOptionalEntity(VendorPrimaryKeyOnlyCB cb, Class<? extends ENTITY> tp) { return createOptionalEntity(doSelectEntity(cb, tp), cb); } protected Entity doReadEntity(ConditionBean cb) { return facadeSelectEntity(downcast(cb)).orElse(null); } /** * Select the entity by the condition-bean with deleted check. <br> * <span style="color: #AD4747; font-size: 120%">If the data is always present as your business rule, this method is good.</span> * <pre> * VendorPrimaryKeyOnly <span style="color: #553000">vendorPrimaryKeyOnly</span> = <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">selectEntityWithDeletedCheck</span>(cb <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> cb.acceptPK(1)); * ... = <span style="color: #553000">vendorPrimaryKeyOnly</span>.get...(); <span style="color: #3F7E5E">// the entity always be not null</span> * </pre> * @param cbLambda The callback for condition-bean of VendorPrimaryKeyOnly. (NotNull) * @return The entity selected by the condition. (NotNull: if no data, throws exception) * @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found) * @throws EntityDuplicatedException When the entity has been duplicated. * @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found. */ public VendorPrimaryKeyOnly selectEntityWithDeletedCheck(CBCall<VendorPrimaryKeyOnlyCB> cbLambda) { return facadeSelectEntityWithDeletedCheck(createCB(cbLambda)); } /** * Select the entity by the primary-key value. * @param primaryKeyOnlyId : PK, NotNull, BIGINT(19). (NotNull) * @return The optional entity selected by the PK. (NotNull: if no data, empty entity) * @throws EntityAlreadyDeletedException When get(), required() of return value is called and the value is null, which means entity has already been deleted (not found). * @throws EntityDuplicatedException When the entity has been duplicated. * @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found. */ public OptionalEntity<VendorPrimaryKeyOnly> selectByPK(Long primaryKeyOnlyId) { return facadeSelectByPK(primaryKeyOnlyId); } protected OptionalEntity<VendorPrimaryKeyOnly> facadeSelectByPK(Long primaryKeyOnlyId) { return doSelectOptionalByPK(primaryKeyOnlyId, typeOfSelectedEntity()); } protected <ENTITY extends VendorPrimaryKeyOnly> ENTITY doSelectByPK(Long primaryKeyOnlyId, Class<? extends ENTITY> tp) { return doSelectEntity(xprepareCBAsPK(primaryKeyOnlyId), tp); } protected <ENTITY extends VendorPrimaryKeyOnly> OptionalEntity<ENTITY> doSelectOptionalByPK(Long primaryKeyOnlyId, Class<? extends ENTITY> tp) { return createOptionalEntity(doSelectByPK(primaryKeyOnlyId, tp), primaryKeyOnlyId); } protected VendorPrimaryKeyOnlyCB xprepareCBAsPK(Long primaryKeyOnlyId) { assertObjectNotNull("primaryKeyOnlyId", primaryKeyOnlyId); return newConditionBean().acceptPK(primaryKeyOnlyId); } // =================================================================================== // List Select // =========== /** * Select the list as result bean. * <pre> * ListResultBean&lt;VendorPrimaryKeyOnly&gt; <span style="color: #553000">vendorPrimaryKeyOnlyList</span> = <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">selectList</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.query().set...; * <span style="color: #553000">cb</span>.query().addOrderBy...; * }); * <span style="color: #70226C">for</span> (VendorPrimaryKeyOnly <span style="color: #553000">vendorPrimaryKeyOnly</span> : <span style="color: #553000">vendorPrimaryKeyOnlyList</span>) { * ... = <span style="color: #553000">vendorPrimaryKeyOnly</span>.get...; * } * </pre> * @param cbLambda The callback for condition-bean of VendorPrimaryKeyOnly. (NotNull) * @return The result bean of selected list. (NotNull: if no data, returns empty list) * @throws DangerousResultSizeException When the result size is over the specified safety size. */ public ListResultBean<VendorPrimaryKeyOnly> selectList(CBCall<VendorPrimaryKeyOnlyCB> cbLambda) { return facadeSelectList(createCB(cbLambda)); } @Override protected boolean isEntityDerivedMappable() { return true; } // =================================================================================== // Page Select // =========== /** * Select the page as result bean. <br> * (both count-select and paging-select are executed) * <pre> * PagingResultBean&lt;VendorPrimaryKeyOnly&gt; <span style="color: #553000">page</span> = <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">selectPage</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.query().set... * <span style="color: #553000">cb</span>.query().addOrderBy... * <span style="color: #553000">cb</span>.<span style="color: #CC4747">paging</span>(20, 3); <span style="color: #3F7E5E">// 20 records per a page and current page number is 3</span> * }); * <span style="color: #70226C">int</span> allRecordCount = <span style="color: #553000">page</span>.getAllRecordCount(); * <span style="color: #70226C">int</span> allPageCount = <span style="color: #553000">page</span>.getAllPageCount(); * <span style="color: #70226C">boolean</span> isExistPrePage = <span style="color: #553000">page</span>.isExistPrePage(); * <span style="color: #70226C">boolean</span> isExistNextPage = <span style="color: #553000">page</span>.isExistNextPage(); * ... * <span style="color: #70226C">for</span> (VendorPrimaryKeyOnly vendorPrimaryKeyOnly : <span style="color: #553000">page</span>) { * ... = vendorPrimaryKeyOnly.get...; * } * </pre> * @param cbLambda The callback for condition-bean of VendorPrimaryKeyOnly. (NotNull) * @return The result bean of selected page. (NotNull: if no data, returns bean as empty list) * @throws DangerousResultSizeException When the result size is over the specified safety size. */ public PagingResultBean<VendorPrimaryKeyOnly> selectPage(CBCall<VendorPrimaryKeyOnlyCB> cbLambda) { return facadeSelectPage(createCB(cbLambda)); } // =================================================================================== // Cursor Select // ============= /** * Select the cursor by the condition-bean. * <pre> * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">selectCursor</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.query().set... * }, <span style="color: #553000">member</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * ... = <span style="color: #553000">member</span>.getMemberName(); * }); * </pre> * @param cbLambda The callback for condition-bean of VendorPrimaryKeyOnly. (NotNull) * @param entityLambda The handler of entity row of VendorPrimaryKeyOnly. (NotNull) */ public void selectCursor(CBCall<VendorPrimaryKeyOnlyCB> cbLambda, EntityRowHandler<VendorPrimaryKeyOnly> entityLambda) { facadeSelectCursor(createCB(cbLambda), entityLambda); } // =================================================================================== // Scalar Select // ============= /** * Select the scalar value derived by a function from uniquely-selected records. <br> * You should call a function method after this method called like as follows: * <pre> * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">selectScalar</span>(Date.class).max(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.specify().<span style="color: #CC4747">column...</span>; <span style="color: #3F7E5E">// required for the function</span> * <span style="color: #553000">cb</span>.query().set... * }); * </pre> * @param <RESULT> The type of result. * @param resultType The type of result. (NotNull) * @return The scalar function object to specify function for scalar value. (NotNull) */ public <RESULT> HpSLSFunction<VendorPrimaryKeyOnlyCB, RESULT> selectScalar(Class<RESULT> resultType) { return facadeScalarSelect(resultType); } // =================================================================================== // Sequence // ======== @Override protected Number doReadNextVal() { String msg = "This table is NOT related to sequence: " + asTableDbName(); throw new UnsupportedOperationException(msg); } // =================================================================================== // Load Referrer // ============= /** * Load referrer for the list by the referrer loader. * <pre> * List&lt;Member&gt; <span style="color: #553000">memberList</span> = <span style="color: #0000C0">memberBhv</span>.selectList(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.query().set... * }); * memberBhv.<span style="color: #CC4747">load</span>(<span style="color: #553000">memberList</span>, <span style="color: #553000">memberLoader</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">memberLoader</span>.<span style="color: #CC4747">loadPurchase</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.setupSelect... * <span style="color: #553000">purchaseCB</span>.query().set... * <span style="color: #553000">purchaseCB</span>.query().addOrderBy... * }); <span style="color: #3F7E5E">// you can also load nested referrer from here</span> * <span style="color: #3F7E5E">//}).withNestedReferrer(purchaseLoader -&gt; {</span> * <span style="color: #3F7E5E">// purchaseLoader.loadPurchasePayment(...);</span> * <span style="color: #3F7E5E">//});</span> * * <span style="color: #3F7E5E">// you can also pull out foreign table and load its referrer</span> * <span style="color: #3F7E5E">// (setupSelect of the foreign table should be called)</span> * <span style="color: #3F7E5E">//memberLoader.pulloutMemberStatus().loadMemberLogin(...)</span> * }); * <span style="color: #70226C">for</span> (Member member : <span style="color: #553000">memberList</span>) { * List&lt;Purchase&gt; purchaseList = member.<span style="color: #CC4747">getPurchaseList()</span>; * <span style="color: #70226C">for</span> (Purchase purchase : purchaseList) { * ... * } * } * </pre> * About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br> * The condition-bean, which the set-upper provides, has order by FK before callback. * @param vendorPrimaryKeyOnlyList The entity list of vendorPrimaryKeyOnly. (NotNull) * @param loaderLambda The callback to handle the referrer loader for actually loading referrer. (NotNull) */ public void load(List<VendorPrimaryKeyOnly> vendorPrimaryKeyOnlyList, ReferrerLoaderHandler<LoaderOfVendorPrimaryKeyOnly> loaderLambda) { xassLRArg(vendorPrimaryKeyOnlyList, loaderLambda); loaderLambda.handle(new LoaderOfVendorPrimaryKeyOnly().ready(vendorPrimaryKeyOnlyList, _behaviorSelector)); } /** * Load referrer for the entity by the referrer loader. * <pre> * Member <span style="color: #553000">member</span> = <span style="color: #0000C0">memberBhv</span>.selectEntityWithDeletedCheck(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> <span style="color: #553000">cb</span>.acceptPK(1)); * <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">load</span>(<span style="color: #553000">member</span>, <span style="color: #553000">memberLoader</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">memberLoader</span>.<span style="color: #CC4747">loadPurchase</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.setupSelect... * <span style="color: #553000">purchaseCB</span>.query().set... * <span style="color: #553000">purchaseCB</span>.query().addOrderBy... * }); <span style="color: #3F7E5E">// you can also load nested referrer from here</span> * <span style="color: #3F7E5E">//}).withNestedReferrer(purchaseLoader -&gt; {</span> * <span style="color: #3F7E5E">// purchaseLoader.loadPurchasePayment(...);</span> * <span style="color: #3F7E5E">//});</span> * * <span style="color: #3F7E5E">// you can also pull out foreign table and load its referrer</span> * <span style="color: #3F7E5E">// (setupSelect of the foreign table should be called)</span> * <span style="color: #3F7E5E">//memberLoader.pulloutMemberStatus().loadMemberLogin(...)</span> * }); * List&lt;Purchase&gt; purchaseList = <span style="color: #553000">member</span>.<span style="color: #CC4747">getPurchaseList()</span>; * <span style="color: #70226C">for</span> (Purchase purchase : purchaseList) { * ... * } * </pre> * About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br> * The condition-bean, which the set-upper provides, has order by FK before callback. * @param vendorPrimaryKeyOnly The entity of vendorPrimaryKeyOnly. (NotNull) * @param loaderLambda The callback to handle the referrer loader for actually loading referrer. (NotNull) */ public void load(VendorPrimaryKeyOnly vendorPrimaryKeyOnly, ReferrerLoaderHandler<LoaderOfVendorPrimaryKeyOnly> loaderLambda) { xassLRArg(vendorPrimaryKeyOnly, loaderLambda); loaderLambda.handle(new LoaderOfVendorPrimaryKeyOnly().ready(xnewLRAryLs(vendorPrimaryKeyOnly), _behaviorSelector)); } // =================================================================================== // Pull out Relation // ================= // =================================================================================== // Extract Column // ============== /** * Extract the value list of (single) primary key primaryKeyOnlyId. * @param vendorPrimaryKeyOnlyList The list of vendorPrimaryKeyOnly. (NotNull, EmptyAllowed) * @return The list of the column value. (NotNull, EmptyAllowed, NotNullElement) */ public List<Long> extractPrimaryKeyOnlyIdList(List<VendorPrimaryKeyOnly> vendorPrimaryKeyOnlyList) { return helpExtractListInternally(vendorPrimaryKeyOnlyList, "primaryKeyOnlyId"); } // =================================================================================== // Entity Update // ============= /** * Insert the entity modified-only. (DefaultConstraintsEnabled) * <pre> * VendorPrimaryKeyOnly vendorPrimaryKeyOnly = <span style="color: #70226C">new</span> VendorPrimaryKeyOnly(); * <span style="color: #3F7E5E">// if auto-increment, you don't need to set the PK value</span> * vendorPrimaryKeyOnly.setFoo...(value); * vendorPrimaryKeyOnly.setBar...(value); * <span style="color: #3F7E5E">// you don't need to set values of common columns</span> * <span style="color: #3F7E5E">//vendorPrimaryKeyOnly.setRegisterUser(value);</span> * <span style="color: #3F7E5E">//vendorPrimaryKeyOnly.set...;</span> * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">insert</span>(vendorPrimaryKeyOnly); * ... = vendorPrimaryKeyOnly.getPK...(); <span style="color: #3F7E5E">// if auto-increment, you can get the value after</span> * </pre> * <p>While, when the entity is created by select, all columns are registered.</p> * @param vendorPrimaryKeyOnly The entity of insert. (NotNull, PrimaryKeyNullAllowed: when auto-increment) * @throws EntityAlreadyExistsException When the entity already exists. (unique constraint violation) */ public void insert(VendorPrimaryKeyOnly vendorPrimaryKeyOnly) { doInsert(vendorPrimaryKeyOnly, null); } /** * Update the entity modified-only. (ZeroUpdateException, NonExclusiveControl) <br> * By PK as default, and also you can update by unique keys using entity's uniqueOf(). * <pre> * VendorPrimaryKeyOnly vendorPrimaryKeyOnly = <span style="color: #70226C">new</span> VendorPrimaryKeyOnly(); * vendorPrimaryKeyOnly.setPK...(value); <span style="color: #3F7E5E">// required</span> * vendorPrimaryKeyOnly.setFoo...(value); <span style="color: #3F7E5E">// you should set only modified columns</span> * <span style="color: #3F7E5E">// you don't need to set values of common columns</span> * <span style="color: #3F7E5E">//vendorPrimaryKeyOnly.setRegisterUser(value);</span> * <span style="color: #3F7E5E">//vendorPrimaryKeyOnly.set...;</span> * <span style="color: #3F7E5E">// if exclusive control, the value of concurrency column is required</span> * vendorPrimaryKeyOnly.<span style="color: #CC4747">setVersionNo</span>(value); * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">update</span>(vendorPrimaryKeyOnly); * </pre> * @param vendorPrimaryKeyOnly The entity of update. (NotNull, PrimaryKeyNotNull) * @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found) * @throws EntityDuplicatedException When the entity has been duplicated. * @throws EntityAlreadyExistsException When the entity already exists. (unique constraint violation) */ public void update(VendorPrimaryKeyOnly vendorPrimaryKeyOnly) { doUpdate(vendorPrimaryKeyOnly, null); } /** * Insert or update the entity modified-only. (DefaultConstraintsEnabled, NonExclusiveControl) <br> * if (the entity has no PK) { insert() } else { update(), but no data, insert() } <br> * <p><span style="color: #994747; font-size: 120%">Also you can update by unique keys using entity's uniqueOf().</span></p> * @param vendorPrimaryKeyOnly The entity of insert or update. (NotNull, ...depends on insert or update) * @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found) * @throws EntityDuplicatedException When the entity has been duplicated. * @throws EntityAlreadyExistsException When the entity already exists. (unique constraint violation) */ public void insertOrUpdate(VendorPrimaryKeyOnly vendorPrimaryKeyOnly) { doInsertOrUpdate(vendorPrimaryKeyOnly, null, null); } /** * Delete the entity. (ZeroUpdateException, NonExclusiveControl) <br> * By PK as default, and also you can delete by unique keys using entity's uniqueOf(). * <pre> * VendorPrimaryKeyOnly vendorPrimaryKeyOnly = <span style="color: #70226C">new</span> VendorPrimaryKeyOnly(); * vendorPrimaryKeyOnly.setPK...(value); <span style="color: #3F7E5E">// required</span> * <span style="color: #3F7E5E">// if exclusive control, the value of concurrency column is required</span> * vendorPrimaryKeyOnly.<span style="color: #CC4747">setVersionNo</span>(value); * <span style="color: #70226C">try</span> { * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">delete</span>(vendorPrimaryKeyOnly); * } <span style="color: #70226C">catch</span> (EntityAlreadyUpdatedException e) { <span style="color: #3F7E5E">// if concurrent update</span> * ... * } * </pre> * @param vendorPrimaryKeyOnly The entity of delete. (NotNull, PrimaryKeyNotNull) * @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found) * @throws EntityDuplicatedException When the entity has been duplicated. */ public void delete(VendorPrimaryKeyOnly vendorPrimaryKeyOnly) { doDelete(vendorPrimaryKeyOnly, null); } // =================================================================================== // Batch Update // ============ /** * Batch-insert the entity list modified-only of same-set columns. (DefaultConstraintsEnabled) <br> * This method uses executeBatch() of java.sql.PreparedStatement. <br> * <p><span style="color: #CC4747; font-size: 120%">The columns of least common multiple are registered like this:</span></p> * <pre> * <span style="color: #70226C">for</span> (... : ...) { * VendorPrimaryKeyOnly vendorPrimaryKeyOnly = <span style="color: #70226C">new</span> VendorPrimaryKeyOnly(); * vendorPrimaryKeyOnly.setFooName("foo"); * <span style="color: #70226C">if</span> (...) { * vendorPrimaryKeyOnly.setFooPrice(123); * } * <span style="color: #3F7E5E">// FOO_NAME and FOO_PRICE (and record meta columns) are registered</span> * <span style="color: #3F7E5E">// FOO_PRICE not-called in any entities are registered as null without default value</span> * <span style="color: #3F7E5E">// columns not-called in all entities are registered as null or default value</span> * vendorPrimaryKeyOnlyList.add(vendorPrimaryKeyOnly); * } * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">batchInsert</span>(vendorPrimaryKeyOnlyList); * </pre> * <p>While, when the entities are created by select, all columns are registered.</p> * <p>And if the table has an identity, entities after the process don't have incremented values. * (When you use the (normal) insert(), you can get the incremented value from your entity)</p> * @param vendorPrimaryKeyOnlyList The list of the entity. (NotNull, EmptyAllowed, PrimaryKeyNullAllowed: when auto-increment) * @return The array of inserted count. (NotNull, EmptyAllowed) */ public int[] batchInsert(List<VendorPrimaryKeyOnly> vendorPrimaryKeyOnlyList) { return doBatchInsert(vendorPrimaryKeyOnlyList, null); } /** * Batch-update the entity list modified-only of same-set columns. (NonExclusiveControl) <br> * This method uses executeBatch() of java.sql.PreparedStatement. <br> * <span style="color: #CC4747; font-size: 120%">You should specify same-set columns to all entities like this:</span> * <pre> * for (... : ...) { * VendorPrimaryKeyOnly vendorPrimaryKeyOnly = <span style="color: #70226C">new</span> VendorPrimaryKeyOnly(); * vendorPrimaryKeyOnly.setFooName("foo"); * <span style="color: #70226C">if</span> (...) { * vendorPrimaryKeyOnly.setFooPrice(123); * } <span style="color: #70226C">else</span> { * vendorPrimaryKeyOnly.setFooPrice(null); <span style="color: #3F7E5E">// updated as null</span> * <span style="color: #3F7E5E">//vendorPrimaryKeyOnly.setFooDate(...); // *not allowed, fragmented</span> * } * <span style="color: #3F7E5E">// FOO_NAME and FOO_PRICE (and record meta columns) are updated</span> * <span style="color: #3F7E5E">// (others are not updated: their values are kept)</span> * vendorPrimaryKeyOnlyList.add(vendorPrimaryKeyOnly); * } * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">batchUpdate</span>(vendorPrimaryKeyOnlyList); * </pre> * @param vendorPrimaryKeyOnlyList The list of the entity. (NotNull, EmptyAllowed, PrimaryKeyNotNull) * @return The array of updated count. (NotNull, EmptyAllowed) * @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found) */ public int[] batchUpdate(List<VendorPrimaryKeyOnly> vendorPrimaryKeyOnlyList) { return doBatchUpdate(vendorPrimaryKeyOnlyList, null); } /** * Batch-delete the entity list. (NonExclusiveControl) <br> * This method uses executeBatch() of java.sql.PreparedStatement. * @param vendorPrimaryKeyOnlyList The list of the entity. (NotNull, EmptyAllowed, PrimaryKeyNotNull) * @return The array of deleted count. (NotNull, EmptyAllowed) * @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found) */ public int[] batchDelete(List<VendorPrimaryKeyOnly> vendorPrimaryKeyOnlyList) { return doBatchDelete(vendorPrimaryKeyOnlyList, null); } // =================================================================================== // Query Update // ============ /** * Insert the several entities by query (modified-only for fixed value). * <pre> * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">queryInsert</span>(new QueryInsertSetupper&lt;VendorPrimaryKeyOnly, VendorPrimaryKeyOnlyCB&gt;() { * public ConditionBean setup(VendorPrimaryKeyOnly entity, VendorPrimaryKeyOnlyCB intoCB) { * FooCB cb = FooCB(); * cb.setupSelect_Bar(); * * <span style="color: #3F7E5E">// mapping</span> * intoCB.specify().columnMyName().mappedFrom(cb.specify().columnFooName()); * intoCB.specify().columnMyCount().mappedFrom(cb.specify().columnFooCount()); * intoCB.specify().columnMyDate().mappedFrom(cb.specify().specifyBar().columnBarDate()); * entity.setMyFixedValue("foo"); <span style="color: #3F7E5E">// fixed value</span> * <span style="color: #3F7E5E">// you don't need to set values of common columns</span> * <span style="color: #3F7E5E">//entity.setRegisterUser(value);</span> * <span style="color: #3F7E5E">//entity.set...;</span> * <span style="color: #3F7E5E">// you don't need to set a value of concurrency column</span> * <span style="color: #3F7E5E">//entity.setVersionNo(value);</span> * * return cb; * } * }); * </pre> * @param manyArgLambda The callback to set up query-insert. (NotNull) * @return The inserted count. */ public int queryInsert(QueryInsertSetupper<VendorPrimaryKeyOnly, VendorPrimaryKeyOnlyCB> manyArgLambda) { return doQueryInsert(manyArgLambda, null); } /** * Update the several entities by query non-strictly modified-only. (NonExclusiveControl) * <pre> * VendorPrimaryKeyOnly vendorPrimaryKeyOnly = <span style="color: #70226C">new</span> VendorPrimaryKeyOnly(); * <span style="color: #3F7E5E">// you don't need to set PK value</span> * <span style="color: #3F7E5E">//vendorPrimaryKeyOnly.setPK...(value);</span> * vendorPrimaryKeyOnly.setFoo...(value); <span style="color: #3F7E5E">// you should set only modified columns</span> * <span style="color: #3F7E5E">// you don't need to set values of common columns</span> * <span style="color: #3F7E5E">//vendorPrimaryKeyOnly.setRegisterUser(value);</span> * <span style="color: #3F7E5E">//vendorPrimaryKeyOnly.set...;</span> * <span style="color: #3F7E5E">// you don't need to set a value of concurrency column</span> * <span style="color: #3F7E5E">// (auto-increment for version number is valid though non-exclusive control)</span> * <span style="color: #3F7E5E">//vendorPrimaryKeyOnly.setVersionNo(value);</span> * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">queryUpdate</span>(vendorPrimaryKeyOnly, <span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.query().setFoo... * }); * </pre> * @param vendorPrimaryKeyOnly The entity that contains update values. (NotNull, PrimaryKeyNullAllowed) * @param cbLambda The callback for condition-bean of VendorPrimaryKeyOnly. (NotNull) * @return The updated count. * @throws NonQueryUpdateNotAllowedException When the query has no condition. */ public int queryUpdate(VendorPrimaryKeyOnly vendorPrimaryKeyOnly, CBCall<VendorPrimaryKeyOnlyCB> cbLambda) { return doQueryUpdate(vendorPrimaryKeyOnly, createCB(cbLambda), null); } /** * Delete the several entities by query. (NonExclusiveControl) * <pre> * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">queryDelete</span>(vendorPrimaryKeyOnly, <span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.query().setFoo... * }); * </pre> * @param cbLambda The callback for condition-bean of VendorPrimaryKeyOnly. (NotNull) * @return The deleted count. * @throws NonQueryDeleteNotAllowedException When the query has no condition. */ public int queryDelete(CBCall<VendorPrimaryKeyOnlyCB> cbLambda) { return doQueryDelete(createCB(cbLambda), null); } // =================================================================================== // Varying Update // ============== // ----------------------------------------------------- // Entity Update // ------------- /** * Insert the entity with varying requests. <br> * For example, disableCommonColumnAutoSetup(), disablePrimaryKeyIdentity(). <br> * Other specifications are same as insert(entity). * <pre> * VendorPrimaryKeyOnly vendorPrimaryKeyOnly = <span style="color: #70226C">new</span> VendorPrimaryKeyOnly(); * <span style="color: #3F7E5E">// if auto-increment, you don't need to set the PK value</span> * vendorPrimaryKeyOnly.setFoo...(value); * vendorPrimaryKeyOnly.setBar...(value); * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">varyingInsert</span>(vendorPrimaryKeyOnly, <span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #3F7E5E">// you can insert by your values for common columns</span> * <span style="color: #553000">op</span>.disableCommonColumnAutoSetup(); * }); * ... = vendorPrimaryKeyOnly.getPK...(); <span style="color: #3F7E5E">// if auto-increment, you can get the value after</span> * </pre> * @param vendorPrimaryKeyOnly The entity of insert. (NotNull, PrimaryKeyNullAllowed: when auto-increment) * @param opLambda The callback for option of insert for varying requests. (NotNull) * @throws EntityAlreadyExistsException When the entity already exists. (unique constraint violation) */ public void varyingInsert(VendorPrimaryKeyOnly vendorPrimaryKeyOnly, WritableOptionCall<VendorPrimaryKeyOnlyCB, InsertOption<VendorPrimaryKeyOnlyCB>> opLambda) { doInsert(vendorPrimaryKeyOnly, createInsertOption(opLambda)); } /** * Update the entity with varying requests modified-only. (ZeroUpdateException, NonExclusiveControl) <br> * For example, self(selfCalculationSpecification), specify(updateColumnSpecification), disableCommonColumnAutoSetup(). <br> * Other specifications are same as update(entity). * <pre> * VendorPrimaryKeyOnly vendorPrimaryKeyOnly = <span style="color: #70226C">new</span> VendorPrimaryKeyOnly(); * vendorPrimaryKeyOnly.setPK...(value); <span style="color: #3F7E5E">// required</span> * vendorPrimaryKeyOnly.setOther...(value); <span style="color: #3F7E5E">// you should set only modified columns</span> * <span style="color: #3F7E5E">// if exclusive control, the value of concurrency column is required</span> * vendorPrimaryKeyOnly.<span style="color: #CC4747">setVersionNo</span>(value); * <span style="color: #3F7E5E">// you can update by self calculation values</span> * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">varyingUpdate</span>(vendorPrimaryKeyOnly, <span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">op</span>.self(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.specify().<span style="color: #CC4747">columnXxxCount()</span>; * }).plus(1); <span style="color: #3F7E5E">// XXX_COUNT = XXX_COUNT + 1</span> * }); * </pre> * @param vendorPrimaryKeyOnly The entity of update. (NotNull, PrimaryKeyNotNull) * @param opLambda The callback for option of update for varying requests. (NotNull) * @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found) * @throws EntityDuplicatedException When the entity has been duplicated. * @throws EntityAlreadyExistsException When the entity already exists. (unique constraint violation) */ public void varyingUpdate(VendorPrimaryKeyOnly vendorPrimaryKeyOnly, WritableOptionCall<VendorPrimaryKeyOnlyCB, UpdateOption<VendorPrimaryKeyOnlyCB>> opLambda) { doUpdate(vendorPrimaryKeyOnly, createUpdateOption(opLambda)); } /** * Insert or update the entity with varying requests. (ExclusiveControl: when update) <br> * Other specifications are same as insertOrUpdate(entity). * @param vendorPrimaryKeyOnly The entity of insert or update. (NotNull) * @param insertOpLambda The callback for option of insert for varying requests. (NotNull) * @param updateOpLambda The callback for option of update for varying requests. (NotNull) * @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found) * @throws EntityDuplicatedException When the entity has been duplicated. * @throws EntityAlreadyExistsException When the entity already exists. (unique constraint violation) */ public void varyingInsertOrUpdate(VendorPrimaryKeyOnly vendorPrimaryKeyOnly, WritableOptionCall<VendorPrimaryKeyOnlyCB, InsertOption<VendorPrimaryKeyOnlyCB>> insertOpLambda, WritableOptionCall<VendorPrimaryKeyOnlyCB, UpdateOption<VendorPrimaryKeyOnlyCB>> updateOpLambda) { doInsertOrUpdate(vendorPrimaryKeyOnly, createInsertOption(insertOpLambda), createUpdateOption(updateOpLambda)); } /** * Delete the entity with varying requests. (ZeroUpdateException, NonExclusiveControl) <br> * Now a valid option does not exist. <br> * Other specifications are same as delete(entity). * @param vendorPrimaryKeyOnly The entity of delete. (NotNull, PrimaryKeyNotNull, ConcurrencyColumnNotNull) * @param opLambda The callback for option of delete for varying requests. (NotNull) * @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found) * @throws EntityDuplicatedException When the entity has been duplicated. */ public void varyingDelete(VendorPrimaryKeyOnly vendorPrimaryKeyOnly, WritableOptionCall<VendorPrimaryKeyOnlyCB, DeleteOption<VendorPrimaryKeyOnlyCB>> opLambda) { doDelete(vendorPrimaryKeyOnly, createDeleteOption(opLambda)); } // ----------------------------------------------------- // Batch Update // ------------ /** * Batch-insert the list with varying requests. <br> * For example, disableCommonColumnAutoSetup() * , disablePrimaryKeyIdentity(), limitBatchInsertLogging(). <br> * Other specifications are same as batchInsert(entityList). * @param vendorPrimaryKeyOnlyList The list of the entity. (NotNull, EmptyAllowed, PrimaryKeyNotNull) * @param opLambda The callback for option of insert for varying requests. (NotNull) * @return The array of updated count. (NotNull, EmptyAllowed) */ public int[] varyingBatchInsert(List<VendorPrimaryKeyOnly> vendorPrimaryKeyOnlyList, WritableOptionCall<VendorPrimaryKeyOnlyCB, InsertOption<VendorPrimaryKeyOnlyCB>> opLambda) { return doBatchInsert(vendorPrimaryKeyOnlyList, createInsertOption(opLambda)); } /** * Batch-update the list with varying requests. <br> * For example, self(selfCalculationSpecification), specify(updateColumnSpecification) * , disableCommonColumnAutoSetup(), limitBatchUpdateLogging(). <br> * Other specifications are same as batchUpdate(entityList). * @param vendorPrimaryKeyOnlyList The list of the entity. (NotNull, EmptyAllowed, PrimaryKeyNotNull) * @param opLambda The callback for option of update for varying requests. (NotNull) * @return The array of updated count. (NotNull, EmptyAllowed) */ public int[] varyingBatchUpdate(List<VendorPrimaryKeyOnly> vendorPrimaryKeyOnlyList, WritableOptionCall<VendorPrimaryKeyOnlyCB, UpdateOption<VendorPrimaryKeyOnlyCB>> opLambda) { return doBatchUpdate(vendorPrimaryKeyOnlyList, createUpdateOption(opLambda)); } /** * Batch-delete the list with varying requests. <br> * For example, limitBatchDeleteLogging(). <br> * Other specifications are same as batchDelete(entityList). * @param vendorPrimaryKeyOnlyList The list of the entity. (NotNull, EmptyAllowed, PrimaryKeyNotNull) * @param opLambda The callback for option of delete for varying requests. (NotNull) * @return The array of deleted count. (NotNull, EmptyAllowed) */ public int[] varyingBatchDelete(List<VendorPrimaryKeyOnly> vendorPrimaryKeyOnlyList, WritableOptionCall<VendorPrimaryKeyOnlyCB, DeleteOption<VendorPrimaryKeyOnlyCB>> opLambda) { return doBatchDelete(vendorPrimaryKeyOnlyList, createDeleteOption(opLambda)); } // ----------------------------------------------------- // Query Update // ------------ /** * Insert the several entities by query with varying requests (modified-only for fixed value). <br> * For example, disableCommonColumnAutoSetup(), disablePrimaryKeyIdentity(). <br> * Other specifications are same as queryInsert(entity, setupper). * @param manyArgLambda The set-upper of query-insert. (NotNull) * @param opLambda The callback for option of insert for varying requests. (NotNull) * @return The inserted count. */ public int varyingQueryInsert(QueryInsertSetupper<VendorPrimaryKeyOnly, VendorPrimaryKeyOnlyCB> manyArgLambda, WritableOptionCall<VendorPrimaryKeyOnlyCB, InsertOption<VendorPrimaryKeyOnlyCB>> opLambda) { return doQueryInsert(manyArgLambda, createInsertOption(opLambda)); } /** * Update the several entities by query with varying requests non-strictly modified-only. {NonExclusiveControl} <br> * For example, self(selfCalculationSpecification), specify(updateColumnSpecification) * , disableCommonColumnAutoSetup(), allowNonQueryUpdate(). <br> * Other specifications are same as queryUpdate(entity, cb). * <pre> * <span style="color: #3F7E5E">// ex) you can update by self calculation values</span> * VendorPrimaryKeyOnly vendorPrimaryKeyOnly = <span style="color: #70226C">new</span> VendorPrimaryKeyOnly(); * <span style="color: #3F7E5E">// you don't need to set PK value</span> * <span style="color: #3F7E5E">//vendorPrimaryKeyOnly.setPK...(value);</span> * vendorPrimaryKeyOnly.setOther...(value); <span style="color: #3F7E5E">// you should set only modified columns</span> * <span style="color: #3F7E5E">// you don't need to set a value of concurrency column</span> * <span style="color: #3F7E5E">// (auto-increment for version number is valid though non-exclusive control)</span> * <span style="color: #3F7E5E">//vendorPrimaryKeyOnly.setVersionNo(value);</span> * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">varyingQueryUpdate</span>(vendorPrimaryKeyOnly, <span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.query().setFoo... * }, <span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">op</span>.self(<span style="color: #553000">colCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">colCB</span>.specify().<span style="color: #CC4747">columnFooCount()</span>; * }).plus(1); <span style="color: #3F7E5E">// FOO_COUNT = FOO_COUNT + 1</span> * }); * </pre> * @param vendorPrimaryKeyOnly The entity that contains update values. (NotNull) {PrimaryKeyNotRequired} * @param cbLambda The callback for condition-bean of VendorPrimaryKeyOnly. (NotNull) * @param opLambda The callback for option of update for varying requests. (NotNull) * @return The updated count. * @throws NonQueryUpdateNotAllowedException When the query has no condition (if not allowed). */ public int varyingQueryUpdate(VendorPrimaryKeyOnly vendorPrimaryKeyOnly, CBCall<VendorPrimaryKeyOnlyCB> cbLambda, WritableOptionCall<VendorPrimaryKeyOnlyCB, UpdateOption<VendorPrimaryKeyOnlyCB>> opLambda) { return doQueryUpdate(vendorPrimaryKeyOnly, createCB(cbLambda), createUpdateOption(opLambda)); } /** * Delete the several entities by query with varying requests non-strictly. <br> * For example, allowNonQueryDelete(). <br> * Other specifications are same as queryDelete(cb). * <pre> * <span style="color: #0000C0">vendorPrimaryKeyOnlyBhv</span>.<span style="color: #CC4747">queryDelete</span>(vendorPrimaryKeyOnly, <span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">cb</span>.query().setFoo... * }, <span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">op</span>... * }); * </pre> * @param cbLambda The callback for condition-bean of VendorPrimaryKeyOnly. (NotNull) * @param opLambda The callback for option of delete for varying requests. (NotNull) * @return The deleted count. * @throws NonQueryDeleteNotAllowedException When the query has no condition (if not allowed). */ public int varyingQueryDelete(CBCall<VendorPrimaryKeyOnlyCB> cbLambda, WritableOptionCall<VendorPrimaryKeyOnlyCB, DeleteOption<VendorPrimaryKeyOnlyCB>> opLambda) { return doQueryDelete(createCB(cbLambda), createDeleteOption(opLambda)); } // =================================================================================== // OutsideSql // ========== /** * Prepare the all facade executor of outside-SQL to execute it. * <pre> * <span style="color: #3F7E5E">// main style</span> * vendorPrimaryKeyOnlyBhv.outideSql().selectEntity(pmb); <span style="color: #3F7E5E">// optional</span> * vendorPrimaryKeyOnlyBhv.outideSql().selectList(pmb); <span style="color: #3F7E5E">// ListResultBean</span> * vendorPrimaryKeyOnlyBhv.outideSql().selectPage(pmb); <span style="color: #3F7E5E">// PagingResultBean</span> * vendorPrimaryKeyOnlyBhv.outideSql().selectPagedListOnly(pmb); <span style="color: #3F7E5E">// ListResultBean</span> * vendorPrimaryKeyOnlyBhv.outideSql().selectCursor(pmb, handler); <span style="color: #3F7E5E">// (by handler)</span> * vendorPrimaryKeyOnlyBhv.outideSql().execute(pmb); <span style="color: #3F7E5E">// int (updated count)</span> * vendorPrimaryKeyOnlyBhv.outideSql().call(pmb); <span style="color: #3F7E5E">// void (pmb has OUT parameters)</span> * * <span style="color: #3F7E5E">// traditional style</span> * vendorPrimaryKeyOnlyBhv.outideSql().traditionalStyle().selectEntity(path, pmb, entityType); * vendorPrimaryKeyOnlyBhv.outideSql().traditionalStyle().selectList(path, pmb, entityType); * vendorPrimaryKeyOnlyBhv.outideSql().traditionalStyle().selectPage(path, pmb, entityType); * vendorPrimaryKeyOnlyBhv.outideSql().traditionalStyle().selectPagedListOnly(path, pmb, entityType); * vendorPrimaryKeyOnlyBhv.outideSql().traditionalStyle().selectCursor(path, pmb, handler); * vendorPrimaryKeyOnlyBhv.outideSql().traditionalStyle().execute(path, pmb); * * <span style="color: #3F7E5E">// options</span> * vendorPrimaryKeyOnlyBhv.outideSql().removeBlockComment().selectList() * vendorPrimaryKeyOnlyBhv.outideSql().removeLineComment().selectList() * vendorPrimaryKeyOnlyBhv.outideSql().formatSql().selectList() * </pre> * <p>The invoker of behavior command should be not null when you call this method.</p> * @return The new-created all facade executor of outside-SQL. (NotNull) */ public OutsideSqlAllFacadeExecutor<VendorPrimaryKeyOnlyBhv> outsideSql() { return doOutsideSql(); } // =================================================================================== // Framework Filter Override // ========================= @Override protected void frameworkFilterEntityOfInsert(Entity entity, org.dbflute.optional.OptionalThing<InsertOption<? extends ConditionBean>> option) { super.frameworkFilterEntityOfInsert(entity, option); new DateUpdateAdjuster().truncatePrecisionOfEntityProperty(entity); } @Override protected void frameworkFilterEntityOfUpdate(Entity entity, org.dbflute.optional.OptionalThing<UpdateOption<? extends ConditionBean>> option) { super.frameworkFilterEntityOfUpdate(entity, option); new DateUpdateAdjuster().truncatePrecisionOfEntityProperty(entity); } // =================================================================================== // Type Helper // =========== protected Class<? extends VendorPrimaryKeyOnly> typeOfSelectedEntity() { return VendorPrimaryKeyOnly.class; } protected Class<VendorPrimaryKeyOnly> typeOfHandlingEntity() { return VendorPrimaryKeyOnly.class; } protected Class<VendorPrimaryKeyOnlyCB> typeOfHandlingConditionBean() { return VendorPrimaryKeyOnlyCB.class; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.test.AbstractQueryTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.chrono.ISOChronology; import java.io.IOException; import java.util.HashMap; import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.sameInstance; public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuilder> { @Override protected RangeQueryBuilder doCreateTestQueryBuilder() { RangeQueryBuilder query; // switch between numeric and date ranges switch (randomIntBetween(0, 2)) { case 0: // use mapped integer field for numeric range queries query = new RangeQueryBuilder(INT_FIELD_NAME); query.from(randomIntBetween(1, 100)); query.to(randomIntBetween(101, 200)); break; case 1: // use mapped date field, using date string representation query = new RangeQueryBuilder(DATE_FIELD_NAME); query.from(new DateTime(System.currentTimeMillis() - randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); // Create timestamp option only then we have a date mapper, // otherwise we could trigger exception. if (createShardContext().getMapperService().fullName(DATE_FIELD_NAME) != null) { if (randomBoolean()) { query.timeZone(randomDateTimeZone().getID()); } if (randomBoolean()) { query.format("yyyy-MM-dd'T'HH:mm:ss.SSSZZ"); } } break; case 2: default: query = new RangeQueryBuilder(STRING_FIELD_NAME); query.from("a" + randomAsciiOfLengthBetween(1, 10)); query.to("z" + randomAsciiOfLengthBetween(1, 10)); break; } query.includeLower(randomBoolean()).includeUpper(randomBoolean()); if (randomBoolean()) { query.from(null); } if (randomBoolean()) { query.to(null); } return query; } @Override protected Map<String, RangeQueryBuilder> getAlternateVersions() { Map<String, RangeQueryBuilder> alternateVersions = new HashMap<>(); RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder(INT_FIELD_NAME); rangeQueryBuilder.from(randomIntBetween(1, 100)).to(randomIntBetween(101, 200)); rangeQueryBuilder.includeLower(randomBoolean()); rangeQueryBuilder.includeUpper(randomBoolean()); String query = "{\n" + " \"range\":{\n" + " \"" + INT_FIELD_NAME + "\": {\n" + " \"" + (rangeQueryBuilder.includeLower() ? "gte" : "gt") + "\": " + rangeQueryBuilder.from() + ",\n" + " \"" + (rangeQueryBuilder.includeUpper() ? "lte" : "lt") + "\": " + rangeQueryBuilder.to() + "\n" + " }\n" + " }\n" + "}"; alternateVersions.put(query, rangeQueryBuilder); return alternateVersions; } @Override protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { if (getCurrentTypes().length == 0 || (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false && queryBuilder.fieldName().equals(INT_FIELD_NAME) == false)) { assertThat(query, instanceOf(TermRangeQuery.class)); TermRangeQuery termRangeQuery = (TermRangeQuery) query; assertThat(termRangeQuery.getField(), equalTo(queryBuilder.fieldName())); assertThat(termRangeQuery.getLowerTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.from()))); assertThat(termRangeQuery.getUpperTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.to()))); assertThat(termRangeQuery.includesLower(), equalTo(queryBuilder.includeLower())); assertThat(termRangeQuery.includesUpper(), equalTo(queryBuilder.includeUpper())); } else if (queryBuilder.fieldName().equals(DATE_FIELD_NAME)) { //we can't properly test unmapped dates because LateParsingQuery is package private } else if (queryBuilder.fieldName().equals(INT_FIELD_NAME)) { assertThat(query, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); if (query instanceof LegacyNumericRangeQuery) { LegacyNumericRangeQuery numericRangeQuery = (LegacyNumericRangeQuery) query; assertThat(numericRangeQuery.getField(), equalTo(queryBuilder.fieldName())); assertThat(numericRangeQuery.getMin(), equalTo(queryBuilder.from())); assertThat(numericRangeQuery.getMax(), equalTo(queryBuilder.to())); assertThat(numericRangeQuery.includesMin(), equalTo(queryBuilder.includeLower())); assertThat(numericRangeQuery.includesMax(), equalTo(queryBuilder.includeUpper())); } else { Integer min = (Integer) queryBuilder.from(); Integer max = (Integer) queryBuilder.to(); int minInt, maxInt; if (min == null) { minInt = Integer.MIN_VALUE; } else { minInt = min.intValue(); if (queryBuilder.includeLower() == false && minInt != Integer.MAX_VALUE) { minInt++; } } if (max == null) { maxInt = Integer.MAX_VALUE; } else { maxInt = max.intValue(); if (queryBuilder.includeUpper() == false && maxInt != Integer.MIN_VALUE) { maxInt--; } } try { assertEquals(IntPoint.newRangeQuery(INT_FIELD_NAME, minInt, maxInt), query); }catch(AssertionError e) { throw e; } } } else { throw new UnsupportedOperationException(); } } public void testIllegalArguments() { expectThrows(IllegalArgumentException.class, () -> new RangeQueryBuilder((String) null)); expectThrows(IllegalArgumentException.class, () -> new RangeQueryBuilder("")); RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder("test"); expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.timeZone(null)); expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.timeZone("badID")); expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.format(null)); expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.format("badFormat")); } /** * Specifying a timezone together with a numeric range query should throw an exception. */ public void testToQueryNonDateWithTimezone() throws QueryShardException, IOException { RangeQueryBuilder query = new RangeQueryBuilder(INT_FIELD_NAME); query.from(1).to(10).timeZone("UTC"); QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(createShardContext())); assertThat(e.getMessage(), containsString("[range] time_zone can not be applied")); } /** * Specifying a timezone together with an unmapped field should throw an exception. */ public void testToQueryUnmappedWithTimezone() throws QueryShardException, IOException { RangeQueryBuilder query = new RangeQueryBuilder("bogus_field"); query.from(1).to(10).timeZone("UTC"); QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(createShardContext())); assertThat(e.getMessage(), containsString("[range] time_zone can not be applied")); } public void testToQueryNumericField() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext()); // since age is automatically registered in data, we encode it as numeric assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); if (parsedQuery instanceof LegacyNumericRangeQuery) { LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; assertThat(rangeQuery.getField(), equalTo(INT_FIELD_NAME)); assertThat(rangeQuery.getMin().intValue(), equalTo(23)); assertThat(rangeQuery.getMax().intValue(), equalTo(54)); assertThat(rangeQuery.includesMin(), equalTo(true)); assertThat(rangeQuery.includesMax(), equalTo(false)); } else { assertEquals(IntPoint.newRangeQuery(INT_FIELD_NAME, 23, 53), parsedQuery); } } public void testDateRangeQueryFormat() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); // We test 01/01/2012 from gte and 2030 for lt String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gte\": \"01/01/2012\",\n" + " \"lt\": \"2030\",\n" + " \"format\": \"dd/MM/yyyy||yyyy\"\n" + " }\n" + " }\n" + "}"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); if (parsedQuery instanceof LegacyNumericRangeQuery) { // Min value was 01/01/2012 (dd/MM/yyyy) DateTime min = DateTime.parse("2012-01-01T00:00:00.000+00"); assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); // Max value was 2030 (yyyy) DateTime max = DateTime.parse("2030-01-01T00:00:00.000+00"); assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue(), is(max.getMillis())); } else { assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, DateTime.parse("2012-01-01T00:00:00.000+00").getMillis(), DateTime.parse("2030-01-01T00:00:00.000+00").getMillis() - 1), parsedQuery); } // Test Invalid format final String invalidQuery = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gte\": \"01/01/2012\",\n" + " \"lt\": \"2030\",\n" + " \"format\": \"yyyy\"\n" + " }\n" + " }\n" + "}"; Query rewrittenQuery = parseQuery(invalidQuery).toQuery(createShardContext()); expectThrows(ElasticsearchParseException.class, () -> rewrittenQuery.rewrite(null)); } public void testDateRangeBoundaries() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gte\": \"2014-11-05||/M\",\n" + " \"lte\": \"2014-12-08||/d\"\n" + " }\n" + " }\n" + "}\n"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); if (parsedQuery instanceof LegacyNumericRangeQuery) { LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; DateTime min = DateTime.parse("2014-11-01T00:00:00.000+00"); assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); assertTrue(rangeQuery.includesMin()); DateTime max = DateTime.parse("2014-12-08T23:59:59.999+00"); assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); assertTrue(rangeQuery.includesMax()); } else { assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, DateTime.parse("2014-11-01T00:00:00.000+00").getMillis(), DateTime.parse("2014-12-08T23:59:59.999+00").getMillis()), parsedQuery); } query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gt\": \"2014-11-05||/M\",\n" + " \"lt\": \"2014-12-08||/d\"\n" + " }\n" + " }\n" + "}"; parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); if (parsedQuery instanceof LegacyNumericRangeQuery) { LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; DateTime min = DateTime.parse("2014-11-30T23:59:59.999+00"); assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); assertFalse(rangeQuery.includesMin()); DateTime max = DateTime.parse("2014-12-08T00:00:00.000+00"); assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); assertFalse(rangeQuery.includesMax()); } else { assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, DateTime.parse("2014-11-30T23:59:59.999+00").getMillis() + 1, DateTime.parse("2014-12-08T00:00:00.000+00").getMillis() - 1), parsedQuery); } } public void testDateRangeQueryTimezone() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); long startDate = System.currentTimeMillis(); String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + " \"gte\": \"2012-01-01\",\n" + " \"lte\": \"now\",\n" + " \"time_zone\": \"+01:00\"\n" + " }\n" + " }\n" + "}"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); if (parsedQuery instanceof PointRangeQuery) { // TODO what can we assert } else { assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); // Min value was 2012-01-01 (UTC) so we need to remove one hour DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00"); // Max value is when we started the test. So it should be some ms from now DateTime max = new DateTime(startDate, DateTimeZone.UTC); assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); // We should not have a big difference here (should be some ms) assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue() - max.getMillis(), lessThanOrEqualTo(60000L)); } query = "{\n" + " \"range\" : {\n" + " \"" + INT_FIELD_NAME + "\" : {\n" + " \"gte\": \"0\",\n" + " \"lte\": \"100\",\n" + " \"time_zone\": \"-01:00\"\n" + " }\n" + " }\n" + "}"; QueryBuilder queryBuilder = parseQuery(query); expectThrows(QueryShardException.class, () -> queryBuilder.toQuery(createShardContext())); } public void testFromJson() throws IOException { String json = "{\n" + " \"range\" : {\n" + " \"timestamp\" : {\n" + " \"from\" : \"2015-01-01 00:00:00\",\n" + " \"to\" : \"now\",\n" + " \"include_lower\" : true,\n" + " \"include_upper\" : true,\n" + " \"time_zone\" : \"+01:00\",\n" + " \"boost\" : 1.0\n" + " }\n" + " }\n" + "}"; RangeQueryBuilder parsed = (RangeQueryBuilder) parseQuery(json); checkGeneratedJson(json, parsed); assertEquals(json, "2015-01-01 00:00:00", parsed.from()); assertEquals(json, "now", parsed.to()); } public void testNamedQueryParsing() throws IOException { String json = "{\n" + " \"range\" : {\n" + " \"timestamp\" : {\n" + " \"from\" : \"2015-01-01 00:00:00\",\n" + " \"to\" : \"now\",\n" + " \"boost\" : 1.0,\n" + " \"_name\" : \"my_range\"\n" + " }\n" + " }\n" + "}"; assertNotNull(parseQuery(json)); final String deprecatedJson = "{\n" + " \"range\" : {\n" + " \"timestamp\" : {\n" + " \"from\" : \"2015-01-01 00:00:00\",\n" + " \"to\" : \"now\",\n" + " \"boost\" : 1.0\n" + " },\n" + " \"_name\" : \"my_range\"\n" + " }\n" + "}"; // non strict parsing should accept "_name" on top level assertNotNull(parseQuery(json, ParseFieldMatcher.EMPTY)); // with strict parsing, ParseField will throw exception IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(deprecatedJson, ParseFieldMatcher.STRICT)); assertEquals("Deprecated field [_name] used, replaced by [query name is not supported in short version of range query]", e.getMessage()); } public void testRewriteDateToMatchAll() throws IOException { String fieldName = randomAsciiOfLengthBetween(1, 20); RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { @Override protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { return Relation.WITHIN; } }; DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); query.from(queryFromValue); query.to(queryToValue); QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); assertThat(rewrittenRange.from(), equalTo(null)); assertThat(rewrittenRange.to(), equalTo(null)); } public void testRewriteDateToMatchNone() throws IOException { String fieldName = randomAsciiOfLengthBetween(1, 20); RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { @Override protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { return Relation.DISJOINT; } }; DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); query.from(queryFromValue); query.to(queryToValue); QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class)); } public void testRewriteDateToSame() throws IOException { String fieldName = randomAsciiOfLengthBetween(1, 20); RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { @Override protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { return Relation.INTERSECTS; } }; DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); query.from(queryFromValue); query.to(queryToValue); QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); assertThat(rewritten, sameInstance(query)); } public void testRewriteOpenBoundsToSame() throws IOException { String fieldName = randomAsciiOfLengthBetween(1, 20); RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { @Override protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { return Relation.INTERSECTS; } }; QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); assertThat(rewritten, sameInstance(query)); } public void testParseFailsWithMultipleFields() throws IOException { String json = "{\n" + " \"range\": {\n" + " \"age\": {\n" + " \"gte\": 30,\n" + " \"lte\": 40\n" + " },\n" + " \"price\": {\n" + " \"gte\": 10,\n" + " \"lte\": 30\n" + " }\n" + " }\n" + " }"; ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json)); assertEquals("[range] query doesn't support multiple fields, found [age] and [price]", e.getMessage()); } }
package fr.free.nrw.commons.upload; import android.annotation.SuppressLint; import android.content.ContentUris; import android.content.Context; import android.content.SharedPreferences; import android.database.Cursor; import android.net.Uri; import android.os.Build; import android.os.Environment; import android.os.ParcelFileDescriptor; import android.preference.PreferenceManager; import android.provider.DocumentsContract; import android.provider.MediaStore; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import java.io.File; import java.io.FileDescriptor; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.nio.channels.FileChannel; import java.util.Date; import timber.log.Timber; public class FileUtils { /** * Get a file path from a Uri. This will get the the path for Storage Access * Framework Documents, as well as the _data field for the MediaStore and * other file-based ContentProviders. * * @param context The context. * @param uri The Uri to query. * @author paulburke */ // Can be safely suppressed, checks for isKitKat before running isDocumentUri @SuppressLint("NewApi") @Nullable public static String getPath(Context context, Uri uri) { String returnPath = null; final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT; // DocumentProvider if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) { // ExternalStorageProvider if (isExternalStorageDocument(uri)) { final String docId = DocumentsContract.getDocumentId(uri); final String[] split = docId.split(":"); final String type = split[0]; if ("primary".equalsIgnoreCase(type)) { returnPath = Environment.getExternalStorageDirectory() + "/" + split[1]; } } else if (isDownloadsDocument(uri)) { // DownloadsProvider final String id = DocumentsContract.getDocumentId(uri); final Uri contentUri = ContentUris.withAppendedId( Uri.parse("content://downloads/document"), Long.valueOf(id)); returnPath = getDataColumn(context, contentUri, null, null); } else if (isMediaDocument(uri)) { // MediaProvider final String docId = DocumentsContract.getDocumentId(uri); final String[] split = docId.split(":"); final String type = split[0]; Uri contentUri = null; switch (type) { case "image": contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI; break; case "video": contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI; break; case "audio": contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI; break; default: break; } final String selection = "_id=?"; final String[] selectionArgs = new String[]{ split[1] }; returnPath = getDataColumn(context, contentUri, selection, selectionArgs); } } // MediaStore (and general) else if ("content".equalsIgnoreCase(uri.getScheme())) { returnPath = getDataColumn(context, uri, null, null); } // File else if ("file".equalsIgnoreCase(uri.getScheme())) { returnPath = uri.getPath(); } if(returnPath == null) { //fetching path may fail depending on the source URI and all hope is lost //so we will create and use a copy of the file, which seems to work String copyPath = null; try { ParcelFileDescriptor descriptor = context.getContentResolver().openFileDescriptor(uri, "r"); if (descriptor != null) { SharedPreferences sharedPref = PreferenceManager .getDefaultSharedPreferences(context); boolean useExtStorage = sharedPref.getBoolean("useExternalStorage", true); if (useExtStorage) { copyPath = Environment.getExternalStorageDirectory().toString() + "/CommonsApp/" + new Date().getTime() + ".jpg"; File newFile = new File(Environment.getExternalStorageDirectory().toString() + "/CommonsApp"); newFile.mkdir(); FileUtils.copy( descriptor.getFileDescriptor(), copyPath); Timber.d("Filepath (copied): %s", copyPath); return copyPath; } copyPath = context.getCacheDir().getAbsolutePath() + "/" + new Date().getTime() + ".jpg"; FileUtils.copy( descriptor.getFileDescriptor(), copyPath); Timber.d("Filepath (copied): %s", copyPath); return copyPath; } } catch (IOException e) { Timber.w(e, "Error in file " + copyPath); return null; } } else { return returnPath; } return null; } /** * Get the value of the data column for this Uri. This is useful for * MediaStore Uris, and other file-based ContentProviders. * * @param context The context. * @param uri The Uri to query. * @param selection (Optional) Filter used in the query. * @param selectionArgs (Optional) Selection arguments used in the query. * @return The value of the _data column, which is typically a file path. */ @Nullable public static String getDataColumn(Context context, Uri uri, String selection, String[] selectionArgs) { Cursor cursor = null; final String column = MediaStore.Images.ImageColumns.DATA; final String[] projection = { column }; try { cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs, null); if (cursor != null && cursor.moveToFirst()) { final int column_index = cursor.getColumnIndexOrThrow(column); return cursor.getString(column_index); } } catch (IllegalArgumentException e) { Timber.d(e); } finally { if (cursor != null) { cursor.close(); } } return null; } /** * @param uri The Uri to check. * @return Whether the Uri authority is ExternalStorageProvider. */ private static boolean isExternalStorageDocument(Uri uri) { return "com.android.externalstorage.documents".equals(uri.getAuthority()); } /** * @param uri The Uri to check. * @return Whether the Uri authority is DownloadsProvider. */ private static boolean isDownloadsDocument(Uri uri) { return "com.android.providers.downloads.documents".equals(uri.getAuthority()); } /** * @param uri The Uri to check. * @return Whether the Uri authority is MediaProvider. */ private static boolean isMediaDocument(Uri uri) { return "com.android.providers.media.documents".equals(uri.getAuthority()); } /** * Check if the URI is owned by the current app. */ public static boolean isSelfOwned(Context context, Uri uri) { return uri.getAuthority().equals(context.getPackageName() + ".provider"); } /** * Copy content from source file to destination file. * * @param source stream copied from * @param destination stream copied to * @throws IOException thrown when failing to read source or opening destination file */ public static void copy(@NonNull FileInputStream source, @NonNull FileOutputStream destination) throws IOException { FileChannel sourceChannel = source.getChannel(); FileChannel destinationChannel = destination.getChannel(); sourceChannel.transferTo(0, sourceChannel.size(), destinationChannel); } /** * Copy content from source file to destination file. * * @param source file descriptor copied from * @param destination file path copied to * @throws IOException thrown when failing to read source or opening destination file */ public static void copy(@NonNull FileDescriptor source, @NonNull String destination) throws IOException { copy(new FileInputStream(source), new FileOutputStream(destination)); } }
package org.drools.base.mvel; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.Serializable; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.drools.RuntimeDroolsException; import org.drools.base.ModifyInterceptor; import org.drools.rule.Declaration; import org.drools.spi.KnowledgeHelper; import org.mvel.Macro; import org.mvel.ParserContext; import org.mvel.compiler.AbstractParser; import org.mvel.compiler.ExpressionCompiler; public class MVELCompilationUnit implements Externalizable { private String expression; private String[] pkgImports; private String[] importClasses; private String[] importMethods; private String[] importFields; private String[] globalIdentifiers; private Declaration[] previousDeclarations; private Declaration[] localDeclarations; private String[] otherIdentifiers; private String[] inputIdentifiers; private String[] inputTypes; private String[] shadowIdentifiers; private int languageLevel; private boolean strictMode; private Map<String, Class> resolvedInputs; private static Map interceptors = new HashMap( 1 ); static { interceptors.put( "Modify", new ModifyInterceptor() ); } private static final Map<String, Class> primitivesMap = new HashMap<String, Class>(); static { primitivesMap.put( "int", int.class ); primitivesMap.put( "boolean", boolean.class ); primitivesMap.put( "float", float.class ); primitivesMap.put( "long", long.class ); primitivesMap.put( "short", short.class ); primitivesMap.put( "byte", byte.class ); primitivesMap.put( "double", double.class ); primitivesMap.put( "char", char.class ); } public static final Object COMPILER_LOCK = new Object(); public MVELCompilationUnit() { } public MVELCompilationUnit(String expression, String[] pkgImports, String[] importClasses, String[] importMethods, String[] importFields, String[] globalIdentifiers, Declaration[] previousDeclarations, Declaration[] localDeclarations, String[] otherIdentifiers, String[] inputIdentifiers, String[] inputTypes, int languageLevel, boolean strictMode) { this.expression = expression; this.pkgImports = pkgImports; this.importClasses = importClasses; this.importMethods = importMethods; this.importFields = importFields; this.globalIdentifiers = globalIdentifiers; this.previousDeclarations = previousDeclarations; this.localDeclarations = localDeclarations; this.otherIdentifiers = otherIdentifiers; this.inputIdentifiers = inputIdentifiers; this.inputTypes = inputTypes; this.languageLevel = languageLevel; this.strictMode = strictMode; } public void setShadowIdentifiers(String[] shadowIdentifiers) { this.shadowIdentifiers = shadowIdentifiers; } public void writeExternal(ObjectOutput out) throws IOException { out.writeUTF( expression ); out.writeObject( pkgImports ); out.writeObject( importClasses ); out.writeObject( importMethods ); out.writeObject( importFields ); out.writeObject( globalIdentifiers ); out.writeObject( previousDeclarations ); out.writeObject( localDeclarations ); out.writeObject( otherIdentifiers ); out.writeObject( inputIdentifiers ); out.writeObject( inputTypes ); out.writeObject( shadowIdentifiers ); out.writeInt( languageLevel ); out.writeBoolean( strictMode ); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { expression = in.readUTF(); pkgImports = (String[]) in.readObject(); importClasses = (String[]) in.readObject(); importMethods = (String[]) in.readObject(); importFields = (String[]) in.readObject(); globalIdentifiers = (String[]) in.readObject(); previousDeclarations = (Declaration[]) in.readObject(); localDeclarations = (Declaration[]) in.readObject(); otherIdentifiers = (String[]) in.readObject(); inputIdentifiers = (String[]) in.readObject(); inputTypes = (String[]) in.readObject(); shadowIdentifiers = (String[]) in.readObject(); languageLevel = in.readInt(); strictMode = in.readBoolean(); } public Serializable getCompiledExpression(ClassLoader classLoader) { Map<String, Object> resolvedImports = new HashMap<String, Object>( importClasses.length + importMethods.length + importFields.length ); String lastName = null; try { for ( String name : importClasses ) { lastName = name; Class cls = loadClass( classLoader, name ); resolvedImports.put( cls.getSimpleName(), cls ); } for ( String name : importMethods ) { lastName = name; int lastDot = name.lastIndexOf( '.' ); String className = name.substring( 0, lastDot ); Class cls = loadClass( classLoader, className ); String methodName = name.substring( lastDot + 1 ); Method method = null; for ( Method item : cls.getMethods() ) { if ( methodName.equals( item.getName() ) ) { method = item; } } resolvedImports.put( method.getName(), method ); } for ( String name : importFields ) { int lastDot = name.lastIndexOf( '.' ); String className = name.substring( 0, lastDot ); Class cls = loadClass( classLoader, className ); String fieldName = name.substring( lastDot + 1 ); Field field = cls.getField( fieldName ); resolvedImports.put( field.getName(), field ); } } catch ( ClassNotFoundException e ) { e.printStackTrace(); throw new RuntimeDroolsException( "Unable to resolve import '" + lastName + "'" ); } catch ( SecurityException e ) { e.printStackTrace(); throw new RuntimeDroolsException( "Unable to resolve import '" + lastName + "'" ); } catch ( NoSuchFieldException e ) { e.printStackTrace(); throw new RuntimeDroolsException( "Unable to resolve import '" + lastName + "'" ); } final ParserContext parserContext = new ParserContext( resolvedImports, null, "xxx" );// context.getPkg().getName()+"."+context. for ( String pkgImport : this.pkgImports ) { parserContext.addPackageImport( pkgImport ); } parserContext.setInterceptors( interceptors ); parserContext.setStrictTypeEnforcement( strictMode ); resolvedInputs = new HashMap<String, Class>( inputIdentifiers.length ); String lastIdentifier = null; String lastType = null; try { for ( int i = 0, length = inputIdentifiers.length; i < length; i++ ) { lastIdentifier = inputIdentifiers[i]; lastType = inputTypes[i]; Class cls = loadClass( classLoader, inputTypes[i] ); resolvedInputs.put( inputIdentifiers[i], cls ); parserContext.addInput( inputIdentifiers[i], cls ); } } catch ( ClassNotFoundException e ) { e.printStackTrace(); throw new RuntimeDroolsException( "Unable to resolve class '" + lastType + "' for identifier '" + lastIdentifier ); } parserContext.addInput( "drools", KnowledgeHelper.class ); resolvedInputs.put( "drools", KnowledgeHelper.class ); return compile( expression, classLoader, parserContext, languageLevel ); } public DroolsMVELFactory getFactory() { Map<String, Class> resolvedGlobals = null; if ( inputIdentifiers != null ) { resolvedGlobals = new HashMap<String, Class>( inputIdentifiers.length ); for ( int i = 0, length = globalIdentifiers.length; i < length; i++ ) { String identifier = globalIdentifiers[i]; resolvedGlobals.put( identifier, resolvedInputs.get( identifier ) ); } } Map<String, Declaration> previousDeclarationsMap = null; if ( previousDeclarations != null ) { previousDeclarationsMap = new HashMap<String, Declaration>( previousDeclarations.length ); for ( Declaration declr : previousDeclarations ) { previousDeclarationsMap.put( declr.getIdentifier(), declr ); } } Map<String, Declaration> localDeclarationsMap = null; if ( localDeclarations != null ) { localDeclarationsMap = new HashMap<String, Declaration>( localDeclarations.length ); for ( Declaration declr : localDeclarations ) { localDeclarationsMap.put( declr.getIdentifier(), declr ); } } DroolsMVELFactory factory = null; if ( shadowIdentifiers == null ) { factory = new DroolsMVELFactory( previousDeclarationsMap, localDeclarationsMap, resolvedGlobals, inputIdentifiers ); } else { Set<String> set = new HashSet<String>( shadowIdentifiers.length ); for ( String string : shadowIdentifiers ) { set.add( string ); } factory = new DroolsMVELShadowFactory( previousDeclarationsMap, localDeclarationsMap, resolvedGlobals, inputIdentifiers, set ); } return factory; } public Serializable compile(final String text, final ClassLoader classLoader, final ParserContext parserContext, final int languageLevel) { ExpressionCompiler compiler = new ExpressionCompiler( text.trim() ); if ( MVELDebugHandler.isDebugMode() ) { compiler.setDebugSymbols( true ); } synchronized ( COMPILER_LOCK ) { ClassLoader tempClassLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader( classLoader ); AbstractParser.setLanguageLevel( languageLevel ); Serializable expr = compiler.compile( parserContext ); Thread.currentThread().setContextClassLoader( tempClassLoader ); return expr; } } private Class loadClass(ClassLoader classLoader, String className) throws ClassNotFoundException { Class cls = primitivesMap.get( className ); if ( cls == null ) { cls = classLoader.loadClass( className ); } return cls; } }
/* Derby - Class org.apache.derbyTesting.functionTests.tests.lang.RestrictedVTITest Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.lang; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.Statement; import junit.framework.Test; import org.apache.derbyTesting.junit.BaseTestSuite; import org.apache.derbyTesting.junit.CleanDatabaseTestSetup; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.TestConfiguration; /** * <p> * Test RestrictedVTIs. See DERBY-4357. * </p> */ public class RestrictedVTITest extends GeneratedColumnsHelper { /////////////////////////////////////////////////////////////////////////////////// // // CONSTANTS // /////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////// // // STATE // /////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////// // // CONSTRUCTOR // /////////////////////////////////////////////////////////////////////////////////// /** * Create a new instance. */ public RestrictedVTITest(String name) { super(name); } /////////////////////////////////////////////////////////////////////////////////// // // JUnit BEHAVIOR // /////////////////////////////////////////////////////////////////////////////////// /** * Construct top level suite in this JUnit test */ public static Test suite() { BaseTestSuite suite = (BaseTestSuite)TestConfiguration.embeddedSuite( RestrictedVTITest.class); Test result = new CleanDatabaseTestSetup( suite ); return result; } protected void setUp() throws Exception { super.setUp(); Connection conn = getConnection(); if ( !routineExists( conn, "GETLASTPROJECTION" ) ) { goodStatement ( conn, "create function getLastProjection\n" + "()\n" + "returns varchar( 32672 )\n" + "language java parameter style java no sql\n" + "external name 'org.apache.derbyTesting.functionTests.tests.lang.IntegerArrayVTI.getLastProjection'\n" ); } if ( !routineExists( conn, "GETLASTRESTRICTION" ) ) { goodStatement ( conn, "create function getLastRestriction\n" + "()\n" + "returns varchar( 32672 )\n" + "language java parameter style java no sql\n" + "external name 'org.apache.derbyTesting.functionTests.tests.lang.IntegerArrayVTI.getLastRestriction'\n" ); } if ( !routineExists( conn, "GETCOUNT" ) ) { goodStatement ( conn, "create function getCount\n" + "()\n" + "returns int\n" + "language java parameter style java no sql\n" + "external name 'org.apache.derbyTesting.functionTests.tests.lang.IntegerArrayVTI.getLastQualifiedRowCount'\n" ); } if ( !routineExists( conn, "INTEGERLIST" ) ) { goodStatement ( conn, "create function integerList()\n" + "returns table( s_r int, s_nr int, ns_r int, ns_nr int )\n" + "language java\n" + "parameter style derby_jdbc_result_set\n" + "no sql\n" + "external name 'org.apache.derbyTesting.functionTests.tests.lang.RestrictedVTITest.integerList'\n" ); } if ( !routineExists( conn, "NULLABLEINTEGERLIST" ) ) { goodStatement ( conn, "create function nullableIntegerList()\n" + "returns table( s_r int, s_nr int, ns_r int, ns_nr int )\n" + "language java\n" + "parameter style derby_jdbc_result_set\n" + "no sql\n" + "external name 'org.apache.derbyTesting.functionTests.tests.lang.RestrictedVTITest.nullableIntegerList'\n" ); } if ( !routineExists( conn, "INTEGERLISTSPECIALCOLNAMES" ) ) { goodStatement ( conn, "create function integerListSpecialColNames()\n" + "returns table( \"CoL \"\"1\"\"\" int,\n" + "\"cOL \"\"2\"\"\" int, col3 int, col4 int )\n" + "language java\n" + "parameter style derby_jdbc_result_set\n" + "no sql\n" + "external name '" + getClass().getName() + ".integerListSpecialColNames'\n" ); } if ( !routineExists( conn, "MAKEBLOB5370" ) ) { goodStatement ( conn, "create function makeBlob5370( ) returns blob\n" + "language java parameter style java no sql deterministic\n" + "external name 'org.apache.derbyTesting.functionTests.tests.lang.BooleanValuesTest.makeSimpleBlob'\n" ); } if ( !routineExists( conn, "LASTQUERY5370" ) ) { goodStatement ( conn, "create function lastQuery5370() returns varchar( 32672 )\n" + "language java parameter style java no sql\n" + "external name 'org.apache.derbyTesting.functionTests.tests.lang.RestrictedTableVTI.getLastQuery'\n" ); } if ( !routineExists( conn, "RESTRICTED5370" ) ) { goodStatement ( conn, "create function restricted5370( schemaName varchar( 32672 ), tableName varchar( 32672 ) )\n" + "returns table\n" + "(\n" + " key_col int,\n" + " boolean_col BOOLEAN,\n" + " bigint_col BIGINT,\n" + " blob_col BLOB(2147483647),\n" + " char_col CHAR(10),\n" + " char_for_bit_data_col CHAR (10) FOR BIT DATA,\n" + " clob_col CLOB,\n" + " date_col DATE,\n" + " decimal_col DECIMAL(5,2),\n" + " real_col REAL,\n" + " double_col DOUBLE,\n" + " int_col INTEGER,\n" + " long_varchar_col LONG VARCHAR,\n" + " long_varchar_for_bit_data_col LONG VARCHAR FOR BIT DATA,\n" + " numeric_col NUMERIC(5,2), \n" + " smallint_col SMALLINT,\n" + " time_col TIME,\n" + " timestamp_col TIMESTAMP,\n" + " varchar_col VARCHAR(10),\n" + " varchar_for_bit_data_col VARCHAR (10) FOR BIT DATA\n" + ")\n" + "language java parameter style derby_jdbc_result_set reads sql data\n" + "external name 'org.apache.derbyTesting.functionTests.tests.lang.RestrictedTableVTI.readTable'\n" ); } if ( !tableExists( conn, "T_4357_1" ) ) { goodStatement ( conn, "create table t_4357_1( a int )\n" ); goodStatement ( conn, "insert into t_4357_1( a ) values cast( null as int), ( 1 ), ( 100 ), ( 1000 ), ( 10000)\n" ); } if ( !tableExists( conn, "T_5370" ) ) { goodStatement ( conn, "create table t_5370\n" + "(\n" + " key_col int,\n" + " boolean_col BOOLEAN,\n" + " bigint_col BIGINT,\n" + " blob_col BLOB(2147483647),\n" + " char_col CHAR(10),\n" + " char_for_bit_data_col CHAR (10) FOR BIT DATA,\n" + " clob_col CLOB,\n" + " date_col DATE,\n" + " decimal_col DECIMAL(5,2),\n" + " real_col REAL,\n" + " double_col DOUBLE,\n" + " int_col INTEGER,\n" + " long_varchar_col LONG VARCHAR,\n" + " long_varchar_for_bit_data_col LONG VARCHAR FOR BIT DATA,\n" + " numeric_col NUMERIC(5,2), \n" + " smallint_col SMALLINT,\n" + " time_col TIME,\n" + " timestamp_col TIMESTAMP,\n" + " varchar_col VARCHAR(10),\n" + " varchar_for_bit_data_col VARCHAR (10) FOR BIT DATA\n" + ")\n" ); goodStatement ( conn, "insert into t_5370\n" + "(\n" + " key_col,\n" + " boolean_col,\n" + " bigint_col,\n" + " blob_col,\n" + " char_col,\n" + " char_for_bit_data_col,\n" + " clob_col,\n" + " date_col,\n" + " decimal_col,\n" + " real_col,\n" + " double_col,\n" + " int_col,\n" + " long_varchar_col,\n" + " long_varchar_for_bit_data_col,\n" + " numeric_col, \n" + " smallint_col,\n" + " time_col,\n" + " timestamp_col,\n" + " varchar_col,\n" + " varchar_for_bit_data_col\n" + ")\n" + "values\n" + "(\n" + " 0,\n" + " false,\n" + " 0,\n" + " makeBlob5370(),\n" + " '0',\n" + " X'DE',\n" + " '0',\n" + " date('1994-02-23'),\n" + " 0.00,\n" + " 0.0,\n" + " 0.0,\n" + " 0,\n" + " '0',\n" + " X'DE',\n" + " 0.00, \n" + " 0,\n" + " time('15:09:02'),\n" + " timestamp('1962-09-23 03:23:34.234'),\n" + " '0',\n" + " X'DE'\n" + "),\n" + "(\n" + " 1,\n" + " true,\n" + " 1,\n" + " makeBlob5370(),\n" + " '1',\n" + " X'DD',\n" + " '1',\n" + " date('1994-02-24'),\n" + " 1.00,\n" + " 1.0,\n" + " 1.0,\n" + " 1,\n" + " '1',\n" + " X'DE',\n" + " 1.00, \n" + " 1,\n" + " time('15:09:03'),\n" + " timestamp('1963-09-23 03:23:34.234'),\n" + " '1',\n" + " X'DD'\n" + "),\n" + "(\n" + " 2,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null,\n" + " null\n" + ")\n" ); } } /////////////////////////////////////////////////////////////////////////////////// // // TESTS // /////////////////////////////////////////////////////////////////////////////////// /** * <p> * Heartbeat test to verify that projections and restrictions are being * pushed into RestrictedVTIs. * </p> */ public void test_01_heartbeat() throws Exception { Connection conn = getConnection(); // test basic pushing of projection and restriction assertPR ( conn, "select s_r, s_nr from table( integerList() ) s where s_r > 1 and ns_r < 3000\n", new String[][] { { "100" , "200" }, }, "[S_R, S_NR, NS_R, null]", "( \"NS_R\" < 3000 ) AND ( \"S_R\" > 1 )" ); assertPR ( conn, "select s_r, s_nr from table( integerList() ) s where s_r > 1 and ns_r < 3000 order by s_r\n", new String[][] { { "100" , "200" }, }, "[S_R, S_NR, NS_R, null]", "( \"NS_R\" < 3000 ) AND ( \"S_R\" > 1 )" ); // order by with no restriction assertPR ( conn, "select s_r, s_nr from table( integerList() ) s order by s_r\n", new String[][] { { "1" , "2" }, { "100" , "200" }, { "1000" , "2000" }, { "10000" , "20000" }, }, "[S_R, S_NR, null, null]", null ); // similar test except with a ? parameter PreparedStatement ps = chattyPrepare ( conn, "select s_r from table( integerList() ) s where s_r > 1 and ns_r < ?" ); ps.setInt( 1, 30000 ); ResultSet rs = ps.executeQuery(); assertResults ( rs, new String[][] { { "100" }, { "1000" }, }, false ); assertResults ( conn, "values ( getLastProjection() )\n", new String[][] { { "[S_R, null, NS_R, null]" } }, false ); assertResults ( conn, "values ( getLastRestriction() )\n", new String[][] { { "( \"NS_R\" < 30000 ) AND ( \"S_R\" > 1 )" } }, false ); // similar to the first test except NOT the qualification assertPR ( conn, "select s_r, s_nr from table( integerList() ) s where not( s_r > 1 and ns_r < 3000 )\n", new String[][] { { "1" , "2" }, { "1000" , "2000" }, { "10000" , "20000" }, }, "[S_R, S_NR, NS_R, null]", "( \"S_R\" <= 1 ) OR ( \"NS_R\" >= 3000 )" ); } /** * <p> * Verify that aliases are correctly mapped to table column names. Also * verify that SELECT list expressions cause columns to be included in the * column list. Also verify that predicates which aren't qualifiers are not included in the restriction. * </p> */ public void test_02_aliasing() throws Exception { Connection conn = getConnection(); assertPR ( conn, "select 2*w, x from table( integerList() ) as s( w, x, y, z ) where w > 1 and mod( y, 3 ) = 0\n", new String[][] { { "200" , "200" }, { "2000" , "2000" }, { "20000" , "20000" }, }, "[S_R, S_NR, NS_R, null]", "\"S_R\" > 1" ); } /** * <p> * Verify that all relational operators are handled. * </p> */ public void test_03_allRelationalOperators() throws Exception { Connection conn = getConnection(); // IS NULL assertPR ( conn, "select s_r, s_nr from table( nullableIntegerList() ) s where s_r is null\n", new String[][] { { null , "2" }, }, "[S_R, S_NR, null, null]", "\"S_R\" IS NULL " ); // IS NOT NULL assertPR ( conn, "select s_r, s_nr from table( nullableIntegerList() ) s where s_r is not null\n", new String[][] { { "100", null }, { "1000", "2000" }, { "10000", "20000" }, }, "[S_R, S_NR, null, null]", "\"S_R\" IS NOT NULL " ); // < assertPR ( conn, "select s_r, s_nr from table( nullableIntegerList() ) s where s_r < 1000\n", new String[][] { { "100", null }, }, "[S_R, S_NR, null, null]", "\"S_R\" < 1000" ); // <= assertPR ( conn, "select s_r, s_nr from table( nullableIntegerList() ) s where s_r <= 100\n", new String[][] { { "100", null }, }, "[S_R, S_NR, null, null]", "\"S_R\" <= 100" ); // = assertPR ( conn, "select s_r, s_nr from table( nullableIntegerList() ) s where s_r = 100\n", new String[][] { { "100", null }, }, "[S_R, S_NR, null, null]", "\"S_R\" = 100" ); // > assertPR ( conn, "select s_r, s_nr from table( nullableIntegerList() ) s where s_r > 100\n", new String[][] { { "1000", "2000" }, { "10000", "20000" }, }, "[S_R, S_NR, null, null]", "\"S_R\" > 100" ); // >= assertPR ( conn, "select s_r, s_nr from table( nullableIntegerList() ) s where s_r >= 100\n", new String[][] { { "100", null }, { "1000", "2000" }, { "10000", "20000" }, }, "[S_R, S_NR, null, null]", "\"S_R\" >= 100" ); } /** * <p> * Miscellaneous conditions. * </p> */ public void test_04_misc() throws Exception { Connection conn = getConnection(); // Arithmetic expressions are not qualifiers. assertPR ( conn, "select s_r, s_nr from table( nullableIntegerList() ) s where s_r < s_nr + ns_r\n", new String[][] { { "10000" , "20000" }, }, "[S_R, S_NR, NS_R, null]", null ); // Casting a literal to an int is computed by the compiler and so is a qualifier assertPR ( conn, "select s_r from table( nullableIntegerList() ) s where ns_r = cast( '300' as int)\n", new String[][] { { "100" }, }, "[S_R, null, NS_R, null]", "\"NS_R\" = 300" ); } /** * <p> * Test joins to RestrictedVTIs. * </p> */ public void test_05_joins() throws Exception { Connection conn = getConnection(); // hashjoin with no restriction assertPR ( conn, "select a, w, y from t_4357_1, table( nullableIntegerList() ) as s( w, x, y, z ) where a = w\n", new String[][] { { "100" , "100", "300" }, { "1000" , "1000", null }, { "10000" , "10000", "30000" }, }, "[S_R, null, NS_R, null]", null ); assertPR ( conn, "select a, w, y from t_4357_1, table( nullableIntegerList() ) as s( w, x, y, z ) where a = w order by y\n", new String[][] { { "100" , "100", "300" }, { "10000" , "10000", "30000" }, { "1000" , "1000", null }, }, "[S_R, null, NS_R, null]", null ); // hashjoin with a restriction on the table function assertPR ( conn, "select a, w, x from t_4357_1, table( nullableIntegerList() ) as s( w, x, y, z ) where a = w and y is not null\n", new String[][] { { "100" , "100", null }, { "10000" , "10000", "20000" }, }, "[S_R, S_NR, NS_R, null]", "\"NS_R\" IS NOT NULL " ); assertPR ( conn, "select a, w, x from t_4357_1, table( nullableIntegerList() ) as s( w, x, y, z ) where a = w and y is not null order by w\n", new String[][] { { "100" , "100", null }, { "10000" , "10000", "20000" }, }, "[S_R, S_NR, NS_R, null]", "\"NS_R\" IS NOT NULL " ); // hashjoin with a restriction on the base table which transitive closure // turns into a restriction on the table function assertPR ( conn, "select a, w, x from t_4357_1, table( nullableIntegerList() ) as s( w, x, y, z ) where a = w and a > 100\n", new String[][] { { "1000" , "1000", "2000" }, { "10000" , "10000", "20000" }, }, "[S_R, S_NR, null, null]", "\"S_R\" > 100" ); assertPR ( conn, "select a, w, x from t_4357_1, table( nullableIntegerList() ) as s( w, x, y, z ) where a = w and a > 100 order by x\n", new String[][] { { "1000" , "1000", "2000" }, { "10000" , "10000", "20000" }, }, "[S_R, S_NR, null, null]", "\"S_R\" > 100" ); // hashjoin with a restriction that can't be pushed into the table function assertPR ( conn, "select a, w, x from t_4357_1, table( nullableIntegerList() ) as s( w, x, y, z ) where a = w and a + x > 100\n", new String[][] { { "1000" , "1000", "2000" }, { "10000" , "10000", "20000" }, }, "[S_R, S_NR, null, null]", null ); assertPR ( conn, "select a, w, x from t_4357_1, table( nullableIntegerList() ) as s( w, x, y, z ) where a = w and x + y > 100\n", new String[][] { { "10000" , "10000", "20000" }, }, "[S_R, S_NR, NS_R, null]", null ); } /** * <p> * Test DISTINCT. * </p> */ public void test_06_distinct() throws Exception { Connection conn = getConnection(); // distinct with restriction assertPR ( conn, "select distinct s_r, s_nr from table( integerList() ) s where s_r > 1 and ns_r < 3000\n", new String[][] { { "100" , "200" }, }, "[S_R, S_NR, NS_R, null]", "( \"NS_R\" < 3000 ) AND ( \"S_R\" > 1 )" ); // distinct without restriction assertPR ( conn, "select distinct s_r, s_nr from table( integerList() ) s\n", new String[][] { { "1" , "2" }, { "100" , "200" }, { "1000" , "2000" }, { "10000" , "20000" }, }, "[S_R, S_NR, null, null]", null ); } /** * <p> * Test subqueries. * </p> */ public void test_07_subqueries() throws Exception { Connection conn = getConnection(); // table function in subquery assertPR ( conn, "select * from t_4357_1 where exists ( select x from table( nullableIntegerList() ) as s( w, x, y, z ) where a = w )\n", new String[][] { { "100" }, { "1000" }, { "10000" }, }, "[S_R, S_NR, null, null]", null ); // table function in inner and outer query blocks assertPR ( conn, "select * from table( nullableIntegerList() ) as t( a, b, c, d ) where exists ( select x from table( nullableIntegerList() ) as s( w, x, y, z ) where a = w )\n", new String[][] { { "100", null, "300", "400" }, { "1000", "2000", null, "4000" }, { "10000", "20000", "30000", null }, }, "[S_R, S_NR, null, null]", null ); } /** * Predicates in HAVING clauses are not (yet) pushed down to the VTI. * Tracked as DERBY-4650. */ public void test_08_having() throws Exception { assertPR( getConnection(), "select s_r, count(*) from table(integerList()) t " + "group by s_r having s_r > 1", new String[][] {{"100", "1"}, {"1000", "1"}, {"10000", "1"}}, "[S_R, null, null, null]", null // DERBY-4650: should be "\"S_R\" > 1" if pushed down ); } /** * Verify that attempts to create a trailing constant qualification do no * cause the VTI to return the wrong rows. * Tracked as DERBY-4651. */ public void test_09_4651() throws Exception { Connection conn = getConnection(); assertPR ( conn, "select s_r, s_nr from table( integerList() ) s order by s_r\n", new String[][] { { "1" , "2" }, { "100" , "200" }, { "1000" , "2000" }, { "10000" , "20000" }, }, "[S_R, S_NR, null, null]", null, 4 ); assertPR ( conn, "select s_r, s_nr from table( integerList() ) s where s_r > 500 order by s_r\n", new String[][] { { "1000" , "2000" }, { "10000" , "20000" }, }, "[S_R, S_NR, null, null]", "\"S_R\" > 500", 2 ); assertPR ( conn, "select s_r, s_nr from table( integerList() ) s where s_r > 500 or 1=1 order by s_r\n", new String[][] { { "1" , "2" }, { "100" , "200" }, { "1000" , "2000" }, { "10000" , "20000" }, }, "[S_R, S_NR, null, null]", null, 4 ); assertPR ( conn, "select s_r, s_nr from table( integerList() ) s where s_r > 500 and 1 != 1 order by s_r\n", new String[][] { }, "[S_R, S_NR, null, null]", null, 4 ); } /** * Test that {@code Restriction.toSQL()} returns properly quoted column * names. DERBY-4654. */ public void test_10_quotes_in_column_names() throws Exception { String[][] expectedRows = new String[][] {{"100", "200", "300", "400"}}; String expectedRestriction = "( \"cOL \"\"2\"\"\" < 1000 ) AND ( \"CoL \"\"1\"\"\" > 1 )"; // Check that we can execute a query against a restricted VTI with // double quotes in the column names. assertPR( getConnection(), "select * from table(integerListSpecialColNames()) t " + "where \"CoL \"\"1\"\"\" > 1 and \"cOL \"\"2\"\"\" < 1000", expectedRows, "[CoL \"1\", cOL \"2\", COL3, COL4]", expectedRestriction); // Get the restriction that was pushed down. Statement stmt = createStatement(); ResultSet rs = executeQuery(stmt, "values getLastRestriction()"); assertTrue("empty result", rs.next()); String restriction = rs.getString(1); assertEquals(expectedRestriction, restriction); rs.close(); // Verify that the returned restriction has correct syntax so that // we can put it directly into the WHERE clause of a select query and // get the same rows as we did above. rs = executeQuery( stmt, "select * from table(integerListSpecialColNames()) t where " + restriction); JDBC.assertUnorderedResultSet(rs, expectedRows); } /** * Verify that Restriction.toSQL() returns usable SQL for all of the * comparable types. See DERBY-5369 and DERBY-5370. */ public void test_11_5369_5370() throws Exception { Connection conn = getConnection(); // // The table function used by this test extends VTITemplate, an // implementation of the JDBC 3.0 ResultSet. This table function will // not run on JSR169 because the JDBC 3.0 ResultSet pulls in classes // which don't exist in the JSR169 java.sql package (e.g., java.sql.Ref). // if ( JDBC.vmSupportsJSR169() ) { return; } // if this fails, then we need to add a new data type to this test vetDatatypeCount( conn, 22 ); // comparable types vet5370positive( conn, "BOOLEAN_COL", "false", "false", "true" ); vet5370positive( conn, "BIGINT_COL", "0", "0", "1" ); vet5370positive( conn, "CHAR_COL", "'0'", "0 ", "1 " ); vet5370positive( conn, "CHAR_FOR_BIT_DATA_COL", "X'de'", "de202020202020202020", "dd202020202020202020" ); vet5370positive( conn, "DATE_COL", "DATE('1994-02-23')", "1994-02-23", "1994-02-24" ); vet5370positive( conn, "DECIMAL_COL", "0.00", "0.00", "1.00" ); vet5370positive( conn, "REAL_COL", "0.0", "0.0", "1.0" ); vet5370positive( conn, "DOUBLE_COL", "0.0", "0.0", "1.0" ); vet5370positive( conn, "INT_COL", "0", "0", "1" ); vet5370positive( conn, "NUMERIC_COL", "0.00", "0.00", "1.00" ); vet5370positive( conn, "SMALLINT_COL", "0", "0", "1" ); vet5370positive( conn, "TIME_COL", "TIME('15:09:02')", "15:09:02", "15:09:03" ); vet5370positive( conn, "TIMESTAMP_COL", "TIMESTAMP('1962-09-23 03:23:34.234')", "1962-09-23 03:23:34.234", "1963-09-23 03:23:34.234" ); vet5370positive( conn, "VARCHAR_COL", "'0'", "0", "1" ); vet5370positive( conn, "VARCHAR_FOR_BIT_DATA_COL", "X'de'", "de", "dd" ); // // The following all fail. If these comparisons start working, then this // test should be revisited to make sure that Restriction.toSQL() handles // the types which used to not be comparable. // vet5370negative( "BLOB_COL", "makeBlob5370()" ); vet5370negative( "CLOB_COL", "'0'" ); vet5370negative( "LONG_VARCHAR_COL", "'0'" ); vet5370negative( "LONG_VARCHAR_FOR_BIT_DATA_COL", "X'de'" ); } private void vet5370positive ( Connection conn, String columnName, String columnValue, String expectedValue, String negatedValue ) throws Exception { assertResults ( conn, "select " + columnName + " from table( restricted5370( 'APP', 'T_5370' ) ) s\n" + "where " + columnName + " = " + columnValue, new String[][] { new String[] { expectedValue } }, false ); assertResults ( conn, "values( lastQuery5370() )", new String[][] { new String[] { "select " + doubleQuote( columnName ) + "\n" + "from " + doubleQuote( "APP" ) + "." + doubleQuote( "T_5370" ) + "\n" + "where " + doubleQuote( columnName ) + " = " + columnValue } }, false ); assertResults ( conn, "select " + columnName + " from table( restricted5370( 'APP', 'T_5370' ) ) s\n" + "where " + columnName + " != " + columnValue, new String[][] { new String[] { negatedValue } }, false ); assertResults ( conn, "values( lastQuery5370() )", new String[][] { new String[] { "select " + doubleQuote( columnName ) + "\n" + "from " + doubleQuote( "APP" ) + "." + doubleQuote( "T_5370" ) + "\n" + "where " + doubleQuote( columnName ) + " != " + columnValue } }, false ); } private static String doubleQuote( String text ) { return '"' + text + '"'; } private void vet5370negative ( String columnName, String columnValue ) throws Exception { expectCompilationError ( "42818", "select " + columnName + " from table( restricted5370( 'APP', 'T_5370' ) ) s\n" + "where " + columnName + " = " + columnValue ); } private int vetDatatypeCount( Connection conn, int expectedTypeCount ) throws Exception { // // If this fails, it means that we need to add another datatype to // ResultSet rs = conn.getMetaData().getTypeInfo(); int actualTypeCount = 0; while ( rs.next() ) { actualTypeCount++; } rs.close(); assertEquals( expectedTypeCount, actualTypeCount ); return actualTypeCount; } /** * Verify that if you wrap a RestrictedVTI in a view, selects * from the view pass the restriction on to the RestrictedVTI. * However, the projection is not passed through to the view so it * is not passed on to the RestrictedVTI, as described on DERBY-6036. * When that issue is addressed, we should adjust this test case. */ public void test_12_6036() throws Exception { Connection conn = getConnection(); goodStatement( conn, "create view v6036 as select * from table( integerList() ) s" ); // directly selecting from the vti pushes down both the projection and the restriction assertResults ( conn, "select s_nr from table( integerList() ) s where ns_r = 3000", new String[][] { { "2000" } }, false ); assertResults ( conn, "values getLastProjection()", new String[][] { { "[null, S_NR, NS_R, null]" } }, false ); assertResults ( conn, "values getLastRestriction()", new String[][] { { "\"NS_R\" = 3000" } }, false ); // directly selecting from the view only pushes down the restriction assertResults ( conn, "select s_nr from v6036 where ns_r = 3000", new String[][] { { "2000" } }, false ); assertResults ( conn, "values getLastProjection()", new String[][] { { "[S_R, S_NR, NS_R, NS_NR]" } }, false ); assertResults ( conn, "values getLastRestriction()", new String[][] { { "\"NS_R\" = 3000" } }, false ); } /////////////////////////////////////////////////////////////////////////////////// // // SQL ROUTINES // /////////////////////////////////////////////////////////////////////////////////// public static IntegerArrayVTI integerList() { // S => in SELECT list // NS => NOT in SELECT LIST // R => in restriction // NR => NOT in restriction return new IntegerArrayVTI ( new String[] { "S_R", "S_NR", "NS_R", "NS_NR" }, new int[][] { new int[] { 1, 2, 3, 4 }, new int[] { 100, 200, 300, 400 }, new int[] { 1000, 2000, 3000, 4000 }, new int[] { 10000, 20000, 30000, 40000 }, } ); } public static IntegerArrayVTI nullableIntegerList() { // S => in SELECT list // NS => NOT in SELECT LIST // R => in restriction // NR => NOT in restriction return new IntegerArrayVTI ( new String[] { "S_R", "S_NR", "NS_R", "NS_NR" }, new Integer[][] { new Integer[] { null, i(2), i(3), i(4) }, new Integer[] { i(100), null, i(300), i(400) }, new Integer[] { i(1000), i(2000), null, i(4000) }, new Integer[] { i(10000), i(20000), i(30000), null }, } ); } private static Integer i( int intValue ) { return new Integer( intValue ); } public static IntegerArrayVTI integerListSpecialColNames() { return new IntegerArrayVTI ( new String[] { "CoL \"1\"", "cOL \"2\"", "COL3", "COL4" }, new int[][] { new int[] { 1, 2, 3, 4 }, new int[] { 100, 200, 300, 400 }, new int[] { 1000, 2000, 3000, 4000 }, new int[] { 10000, 20000, 30000, 40000 }, } ); } /////////////////////////////////////////////////////////////////////////////////// // // MINIONS // /////////////////////////////////////////////////////////////////////////////////// /** Return true if the SQL routine exists */ private boolean routineExists( Connection conn, String functionName ) throws Exception { PreparedStatement ps = chattyPrepare( conn, "select count (*) from sys.sysaliases where alias = ?" ); ps.setString( 1, functionName ); ResultSet rs = ps.executeQuery(); rs.next(); boolean retval = rs.getInt( 1 ) > 0 ? true : false; rs.close(); ps.close(); return retval; } /** Return true if the table exists */ private boolean tableExists( Connection conn, String tableName ) throws Exception { PreparedStatement ps = chattyPrepare( conn, "select count (*) from sys.systables where tablename = ?" ); ps.setString( 1, tableName ); ResultSet rs = ps.executeQuery(); rs.next(); boolean retval = rs.getInt( 1 ) > 0 ? true : false; rs.close(); ps.close(); return retval; } /** * <p> * Run a query against a RestrictedVTI, verify that the expected * projection and restriction are pushed into the VTI, and verify * that the VTI returns the expected number of rows. * </p> */ private void assertPR ( Connection conn, String query, String[][] expectedResults, String expectedProjection, String expectedRestriction, int expectedQualifiedRowCount ) throws Exception { assertPR( conn, query, expectedResults, expectedProjection, expectedRestriction ); assertResults ( conn, "values ( getCount() )\n", new String[][] { { Integer.toString( expectedQualifiedRowCount ) } }, false ); } /** * <p> * Run a query against a RestrictedVTI and verify that the expected * projection and restriction are pushed into the VTI. * </p> */ private void assertPR ( Connection conn, String query, String[][] expectedResults, String expectedProjection, String expectedRestriction ) throws Exception { assertResults ( conn, query, expectedResults, false ); assertResults ( conn, "values ( getLastProjection() )\n", new String[][] { { expectedProjection } }, false ); assertResults ( conn, "values ( getLastRestriction() )\n", new String[][] { { expectedRestriction } }, false ); } }
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.ml.rest.api; import java.util.List; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.HttpHeaders; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.ml.commons.domain.*; import org.wso2.carbon.ml.core.exceptions.MLAnalysisHandlerException; import org.wso2.carbon.ml.core.impl.MLAnalysisHandler; import org.wso2.carbon.ml.core.utils.MLUtils; import org.wso2.carbon.ml.rest.api.model.MLAnalysisConfigsBean; import org.wso2.carbon.ml.rest.api.model.MLErrorBean; /** * WSO2 ML Analyses API. All the operations related to analyses are delegated from this class. */ @Path("/analyses") public class AnalysisApiV11 extends MLRestAPI { private static final Log logger = LogFactory.getLog(AnalysisApiV11.class); /* * Analysis handler which is doing the real work. */ private MLAnalysisHandler mlAnalysisHandler; public AnalysisApiV11() { mlAnalysisHandler = new MLAnalysisHandler(); } /** * HTTP Options method implementation for analysis API. * * @return */ @OPTIONS public Response options() { return Response.ok().header(HttpHeaders.ALLOW, "GET POST DELETE").build(); } /** * Create a new analysis of a project. * @param analysis {@link MLAnalysis} object */ @POST @Produces("application/json") public Response createAnalysis(MLAnalysis analysis) { String analysisName = analysis.getName(); if (analysisName == null || analysisName.isEmpty() || analysis.getProjectId() == 0) { String msg = "Analysis name or project Id is missing: " + analysis; logger.error(msg); return Response.status(Response.Status.BAD_REQUEST).entity(new MLErrorBean(msg)).build(); } if (!MLUtils.isValidName(analysisName)) { String msg = "analysis name: " + analysisName + " contains invalid characters."; logger.error(msg); return Response.status(Response.Status.BAD_REQUEST).entity(new MLErrorBean(msg)).build(); } PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { analysis.setTenantId(tenantId); analysis.setUserName(userName); mlAnalysisHandler.createAnalysis(analysis); return Response.ok().build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils.getErrorMsg(String.format( "Error occurred while creating an [analysis] %s of tenant [id] %s and [user] %s .", analysis, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Adding customized features of an analysis. * @param analysisId Unique id of the analysis * @param customizedFeatures {@link List} of {@link MLCustomizedFeature} objects */ @POST @Path("/{analysisId}/features") @Produces("application/json") @Consumes("application/json") public Response addCustomizedFeatures(@PathParam("analysisId") long analysisId, List<MLCustomizedFeature> customizedFeatures) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { mlAnalysisHandler.addCustomizedFeatures(analysisId, customizedFeatures, tenantId, userName); return Response.ok().build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while adding customized features for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Set default features as customized features of an analysis. * @param analysisId Unique id of the analysis * @param customizedValues {@link MLCustomizedFeature} object */ @POST @Path("/{analysisId}/features/defaults") @Produces("application/json") @Consumes("application/json") public Response addDefaultsIntoCustomizedFeatures(@PathParam("analysisId") long analysisId, MLCustomizedFeature customizedValues) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { customizedValues.setTenantId(tenantId); customizedValues.setUserName(userName); customizedValues.setLastModifiedUser(userName); mlAnalysisHandler.addDefaultsIntoCustomizedFeatures(analysisId, customizedValues); return Response.ok().build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while adding default features into customized features for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get summarized features of an analysis. * @param analysisId Unique id of the analysis * @param limit Number of features need to retrieve, from the starting index * @param offset Starting index * @return JSON array of {@link FeatureSummary} objects */ @GET @Path("/{analysisId}/summarizedFeatures") @Produces("application/json") @Consumes("application/json") public Response getSummarizedFeatures(@PathParam("analysisId") long analysisId, @QueryParam("limit") int limit, @QueryParam("offset") int offset) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { List<FeatureSummary> features = mlAnalysisHandler.getSummarizedFeatures(tenantId, userName, analysisId, limit, offset); return Response.ok(features).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving summarized features for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get customized features of an analysis. * @param analysisId Unique id of the analysis * @param limit Number of features need to retrieve, from the starting index * @param offset Starting index * @return JSON array of {@link MLCustomizedFeature} objects */ @GET @Path("/{analysisId}/customizedFeatures") @Produces("application/json") @Consumes("application/json") public Response getCustomizedFeatures(@PathParam("analysisId") long analysisId, @QueryParam("limit") int limit, @QueryParam("offset") int offset) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { List<MLCustomizedFeature> customizedFeatures = mlAnalysisHandler.getCustomizedFeatures(tenantId, userName, analysisId, limit, offset); return Response.ok(customizedFeatures).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving customized features for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get configurations of an analysis. * @param analysisId Unique id of the analysis * @param limit Number of features included in the analysis configuration * @param offset Starting index of the features * @return JSON array of {@link MLAnalysisConfigsBean} objects */ @GET @Path("/{analysisId}/configs") @Produces("application/json") @Consumes("application/json") public Response getConfigs(@PathParam("analysisId") long analysisId, @QueryParam("limit") int limit, @QueryParam("offset") int offset) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { MLAnalysisConfigsBean mlAnalysisConfigsBean = new MLAnalysisConfigsBean(); mlAnalysisConfigsBean.setId(analysisId); mlAnalysisConfigsBean.setCustomizedFeatures(mlAnalysisHandler.getCustomizedFeatures(tenantId, userName, analysisId, limit, offset)); mlAnalysisConfigsBean.setAlgorithmName(mlAnalysisHandler.getAlgorithmName(analysisId)); mlAnalysisConfigsBean.setResponseVariable(mlAnalysisHandler.getResponseVariable(analysisId)); mlAnalysisConfigsBean.setTrainDataFraction(mlAnalysisHandler.getTrainDataFraction(analysisId)); mlAnalysisConfigsBean.setNormalLabels(mlAnalysisHandler.getNormalLabels(analysisId)); mlAnalysisConfigsBean.setNormalization(Boolean.parseBoolean(mlAnalysisHandler.getNormalization(analysisId))); mlAnalysisConfigsBean.setNewNormalLabel(mlAnalysisHandler.getNewNormalLabel(analysisId)); mlAnalysisConfigsBean.setNewAnomalyLabel(mlAnalysisHandler.getNewAnomalyLabel(analysisId)); mlAnalysisConfigsBean.setUserVariable(mlAnalysisHandler.getUserVariable(analysisId)); mlAnalysisConfigsBean.setProductVariable(mlAnalysisHandler.getProductVariable(analysisId)); mlAnalysisConfigsBean.setRatingVariable(mlAnalysisHandler.getRatingVariable(analysisId)); mlAnalysisConfigsBean.setObservations(mlAnalysisHandler.getObservations(analysisId)); mlAnalysisConfigsBean.setHyperParameters(mlAnalysisHandler.getHyperParameters(analysisId, mlAnalysisHandler.getAlgorithmName(analysisId))); return Response.ok(mlAnalysisConfigsBean).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving configurations for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get filtered feature names of an analysis. * @param analysisId Unique id of the analysis * @param featureType Feature type need to retrieve (Categorical or Numerical) * @return JSON array of feature names */ @GET @Path("/{analysisId}/filteredFeatures") @Produces("application/json") public Response getfilteredFeatures(@PathParam("analysisId") String analysisId, @QueryParam("featureType") String featureType) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { List<String> features = mlAnalysisHandler.getFeatureNames(analysisId, featureType); return Response.ok(features).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving filtered feature names for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get all feature names of an analysis. * @param analysisId Unique id of the analysis * @return JSON array of feature names */ @GET @Path("/{analysisId}/features") @Produces("application/json") public Response getAllFeatures(@PathParam("analysisId") String analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { List<String> features = mlAnalysisHandler.getFeatureNames(analysisId); return Response.ok(features).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving all feature names for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get the response variable of an analysis. * @param analysisId Unique id of the analysis * @return Response variable name */ @GET @Path("/{analysisId}/responseVariables") @Produces("application/json") @Consumes("application/json") public Response getResponseVariable(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { String responseVariable = mlAnalysisHandler.getResponseVariable(analysisId); return Response.ok(responseVariable).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving response variable for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get the algorithm name of an analysis. * @param analysisId Unique id of the analysis * @return Algorithm name */ @GET @Path("/{analysisId}/algorithmName") @Produces("application/json") @Consumes("application/json") public Response getAlgorithmName(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { String algorithmName = mlAnalysisHandler.getAlgorithmName(analysisId); return Response.ok(algorithmName).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving algorithm name for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get the algorithm type of an analysis. * @param analysisId Unique id of the analysis * @return Algorithm type */ @GET @Path("/{analysisId}/algorithmType") @Produces("application/json") @Consumes("application/json") public Response getAlgorithmType(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { String responseVariable = mlAnalysisHandler.getAlgorithmType(analysisId); return Response.ok(responseVariable).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving algorithm type for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get the normal labels of an analysis. * @param analysisId Unique id of the analysis * @return Normal Labels */ @GET @Path("/{analysisId}/normalLabels") @Produces("application/json") @Consumes("application/json") public Response getNormalLabels(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { String normalLabels = mlAnalysisHandler.getNormalLabels(analysisId); return Response.ok(normalLabels).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving normal labels for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get the normalization option of an analysis. * @param analysisId Unique id of the analysis * @return Normalization option */ @GET @Path("/{analysisId}/normalization") @Produces("application/json") @Consumes("application/json") public Response getNormalization(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { String normalLabels = mlAnalysisHandler.getNormalization(analysisId); return Response.ok(normalLabels).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving data normalization selection for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get the new normal label of an analysis. * @param analysisId Unique id of the analysis * @return New Normal Label */ @GET @Path("/{analysisId}/newNormalLabel") @Produces("application/json") @Consumes("application/json") public Response getNewNormalLabel(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { String normalLabels = mlAnalysisHandler.getNewNormalLabel(analysisId); return Response.ok(normalLabels).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving data new normal label for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get the new anomaly labels of an analysis. * @param analysisId Unique id of the analysis * @return New Anomaly Label */ @GET @Path("/{analysisId}/newAnomalyLabel") @Produces("application/json") @Consumes("application/json") public Response getNewAnomalyLabel(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { String normalLabels = mlAnalysisHandler.getNewAnomalyLabel(analysisId); return Response.ok(normalLabels).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving new anomaly label for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get the train data fraction of an analysis. * @param analysisId Unique id of the analysis * @return Train data fraction */ @GET @Path("/{analysisId}/trainDataFraction") @Produces("application/json") @Consumes("application/json") public Response getTrainDataFraction(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { double trainDataFraction = mlAnalysisHandler.getTrainDataFraction(analysisId); return Response.ok(trainDataFraction).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving train data fraction for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get summary statistics of a feature of an analysis. * @param analysisId Unique id of the analysis * @param featureName Name of the feature * @return Summary statistics of the feature */ @GET @Path("/{analysisId}/stats") @Produces("application/json") @Consumes("application/json") public Response getSummaryStatistics(@PathParam("analysisId") long analysisId, @QueryParam("feature") String featureName) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { if (featureName == null) { return Response.status(Response.Status.NOT_FOUND) .entity(new MLErrorBean("feature query param was not set.")).build(); } String summary = mlAnalysisHandler.getSummaryStats(tenantId, userName, analysisId, featureName); return Response.ok(summary).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving summarized stats of feature [name] %s for the analysis [id] %s of tenant [id] %s and [user] %s .", featureName, analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Add configurations of an analysis. * @param analysisId Unique id of the analysis * @param modelConfigs {@link List} of {@link MLModelConfiguration} objects */ @POST @Path("/{analysisId}/configurations") @Produces("application/json") @Consumes("application/json") public Response addModelConfiguration(@PathParam("analysisId") long analysisId, List<MLModelConfiguration> modelConfigs) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { mlAnalysisHandler.addModelConfigurations(analysisId, modelConfigs); return Response.ok().build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while adding model configurations for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Add hyper-parameters for the selected algorithm of an analysis. * @param analysisId Unique id of the analysis * @param hyperParameters {@link List} of {@link MLHyperParameter} objects * @param algorithmName Algorithm name */ @POST @Path("/{analysisId}/hyperParams") @Produces("application/json") @Consumes("application/json") public Response addHyperParameters(@PathParam("analysisId") long analysisId, List<MLHyperParameter> hyperParameters, @QueryParam("algorithmName") String algorithmName) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { mlAnalysisHandler.addHyperParameters(analysisId, hyperParameters, algorithmName); return Response.ok().build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while adding hyper parameters for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get hyper-parameter of an analysis. * @param analysisId Unique id of the analysis * @param algorithmName Algorithm name * @return JSON array of {@link MLHyperParameter} objects */ @GET @Path("/{analysisId}/hyperParameters") @Produces("application/json") @Consumes("application/json") public Response getHyperParameters(@PathParam("analysisId") long analysisId, @QueryParam("algorithmName") String algorithmName) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { List<MLHyperParameter> hyperParameters = mlAnalysisHandler.getHyperParameters(analysisId, algorithmName); return Response.ok(hyperParameters).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving hyper parameters of algorithm [name] %s for the analysis [id] %s of tenant [id] %s and [user] %s .", algorithmName, analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Load default configurations as configurations of an analysis. * @param analysisId Unique id of the analysis */ @POST @Path("/{analysisId}/hyperParams/defaults") @Produces("application/json") @Consumes("application/json") public Response addDefaultsIntoHyperParameters(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { mlAnalysisHandler.addDefaultsIntoHyperParameters(analysisId); return Response.ok().build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while loading default hyper parameters for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Retrieve all analyses. * @return JSON array of {@link MLAnalysis} objects */ @GET @Produces("application/json") public Response getAllAnalyses() { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { List<MLAnalysis> analyses = mlAnalysisHandler.getAnalyses(tenantId, userName); return Response.ok(analyses).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils.getErrorMsg(String.format( "Error occurred while retrieving all analyses of tenant [id] %s and [user] %s .", tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Get all models of an analysis. * @param analysisId Unique id of the analysis * @return JSON array of {@link MLModelData} objects */ @GET @Path("/{analysisId}/models") @Produces("application/json") public Response getAllModelsOfAnalysis(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { List<MLModelData> models = mlAnalysisHandler.getAllModelsOfAnalysis(tenantId, userName, analysisId); return Response.ok(models).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils.getErrorMsg(String.format( "Error occurred while retrieving all models of analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * Delete an analysis of a given ID. * @param analysisId Unique id of the analysis */ @DELETE @Path("/{analysisId}") @Produces("application/json") public Response deleteAnalysis(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { mlAnalysisHandler.deleteAnalysis(tenantId, userName, analysisId); auditLog.info(String.format("User [name] %s of tenant [id] %s deleted an analysis [id] %s ", userName, tenantId, analysisId)); return Response.ok().build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils.getErrorMsg(String.format( "Error occurred while deleting an analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); auditLog.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * get the user variable of an analysis. */ @GET @Path("/{analysisId}/userVariable") @Produces("application/json") @Consumes("application/json") public Response getUserVariable(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { String userVariable = mlAnalysisHandler.getUserVariable(analysisId); return Response.ok(userVariable).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving user variable for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * get the product variable of an analysis. */ @GET @Path("/{analysisId}/productVariable") @Produces("application/json") @Consumes("application/json") public Response getProductVariable(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { String productVariable = mlAnalysisHandler.getProductVariable(analysisId); return Response.ok(productVariable).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving product variable for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * get the rating variable of an analysis. */ @GET @Path("/{analysisId}/ratingVariable") @Produces("application/json") @Consumes("application/json") public Response getRatingVariable(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { String ratingVariable = mlAnalysisHandler.getRatingVariable(analysisId); return Response.ok(ratingVariable).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving rating variable for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } /** * get the observation list fraction of an analysis. */ @GET @Path("/{analysisId}/observationList") @Produces("application/json") @Consumes("application/json") public Response getObservationList(@PathParam("analysisId") long analysisId) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { String observations = mlAnalysisHandler.getObservations(analysisId); return Response.ok(observations).build(); } catch (MLAnalysisHandlerException e) { String msg = MLUtils .getErrorMsg( String.format( "Error occurred while retrieving observations for the analysis [id] %s of tenant [id] %s and [user] %s .", analysisId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .build(); } } }
/* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.coprocessor; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.NavigableSet; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.KeyValueScanner; import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Pair; import org.mortbay.log.Log; import com.google.common.collect.ImmutableList; /** * An abstract class that implements RegionObserver. * By extending it, you can create your own region observer without * overriding all abstract methods of RegionObserver. */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) @InterfaceStability.Evolving public abstract class BaseRegionObserver implements RegionObserver { @Override public void start(CoprocessorEnvironment e) throws IOException { } @Override public void stop(CoprocessorEnvironment e) throws IOException { } @Override public void preOpen(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException { } @Override public void postOpen(ObserverContext<RegionCoprocessorEnvironment> e) { } @Override public void preClose(ObserverContext<RegionCoprocessorEnvironment> c, boolean abortRequested) throws IOException { } @Override public void postClose(ObserverContext<RegionCoprocessorEnvironment> e, boolean abortRequested) { } @Override public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, final Store store, final KeyValueScanner memstoreScanner, final InternalScanner s) throws IOException { return s; } @Override public void preFlush(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException { } @Override public void postFlush(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException { } @Override public InternalScanner preFlush(ObserverContext<RegionCoprocessorEnvironment> e, Store store, InternalScanner scanner) throws IOException { return scanner; } @Override public void postFlush(ObserverContext<RegionCoprocessorEnvironment> e, Store store, StoreFile resultFile) throws IOException { } @Override public void preSplit(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException { } @Override public void preSplit(ObserverContext<RegionCoprocessorEnvironment> c, byte[] splitRow) throws IOException { } @Override public void preRollBackSplit(ObserverContext<RegionCoprocessorEnvironment> ctx) throws IOException { } @Override public void postRollBackSplit( ObserverContext<RegionCoprocessorEnvironment> ctx) throws IOException { } @Override public void postCompleteSplit( ObserverContext<RegionCoprocessorEnvironment> ctx) throws IOException { } @Override public void postSplit(ObserverContext<RegionCoprocessorEnvironment> e, HRegion l, HRegion r) throws IOException { } @Override public void preCompactSelection(final ObserverContext<RegionCoprocessorEnvironment> c, final Store store, final List<StoreFile> candidates) throws IOException { } @Override public void preCompactSelection(final ObserverContext<RegionCoprocessorEnvironment> c, final Store store, final List<StoreFile> candidates, final CompactionRequest request) throws IOException { preCompactSelection(c, store, candidates); } @Override public void postCompactSelection(final ObserverContext<RegionCoprocessorEnvironment> c, final Store store, final ImmutableList<StoreFile> selected) { } @Override public void postCompactSelection(final ObserverContext<RegionCoprocessorEnvironment> c, final Store store, final ImmutableList<StoreFile> selected, CompactionRequest request) { postCompactSelection(c, store, selected); } @Override public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e, final Store store, final InternalScanner scanner, final ScanType scanType) throws IOException { return scanner; } @Override public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e, final Store store, final InternalScanner scanner, final ScanType scanType, CompactionRequest request) throws IOException { return preCompact(e, store, scanner, scanType); } @Override public InternalScanner preCompactScannerOpen( final ObserverContext<RegionCoprocessorEnvironment> c, final Store store, List<? extends KeyValueScanner> scanners, final ScanType scanType, final long earliestPutTs, final InternalScanner s) throws IOException { return s; } @Override public InternalScanner preCompactScannerOpen( final ObserverContext<RegionCoprocessorEnvironment> c, final Store store, List<? extends KeyValueScanner> scanners, final ScanType scanType, final long earliestPutTs, final InternalScanner s, CompactionRequest request) throws IOException { return preCompactScannerOpen(c, store, scanners, scanType, earliestPutTs, s); } @Override public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e, final Store store, final StoreFile resultFile) throws IOException { } @Override public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e, final Store store, final StoreFile resultFile, CompactionRequest request) throws IOException { postCompact(e, store, resultFile); } @Override public void preGetClosestRowBefore(final ObserverContext<RegionCoprocessorEnvironment> e, final byte [] row, final byte [] family, final Result result) throws IOException { } @Override public void postGetClosestRowBefore(final ObserverContext<RegionCoprocessorEnvironment> e, final byte [] row, final byte [] family, final Result result) throws IOException { } @Override public void preGetOp(final ObserverContext<RegionCoprocessorEnvironment> e, final Get get, final List<Cell> results) throws IOException { // By default we are executing the deprecated preGet to support legacy RegionObservers // We may use the results coming in and we may return the results going out. List<KeyValue> kvs = new ArrayList<KeyValue>(results.size()); for (Cell c : results) { kvs.add(KeyValueUtil.ensureKeyValue(c)); } preGet(e, get, kvs); results.clear(); results.addAll(kvs); } /** * WARNING: please override preGetOp instead of this method. This is to maintain some * compatibility and to ease the transition from 0.94 -> 0.96. It is super inefficient! */ @Deprecated @Override public void preGet(final ObserverContext<RegionCoprocessorEnvironment> c, final Get get, final List<KeyValue> result) throws IOException { } @Override public void postGetOp(final ObserverContext<RegionCoprocessorEnvironment> e, final Get get, final List<Cell> results) throws IOException { // By default we are executing the deprecated preGet to support legacy RegionObservers // We may use the results coming in and we may return the results going out. List<KeyValue> kvs = new ArrayList<KeyValue>(results.size()); for (Cell c : results) { kvs.add(KeyValueUtil.ensureKeyValue(c)); } postGet(e, get, kvs); results.clear(); results.addAll(kvs); } /** * WARNING: please override postGetOp instead of this method. This is to maintain some * compatibility and to ease the transition from 0.94 -> 0.96. It is super inefficient! */ @Deprecated @Override public void postGet(final ObserverContext<RegionCoprocessorEnvironment> c, final Get get, final List<KeyValue> result) throws IOException { } @Override public boolean preExists(final ObserverContext<RegionCoprocessorEnvironment> e, final Get get, final boolean exists) throws IOException { return exists; } @Override public boolean postExists(final ObserverContext<RegionCoprocessorEnvironment> e, final Get get, boolean exists) throws IOException { return exists; } @Override public void prePut(final ObserverContext<RegionCoprocessorEnvironment> e, final Put put, final WALEdit edit, final Durability durability) throws IOException { } @Override public void postPut(final ObserverContext<RegionCoprocessorEnvironment> e, final Put put, final WALEdit edit, final Durability durability) throws IOException { } @Override public void preDelete(final ObserverContext<RegionCoprocessorEnvironment> e, final Delete delete, final WALEdit edit, final Durability durability) throws IOException { } @Override public void postDelete(final ObserverContext<RegionCoprocessorEnvironment> e, final Delete delete, final WALEdit edit, final Durability durability) throws IOException { } @Override public void preBatchMutate(final ObserverContext<RegionCoprocessorEnvironment> c, final MiniBatchOperationInProgress<Mutation> miniBatchOp) throws IOException { } @Override public void postBatchMutate(final ObserverContext<RegionCoprocessorEnvironment> c, final MiniBatchOperationInProgress<Mutation> miniBatchOp) throws IOException { } @Override public boolean preCheckAndPut(final ObserverContext<RegionCoprocessorEnvironment> e, final byte [] row, final byte [] family, final byte [] qualifier, final CompareOp compareOp, final ByteArrayComparable comparator, final Put put, final boolean result) throws IOException { return result; } @Override public boolean postCheckAndPut(final ObserverContext<RegionCoprocessorEnvironment> e, final byte [] row, final byte [] family, final byte [] qualifier, final CompareOp compareOp, final ByteArrayComparable comparator, final Put put, final boolean result) throws IOException { return result; } @Override public boolean preCheckAndDelete(final ObserverContext<RegionCoprocessorEnvironment> e, final byte [] row, final byte [] family, final byte [] qualifier, final CompareOp compareOp, final ByteArrayComparable comparator, final Delete delete, final boolean result) throws IOException { return result; } @Override public boolean postCheckAndDelete(final ObserverContext<RegionCoprocessorEnvironment> e, final byte [] row, final byte [] family, final byte [] qualifier, final CompareOp compareOp, final ByteArrayComparable comparator, final Delete delete, final boolean result) throws IOException { return result; } @Override public Result preAppend(final ObserverContext<RegionCoprocessorEnvironment> e, final Append append) throws IOException { return null; } @Override public Result postAppend(final ObserverContext<RegionCoprocessorEnvironment> e, final Append append, final Result result) throws IOException { return result; } @Override public long preIncrementColumnValue(final ObserverContext<RegionCoprocessorEnvironment> e, final byte [] row, final byte [] family, final byte [] qualifier, final long amount, final boolean writeToWAL) throws IOException { return amount; } @Override public long postIncrementColumnValue(final ObserverContext<RegionCoprocessorEnvironment> e, final byte [] row, final byte [] family, final byte [] qualifier, final long amount, final boolean writeToWAL, long result) throws IOException { return result; } @Override public Result preIncrement(final ObserverContext<RegionCoprocessorEnvironment> e, final Increment increment) throws IOException { return null; } @Override public Result postIncrement(final ObserverContext<RegionCoprocessorEnvironment> e, final Increment increment, final Result result) throws IOException { return result; } @Override public RegionScanner preScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> e, final Scan scan, final RegionScanner s) throws IOException { return s; } @Override public KeyValueScanner preStoreScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, final Store store, final Scan scan, final NavigableSet<byte[]> targetCols, final KeyValueScanner s) throws IOException { return s; } @Override public RegionScanner postScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> e, final Scan scan, final RegionScanner s) throws IOException { return s; } @Override public boolean preScannerNext(final ObserverContext<RegionCoprocessorEnvironment> e, final InternalScanner s, final List<Result> results, final int limit, final boolean hasMore) throws IOException { return hasMore; } @Override public boolean postScannerNext(final ObserverContext<RegionCoprocessorEnvironment> e, final InternalScanner s, final List<Result> results, final int limit, final boolean hasMore) throws IOException { return hasMore; } @Override public boolean postScannerFilterRow(final ObserverContext<RegionCoprocessorEnvironment> e, final InternalScanner s, final byte[] currentRow, final boolean hasMore) throws IOException { return hasMore; } @Override public void preScannerClose(final ObserverContext<RegionCoprocessorEnvironment> e, final InternalScanner s) throws IOException { } @Override public void postScannerClose(final ObserverContext<RegionCoprocessorEnvironment> e, final InternalScanner s) throws IOException { } @Override public void preWALRestore(ObserverContext<RegionCoprocessorEnvironment> env, HRegionInfo info, HLogKey logKey, WALEdit logEdit) throws IOException { } @Override public void postWALRestore(ObserverContext<RegionCoprocessorEnvironment> env, HRegionInfo info, HLogKey logKey, WALEdit logEdit) throws IOException { } @Override public void preBulkLoadHFile(final ObserverContext<RegionCoprocessorEnvironment> ctx, List<Pair<byte[], String>> familyPaths) throws IOException { } @Override public boolean postBulkLoadHFile(ObserverContext<RegionCoprocessorEnvironment> ctx, List<Pair<byte[], String>> familyPaths, boolean hasLoaded) throws IOException { return hasLoaded; } }
package com.team_htbr.a1617proj1bloeddonatie_app; import android.Manifest; import android.app.PendingIntent; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.location.Location; import android.os.Handler; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.app.ActivityCompat; import android.os.Bundle; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.MenuItem; import android.view.View; import android.widget.Button; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.ResultCallback; import com.google.android.gms.common.api.Status; import com.google.android.gms.location.*; import com.google.android.gms.location.LocationListener; import com.google.android.gms.maps.model.LatLng; import com.google.firebase.database.ChildEventListener; import com.google.firebase.database.DataSnapshot; import com.google.firebase.database.DatabaseError; import com.google.firebase.database.DatabaseReference; import com.google.firebase.database.FirebaseDatabase; import java.util.ArrayList; import java.util.List; public class MainActivity extends AppCompatActivity { public static final String TAG = "MainActivity"; public static Location currentLocation; private ActionBarDrawerToggle mToggle; private GoogleApiClient googleApiClient = null; private List<com.team_htbr.a1617proj1bloeddonatie_app.Location> locationsList; private List<Geofence> geofences; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); setTitle("Bloeddonatie"); DrawerLayout mDrawerLayout = (DrawerLayout) findViewById(R.id.drawerLayout); mToggle = new ActionBarDrawerToggle(this, mDrawerLayout, R.string.navigation_drawer_open, R.string.navigation_drawer_close); mDrawerLayout.addDrawerListener(mToggle); mToggle.syncState(); getSupportActionBar().setDisplayHomeAsUpEnabled(true); geofences = new ArrayList<>(); locationsList = new ArrayList<>(); requestPermissions(new String[] { Manifest.permission.ACCESS_FINE_LOCATION}, 1234); connectToGoogleApi(); Button btnBloodtype = (Button) findViewById(R.id.Bloodtype); btnBloodtype.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { startActivity(new Intent(MainActivity.this, SubscribeBloodtypeActivity.class)); } }); Button btnDonorTest = (Button) findViewById(R.id.donorTest); btnDonorTest.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { startActivity(new Intent(MainActivity.this, DonorTestActivity.class)); } }); Button btnMaps = (Button) findViewById(R.id.GoogleMap); btnMaps.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { startActivity(new Intent(MainActivity.this, MapsActivity.class)); } }); } private void checkDataBase() { DatabaseReference fireBaseDataBase = FirebaseDatabase.getInstance().getReference(); DatabaseReference locationsDataBase = fireBaseDataBase.child("locations"); locationsDataBase.addChildEventListener(new ChildEventListener() { @Override public void onChildAdded(DataSnapshot dataSnapshot, String s) { locationsList.add(dataSnapshot.getValue(com.team_htbr.a1617proj1bloeddonatie_app.Location.class)); startLocationMoitoring(); startGeofenceMonitoring(); } @Override public void onChildChanged(DataSnapshot dataSnapshot, String s) { Log.d(TAG, "bla bla"); } @Override public void onChildRemoved(DataSnapshot dataSnapshot) { Log.d(TAG, "bla bla"); } @Override public void onChildMoved(DataSnapshot dataSnapshot, String s) { Log.d(TAG, "bla bla"); } @Override public void onCancelled(DatabaseError databaseError) { Log.d(TAG, "bla bla"); } }); Button btnMaps = (Button) findViewById(R.id.GoogleMap); btnMaps.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { startActivity(new Intent(MainActivity.this, MapsActivity.class)); } }); Button btnDonorTest = (Button) findViewById(R.id.donorTest); btnDonorTest.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { startActivity(new Intent(MainActivity.this, DonorTestActivity.class)); } }); Button btnBloodtype = (Button) findViewById(R.id.Bloodtype); btnBloodtype.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { startActivity(new Intent(MainActivity.this, SubscribeBloodtypeActivity.class)); } }); connectToGoogleApi(); final Handler handler = new Handler(); handler.postDelayed(new Runnable() { @Override public void run() { startLocationMoitoring(); } }, 5000); } public void redirectMaps(MenuItem item){ startActivity(new Intent(MainActivity.this, MapsActivity.class)); } public void redirectDonorTest(MenuItem item){ startActivity(new Intent(MainActivity.this, DonorTestActivity.class)); } public void redirectBloodType(MenuItem item){ startActivity(new Intent(MainActivity.this, SubscribeBloodtypeActivity.class)); } private void connectToGoogleApi() { if (googleApiClient == null) { googleApiClient = new GoogleApiClient.Builder(this) .addConnectionCallbacks(new GoogleApiClient.ConnectionCallbacks() { @Override public void onConnected(@Nullable Bundle bundle) { Log.d(TAG, "connected to googleapiclient"); } @Override public void onConnectionSuspended(int i) { Log.d(TAG, "suspended connection to googleapiclient"); } }) .addOnConnectionFailedListener(new GoogleApiClient.OnConnectionFailedListener() { @Override public void onConnectionFailed(@NonNull ConnectionResult result) { Log.d(TAG, "failed to cennect - " + result.getErrorMessage()); } }) .addApi(LocationServices.API) .build(); } } protected void onStart() { super.onStart(); googleApiClient.reconnect(); } protected void onStop() { super.onStop(); googleApiClient.disconnect(); } private void startLocationMoitoring() { if (googleApiClient.isConnected()) { LocationRequest locationRequest = LocationRequest.create() .setInterval(10000) .setFastestInterval(5000) .setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY); if (ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) { return; } LocationServices.FusedLocationApi.requestLocationUpdates(googleApiClient, locationRequest, new LocationListener() { @Override public void onLocationChanged(Location location) { Log.d(TAG, "location update"); currentLocation = location; } }); } } public void startGeofenceMonitoring() { if (googleApiClient.isConnected()) { for (com.team_htbr.a1617proj1bloeddonatie_app.Location location: locationsList) { geofences.add(new Geofence.Builder() .setRequestId(location.getName()) .setCircularRegion(location.getLat(), location.getLng(), 1000) .setExpirationDuration(Geofence.NEVER_EXPIRE) .setNotificationResponsiveness(5000) .setTransitionTypes(Geofence.GEOFENCE_TRANSITION_ENTER) .build()); } GeofencingRequest geofencingRequest = new GeofencingRequest.Builder() .setInitialTrigger(GeofencingRequest.INITIAL_TRIGGER_ENTER) .addGeofences(geofences).build(); Intent intent = new Intent(this, GeofenceService.class); PendingIntent pendingIntent = PendingIntent.getService(this, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT); if (!googleApiClient.isConnected()) { Log.d(TAG, "no connection"); } else { if (ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED) { return; } LocationServices.GeofencingApi.addGeofences(googleApiClient, geofencingRequest, pendingIntent) .setResultCallback(new ResultCallback<Status>() { @Override public void onResult(@NonNull Status status) { if (status.isSuccess()) { Log.d(TAG, "succesful add"); } else { Log.d(TAG, "Failed to add"); } } }); } } } public static LatLng getMyLocation() { if (currentLocation == null){ return null; } else return new LatLng(currentLocation.getLatitude(), currentLocation.getLongitude()); } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle navigation view item clicks here. /*int id = item.getItemId(); if (id == R.id.nav_bloedtype) { Intent intent1 = new Intent(this,SubscribeBloodtypeActivity.class); this.startActivity(intent1); return true; }/* else if (id == R.id.nav_gallery) { } else if (id == R.id.nav_slideshow) { } else if (id == R.id.nav_manage) { } else if (id == R.id.nav_share) { } else if (id == R.id.nav_send) { }*/ if(mToggle.onOptionsItemSelected(item)){ return true; } return super.onOptionsItemSelected(item); } public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) { switch (requestCode) { case 1234: { if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { startLocationMoitoring(); checkDataBase(); } else { return; } } default: { return; } } } }
/* * Copyright 2013 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp.newtypes; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.javascript.jscomp.CodingConvention; import com.google.javascript.jscomp.DiagnosticType; import com.google.javascript.jscomp.JSError; import com.google.javascript.jscomp.newtypes.NominalType.RawNominalType; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.JSTypeExpression; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * * @author blickly@google.com (Ben Lickly) * @author dimvar@google.com (Dimitris Vardoulakis) */ public final class JSTypeCreatorFromJSDoc { public static final DiagnosticType INVALID_GENERICS_INSTANTIATION = DiagnosticType.warning( "JSC_INVALID_GENERICS_INSTANTIATION", "Invalid generics instantiation for {0}.\n" + "Expected {1} type argument(s), but found {2}."); public static final DiagnosticType BAD_JSDOC_ANNOTATION = DiagnosticType.warning( "JSC_BAD_JSDOC_ANNOTATION", "Bad JSDoc annotation. {0}"); public static final DiagnosticType EXTENDS_NON_OBJECT = DiagnosticType.warning( "JSC_EXTENDS_NON_OBJECT", "{0} extends non-object type {1}.\n"); public static final DiagnosticType EXTENDS_NOT_ON_CTOR_OR_INTERF = DiagnosticType.warning( "JSC_EXTENDS_NOT_ON_CTOR_OR_INTERF", "@extends used without @constructor or @interface for {0}.\n"); public static final DiagnosticType INHERITANCE_CYCLE = DiagnosticType.warning( "JSC_INHERITANCE_CYCLE", "Cycle detected in inheritance chain of type {0}"); public static final DiagnosticType DICT_IMPLEMENTS_INTERF = DiagnosticType.warning( "JSC_DICT_IMPLEMENTS_INTERF", "Class {0} is a dict. Dicts can't implement interfaces."); public static final DiagnosticType IMPLEMENTS_WITHOUT_CONSTRUCTOR = DiagnosticType.warning( "JSC_IMPLEMENTS_WITHOUT_CONSTRUCTOR", "@implements used without @constructor or @interface for {0}"); public static final DiagnosticType CONFLICTING_SHAPE_TYPE = DiagnosticType.disabled( "JSC_CONFLICTING_SHAPE_TYPE", "{1} cannot extend this type; {0}s can only extend {0}s"); public static final DiagnosticType CONFLICTING_EXTENDED_TYPE = DiagnosticType.warning( "JSC_CONFLICTING_EXTENDED_TYPE", "{1} cannot extend this type; {0}s can only extend {0}s"); public static final DiagnosticType CONFLICTING_IMPLEMENTED_TYPE = DiagnosticType.warning( "JSC_CONFLICTING_IMPLEMENTED_TYPE", "{0} cannot implement this type; " + "an interface can only extend, but not implement interfaces"); public static final DiagnosticType UNION_IS_UNINHABITABLE = DiagnosticType.warning( "JSC_UNION_IS_UNINHABITABLE", "Union of {0} with {1} would create an impossible type."); private final CodingConvention convention; // Used to communicate state between methods when resolving enum types private int howmanyTypeVars = 0; private final JSType objectOrNull; /** Exception for when unrecognized type names are encountered */ public static class UnknownTypeException extends Exception { UnknownTypeException(String cause) { super(cause); } } private Set<JSError> warnings = new LinkedHashSet<>(); // Unknown type names indexed by JSDoc AST node at which they were found. private Map<Node, String> unknownTypeNames = new LinkedHashMap<>(); public JSTypeCreatorFromJSDoc(CodingConvention convention) { this.objectOrNull = JSType.join(JSType.TOP_OBJECT, JSType.NULL); this.qmarkFunctionDeclared = FunctionTypeBuilder.qmarkFunctionBuilder().buildDeclaration(); this.convention = convention; } private DeclaredFunctionType qmarkFunctionDeclared; private JSType qmarkFunctionOrNull = null; private JSType getQmarkFunctionOrNull(JSTypes commonTypes) { if (qmarkFunctionOrNull == null) { qmarkFunctionOrNull = JSType.join(commonTypes.qmarkFunction(), JSType.NULL); } return qmarkFunctionOrNull; } public JSType getDeclaredTypeOfNode(JSDocInfo jsdoc, RawNominalType ownerType, DeclaredTypeRegistry registry) { return getDeclaredTypeOfNode(jsdoc, registry, ownerType == null ? ImmutableList.<String>of() : ownerType.getTypeParameters()); } private JSType getDeclaredTypeOfNode(JSDocInfo jsdoc, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { if (jsdoc == null) { return null; } return getTypeFromJSTypeExpression( jsdoc.getType(), registry, typeParameters); } public Set<JSError> getWarnings() { return warnings; } public Map<Node, String> getUnknownTypesMap() { return unknownTypeNames; } private JSType getTypeFromJSTypeExpression(JSTypeExpression expr, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { if (expr == null) { return null; } return getTypeFromComment(expr.getRoot(), registry, typeParameters == null ? ImmutableList.<String>of() : typeParameters); } // Very similar to JSTypeRegistry#createFromTypeNodesInternal // n is a jsdoc node, not an AST node; the same class (Node) is used for both private JSType getTypeFromComment(Node n, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { try { return getTypeFromCommentHelper(n, registry, typeParameters); } catch (UnknownTypeException e) { return JSType.UNKNOWN; } } private JSType getMaybeTypeFromComment(Node n, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { try { return getTypeFromCommentHelper(n, registry, typeParameters); } catch (UnknownTypeException e) { return null; } } private JSType getTypeFromCommentHelper(Node n, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) throws UnknownTypeException { Preconditions.checkNotNull(n); Preconditions.checkNotNull(typeParameters); switch (n.getType()) { case Token.LC: return getRecordTypeHelper(n, registry, typeParameters); case Token.EMPTY: // for function types that don't declare a return type return JSType.UNKNOWN; case Token.VOID: return JSType.UNDEFINED; case Token.LB: warn("The [] type syntax is no longer supported." + " Please use Array.<T> instead.", n); return JSType.UNKNOWN; case Token.STRING: return getNamedTypeHelper(n, registry, typeParameters); case Token.PIPE: { // The way JSType.join works, Subtype|Supertype is equal to Supertype, // so when programmers write un-normalized unions, we normalize them // silently. We may also want to warn. JSType union = JSType.BOTTOM; for (Node child = n.getFirstChild(); child != null; child = child.getNext()) { // TODO(dimvar): When the union has many things, we join and throw // away types, except the result of the last join. Very inefficient. // Consider optimizing. JSType nextType = getTypeFromCommentHelper(child, registry, typeParameters); if (nextType.isUnknown()) { warn("This union type is equivalent to '?'.", n); return JSType.UNKNOWN; } JSType nextUnion = JSType.join(union, nextType); if (nextUnion.isBottom()) { warnings.add(JSError.make(n, UNION_IS_UNINHABITABLE, nextType.toString(), union.toString())); return JSType.UNKNOWN; } union = nextUnion; } return union; } case Token.BANG: { JSType nullableType = getTypeFromCommentHelper( n.getFirstChild(), registry, typeParameters); if (nullableType.isTypeVariable()) { warn("Cannot use ! to restrict type variable type.\n" + "Prefer to make type argument non-nullable and add " + "null explicitly where needed (e.g. through ?T or T|null)", n); } return nullableType.removeType(JSType.NULL); } case Token.QMARK: { Node child = n.getFirstChild(); if (child == null) { return JSType.UNKNOWN; } else { return JSType.join(JSType.NULL, getTypeFromCommentHelper(child, registry, typeParameters)); } } case Token.STAR: return JSType.TOP; case Token.FUNCTION: return getFunTypeHelper(n, registry, typeParameters); default: throw new IllegalArgumentException("Unsupported type exp: " + Token.name(n.getType()) + " " + n.toStringTree()); } } private JSType getRecordTypeHelper(Node n, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) throws UnknownTypeException { Map<String, JSType> fields = new LinkedHashMap<>(); // For each of the fields in the record type. for (Node fieldTypeNode = n.getFirstChild().getFirstChild(); fieldTypeNode != null; fieldTypeNode = fieldTypeNode.getNext()) { boolean isFieldTypeDeclared = fieldTypeNode.getType() == Token.COLON; Node fieldNameNode = isFieldTypeDeclared ? fieldTypeNode.getFirstChild() : fieldTypeNode; String fieldName = fieldNameNode.getString(); if (fieldName.startsWith("'") || fieldName.startsWith("\"")) { fieldName = fieldName.substring(1, fieldName.length() - 1); } JSType fieldType = !isFieldTypeDeclared ? JSType.UNKNOWN : getTypeFromCommentHelper( fieldTypeNode.getLastChild(), registry, typeParameters); // TODO(blickly): Allow optional properties fields.put(fieldName, fieldType); } return JSType.fromObjectType(ObjectType.fromProperties(fields)); } private JSType getNamedTypeHelper(Node n, DeclaredTypeRegistry registry, ImmutableList<String> outerTypeParameters) throws UnknownTypeException { String typeName = n.getString(); switch (typeName) { case "boolean": return JSType.BOOLEAN; case "null": return JSType.NULL; case "number": return JSType.NUMBER; case "string": return JSType.STRING; case "undefined": case "void": return JSType.UNDEFINED; case "Function": return getQmarkFunctionOrNull(registry.getCommonTypes()); case "Object": return objectOrNull; default: return lookupTypeByName(typeName, n, registry, outerTypeParameters); } } private JSType lookupTypeByName(String name, Node n, DeclaredTypeRegistry registry, ImmutableList<String> outerTypeParameters) throws UnknownTypeException { if (outerTypeParameters.contains(name)) { return JSType.fromTypeVar(name); } Declaration decl = registry.getDeclaration(QualifiedName.fromQualifiedString(name), true); if (decl == null) { unknownTypeNames.put(n, name); throw new UnknownTypeException("Unhandled type: " + name); } // It's either a typedef, an enum, a type variable or a nominal type if (decl.getTypedef() != null) { return getTypedefType(decl.getTypedef(), registry); } if (decl.getEnum() != null) { return getEnumPropType(decl.getEnum(), registry); } if (decl.isTypeVar()) { howmanyTypeVars++; return decl.getTypeOfSimpleDecl(); } if (decl.getNominal() != null) { return getNominalTypeHelper(decl.getNominal(), n, registry, outerTypeParameters); } return JSType.UNKNOWN; } private JSType getTypedefType(Typedef td, DeclaredTypeRegistry registry) { resolveTypedef(td, registry); return td.getType(); } public void resolveTypedef(Typedef td, DeclaredTypeRegistry registry) { Preconditions.checkState(td != null, "getTypedef should only be " + "called when we know that the typedef is defined"); if (td.isResolved()) { return; } JSTypeExpression texp = td.getTypeExpr(); JSType tdType; if (texp == null) { warn("Circular type definitions are not allowed.", td.getTypeExprForErrorReporting().getRoot()); tdType = JSType.UNKNOWN; } else { tdType = getTypeFromJSTypeExpression(texp, registry, null); } td.resolveTypedef(tdType); } private JSType getEnumPropType(EnumType e, DeclaredTypeRegistry registry) { resolveEnum(e, registry); return e.getPropType(); } public void resolveEnum(EnumType e, DeclaredTypeRegistry registry) { Preconditions.checkState(e != null, "getEnum should only be " + "called when we know that the enum is defined"); if (e.isResolved()) { return; } JSTypeExpression texp = e.getTypeExpr(); JSType enumeratedType; if (texp == null) { warn("Circular type definitions are not allowed.", e.getTypeExprForErrorReporting().getRoot()); enumeratedType = JSType.UNKNOWN; } else { int numTypeVars = howmanyTypeVars; enumeratedType = getTypeFromJSTypeExpression(texp, registry, null); if (howmanyTypeVars > numTypeVars) { warn("An enum type cannot include type variables.", texp.getRoot()); enumeratedType = JSType.UNKNOWN; howmanyTypeVars = numTypeVars; } else if (enumeratedType.isTop()) { warn("An enum type cannot be *. " + "Use ? if you do not want the elements checked.", texp.getRoot()); enumeratedType = JSType.UNKNOWN; } else if (enumeratedType.isUnion()) { warn("An enum type cannot be a union type.", texp.getRoot()); enumeratedType = JSType.UNKNOWN; } } e.resolveEnum(enumeratedType); } private JSType getNominalTypeHelper(RawNominalType rawType, Node n, DeclaredTypeRegistry registry, ImmutableList<String> outerTypeParameters) throws UnknownTypeException { NominalType uninstantiated = rawType.getAsNominalType(); if (!rawType.isGeneric() && !n.hasChildren()) { return rawType.getInstanceAsNullableJSType(); } ImmutableList.Builder<JSType> typeList = ImmutableList.builder(); if (n.hasChildren()) { // Compute instantiation of polymorphic class/interface. Preconditions.checkState(n.getFirstChild().isBlock()); for (Node child : n.getFirstChild().children()) { typeList.add( getTypeFromCommentHelper(child, registry, outerTypeParameters)); } } ImmutableList<JSType> typeArguments = typeList.build(); ImmutableList<String> typeParameters = rawType.getTypeParameters(); int typeArgsSize = typeArguments.size(); int typeParamsSize = typeParameters.size(); if (typeArgsSize != typeParamsSize) { // We used to also warn when (typeArgsSize < typeParamsSize), but it // happens so often that we stopped. Array, Object and goog.Promise are // common culprits, but many other types as well. if (typeArgsSize > typeParamsSize) { warnings.add(JSError.make( n, INVALID_GENERICS_INSTANTIATION, uninstantiated.getName(), String.valueOf(typeParamsSize), String.valueOf(typeArgsSize))); } return JSType.join(JSType.NULL, JSType.fromObjectType(ObjectType.fromNominalType( uninstantiated.instantiateGenerics( fixLengthOfTypeList(typeParameters.size(), typeArguments))))); } return JSType.join(JSType.NULL, JSType.fromObjectType(ObjectType.fromNominalType( uninstantiated.instantiateGenerics(typeArguments)))); } private static List<JSType> fixLengthOfTypeList( int desiredLength, List<JSType> typeList) { int length = typeList.size(); if (length == desiredLength) { return typeList; } ImmutableList.Builder<JSType> builder = ImmutableList.builder(); for (int i = 0; i < desiredLength; i++) { builder.add(i < length ? typeList.get(i) : JSType.UNKNOWN); } return builder.build(); } // Don't confuse with getFunTypeFromAtTypeJsdoc; the function below computes a // type that doesn't have an associated AST node. private JSType getFunTypeHelper(Node jsdocNode, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) throws UnknownTypeException { FunctionTypeBuilder builder = new FunctionTypeBuilder(); fillInFunTypeBuilder(jsdocNode, null, registry, typeParameters, builder); return registry.getCommonTypes().fromFunctionType(builder.buildFunction()); } private void fillInFunTypeBuilder( Node jsdocNode, RawNominalType ownerType, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters, FunctionTypeBuilder builder) throws UnknownTypeException { Node child = jsdocNode.getFirstChild(); if (child.getType() == Token.THIS) { if (ownerType == null) { builder.addReceiverType(getNominalType(child.getFirstChild(), registry, typeParameters)); } child = child.getNext(); } else if (child.getType() == Token.NEW) { builder.addNominalType( getNominalType(child.getFirstChild(), registry, typeParameters)); child = child.getNext(); } if (child.getType() == Token.PARAM_LIST) { for (Node arg = child.getFirstChild(); arg != null; arg = arg.getNext()) { try { switch (arg.getType()) { case Token.EQUALS: builder.addOptFormal(getTypeFromCommentHelper( arg.getFirstChild(), registry, typeParameters)); break; case Token.ELLIPSIS: Node restNode = arg.getFirstChild(); builder.addRestFormals(restNode == null ? JSType.UNKNOWN : getTypeFromCommentHelper(restNode, registry, typeParameters)); break; default: builder.addReqFormal( getTypeFromCommentHelper(arg, registry, typeParameters)); break; } } catch (FunctionTypeBuilder.WrongParameterOrderException e) { warn("Wrong parameter order: required parameters are first, " + "then optional, then varargs", jsdocNode); } } child = child.getNext(); } builder.addRetType( getTypeFromCommentHelper(child, registry, typeParameters)); } // May return null; private NominalType getNominalType(Node n, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { return getTypeFromComment(n, registry, typeParameters) .removeType(JSType.NULL).getNominalTypeIfSingletonObj(); } private ImmutableSet<NominalType> getImplementedInterfaces( JSDocInfo jsdoc, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { return getInterfacesHelper(jsdoc, registry, typeParameters, true); } private ImmutableSet<NominalType> getExtendedInterfaces( JSDocInfo jsdoc, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { return getInterfacesHelper(jsdoc, registry, typeParameters, false); } private ImmutableSet<NominalType> getInterfacesHelper( JSDocInfo jsdoc, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters, boolean implementedIntfs) { ImmutableSet.Builder<NominalType> builder = ImmutableSet.builder(); for (JSTypeExpression texp : (implementedIntfs ? jsdoc.getImplementedInterfaces() : jsdoc.getExtendedInterfaces())) { Node expRoot = texp.getRoot(); JSType interfaceType = getMaybeTypeFromComment(expRoot, registry, typeParameters); if (interfaceType != null) { NominalType nt = interfaceType.getNominalTypeIfSingletonObj(); if (nt != null && nt.isInterface()) { builder.add(nt); } else { String errorMsg = implementedIntfs ? "Cannot implement non-interface" : "Cannot extend non-interface"; warn(errorMsg, expRoot); } } } return builder.build(); } private static boolean isQmarkFunction(Node jsdocNode) { if (jsdocNode.getType() == Token.BANG) { jsdocNode = jsdocNode.getFirstChild(); } return jsdocNode.isString() && jsdocNode.getString().equals("Function"); } /** * Consumes either a "classic" function jsdoc with @param, @return, etc, * or a jsdoc with @type{function ...} and finds the types of the formal * parameters and the return value. It returns a builder because the callers * of this function must separately handle @constructor, @interface, etc. * * constructorType is non-null iff this function is a constructor or * interface declaration. */ public DeclaredFunctionType getFunctionType( JSDocInfo jsdoc, String functionName, Node declNode, RawNominalType constructorType, RawNominalType ownerType, DeclaredTypeRegistry registry) { FunctionTypeBuilder builder = new FunctionTypeBuilder(); if (ownerType != null) { builder.addReceiverType(ownerType.getAsNominalType()); } try { if (jsdoc != null && jsdoc.getType() != null) { Node jsdocNode = jsdoc.getType().getRoot(); int tokenType = jsdocNode.getType(); if (tokenType == Token.FUNCTION) { if (declNode.isFunction()) { return getFunTypeFromAtTypeJsdoc( jsdoc, declNode, ownerType, registry, builder); } try { // TODO(blickly): Use typeParameters here fillInFunTypeBuilder(jsdocNode, ownerType, registry, ImmutableList.<String>of(), builder); return builder.buildDeclaration(); } catch (UnknownTypeException e) { return qmarkFunctionDeclared; } } if (isQmarkFunction(jsdocNode)) { return qmarkFunctionDeclared; } else { warn("The function is annotated with a non-function jsdoc. " + "Ignoring jsdoc.", declNode); return getFunTypeFromTypicalFunctionJsdoc(null, functionName, declNode, constructorType, ownerType, registry, builder, true); } } return getFunTypeFromTypicalFunctionJsdoc(jsdoc, functionName, declNode, constructorType, ownerType, registry, builder, false); } catch (FunctionTypeBuilder.WrongParameterOrderException e) { warn("Wrong parameter order: required parameters are first, " + "then optional, then varargs. Ignoring jsdoc.", declNode); return qmarkFunctionDeclared; } } private DeclaredFunctionType getFunTypeFromAtTypeJsdoc( JSDocInfo jsdoc, Node funNode, RawNominalType ownerType, DeclaredTypeRegistry registry, FunctionTypeBuilder builder) { Preconditions.checkArgument(funNode.isFunction()); Node childJsdoc = jsdoc.getType().getRoot().getFirstChild(); Node param = funNode.getFirstChild().getNext().getFirstChild(); Node paramType; boolean warnedForMissingTypes = false; boolean warnedForInlineJsdoc = false; ImmutableList<String> typeParameters = ownerType == null ? ImmutableList.<String>of() : ownerType.getTypeParameters(); if (childJsdoc.getType() == Token.THIS) { if (ownerType == null) { builder.addReceiverType(getNominalType( childJsdoc.getFirstChild(), registry, typeParameters)); } childJsdoc = childJsdoc.getNext(); } else if (childJsdoc.getType() == Token.NEW) { builder.addNominalType( getNominalType(childJsdoc.getFirstChild(), registry, typeParameters)); childJsdoc = childJsdoc.getNext(); } if (childJsdoc.getType() == Token.PARAM_LIST) { paramType = childJsdoc.getFirstChild(); childJsdoc = childJsdoc.getNext(); // go to the return type } else { // empty parameter list paramType = null; } while (param != null) { if (paramType == null) { if (!warnedForMissingTypes) { warn("The function has more formal parameters than the types " + "declared in the JSDoc", funNode); warnedForMissingTypes = true; } builder.addOptFormal(JSType.UNKNOWN); } else { if (!warnedForInlineJsdoc && param.getJSDocInfo() != null) { warn("The function cannot have both an @type jsdoc and inline " + "jsdocs. Ignoring inline jsdocs.", param); warnedForInlineJsdoc = true; } switch (paramType.getType()) { case Token.EQUALS: builder.addOptFormal(getTypeFromComment( paramType.getFirstChild(), registry, typeParameters)); break; case Token.ELLIPSIS: if (!warnedForMissingTypes) { warn("The function has more formal parameters than the types " + "declared in the JSDoc", funNode); warnedForMissingTypes = true; builder.addOptFormal(JSType.UNKNOWN); } break; default: builder.addReqFormal( getTypeFromComment(paramType, registry, typeParameters)); break; } paramType = paramType.getNext(); } param = param.getNext(); } if (paramType != null) { if (paramType.getType() == Token.ELLIPSIS) { builder.addRestFormals(getTypeFromComment( paramType.getFirstChild(), registry, typeParameters)); } else { warn("The function has fewer formal parameters than the types " + "declared in the JSDoc", funNode); } } if (!warnedForInlineJsdoc && funNode.getFirstChild().getJSDocInfo() != null) { warn("The function cannot have both an @type jsdoc and inline " + "jsdocs. Ignoring the inline return jsdoc.", funNode); } if (jsdoc.getReturnType() != null) { warn("The function cannot have both an @type jsdoc and @return " + "jsdoc. Ignoring @return jsdoc.", funNode); } if (funNode.getParent().isSetterDef()) { if (childJsdoc != null) { warn("Cannot declare a return type on a setter", funNode); } builder.addRetType(JSType.UNDEFINED); } else { builder.addRetType( getTypeFromComment(childJsdoc, registry, typeParameters)); } return builder.buildDeclaration(); } private static class ParamIterator { /** The parameter names from the JSDocInfo. Only set if 'params' is null. */ Iterator<String> paramNames; /** * The PARAM_LIST node containing the function parameters. Only set if * 'paramNames' is null. */ Node params; int index = -1; ParamIterator(Node params, JSDocInfo jsdoc) { Preconditions.checkArgument(params != null || jsdoc != null); if (params != null) { this.params = params; this.paramNames = null; } else { this.params = null; this.paramNames = jsdoc.getParameterNames().iterator(); } } boolean hasNext() { if (paramNames != null) { return paramNames.hasNext(); } return index + 1 < params.getChildCount(); } String nextString() { if (paramNames != null) { return paramNames.next(); } index++; return params.getChildAtIndex(index).getString(); } Node getNode() { if (paramNames != null) { return null; } return params.getChildAtIndex(index); } } private DeclaredFunctionType getFunTypeFromTypicalFunctionJsdoc( JSDocInfo jsdoc, String functionName, Node funNode, RawNominalType constructorType, RawNominalType ownerType, DeclaredTypeRegistry registry, FunctionTypeBuilder builder, boolean ignoreJsdoc /* for when the jsdoc is malformed */) { Preconditions.checkArgument(!ignoreJsdoc || jsdoc == null); Preconditions.checkArgument(!ignoreJsdoc || funNode.isFunction()); ImmutableList<String> typeParameters = ImmutableList.of(); Node parent = funNode.getParent(); // TODO(dimvar): need more @template warnings // - warn for multiple @template annotations // - warn for @template annotation w/out usage if (jsdoc != null) { typeParameters = jsdoc.getTemplateTypeNames(); if (!typeParameters.isEmpty()) { if (parent.isSetterDef() || parent.isGetterDef()) { ignoreJsdoc = true; jsdoc = null; warn("@template can't be used with getters/setters", funNode); } else { builder.addTypeParameters(typeParameters); } } } if (ownerType != null) { ImmutableList.Builder<String> paramsBuilder = new ImmutableList.Builder<>(); paramsBuilder.addAll(typeParameters); paramsBuilder.addAll(ownerType.getTypeParameters()); typeParameters = paramsBuilder.build(); } fillInFormalParameterTypes( jsdoc, funNode, typeParameters, registry, builder, ignoreJsdoc); fillInReturnType( jsdoc, funNode, parent, typeParameters, registry, builder, ignoreJsdoc); if (jsdoc == null) { return builder.buildDeclaration(); } // Look at other annotations, eg, @constructor NominalType parentClass = getMaybeParentClass( jsdoc, functionName, funNode, typeParameters, registry); ImmutableSet<NominalType> implementedIntfs = getImplementedInterfaces( jsdoc, registry, typeParameters); if (constructorType == null && (jsdoc.isConstructor() || jsdoc.isInterface())) { // Anonymous type, don't register it. return builder.buildDeclaration(); } else if (jsdoc.isConstructor()) { handleConstructorAnnotation(functionName, funNode, constructorType, parentClass, implementedIntfs, registry, builder); } else if (jsdoc.isInterface()) { handleInterfaceAnnotation(jsdoc, functionName, funNode, constructorType, implementedIntfs, typeParameters, registry, builder); } else if (!implementedIntfs.isEmpty()) { warnings.add(JSError.make( funNode, IMPLEMENTS_WITHOUT_CONSTRUCTOR, functionName)); } if (jsdoc.hasThisType() && ownerType == null) { Node thisRoot = jsdoc.getThisType().getRoot(); Preconditions.checkState(thisRoot.getType() == Token.BANG); Node thisNode = thisRoot.getFirstChild(); // JsDocInfoParser wraps @this types with !. But we warn when we see !T, // and we don't want to warn for a ! that was automatically inserted. // So, we bypass the ! here. JSType thisType = getMaybeTypeFromComment(thisNode, registry, typeParameters); if (thisType != null) { thisType = thisType.removeType(JSType.NULL); } // TODO(dimvar): thisType may be non-null but have a null // thisTypeAsNominal. // We currently only support nominal types for the receiver type, but // people use other types as well: unions, records, etc. // Decide what to do about those. NominalType thisTypeAsNominal = thisType == null ? null : thisType.getNominalTypeIfSingletonObj(); builder.addReceiverType(thisTypeAsNominal); } return builder.buildDeclaration(); } private void fillInFormalParameterTypes( JSDocInfo jsdoc, Node funNode, ImmutableList<String> typeParameters, DeclaredTypeRegistry registry, FunctionTypeBuilder builder, boolean ignoreJsdoc /* for when the jsdoc is malformed */) { boolean ignoreFunNode = !funNode.isFunction(); Node params = ignoreFunNode ? null : funNode.getFirstChild().getNext(); ParamIterator iterator = new ParamIterator(params, jsdoc); while (iterator.hasNext()) { String pname = iterator.nextString(); Node param = iterator.getNode(); JSType inlineParamType = (ignoreJsdoc || ignoreFunNode) ? null : getDeclaredTypeOfNode( param.getJSDocInfo(), registry, typeParameters); boolean isRequired = true; boolean isRestFormals = false; JSTypeExpression texp = jsdoc == null ? null : jsdoc.getParameterType(pname); Node jsdocNode = texp == null ? null : texp.getRoot(); if (param != null) { if (convention.isOptionalParameter(param)) { isRequired = false; } else if (convention.isVarArgsParameter(param)) { isRequired = false; isRestFormals = true; } } JSType fnParamType = null; if (jsdocNode != null) { if (jsdocNode.getType() == Token.EQUALS) { isRequired = false; jsdocNode = jsdocNode.getFirstChild(); } else if (jsdocNode.getType() == Token.ELLIPSIS) { isRequired = false; isRestFormals = true; jsdocNode = jsdocNode.getFirstChild(); } fnParamType = getTypeFromComment(jsdocNode, registry, typeParameters); } if (inlineParamType != null) { // TODO(dimvar): The support for inline optional parameters is currently // broken, so this is always a required parameter. See b/11481388. Fix. builder.addReqFormal(inlineParamType); if (fnParamType != null) { warn("Found two JsDoc comments for formal parameter " + pname, param); } } else if (isRequired) { builder.addReqFormal(fnParamType); } else if (isRestFormals) { builder.addRestFormals( fnParamType == null ? JSType.UNKNOWN : fnParamType); } else { builder.addOptFormal(fnParamType); } } } private void fillInReturnType( JSDocInfo jsdoc, Node funNode, Node parent, ImmutableList<String> typeParameters, DeclaredTypeRegistry registry, FunctionTypeBuilder builder, boolean ignoreJsdoc /* for when the jsdoc is malformed */) { JSDocInfo inlineRetJsdoc = ignoreJsdoc ? null : funNode.getFirstChild().getJSDocInfo(); JSTypeExpression retTypeExp = jsdoc == null ? null : jsdoc.getReturnType(); if (parent.isSetterDef()) { // inline returns for setters are attached to the function body. // Consider fixing this. inlineRetJsdoc = ignoreJsdoc ? null : funNode.getLastChild().getJSDocInfo(); if (retTypeExp != null || inlineRetJsdoc != null) { warn("Cannot declare a return type on a setter", funNode); } builder.addRetType(JSType.UNDEFINED); } else if (inlineRetJsdoc != null) { builder.addRetType( getDeclaredTypeOfNode(inlineRetJsdoc, registry, typeParameters)); if (retTypeExp != null) { warn("Found two JsDoc comments for the return type", funNode); } } else { builder.addRetType( getTypeFromJSTypeExpression(retTypeExp, registry, typeParameters)); } } private NominalType getMaybeParentClass( JSDocInfo jsdoc, String functionName, Node funNode, ImmutableList<String> typeParameters, DeclaredTypeRegistry registry) { if (!jsdoc.hasBaseType()) { return null; } if (!jsdoc.isConstructor()) { warnings.add(JSError.make( funNode, EXTENDS_NOT_ON_CTOR_OR_INTERF, functionName)); return null; } Node docNode = jsdoc.getBaseType().getRoot(); JSType extendedType = getMaybeTypeFromComment(docNode, registry, typeParameters); if (extendedType == null) { return null; } NominalType parentClass = extendedType.getNominalTypeIfSingletonObj(); if (parentClass != null && parentClass.isClass()) { return parentClass; } if (parentClass == null) { warnings.add(JSError.make(funNode, EXTENDS_NON_OBJECT, functionName, extendedType.toString())); } else { Preconditions.checkState(parentClass.isInterface()); warnings.add(JSError.make(funNode, CONFLICTING_EXTENDED_TYPE, "constructor", functionName)); } return null; } private void handleConstructorAnnotation( String functionName, Node funNode, RawNominalType constructorType, NominalType parentClass, ImmutableSet<NominalType> implementedIntfs, DeclaredTypeRegistry registry, FunctionTypeBuilder builder) { String className = constructorType.toString(); NominalType builtinObject = registry.getCommonTypes().getObjectType(); if (parentClass == null && !functionName.equals("Object")) { parentClass = builtinObject; } if (parentClass != null) { if (!constructorType.addSuperClass(parentClass)) { warnings.add(JSError.make(funNode, INHERITANCE_CYCLE, className)); } else if (parentClass != builtinObject) { if (constructorType.isStruct() && !parentClass.isStruct()) { warnings.add(JSError.make( funNode, CONFLICTING_SHAPE_TYPE, "struct", className)); } else if (constructorType.isDict() && !parentClass.isDict()) { warnings.add(JSError.make( funNode, CONFLICTING_SHAPE_TYPE, "dict", className)); } } } if (constructorType.isDict() && !implementedIntfs.isEmpty()) { warnings.add(JSError.make(funNode, DICT_IMPLEMENTS_INTERF, className)); } boolean noCycles = constructorType.addInterfaces(implementedIntfs); Preconditions.checkState(noCycles); builder.addNominalType(constructorType.getAsNominalType()); } private void handleInterfaceAnnotation( JSDocInfo jsdoc, String functionName, Node funNode, RawNominalType constructorType, ImmutableSet<NominalType> implementedIntfs, ImmutableList<String> typeParameters, DeclaredTypeRegistry registry, FunctionTypeBuilder builder) { if (!implementedIntfs.isEmpty()) { warnings.add(JSError.make( funNode, CONFLICTING_IMPLEMENTED_TYPE, functionName)); } boolean noCycles = constructorType.addInterfaces( getExtendedInterfaces(jsdoc, registry, typeParameters)); if (!noCycles) { warnings.add(JSError.make( funNode, INHERITANCE_CYCLE, constructorType.toString())); } builder.addNominalType(constructorType.getAsNominalType()); } // /** @param {...?} var_args */ function f(var_args) { ... } // var_args shouldn't be used in the body of f public static boolean isRestArg(JSDocInfo funJsdoc, String formalParamName) { if (funJsdoc == null) { return false; } JSTypeExpression texp = funJsdoc.getParameterType(formalParamName); Node jsdocNode = texp == null ? null : texp.getRoot(); return jsdocNode != null && jsdocNode.getType() == Token.ELLIPSIS; } // TODO(blickly): Add more DiagnosticTypes and remove this method void warn(String msg, Node faultyNode) { warnings.add(JSError.make(faultyNode, BAD_JSDOC_ANNOTATION, msg)); } }
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.kong.zxreader.down.downloads; import java.util.Collection; import java.util.HashMap; import com.kong.zxreader.R; import android.app.Notification; import android.app.PendingIntent; import android.content.ContentUris; import android.content.Context; import android.content.Intent; import android.net.Uri; import android.view.View; import android.widget.RemoteViews; /** * This class handles the updating of the Notification Manager for the * cases where there is an ongoing download. Once the download is complete * (be it successful or unsuccessful) it is no longer the responsibility * of this component to show the download in the notification manager. * */ class DownloadNotification { Context mContext; HashMap <String, NotificationItem> mNotifications; private DownSystemFacade mSystemFacade; static final String LOGTAG = "DownloadNotification"; static final String WHERE_RUNNING = "(" + Downloads.COLUMN_STATUS + " >= '100') AND (" + Downloads.COLUMN_STATUS + " <= '199') AND (" + Downloads.COLUMN_VISIBILITY + " IS NULL OR " + Downloads.COLUMN_VISIBILITY + " == '" + Downloads.VISIBILITY_VISIBLE + "' OR " + Downloads.COLUMN_VISIBILITY + " == '" + Downloads.VISIBILITY_VISIBLE_NOTIFY_COMPLETED + "')"; static final String WHERE_COMPLETED = Downloads.COLUMN_STATUS + " >= '200' AND " + Downloads.COLUMN_VISIBILITY + " == '" + Downloads.VISIBILITY_VISIBLE_NOTIFY_COMPLETED + "'"; /** * This inner class is used to collate downloads that are owned by * the same application. This is so that only one notification line * item is used for all downloads of a given application. * */ static class NotificationItem { int mId; // This first db _id for the download for the app long mTotalCurrent = 0; long mTotalTotal = 0; int mTitleCount = 0; String mPackageName; // App package name String mDescription; String[] mTitles = new String[2]; // download titles. String mPausedText = null; /* * Add a second download to this notification item. */ void addItem(String title, long currentBytes, long totalBytes) { mTotalCurrent += currentBytes; if (totalBytes <= 0 || mTotalTotal == -1) { mTotalTotal = -1; } else { mTotalTotal += totalBytes; } if (mTitleCount < 2) { mTitles[mTitleCount] = title; } mTitleCount++; } } /** * Constructor * @param ctx The context to use to obtain access to the * Notification Service */ DownloadNotification(Context ctx, DownSystemFacade systemFacade) { mContext = ctx; mSystemFacade = systemFacade; mNotifications = new HashMap<String, NotificationItem>(); } /* * Update the notification ui. */ public void updateNotification(Collection<DownloadInfo> downloads) { updateActiveNotification(downloads); updateCompletedNotification(downloads); } private void updateActiveNotification(Collection<DownloadInfo> downloads) { // Collate the notifications mNotifications.clear(); for (DownloadInfo download : downloads) { if (!isActiveAndVisible(download)) { continue; } String packageName = download.mPackage; long max = download.mTotalBytes; long progress = download.mCurrentBytes; long id = download.mId; String title = download.mTitle; if (title == null || title.length() == 0) { title = mContext.getResources().getString( R.string.download_unknown_title); } NotificationItem item; if (mNotifications.containsKey(packageName)) { item = mNotifications.get(packageName); item.addItem(title, progress, max); } else { item = new NotificationItem(); item.mId = (int) id; item.mPackageName = packageName; item.mDescription = download.mDescription; item.addItem(title, progress, max); mNotifications.put(packageName, item); } if (download.mStatus == Downloads.STATUS_QUEUED_FOR_WIFI && item.mPausedText == null) { item.mPausedText = mContext.getResources().getString( R.string.notification_need_wifi_for_size); } } // Add the notifications for (NotificationItem item : mNotifications.values()) { // Build the notification object Notification n = new Notification(); boolean hasPausedText = (item.mPausedText != null); int iconResource = android.R.drawable.stat_sys_download; if (hasPausedText) { iconResource = android.R.drawable.stat_sys_warning; } n.icon = iconResource; n.flags |= Notification.FLAG_ONGOING_EVENT; // Build the RemoteView object RemoteViews expandedView = new RemoteViews(mContext.getPackageName(), R.layout.status_bar_ongoing_event_progress_bar); StringBuilder title = new StringBuilder(item.mTitles[0]); if (item.mTitleCount > 1) { title.append(mContext.getString(R.string.notification_filename_separator)); title.append(item.mTitles[1]); n.number = item.mTitleCount; if (item.mTitleCount > 2) { title.append(mContext.getString(R.string.notification_filename_extras, new Object[] { Integer.valueOf(item.mTitleCount - 2) })); } } else { expandedView.setTextViewText(R.id.description, item.mDescription); } expandedView.setTextViewText(R.id.title, title); if (hasPausedText) { expandedView.setViewVisibility(R.id.progress_bar, View.GONE); expandedView.setTextViewText(R.id.paused_text, item.mPausedText); } else { expandedView.setViewVisibility(R.id.paused_text, View.GONE); expandedView.setProgressBar(R.id.progress_bar, (int) item.mTotalTotal, (int) item.mTotalCurrent, item.mTotalTotal == -1); } expandedView.setTextViewText(R.id.progress_text, getDownloadingText(item.mTotalTotal, item.mTotalCurrent)); expandedView.setImageViewResource(R.id.appIcon, iconResource); n.contentView = expandedView; Intent intent = new Intent(DownConstants.ACTION_LIST); intent.setClassName(mContext.getPackageName(), DownloadReceiver.class.getName()); intent.setData( ContentUris.withAppendedId(Downloads.ALL_DOWNLOADS_CONTENT_URI, item.mId)); intent.putExtra("multiple", item.mTitleCount > 1); n.contentIntent = PendingIntent.getBroadcast(mContext, 0, intent, 0); mSystemFacade.postNotification(item.mId, n); } } private void updateCompletedNotification(Collection<DownloadInfo> downloads) { for (DownloadInfo download : downloads) { if (!isCompleteAndVisible(download)) { continue; } // Add the notifications Notification n = new Notification(); n.icon = android.R.drawable.stat_sys_download_done; long id = download.mId; String title = download.mTitle; if (title == null || title.length() == 0) { title = mContext.getResources().getString( R.string.download_unknown_title); } Uri contentUri = ContentUris.withAppendedId(Downloads.ALL_DOWNLOADS_CONTENT_URI, id); String caption; Intent intent; if (Downloads.isStatusError(download.mStatus)) { caption = mContext.getResources() .getString(R.string.notification_download_failed); intent = new Intent(DownConstants.ACTION_LIST); } else { caption = mContext.getResources() .getString(R.string.notification_download_complete); if (download.mDestination == Downloads.DESTINATION_EXTERNAL) { intent = new Intent(DownConstants.ACTION_OPEN); } else { intent = new Intent(DownConstants.ACTION_LIST); } } intent.setClassName(mContext.getPackageName(), DownloadReceiver.class.getName()); intent.setData(contentUri); n.when = download.mLastMod; n.setLatestEventInfo(mContext, title, caption, PendingIntent.getBroadcast(mContext, 0, intent, 0)); intent = new Intent(DownConstants.ACTION_HIDE); intent.setClassName(mContext.getPackageName(), DownloadReceiver.class.getName()); intent.setData(contentUri); n.deleteIntent = PendingIntent.getBroadcast(mContext, 0, intent, 0); mSystemFacade.postNotification(download.mId, n); } } private boolean isActiveAndVisible(DownloadInfo download) { return 100 <= download.mStatus && download.mStatus < 200 && download.mVisibility != Downloads.VISIBILITY_HIDDEN; } private boolean isCompleteAndVisible(DownloadInfo download) { return download.mStatus >= 200 && download.mVisibility == Downloads.VISIBILITY_VISIBLE_NOTIFY_COMPLETED; } /* * Helper function to build the downloading text. */ private String getDownloadingText(long totalBytes, long currentBytes) { if (totalBytes <= 0) { return ""; } long progress = currentBytes * 100 / totalBytes; StringBuilder sb = new StringBuilder(); sb.append(progress); sb.append('%'); return sb.toString(); } }
package psidev.psi.mi.jami.xml.io.writer.elements.impl.compact.xml25; import junit.framework.Assert; import org.junit.Test; import psidev.psi.mi.jami.exception.IllegalRangeException; import psidev.psi.mi.jami.model.*; import psidev.psi.mi.jami.model.impl.*; import psidev.psi.mi.jami.utils.InteractorUtils; import psidev.psi.mi.jami.utils.RangeUtils; import psidev.psi.mi.jami.xml.cache.PsiXmlObjectCache; import psidev.psi.mi.jami.xml.cache.InMemoryIdentityObjectCache; import psidev.psi.mi.jami.xml.io.writer.elements.impl.AbstractXmlWriterTest; import psidev.psi.mi.jami.xml.io.writer.elements.impl.compact.xml25.XmlNamedModelledParticipantWriter; import javax.xml.stream.XMLStreamException; import java.io.IOException; /** * Unti tester for XmlNamedModelledParticipantWriter * * @author Marine Dumousseau (marine@ebi.ac.uk) * @version $Id$ * @since <pre>25/11/13</pre> */ public class XmlNamedModelledParticipantWriterTest extends AbstractXmlWriterTest { private String participant = "<participant id=\"1\">\n" + " <interactorRef>2</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + "</participant>"; private String participant_interaction = "<participant id=\"1\">\n" + " <interactionRef>2</interactionRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + "</participant>"; private String participant_aliases ="<participant id=\"1\">\n" + " <names>\n" + " <alias type=\"synonym\">participant synonym</alias>\n"+ " <alias>test</alias>\n"+ " </names>\n" + " <interactorRef>2</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + "</participant>"; private String participant_shortName ="<participant id=\"1\">\n" + " <names>\n" + " <shortLabel>participant test</shortLabel>\n"+ " <alias type=\"synonym\">participant synonym</alias>\n"+ " <alias>test</alias>\n"+ " </names>\n" + " <interactorRef>2</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + "</participant>"; private String participant_fullName ="<participant id=\"1\">\n" + " <names>\n" + " <fullName>participant test</fullName>\n"+ " <alias type=\"synonym\">participant synonym</alias>\n"+ " <alias>test</alias>\n"+ " </names>\n" + " <interactorRef>2</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + "</participant>"; private String participant_xref = "<participant id=\"1\">\n" + " <xref>\n" + " <primaryRef db=\"test2\" id=\"xxxx2\"/>\n" + " <secondaryRef db=\"test\" id=\"xxxx1\"/>\n"+ " </xref>\n"+ " <interactorRef>2</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + "</participant>"; private String participant_feature = "<participant id=\"1\">\n" + " <interactorRef>2</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <featureList>\n" + " <feature id=\"3\">\n" + " <featureType>\n" + " <names>\n" + " <shortLabel>biological feature</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0252\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </featureType>\n" + " <featureRangeList>\n" + " <featureRange>\n" + " <startStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </startStatus>\n" + " <begin position=\"1\"/>\n"+ " <endStatus>\n" + " <names>\n" + " <shortLabel>certain</shortLabel>\n"+ " </names>\n"+ " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+ " </xref>\n"+ " </endStatus>\n" + " <end position=\"4\"/>\n"+ " </featureRange>\n"+ " </featureRangeList>\n" + " </feature>\n"+ " </featureList>\n"+ "</participant>"; private String participant_attributes = "<participant id=\"1\">\n" + " <interactorRef>2</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <attributeList>\n" + " <attribute name=\"test2\"/>\n"+ " <attribute name=\"test3\"/>\n"+ " </attributeList>\n"+ "</participant>"; private String participant_stoichiometry = "<participant id=\"1\">\n" + " <interactorRef>2</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <attributeList>\n" + " <attribute name=\"comment\" nameAc=\"MI:0612\">stoichiometry: 1</attribute>\n"+ " </attributeList>\n"+ "</participant>"; private String participant_stoichiometry_range = "<participant id=\"1\">\n" + " <interactorRef>2</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " <attributeList>\n" + " <attribute name=\"comment\" nameAc=\"MI:0612\">stoichiometry: 1 - 4</attribute>\n"+ " </attributeList>\n"+ "</participant>"; private String participant_registered = "<participant id=\"2\">\n" + " <interactorRef>3</interactorRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + "</participant>"; private PsiXmlObjectCache elementCache = new InMemoryIdentityObjectCache(); @Test public void test_write_participant() throws XMLStreamException, IOException, IllegalRangeException { ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); elementCache.clear(); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.write(participant); streamWriter.flush(); Assert.assertEquals(this.participant, output.toString()); } @Test public void test_write_participant_complex() throws XMLStreamException, IOException, IllegalRangeException { Complex complex = new DefaultComplex("test complex"); complex.getParticipants().add(new DefaultModelledParticipant(new DefaultProtein("test protein"))); ModelledParticipant participant = new DefaultNamedModelledParticipant(complex); elementCache.clear(); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.write(participant); streamWriter.flush(); Assert.assertEquals(this.participant_interaction, output.toString()); } @Test public void test_write_participant_complex_as_interactor() throws XMLStreamException, IOException, IllegalRangeException { Complex complex = new DefaultComplex("test complex"); complex.getParticipants().add(new DefaultModelledParticipant(new DefaultProtein("test protein"))); ModelledParticipant participant = new DefaultNamedModelledParticipant(complex); elementCache.clear(); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.setComplexAsInteractor(true); writer.write(participant); streamWriter.flush(); Assert.assertEquals(this.participant, output.toString()); } @Test public void test_write_participant_complex_no_participants() throws XMLStreamException, IOException, IllegalRangeException { Complex complex = new DefaultComplex("test complex"); ModelledParticipant participant = new DefaultNamedModelledParticipant(complex); elementCache.clear(); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.write(participant); streamWriter.flush(); Assert.assertEquals(this.participant, output.toString()); } @Test public void test_write_participant_aliases() throws XMLStreamException, IOException, IllegalRangeException { ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); participant.getAliases().add(new DefaultAlias(new DefaultCvTerm("synonym"), "participant synonym")); participant.getAliases().add(new DefaultAlias("test")); elementCache.clear(); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.write(participant); streamWriter.flush(); Assert.assertEquals(this.participant_aliases, output.toString()); } @Test public void test_write_participant_shortName() throws XMLStreamException, IOException, IllegalRangeException { NamedParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); participant.getAliases().add(new DefaultAlias(new DefaultCvTerm("synonym"), "participant synonym")); participant.getAliases().add(new DefaultAlias("test")); participant.setShortName("participant test"); elementCache.clear(); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.write((ModelledParticipant)participant); streamWriter.flush(); Assert.assertEquals(this.participant_shortName, output.toString()); } @Test public void test_write_participant_fullName() throws XMLStreamException, IOException, IllegalRangeException { NamedParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); participant.getAliases().add(new DefaultAlias(new DefaultCvTerm("synonym"), "participant synonym")); participant.getAliases().add(new DefaultAlias("test")); participant.setFullName("participant test"); elementCache.clear(); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.write((ModelledParticipant)participant); streamWriter.flush(); Assert.assertEquals(this.participant_fullName, output.toString()); } @Test public void test_write_participant_xref() throws XMLStreamException, IOException, IllegalRangeException { ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); participant.getXrefs().add(new DefaultXref(new DefaultCvTerm("test2"), "xxxx2")); participant.getXrefs().add(new DefaultXref(new DefaultCvTerm("test"), "xxxx1")); elementCache.clear(); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.write(participant); streamWriter.flush(); Assert.assertEquals(this.participant_xref, output.toString()); } @Test public void test_write_participant_feature() throws XMLStreamException, IOException, IllegalRangeException { ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); ModelledFeature feature = new DefaultModelledFeature(); feature.getRanges().add(RangeUtils.createRangeFromString("1-4")); participant.addFeature(feature); elementCache.clear(); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.write(participant); streamWriter.flush(); Assert.assertEquals(this.participant_feature, output.toString()); } @Test public void test_write_participant_attributes() throws XMLStreamException, IOException, IllegalRangeException { ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); participant.getAnnotations().add(new DefaultAnnotation(new DefaultCvTerm("test2"))); participant.getAnnotations().add(new DefaultAnnotation(new DefaultCvTerm("test3"))); elementCache.clear(); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.write(participant); streamWriter.flush(); Assert.assertEquals(this.participant_attributes, output.toString()); } @Test public void test_write_participant_stoichiometry() throws XMLStreamException, IOException, IllegalRangeException { ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); participant.setStoichiometry(1); elementCache.clear(); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.write(participant); streamWriter.flush(); Assert.assertEquals(this.participant_stoichiometry, output.toString()); } @Test public void test_write_participant_stoichiometry_range() throws XMLStreamException, IOException, IllegalRangeException { ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); participant.setStoichiometry(new DefaultStoichiometry(1,4)); elementCache.clear(); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.write(participant); streamWriter.flush(); Assert.assertEquals(this.participant_stoichiometry_range, output.toString()); } @Test public void test_write_participant_registered() throws XMLStreamException, IOException, IllegalRangeException { ModelledParticipant participant = new DefaultNamedModelledParticipant(InteractorUtils.createUnknownBasicInteractor()); elementCache.clear(); elementCache.extractIdForParticipant(new DefaultParticipant(new DefaultProtein("protein test"))); elementCache.extractIdForParticipant(participant); XmlNamedModelledParticipantWriter writer = new XmlNamedModelledParticipantWriter(createStreamWriter(), this.elementCache); writer.write(participant); streamWriter.flush(); Assert.assertEquals(this.participant_registered, output.toString()); } }
/* * Copyright (c) 2004, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test * @bug 5024531 * @summary Test type mapping of the platform MXBean proxy * returned from Management.newPlatformMXBeanProxy(). * @author Mandy Chung * * @compile ProxyTypeMapping.java * @run main/othervm -verbose:gc ProxyTypeMapping */ import java.lang.management.*; import javax.management.*; import static java.lang.management.ManagementFactory.*; import java.util.List; import java.util.Map; import java.util.Properties; import com.sun.management.GcInfo; public class ProxyTypeMapping { private static MBeanServer server = ManagementFactory.getPlatformMBeanServer(); private static RuntimeMXBean runtime; private static ThreadMXBean thread; private static MemoryMXBean memory; private static MemoryPoolMXBean heapPool = null; private static MemoryPoolMXBean nonHeapPool = null; public static void main(String[] argv) throws Exception { runtime = newPlatformMXBeanProxy(server, RUNTIME_MXBEAN_NAME, RuntimeMXBean.class); thread = newPlatformMXBeanProxy(server, THREAD_MXBEAN_NAME, ThreadMXBean.class); memory = newPlatformMXBeanProxy(server, MEMORY_MXBEAN_NAME, MemoryMXBean.class); // check notification emitter MyListener listener = new MyListener(); NotificationEmitter emitter = (NotificationEmitter) memory; emitter.addNotificationListener(listener, null, null); emitter.removeNotificationListener(listener); List<MemoryPoolMXBean> pools = getMemoryPoolMXBeans(); for (MemoryPoolMXBean p : pools) { if (heapPool == null && p.getType() == MemoryType.HEAP && p.isUsageThresholdSupported() && p.isCollectionUsageThresholdSupported()) { heapPool = p; } if (nonHeapPool == null && p.getType() == MemoryType.NON_HEAP && p.isUsageThresholdSupported()) { nonHeapPool = p; } } checkEnum(); checkList(); checkMap(); checkMemoryUsage(); checkThreadInfo(); checkOS(); checkSunGC(); System.out.println("Test passed."); } private static void checkEnum() throws Exception { MemoryType type = heapPool.getType(); if (type != MemoryType.HEAP) { throw new RuntimeException("TEST FAILED: " + " incorrect memory type for " + heapPool.getName()); } type = nonHeapPool.getType(); if (type != MemoryType.NON_HEAP) { throw new RuntimeException("TEST FAILED: " + " incorrect memory type for " + nonHeapPool.getName()); } } private static final String OPTION = "-verbose:gc"; private static void checkList() throws Exception { List<String> args = runtime.getInputArguments(); if (args.size() < 1) { throw new RuntimeException("TEST FAILED: " + " empty input arguments"); } // check if -verbose:gc exists boolean found = false; for (String option : args) { if (option.equals(OPTION)) { found = true; break; } } if (!found) { throw new RuntimeException("TEST FAILED: " + "VM option " + OPTION + " not found"); } } private static final String KEY1 = "test.property.key1"; private static final String VALUE1 = "test.property.value1"; private static final String KEY2 = "test.property.key2"; private static final String VALUE2 = "test.property.value2"; private static final String KEY3 = "test.property.key3"; private static void checkMap() throws Exception { // Add new system properties System.setProperty(KEY1, VALUE1); System.setProperty(KEY2, VALUE2); Map<String,String> props1 = runtime.getSystemProperties(); String value1 = props1.get(KEY1); if (value1 == null || !value1.equals(VALUE1)) { throw new RuntimeException("TEST FAILED: " + KEY1 + " property found" + " with value = " + value1 + " but expected to be " + VALUE1); } String value2 = props1.get(KEY2); if (value2 == null || !value2.equals(VALUE2)) { throw new RuntimeException("TEST FAILED: " + KEY2 + " property found" + " with value = " + value2 + " but expected to be " + VALUE2); } String value3 = props1.get(KEY3); if (value3 != null) { throw new RuntimeException("TEST FAILED: " + KEY3 + " property found" + " but should not exist" ); } } private static void checkMemoryUsage() throws Exception { // sanity check to have non-zero usage MemoryUsage u1 = memory.getHeapMemoryUsage(); MemoryUsage u2 = memory.getNonHeapMemoryUsage(); MemoryUsage u3 = heapPool.getUsage(); MemoryUsage u4 = nonHeapPool.getUsage(); if (u1.getCommitted() <= 0 || u2.getCommitted() <= 0 || u3.getCommitted() <= 0 || u4.getCommitted() <= 0) { throw new RuntimeException("TEST FAILED: " + " expected non-zero committed usage"); } memory.gc(); MemoryUsage u5 = heapPool.getCollectionUsage(); if (u5.getCommitted() <= 0) { throw new RuntimeException("TEST FAILED: " + " expected non-zero committed collected usage"); } } private static void checkThreadInfo() throws Exception { // assume all threads stay alive long[] ids = thread.getAllThreadIds(); ThreadInfo[] infos = thread.getThreadInfo(ids); for (ThreadInfo ti : infos) { printThreadInfo(ti); } infos = thread.getThreadInfo(ids, 2); for (ThreadInfo ti : infos) { printThreadInfo(ti); } long id = Thread.currentThread().getId(); ThreadInfo info = thread.getThreadInfo(id); printThreadInfo(info); info = thread.getThreadInfo(id, 2); printThreadInfo(info); } private static void printThreadInfo(ThreadInfo info) { if (info == null) { throw new RuntimeException("TEST FAILED: " + " Null ThreadInfo"); } System.out.print(info.getThreadName()); System.out.print(" id=" + info.getThreadId()); System.out.println(" " + info.getThreadState()); for (StackTraceElement s : info.getStackTrace()) { System.out.println(s); } } private static void checkOS() throws Exception { com.sun.management.OperatingSystemMXBean os = newPlatformMXBeanProxy(server, OPERATING_SYSTEM_MXBEAN_NAME, com.sun.management.OperatingSystemMXBean.class); System.out.println("# CPUs = " + os.getAvailableProcessors()); System.out.println("Committed virtual memory = " + os.getCommittedVirtualMemorySize()); } private static void checkSunGC() throws Exception { // Test com.sun.management proxy List<GarbageCollectorMXBean> gcs = getGarbageCollectorMXBeans(); for (GarbageCollectorMXBean gc : gcs) { com.sun.management.GarbageCollectorMXBean sunGc = newPlatformMXBeanProxy(server, GARBAGE_COLLECTOR_MXBEAN_DOMAIN_TYPE + ",name=" + gc.getName(), com.sun.management.GarbageCollectorMXBean.class); GcInfo info = sunGc.getLastGcInfo(); if (info != null) { System.out.println("GC statistic for : " + gc.getName()); printGcInfo(info); } } } private static void printGcInfo(GcInfo info) throws Exception { System.out.print("GC #" + info.getId()); System.out.print(" start:" + info.getStartTime()); System.out.print(" end:" + info.getEndTime()); System.out.println(" (" + info.getDuration() + "ms)"); Map<String,MemoryUsage> usage = info.getMemoryUsageBeforeGc(); for (Map.Entry<String,MemoryUsage> entry : usage.entrySet()) { String poolname = entry.getKey(); MemoryUsage busage = entry.getValue(); MemoryUsage ausage = info.getMemoryUsageAfterGc().get(poolname); if (ausage == null) { throw new RuntimeException("After Gc Memory does not exist" + " for " + poolname); } System.out.println("Usage for pool " + poolname); System.out.println(" Before GC: " + busage); System.out.println(" After GC: " + ausage); } } static class MyListener implements NotificationListener { public void handleNotification(Notification notif, Object handback) { return; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexing.common.task; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import org.apache.commons.io.FileUtils; import org.apache.druid.client.cache.CacheConfig; import org.apache.druid.client.cache.CachePopulatorStats; import org.apache.druid.client.cache.MapCache; import org.apache.druid.common.config.NullHandling; import org.apache.druid.data.input.Firehose; import org.apache.druid.data.input.FirehoseFactory; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.FloatDimensionSchema; import org.apache.druid.data.input.impl.InputRowParser; import org.apache.druid.data.input.impl.LongDimensionSchema; import org.apache.druid.data.input.impl.MapInputRowParser; import org.apache.druid.data.input.impl.StringDimensionSchema; import org.apache.druid.data.input.impl.TimeAndDimsParseSpec; import org.apache.druid.data.input.impl.TimestampSpec; import org.apache.druid.discovery.DataNodeService; import org.apache.druid.discovery.DruidNodeAnnouncer; import org.apache.druid.discovery.LookupNodeService; import org.apache.druid.indexer.IngestionState; import org.apache.druid.indexer.TaskState; import org.apache.druid.indexer.TaskStatus; import org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData; import org.apache.druid.indexing.common.SegmentLoaderFactory; import org.apache.druid.indexing.common.TaskReport; import org.apache.druid.indexing.common.TaskReportFileWriter; import org.apache.druid.indexing.common.TaskToolbox; import org.apache.druid.indexing.common.TaskToolboxFactory; import org.apache.druid.indexing.common.TestUtils; import org.apache.druid.indexing.common.actions.LocalTaskActionClientFactory; import org.apache.druid.indexing.common.actions.TaskActionClientFactory; import org.apache.druid.indexing.common.actions.TaskActionToolbox; import org.apache.druid.indexing.common.actions.TaskAuditLogConfig; import org.apache.druid.indexing.common.config.TaskConfig; import org.apache.druid.indexing.common.config.TaskStorageConfig; import org.apache.druid.indexing.common.index.RealtimeAppenderatorIngestionSpec; import org.apache.druid.indexing.common.index.RealtimeAppenderatorTuningConfig; import org.apache.druid.indexing.common.stats.RowIngestionMeters; import org.apache.druid.indexing.common.stats.RowIngestionMetersFactory; import org.apache.druid.indexing.overlord.DataSourceMetadata; import org.apache.druid.indexing.overlord.HeapMemoryTaskStorage; import org.apache.druid.indexing.overlord.SegmentPublishResult; import org.apache.druid.indexing.overlord.TaskLockbox; import org.apache.druid.indexing.overlord.TaskStorage; import org.apache.druid.indexing.overlord.supervisor.SupervisorManager; import org.apache.druid.indexing.test.TestDataSegmentAnnouncer; import org.apache.druid.indexing.test.TestDataSegmentKiller; import org.apache.druid.indexing.test.TestDataSegmentPusher; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.jackson.JacksonUtils; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.common.parsers.ParseException; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.java.util.emitter.core.NoopEmitter; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.metrics.MonitorScheduler; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.metadata.EntryExistsException; import org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator; import org.apache.druid.metadata.TestDerbyConnector; import org.apache.druid.query.DefaultQueryRunnerFactoryConglomerate; import org.apache.druid.query.Druids; import org.apache.druid.query.IntervalChunkingQueryRunnerDecorator; import org.apache.druid.query.Query; import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.QueryToolChest; import org.apache.druid.query.Result; import org.apache.druid.query.SegmentDescriptor; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.filter.DimFilter; import org.apache.druid.query.filter.SelectorDimFilter; import org.apache.druid.query.timeseries.TimeseriesQuery; import org.apache.druid.query.timeseries.TimeseriesQueryEngine; import org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest; import org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory; import org.apache.druid.query.timeseries.TimeseriesResultValue; import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.indexing.DataSchema; import org.apache.druid.segment.indexing.RealtimeIOConfig; import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec; import org.apache.druid.segment.loading.SegmentLoaderConfig; import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.segment.realtime.plumber.SegmentHandoffNotifier; import org.apache.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory; import org.apache.druid.segment.transform.ExpressionTransform; import org.apache.druid.segment.transform.TransformSpec; import org.apache.druid.server.DruidNode; import org.apache.druid.server.coordination.DataSegmentServerAnnouncer; import org.apache.druid.server.coordination.ServerType; import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.LinearShardSpec; import org.apache.druid.timeline.partition.NumberedShardSpec; import org.apache.druid.utils.Runnables; import org.easymock.EasyMock; import org.joda.time.DateTime; import org.joda.time.Period; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Deque; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; public class AppenderatorDriverRealtimeIndexTaskTest { private static final Logger log = new Logger(AppenderatorDriverRealtimeIndexTaskTest.class); private static final ServiceEmitter emitter = new ServiceEmitter( "service", "host", new NoopEmitter() ); private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); private static final String FAIL_DIM = "__fail__"; private static class TestFirehose implements Firehose { private final InputRowParser<Map<String, Object>> parser; private final Deque<Optional<Map<String, Object>>> queue = new ArrayDeque<>(); private boolean closed = false; public TestFirehose(final InputRowParser<Map<String, Object>> parser) { this.parser = parser; } public void addRows(List<Map<String, Object>> rows) { synchronized (this) { rows.stream().map(Optional::ofNullable).forEach(queue::add); notifyAll(); } } @Override public boolean hasMore() { try { synchronized (this) { while (queue.isEmpty() && !closed) { wait(); } return !queue.isEmpty(); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } @Override public InputRow nextRow() { synchronized (this) { final InputRow row = parser.parseBatch(queue.removeFirst().orElse(null)).get(0); if (row != null && row.getRaw(FAIL_DIM) != null) { throw new ParseException(FAIL_DIM); } return row; } } @Override public Runnable commit() { return Runnables.getNoopRunnable(); } @Override public void close() { synchronized (this) { closed = true; notifyAll(); } } } private static class TestFirehoseFactory implements FirehoseFactory<InputRowParser> { public TestFirehoseFactory() { } @Override @SuppressWarnings("unchecked") public Firehose connect(InputRowParser parser, File temporaryDirectory) throws ParseException { return new TestFirehose(parser); } } @Rule public final ExpectedException expectedException = ExpectedException.none(); @Rule public final TemporaryFolder tempFolder = new TemporaryFolder(); @Rule public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule(); private DateTime now; private ListeningExecutorService taskExec; private Map<SegmentDescriptor, Pair<Executor, Runnable>> handOffCallbacks; private Collection<DataSegment> publishedSegments; private CountDownLatch segmentLatch; private CountDownLatch handoffLatch; private TaskStorage taskStorage; private TaskLockbox taskLockbox; private TaskToolboxFactory taskToolboxFactory; private File baseDir; private File reportsFile; private RowIngestionMetersFactory rowIngestionMetersFactory; @Before public void setUp() throws IOException { EmittingLogger.registerEmitter(emitter); emitter.start(); taskExec = MoreExecutors.listeningDecorator(Execs.singleThreaded("realtime-index-task-test-%d")); now = DateTimes.nowUtc(); TestDerbyConnector derbyConnector = derbyConnectorRule.getConnector(); derbyConnector.createDataSourceTable(); derbyConnector.createTaskTables(); derbyConnector.createSegmentTable(); derbyConnector.createPendingSegmentsTable(); baseDir = tempFolder.newFolder(); reportsFile = File.createTempFile("KafkaIndexTaskTestReports-" + System.currentTimeMillis(), "json"); makeToolboxFactory(baseDir); } @After public void tearDown() { taskExec.shutdownNow(); reportsFile.delete(); } @Test(timeout = 60_000L) public void testDefaultResource() { final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null); Assert.assertEquals(task.getId(), task.getTaskResource().getAvailabilityGroup()); } @Test(timeout = 60_000L) public void testHandoffTimeout() throws Exception { expectPublishedSegments(1); final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null, TransformSpec.NONE, true, 100L, true, 0, 1); final ListenableFuture<TaskStatus> statusFuture = runTask(task); // Wait for firehose to show up, it starts off null. while (task.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task.getFirehose(); firehose.addRows( ImmutableList.of( ImmutableMap.of("t", now.getMillis(), "dim1", "foo", "met1", "1") ) ); // Stop the firehose, this will drain out existing events. firehose.close(); // handoff would timeout, resulting in exception TaskStatus status = statusFuture.get(); Assert.assertTrue(status.getErrorMsg() .contains("java.util.concurrent.TimeoutException: Timeout waiting for task.")); } @Test(timeout = 60_000L) public void testBasics() throws Exception { expectPublishedSegments(1); final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null); final ListenableFuture<TaskStatus> statusFuture = runTask(task); // Wait for firehose to show up, it starts off null. while (task.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task.getFirehose(); firehose.addRows( ImmutableList.of( ImmutableMap.of("t", now.getMillis(), "dim1", "foo", "met1", "1"), ImmutableMap.of("t", now.getMillis(), "dim2", "bar", "met1", 2.0) ) ); // Stop the firehose, this will drain out existing events. firehose.close(); // Wait for publish. Collection<DataSegment> publishedSegments = awaitSegments(); // Check metrics. Assert.assertEquals(2, task.getRowIngestionMeters().getProcessed()); Assert.assertEquals(0, task.getRowIngestionMeters().getThrownAway()); Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable()); // Do some queries. Assert.assertEquals(2, sumMetric(task, null, "rows").longValue()); Assert.assertEquals(3, sumMetric(task, null, "met1").longValue()); awaitHandoffs(); for (DataSegment publishedSegment : publishedSegments) { Pair<Executor, Runnable> executorRunnablePair = handOffCallbacks.get( new SegmentDescriptor( publishedSegment.getInterval(), publishedSegment.getVersion(), publishedSegment.getShardSpec().getPartitionNum() ) ); Assert.assertNotNull( publishedSegment + " missing from handoff callbacks: " + handOffCallbacks, executorRunnablePair ); // Simulate handoff. executorRunnablePair.lhs.execute(executorRunnablePair.rhs); } handOffCallbacks.clear(); // Wait for the task to finish. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); } @Test(timeout = 60_000L) public void testLateData() throws Exception { expectPublishedSegments(1); final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null); final ListenableFuture<TaskStatus> statusFuture = runTask(task); // Wait for firehose to show up, it starts off null. while (task.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task.getFirehose(); firehose.addRows( ImmutableList.of( ImmutableMap.of("t", now.getMillis(), "dim1", "foo", "met1", "1"), // Data is from 2 days ago, should still be processed ImmutableMap.of("t", now.minus(new Period("P2D")).getMillis(), "dim2", "bar", "met1", 2.0) ) ); // Stop the firehose, this will drain out existing events. firehose.close(); // Wait for publish. Collection<DataSegment> publishedSegments = awaitSegments(); // Check metrics. Assert.assertEquals(2, task.getRowIngestionMeters().getProcessed()); Assert.assertEquals(0, task.getRowIngestionMeters().getThrownAway()); Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable()); // Do some queries. Assert.assertEquals(2, sumMetric(task, null, "rows").longValue()); Assert.assertEquals(3, sumMetric(task, null, "met1").longValue()); awaitHandoffs(); for (DataSegment publishedSegment : publishedSegments) { Pair<Executor, Runnable> executorRunnablePair = handOffCallbacks.get( new SegmentDescriptor( publishedSegment.getInterval(), publishedSegment.getVersion(), publishedSegment.getShardSpec().getPartitionNum() ) ); Assert.assertNotNull( publishedSegment + " missing from handoff callbacks: " + handOffCallbacks, executorRunnablePair ); // Simulate handoff. executorRunnablePair.lhs.execute(executorRunnablePair.rhs); } handOffCallbacks.clear(); // Wait for the task to finish. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); } @Test(timeout = 60_000L) public void testMaxRowsPerSegment() throws Exception { // Expect 2 segments as we will hit maxRowsPerSegment expectPublishedSegments(2); final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null); final ListenableFuture<TaskStatus> statusFuture = runTask(task); // Wait for firehose to show up, it starts off null. while (task.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task.getFirehose(); // maxRowsPerSegment is 1000 as configured in #makeRealtimeTask for (int i = 0; i < 2000; i++) { firehose.addRows( ImmutableList.of( ImmutableMap.of("t", now.getMillis(), "dim1", "foo-" + i, "met1", "1") ) ); } // Stop the firehose, this will drain out existing events. firehose.close(); // Wait for publish. Collection<DataSegment> publishedSegments = awaitSegments(); // Check metrics. Assert.assertEquals(2000, task.getRowIngestionMeters().getProcessed()); Assert.assertEquals(0, task.getRowIngestionMeters().getThrownAway()); Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable()); // Do some queries. Assert.assertEquals(2000, sumMetric(task, null, "rows").longValue()); Assert.assertEquals(2000, sumMetric(task, null, "met1").longValue()); awaitHandoffs(); for (DataSegment publishedSegment : publishedSegments) { Pair<Executor, Runnable> executorRunnablePair = handOffCallbacks.get( new SegmentDescriptor( publishedSegment.getInterval(), publishedSegment.getVersion(), publishedSegment.getShardSpec().getPartitionNum() ) ); Assert.assertNotNull( publishedSegment + " missing from handoff callbacks: " + handOffCallbacks, executorRunnablePair ); // Simulate handoff. executorRunnablePair.lhs.execute(executorRunnablePair.rhs); } handOffCallbacks.clear(); // Wait for the task to finish. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); } @Test(timeout = 60_000L) public void testMaxTotalRows() throws Exception { // Expect 2 segments as we will hit maxTotalRows expectPublishedSegments(2); final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null, Integer.MAX_VALUE, 1500L); final ListenableFuture<TaskStatus> statusFuture = runTask(task); // Wait for firehose to show up, it starts off null. while (task.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task.getFirehose(); // maxTotalRows is 1500 for (int i = 0; i < 2000; i++) { firehose.addRows( ImmutableList.of( ImmutableMap.of("t", now.getMillis(), "dim1", "foo-" + i, "met1", "1") ) ); } // Stop the firehose, this will drain out existing events. firehose.close(); // Wait for publish. Collection<DataSegment> publishedSegments = awaitSegments(); // Check metrics. Assert.assertEquals(2000, task.getRowIngestionMeters().getProcessed()); Assert.assertEquals(0, task.getRowIngestionMeters().getThrownAway()); Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable()); // Do some queries. Assert.assertEquals(2000, sumMetric(task, null, "rows").longValue()); Assert.assertEquals(2000, sumMetric(task, null, "met1").longValue()); awaitHandoffs(); Assert.assertEquals(2, publishedSegments.size()); for (DataSegment publishedSegment : publishedSegments) { Pair<Executor, Runnable> executorRunnablePair = handOffCallbacks.get( new SegmentDescriptor( publishedSegment.getInterval(), publishedSegment.getVersion(), publishedSegment.getShardSpec().getPartitionNum() ) ); Assert.assertNotNull( publishedSegment + " missing from handoff callbacks: " + handOffCallbacks, executorRunnablePair ); // Simulate handoff. executorRunnablePair.lhs.execute(executorRunnablePair.rhs); } handOffCallbacks.clear(); // Wait for the task to finish. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); } @Test(timeout = 60_000L) public void testTransformSpec() throws Exception { expectPublishedSegments(2); final TransformSpec transformSpec = new TransformSpec( new SelectorDimFilter("dim1", "foo", null), ImmutableList.of( new ExpressionTransform("dim1t", "concat(dim1,dim1)", ExprMacroTable.nil()) ) ); final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null, transformSpec, true, 0, true, 0, 1); final ListenableFuture<TaskStatus> statusFuture = runTask(task); // Wait for firehose to show up, it starts off null. while (task.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task.getFirehose(); firehose.addRows( ImmutableList.of( ImmutableMap.of("t", now.getMillis(), "dim1", "foo", "met1", "1"), ImmutableMap.of("t", now.minus(new Period("P1D")).getMillis(), "dim1", "foo", "met1", 2.0), ImmutableMap.of("t", now.getMillis(), "dim2", "bar", "met1", 2.0) ) ); // Stop the firehose, this will drain out existing events. firehose.close(); Collection<DataSegment> publishedSegments = awaitSegments(); // Check metrics. Assert.assertEquals(2, task.getRowIngestionMeters().getProcessed()); Assert.assertEquals(1, task.getRowIngestionMeters().getThrownAway()); Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable()); // Do some queries. Assert.assertEquals(2, sumMetric(task, null, "rows").longValue()); Assert.assertEquals(2, sumMetric(task, new SelectorDimFilter("dim1t", "foofoo", null), "rows").longValue()); if (NullHandling.replaceWithDefault()) { Assert.assertEquals(0, sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "metric1").longValue()); } else { Assert.assertNull(sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "metric1")); } Assert.assertEquals(3, sumMetric(task, null, "met1").longValue()); awaitHandoffs(); for (DataSegment publishedSegment : publishedSegments) { Pair<Executor, Runnable> executorRunnablePair = handOffCallbacks.get( new SegmentDescriptor( publishedSegment.getInterval(), publishedSegment.getVersion(), publishedSegment.getShardSpec().getPartitionNum() ) ); Assert.assertNotNull( publishedSegment + " missing from handoff callbacks: " + handOffCallbacks, executorRunnablePair ); // Simulate handoff. executorRunnablePair.lhs.execute(executorRunnablePair.rhs); } handOffCallbacks.clear(); // Wait for the task to finish. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); } @Test(timeout = 60_000L) public void testReportParseExceptionsOnBadMetric() throws Exception { expectPublishedSegments(0); final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null, true); final ListenableFuture<TaskStatus> statusFuture = runTask(task); // Wait for firehose to show up, it starts off null. while (task.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task.getFirehose(); firehose.addRows( ImmutableList.of( ImmutableMap.of("t", 2000000L, "dim1", "foo", "met1", "1"), ImmutableMap.of("t", 3000000L, "dim1", "foo", "met1", "foo"), ImmutableMap.of("t", now.minus(new Period("P1D")).getMillis(), "dim1", "foo", "met1", "foo"), ImmutableMap.of("t", 4000000L, "dim2", "bar", "met1", 2.0) ) ); // Stop the firehose, this will drain out existing events. firehose.close(); // Wait for the task to finish. TaskStatus status = statusFuture.get(); Assert.assertTrue(status.getErrorMsg() .contains("java.lang.RuntimeException: Max parse exceptions exceeded, terminating task...")); IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData(); Map<String, Object> expectedUnparseables = ImmutableMap.of( RowIngestionMeters.BUILD_SEGMENTS, Collections.singletonList( "Found unparseable columns in row: [MapBasedInputRow{timestamp=1970-01-01T00:50:00.000Z, event={t=3000000, dim1=foo, met1=foo}, dimensions=[dim1, dim2, dim1t, dimLong, dimFloat]}], exceptions: [Unable to parse value[foo] for field[met1],]" ) ); Assert.assertEquals(expectedUnparseables, reportData.getUnparseableEvents()); } @Test(timeout = 60_000L) public void testNoReportParseExceptions() throws Exception { expectPublishedSegments(1); final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask( null, TransformSpec.NONE, false, 0, true, null, 1 ); final ListenableFuture<TaskStatus> statusFuture = runTask(task); // Wait for firehose to show up, it starts off null. while (task.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task.getFirehose(); firehose.addRows( Arrays.asList( // Good row- will be processed. ImmutableMap.of("t", now.getMillis(), "dim1", "foo", "met1", "1"), // Null row- will be thrown away. null, // Bad metric- will count as processed, but that particular metric won't update. ImmutableMap.of("t", now.getMillis(), "dim1", "foo", "met1", "foo"), // Bad row- will be unparseable. ImmutableMap.of("dim1", "foo", "met1", 2.0, FAIL_DIM, "x"), // Good row- will be processed. ImmutableMap.of("t", now.getMillis(), "dim2", "bar", "met1", 2.0) ) ); // Stop the firehose, this will drain out existing events. firehose.close(); // Wait for publish. Collection<DataSegment> publishedSegments = awaitSegments(); DataSegment publishedSegment = Iterables.getOnlyElement(publishedSegments); // Check metrics. Assert.assertEquals(2, task.getRowIngestionMeters().getProcessed()); Assert.assertEquals(1, task.getRowIngestionMeters().getProcessedWithError()); Assert.assertEquals(0, task.getRowIngestionMeters().getThrownAway()); Assert.assertEquals(2, task.getRowIngestionMeters().getUnparseable()); // Do some queries. Assert.assertEquals(3, sumMetric(task, null, "rows").longValue()); Assert.assertEquals(3, sumMetric(task, null, "met1").longValue()); awaitHandoffs(); // Simulate handoff. for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) { final Pair<Executor, Runnable> executorRunnablePair = entry.getValue(); Assert.assertEquals( new SegmentDescriptor( publishedSegment.getInterval(), publishedSegment.getVersion(), publishedSegment.getShardSpec().getPartitionNum() ), entry.getKey() ); executorRunnablePair.lhs.execute(executorRunnablePair.rhs); } handOffCallbacks.clear(); Map<String, Object> expectedMetrics = ImmutableMap.of( RowIngestionMeters.BUILD_SEGMENTS, ImmutableMap.of( RowIngestionMeters.PROCESSED, 2, RowIngestionMeters.PROCESSED_WITH_ERROR, 1, RowIngestionMeters.UNPARSEABLE, 2, RowIngestionMeters.THROWN_AWAY, 0 ) ); // Wait for the task to finish. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData(); Assert.assertEquals(expectedMetrics, reportData.getRowStats()); } @Test(timeout = 60_000L) public void testMultipleParseExceptionsSuccess() throws Exception { expectPublishedSegments(1); final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null, TransformSpec.NONE, false, 0, true, 10, 10); final ListenableFuture<TaskStatus> statusFuture = runTask(task); // Wait for firehose to show up, it starts off null. while (task.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task.getFirehose(); firehose.addRows( Arrays.asList( // Good row- will be processed. ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "met1", "1"), // Null row- will be thrown away. null, // Bad metric- will count as processed, but that particular metric won't update. ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "met1", "foo"), // Bad long dim- will count as processed, but bad dims will get default values ImmutableMap.of( "t", 1521251960729L, "dim1", "foo", "dimLong", "notnumber", "dimFloat", "notnumber", "met1", "foo" ), // Bad row- will be unparseable. ImmutableMap.of("dim1", "foo", "met1", 2.0, FAIL_DIM, "x"), // Good row- will be processed. ImmutableMap.of("t", 1521251960729L, "dim2", "bar", "met1", 2.0) ) ); // Stop the firehose, this will drain out existing events. firehose.close(); // Wait for publish. Collection<DataSegment> publishedSegments = awaitSegments(); DataSegment publishedSegment = Iterables.getOnlyElement(publishedSegments); // Check metrics. Assert.assertEquals(2, task.getRowIngestionMeters().getProcessed()); Assert.assertEquals(2, task.getRowIngestionMeters().getProcessedWithError()); Assert.assertEquals(0, task.getRowIngestionMeters().getThrownAway()); Assert.assertEquals(2, task.getRowIngestionMeters().getUnparseable()); // Do some queries. Assert.assertEquals(4, sumMetric(task, null, "rows").longValue()); Assert.assertEquals(3, sumMetric(task, null, "met1").longValue()); awaitHandoffs(); // Simulate handoff. for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) { final Pair<Executor, Runnable> executorRunnablePair = entry.getValue(); Assert.assertEquals( new SegmentDescriptor( publishedSegment.getInterval(), publishedSegment.getVersion(), publishedSegment.getShardSpec().getPartitionNum() ), entry.getKey() ); executorRunnablePair.lhs.execute(executorRunnablePair.rhs); } handOffCallbacks.clear(); Map<String, Object> expectedMetrics = ImmutableMap.of( RowIngestionMeters.BUILD_SEGMENTS, ImmutableMap.of( RowIngestionMeters.PROCESSED, 2, RowIngestionMeters.PROCESSED_WITH_ERROR, 2, RowIngestionMeters.UNPARSEABLE, 2, RowIngestionMeters.THROWN_AWAY, 0 ) ); // Wait for the task to finish. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData(); Assert.assertEquals(expectedMetrics, reportData.getRowStats()); Map<String, Object> expectedUnparseables = ImmutableMap.of( RowIngestionMeters.BUILD_SEGMENTS, Arrays.asList( "Unparseable timestamp found! Event: {dim1=foo, met1=2.0, __fail__=x}", "Found unparseable columns in row: [MapBasedInputRow{timestamp=2018-03-17T01:59:20.729Z, event={t=1521251960729, dim1=foo, dimLong=notnumber, dimFloat=notnumber, met1=foo}, dimensions=[dim1, dim2, dim1t, dimLong, dimFloat]}], exceptions: [could not convert value [notnumber] to long,could not convert value [notnumber] to float,Unable to parse value[foo] for field[met1],]", "Found unparseable columns in row: [MapBasedInputRow{timestamp=2018-03-17T01:59:20.729Z, event={t=1521251960729, dim1=foo, met1=foo}, dimensions=[dim1, dim2, dim1t, dimLong, dimFloat]}], exceptions: [Unable to parse value[foo] for field[met1],]", "Unparseable timestamp found! Event: null" ) ); Assert.assertEquals(expectedUnparseables, reportData.getUnparseableEvents()); Assert.assertEquals(IngestionState.COMPLETED, reportData.getIngestionState()); } @Test(timeout = 60_000L) public void testMultipleParseExceptionsFailure() throws Exception { expectPublishedSegments(1); final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null, TransformSpec.NONE, false, 0, true, 3, 10); final ListenableFuture<TaskStatus> statusFuture = runTask(task); // Wait for firehose to show up, it starts off null. while (task.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task.getFirehose(); firehose.addRows( Arrays.asList( // Good row- will be processed. ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "met1", "1"), // Null row- will be thrown away. null, // Bad metric- will count as processed, but that particular metric won't update. ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "met1", "foo"), // Bad long dim- will count as processed, but bad dims will get default values ImmutableMap.of( "t", 1521251960729L, "dim1", "foo", "dimLong", "notnumber", "dimFloat", "notnumber", "met1", "foo" ), // Bad row- will be unparseable. ImmutableMap.of("dim1", "foo", "met1", 2.0, FAIL_DIM, "x"), // Good row- will be processed. ImmutableMap.of("t", 1521251960729L, "dim2", "bar", "met1", 2.0) ) ); // Stop the firehose, this will drain out existing events. firehose.close(); // Wait for the task to finish. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.FAILED, taskStatus.getStatusCode()); Assert.assertTrue(taskStatus.getErrorMsg().contains("Max parse exceptions exceeded, terminating task...")); IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData(); Map<String, Object> expectedMetrics = ImmutableMap.of( RowIngestionMeters.BUILD_SEGMENTS, ImmutableMap.of( RowIngestionMeters.PROCESSED, 1, RowIngestionMeters.PROCESSED_WITH_ERROR, 2, RowIngestionMeters.UNPARSEABLE, 2, RowIngestionMeters.THROWN_AWAY, 0 ) ); Assert.assertEquals(expectedMetrics, reportData.getRowStats()); Map<String, Object> expectedUnparseables = ImmutableMap.of( RowIngestionMeters.BUILD_SEGMENTS, Arrays.asList( "Unparseable timestamp found! Event: {dim1=foo, met1=2.0, __fail__=x}", "Found unparseable columns in row: [MapBasedInputRow{timestamp=2018-03-17T01:59:20.729Z, event={t=1521251960729, dim1=foo, dimLong=notnumber, dimFloat=notnumber, met1=foo}, dimensions=[dim1, dim2, dim1t, dimLong, dimFloat]}], exceptions: [could not convert value [notnumber] to long,could not convert value [notnumber] to float,Unable to parse value[foo] for field[met1],]", "Found unparseable columns in row: [MapBasedInputRow{timestamp=2018-03-17T01:59:20.729Z, event={t=1521251960729, dim1=foo, met1=foo}, dimensions=[dim1, dim2, dim1t, dimLong, dimFloat]}], exceptions: [Unable to parse value[foo] for field[met1],]", "Unparseable timestamp found! Event: null" ) ); Assert.assertEquals(expectedUnparseables, reportData.getUnparseableEvents()); Assert.assertEquals(IngestionState.BUILD_SEGMENTS, reportData.getIngestionState()); } @Test(timeout = 60_000L) public void testRestore() throws Exception { expectPublishedSegments(0); final AppenderatorDriverRealtimeIndexTask task1 = makeRealtimeTask(null); final DataSegment publishedSegment; // First run: { final ListenableFuture<TaskStatus> statusFuture = runTask(task1); // Wait for firehose to show up, it starts off null. while (task1.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task1.getFirehose(); firehose.addRows( ImmutableList.of( ImmutableMap.of("t", now.getMillis(), "dim1", "foo") ) ); // Trigger graceful shutdown. task1.stopGracefully(taskToolboxFactory.build(task1).getConfig()); // Wait for the task to finish. The status doesn't really matter, but we'll check it anyway. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); // Nothing should be published. Assert.assertTrue(publishedSegments.isEmpty()); } // Second run: { expectPublishedSegments(1); final AppenderatorDriverRealtimeIndexTask task2 = makeRealtimeTask(task1.getId()); final ListenableFuture<TaskStatus> statusFuture = runTask(task2); // Wait for firehose to show up, it starts off null. while (task2.getFirehose() == null) { Thread.sleep(50); } // Do a query, at this point the previous data should be loaded. Assert.assertEquals(1, sumMetric(task2, null, "rows").longValue()); final TestFirehose firehose = (TestFirehose) task2.getFirehose(); firehose.addRows( ImmutableList.of( ImmutableMap.of("t", now.getMillis(), "dim2", "bar") ) ); // Stop the firehose, this will drain out existing events. firehose.close(); Collection<DataSegment> publishedSegments = awaitSegments(); publishedSegment = Iterables.getOnlyElement(publishedSegments); // Do a query. Assert.assertEquals(2, sumMetric(task2, null, "rows").longValue()); awaitHandoffs(); // Simulate handoff. for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) { final Pair<Executor, Runnable> executorRunnablePair = entry.getValue(); Assert.assertEquals( new SegmentDescriptor( publishedSegment.getInterval(), publishedSegment.getVersion(), publishedSegment.getShardSpec().getPartitionNum() ), entry.getKey() ); executorRunnablePair.lhs.execute(executorRunnablePair.rhs); } handOffCallbacks.clear(); // Wait for the task to finish. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); } } @Test(timeout = 60_000L) public void testRestoreAfterHandoffAttemptDuringShutdown() throws Exception { final AppenderatorDriverRealtimeIndexTask task1 = makeRealtimeTask(null); final DataSegment publishedSegment; // First run: { expectPublishedSegments(1); final ListenableFuture<TaskStatus> statusFuture = runTask(task1); // Wait for firehose to show up, it starts off null. while (task1.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task1.getFirehose(); firehose.addRows( ImmutableList.of( ImmutableMap.of("t", now.getMillis(), "dim1", "foo") ) ); // Stop the firehose, this will trigger a finishJob. firehose.close(); Collection<DataSegment> publishedSegments = awaitSegments(); publishedSegment = Iterables.getOnlyElement(publishedSegments); // Do a query. Assert.assertEquals(1, sumMetric(task1, null, "rows").longValue()); // Trigger graceful shutdown. task1.stopGracefully(taskToolboxFactory.build(task1).getConfig()); // Wait for the task to finish. The status doesn't really matter. while (!statusFuture.isDone()) { Thread.sleep(50); } } // Second run: { expectPublishedSegments(1); final AppenderatorDriverRealtimeIndexTask task2 = makeRealtimeTask(task1.getId()); final ListenableFuture<TaskStatus> statusFuture = runTask(task2); // Wait for firehose to show up, it starts off null. while (task2.getFirehose() == null) { Thread.sleep(50); } // Stop the firehose again, this will start another handoff. final TestFirehose firehose = (TestFirehose) task2.getFirehose(); // Stop the firehose, this will trigger a finishJob. firehose.close(); awaitHandoffs(); // Simulate handoff. for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) { final Pair<Executor, Runnable> executorRunnablePair = entry.getValue(); Assert.assertEquals( new SegmentDescriptor( publishedSegment.getInterval(), publishedSegment.getVersion(), publishedSegment.getShardSpec().getPartitionNum() ), entry.getKey() ); executorRunnablePair.lhs.execute(executorRunnablePair.rhs); } handOffCallbacks.clear(); // Wait for the task to finish. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); } } @Test(timeout = 60_000L) public void testRestoreCorruptData() throws Exception { final AppenderatorDriverRealtimeIndexTask task1 = makeRealtimeTask(null); // First run: { expectPublishedSegments(0); final ListenableFuture<TaskStatus> statusFuture = runTask(task1); // Wait for firehose to show up, it starts off null. while (task1.getFirehose() == null) { Thread.sleep(50); } final TestFirehose firehose = (TestFirehose) task1.getFirehose(); firehose.addRows( ImmutableList.of( ImmutableMap.of("t", now.getMillis(), "dim1", "foo") ) ); // Trigger graceful shutdown. task1.stopGracefully(taskToolboxFactory.build(task1).getConfig()); // Wait for the task to finish. The status doesn't really matter, but we'll check it anyway. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); // Nothing should be published. Assert.assertTrue(publishedSegments.isEmpty()); } Optional<File> optional = FileUtils.listFiles(baseDir, null, true).stream() .filter(f -> f.getName().equals("00000.smoosh")) .findFirst(); Assert.assertTrue("Could not find smoosh file", optional.isPresent()); // Corrupt the data: final File smooshFile = optional.get(); Files.write(smooshFile.toPath(), StringUtils.toUtf8("oops!")); // Second run: { expectPublishedSegments(0); final AppenderatorDriverRealtimeIndexTask task2 = makeRealtimeTask(task1.getId()); final ListenableFuture<TaskStatus> statusFuture = runTask(task2); // Wait for the task to finish. TaskStatus status = statusFuture.get(); Map<String, Object> expectedMetrics = ImmutableMap.of( RowIngestionMeters.BUILD_SEGMENTS, ImmutableMap.of( RowIngestionMeters.PROCESSED_WITH_ERROR, 0, RowIngestionMeters.PROCESSED, 0, RowIngestionMeters.UNPARSEABLE, 0, RowIngestionMeters.THROWN_AWAY, 0 ) ); IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData(); Assert.assertEquals(expectedMetrics, reportData.getRowStats()); Pattern errorPattern = Pattern.compile( "(?s)java\\.lang\\.IllegalArgumentException.*\n" + "\tat (java\\.base/)?java\\.nio\\.Buffer\\..*" ); Assert.assertTrue(errorPattern.matcher(status.getErrorMsg()).matches()); } } @Test(timeout = 60_000L) public void testStopBeforeStarting() throws Exception { expectPublishedSegments(0); final AppenderatorDriverRealtimeIndexTask task1 = makeRealtimeTask(null); task1.stopGracefully(taskToolboxFactory.build(task1).getConfig()); final ListenableFuture<TaskStatus> statusFuture = runTask(task1); // Wait for the task to finish. final TaskStatus taskStatus = statusFuture.get(); Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); } private ListenableFuture<TaskStatus> runTask(final Task task) { try { taskStorage.insert(task, TaskStatus.running(task.getId())); } catch (EntryExistsException e) { // suppress } taskLockbox.syncFromStorage(); final TaskToolbox toolbox = taskToolboxFactory.build(task); return taskExec.submit( () -> { try { if (task.isReady(toolbox.getTaskActionClient())) { return task.run(toolbox); } else { throw new ISE("Task is not ready"); } } catch (Exception e) { log.warn(e, "Task failed"); throw e; } } ); } private AppenderatorDriverRealtimeIndexTask makeRealtimeTask(final String taskId) { return makeRealtimeTask( taskId, TransformSpec.NONE, true, 0, true, 0, 1 ); } private AppenderatorDriverRealtimeIndexTask makeRealtimeTask( final String taskId, final Integer maxRowsPerSegment, final Long maxTotalRows ) { return makeRealtimeTask( taskId, TransformSpec.NONE, true, 0, true, 0, 1, maxRowsPerSegment, maxTotalRows ); } private AppenderatorDriverRealtimeIndexTask makeRealtimeTask(final String taskId, boolean reportParseExceptions) { return makeRealtimeTask( taskId, TransformSpec.NONE, reportParseExceptions, 0, true, null, 1 ); } private AppenderatorDriverRealtimeIndexTask makeRealtimeTask( final String taskId, final TransformSpec transformSpec, final boolean reportParseExceptions, final long handoffTimeout, final Boolean logParseExceptions, final Integer maxParseExceptions, final Integer maxSavedParseExceptions ) { return makeRealtimeTask( taskId, transformSpec, reportParseExceptions, handoffTimeout, logParseExceptions, maxParseExceptions, maxSavedParseExceptions, 1000, null ); } private AppenderatorDriverRealtimeIndexTask makeRealtimeTask( final String taskId, final TransformSpec transformSpec, final boolean reportParseExceptions, final long handoffTimeout, final Boolean logParseExceptions, final Integer maxParseExceptions, final Integer maxSavedParseExceptions, final Integer maxRowsPerSegment, final Long maxTotalRows ) { ObjectMapper objectMapper = new DefaultObjectMapper(); DataSchema dataSchema = new DataSchema( "test_ds", TestHelper.makeJsonMapper().convertValue( new MapInputRowParser( new TimeAndDimsParseSpec( new TimestampSpec("t", "auto", null), new DimensionsSpec( ImmutableList.of( new StringDimensionSchema("dim1"), new StringDimensionSchema("dim2"), new StringDimensionSchema("dim1t"), new LongDimensionSchema("dimLong"), new FloatDimensionSchema("dimFloat") ), null, null ) ) ), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ), new AggregatorFactory[]{new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("met1", "met1")}, new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null), transformSpec, objectMapper ); RealtimeIOConfig realtimeIOConfig = new RealtimeIOConfig( new TestFirehoseFactory(), null, null ); RealtimeAppenderatorTuningConfig tuningConfig = new RealtimeAppenderatorTuningConfig( 1000, null, maxRowsPerSegment, maxTotalRows, null, null, null, null, null, reportParseExceptions, handoffTimeout, null, null, logParseExceptions, maxParseExceptions, maxSavedParseExceptions ); return new AppenderatorDriverRealtimeIndexTask( taskId, null, new RealtimeAppenderatorIngestionSpec(dataSchema, realtimeIOConfig, tuningConfig), null, null, AuthTestUtils.TEST_AUTHORIZER_MAPPER, rowIngestionMetersFactory ) { @Override protected boolean isFirehoseDrainableByClosing(FirehoseFactory firehoseFactory) { return true; } }; } private void expectPublishedSegments(int count) { segmentLatch = new CountDownLatch(count); handoffLatch = new CountDownLatch(count); } private Collection<DataSegment> awaitSegments() throws InterruptedException { Assert.assertTrue( "Timed out waiting for segments to be published", segmentLatch.await(1, TimeUnit.MINUTES) ); return publishedSegments; } private void awaitHandoffs() throws InterruptedException { Assert.assertTrue( "Timed out waiting for segments to be handed off", handoffLatch.await(1, TimeUnit.MINUTES) ); } private void makeToolboxFactory(final File directory) { taskStorage = new HeapMemoryTaskStorage(new TaskStorageConfig(null)); taskLockbox = new TaskLockbox(taskStorage); publishedSegments = new CopyOnWriteArrayList<>(); ObjectMapper mapper = new DefaultObjectMapper(); mapper.registerSubtypes(LinearShardSpec.class); mapper.registerSubtypes(NumberedShardSpec.class); IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator( mapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), derbyConnectorRule.getConnector() ) { @Override public Set<DataSegment> announceHistoricalSegments(Set<DataSegment> segments) throws IOException { Set<DataSegment> result = super.announceHistoricalSegments(segments); Assert.assertFalse( "Segment latch not initialized, did you forget to call expectPublishSegments?", segmentLatch == null ); publishedSegments.addAll(result); segments.forEach(s -> segmentLatch.countDown()); return result; } @Override public SegmentPublishResult announceHistoricalSegments( Set<DataSegment> segments, DataSourceMetadata startMetadata, DataSourceMetadata endMetadata ) throws IOException { SegmentPublishResult result = super.announceHistoricalSegments(segments, startMetadata, endMetadata); Assert.assertFalse( "Segment latch not initialized, did you forget to call expectPublishSegments?", segmentLatch == null ); publishedSegments.addAll(result.getSegments()); result.getSegments().forEach(s -> segmentLatch.countDown()); return result; } }; final TaskConfig taskConfig = new TaskConfig(directory.getPath(), null, null, 50000, null, true, null, null); final TaskActionToolbox taskActionToolbox = new TaskActionToolbox( taskLockbox, taskStorage, mdc, emitter, EasyMock.createMock(SupervisorManager.class) ); final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory( taskStorage, taskActionToolbox, new TaskAuditLogConfig(false) ); IntervalChunkingQueryRunnerDecorator queryRunnerDecorator = new IntervalChunkingQueryRunnerDecorator( null, null, null ) { @Override public <T> QueryRunner<T> decorate(QueryRunner<T> delegate, QueryToolChest<T, ? extends Query<T>> toolChest) { return delegate; } }; final QueryRunnerFactoryConglomerate conglomerate = new DefaultQueryRunnerFactoryConglomerate( ImmutableMap.of( TimeseriesQuery.class, new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest(queryRunnerDecorator), new TimeseriesQueryEngine(), (query, future) -> { // do nothing } ) ) ); handOffCallbacks = new ConcurrentHashMap<>(); final SegmentHandoffNotifierFactory handoffNotifierFactory = dataSource -> new SegmentHandoffNotifier() { @Override public boolean registerSegmentHandoffCallback( SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable ) { handOffCallbacks.put(descriptor, new Pair<>(exec, handOffRunnable)); handoffLatch.countDown(); return true; } @Override public void start() { //Noop } @Override public void close() { //Noop } }; final TestUtils testUtils = new TestUtils(); rowIngestionMetersFactory = testUtils.getRowIngestionMetersFactory(); SegmentLoaderConfig segmentLoaderConfig = new SegmentLoaderConfig() { @Override public List<StorageLocationConfig> getLocations() { return new ArrayList<>(); } }; taskToolboxFactory = new TaskToolboxFactory( taskConfig, taskActionClientFactory, emitter, new TestDataSegmentPusher(), new TestDataSegmentKiller(), null, // DataSegmentMover null, // DataSegmentArchiver new TestDataSegmentAnnouncer(), EasyMock.createNiceMock(DataSegmentServerAnnouncer.class), handoffNotifierFactory, () -> conglomerate, Execs.directExecutor(), // queryExecutorService EasyMock.createMock(MonitorScheduler.class), new SegmentLoaderFactory(null, testUtils.getTestObjectMapper()), testUtils.getTestObjectMapper(), testUtils.getTestIndexIO(), MapCache.create(1024), new CacheConfig(), new CachePopulatorStats(), testUtils.getTestIndexMergerV9(), EasyMock.createNiceMock(DruidNodeAnnouncer.class), EasyMock.createNiceMock(DruidNode.class), new LookupNodeService("tier"), new DataNodeService("tier", 1000, ServerType.INDEXER_EXECUTOR, 0), new TaskReportFileWriter(reportsFile) ); } @Nullable public Long sumMetric(final Task task, final DimFilter filter, final String metric) { // Do a query. TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test_ds") .filters(filter) .aggregators( ImmutableList.of( new LongSumAggregatorFactory(metric, metric) ) ).granularity(Granularities.ALL) .intervals("2000/3000") .build(); List<Result<TimeseriesResultValue>> results = task.getQueryRunner(query).run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); if (results.isEmpty()) { return 0L; } else { return results.get(0).getValue().getLongMetric(metric); } } private IngestionStatsAndErrorsTaskReportData getTaskReportData() throws IOException { Map<String, TaskReport> taskReports = objectMapper.readValue( reportsFile, new TypeReference<Map<String, TaskReport>>() { } ); return IngestionStatsAndErrorsTaskReportData.getPayloadFromTaskReports( taskReports ); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.rekognition.model; import java.io.Serializable; import javax.annotation.Generated; @Generated("com.amazonaws:aws-java-sdk-code-generator") public class RecognizeCelebritiesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * Details about each celebrity found in the image. Amazon Rekognition can detect a maximum of 64 celebrities in an * image. Each celebrity object includes the following attributes: <code>Face</code>, <code>Confidence</code>, * <code>Emotions</code>, <code>Landmarks</code>, <code>Pose</code>, <code>Quality</code>, <code>Smile</code>, * <code>Id</code>, <code>KnownGender</code>, <code>MatchConfidence</code>, <code>Name</code>, <code>Urls</code>. * </p> */ private java.util.List<Celebrity> celebrityFaces; /** * <p> * Details about each unrecognized face in the image. * </p> */ private java.util.List<ComparedFace> unrecognizedFaces; /** * <note> * <p> * Support for estimating image orientation using the the OrientationCorrection field has ceased as of August 2021. * Any returned values for this field included in an API response will always be NULL. * </p> * </note> * <p> * The orientation of the input image (counterclockwise direction). If your application displays the image, you can * use this value to correct the orientation. The bounding box coordinates returned in <code>CelebrityFaces</code> * and <code>UnrecognizedFaces</code> represent face locations before the image orientation is corrected. * </p> * <note> * <p> * If the input image is in .jpeg format, it might contain exchangeable image (Exif) metadata that includes the * image's orientation. If so, and the Exif metadata for the input image populates the orientation field, the value * of <code>OrientationCorrection</code> is null. The <code>CelebrityFaces</code> and <code>UnrecognizedFaces</code> * bounding box coordinates represent face locations after Exif metadata is used to correct the image orientation. * Images in .png format don't contain Exif metadata. * </p> * </note> */ private String orientationCorrection; /** * <p> * Details about each celebrity found in the image. Amazon Rekognition can detect a maximum of 64 celebrities in an * image. Each celebrity object includes the following attributes: <code>Face</code>, <code>Confidence</code>, * <code>Emotions</code>, <code>Landmarks</code>, <code>Pose</code>, <code>Quality</code>, <code>Smile</code>, * <code>Id</code>, <code>KnownGender</code>, <code>MatchConfidence</code>, <code>Name</code>, <code>Urls</code>. * </p> * * @return Details about each celebrity found in the image. Amazon Rekognition can detect a maximum of 64 * celebrities in an image. Each celebrity object includes the following attributes: <code>Face</code>, * <code>Confidence</code>, <code>Emotions</code>, <code>Landmarks</code>, <code>Pose</code>, * <code>Quality</code>, <code>Smile</code>, <code>Id</code>, <code>KnownGender</code>, * <code>MatchConfidence</code>, <code>Name</code>, <code>Urls</code>. */ public java.util.List<Celebrity> getCelebrityFaces() { return celebrityFaces; } /** * <p> * Details about each celebrity found in the image. Amazon Rekognition can detect a maximum of 64 celebrities in an * image. Each celebrity object includes the following attributes: <code>Face</code>, <code>Confidence</code>, * <code>Emotions</code>, <code>Landmarks</code>, <code>Pose</code>, <code>Quality</code>, <code>Smile</code>, * <code>Id</code>, <code>KnownGender</code>, <code>MatchConfidence</code>, <code>Name</code>, <code>Urls</code>. * </p> * * @param celebrityFaces * Details about each celebrity found in the image. Amazon Rekognition can detect a maximum of 64 celebrities * in an image. Each celebrity object includes the following attributes: <code>Face</code>, * <code>Confidence</code>, <code>Emotions</code>, <code>Landmarks</code>, <code>Pose</code>, * <code>Quality</code>, <code>Smile</code>, <code>Id</code>, <code>KnownGender</code>, * <code>MatchConfidence</code>, <code>Name</code>, <code>Urls</code>. */ public void setCelebrityFaces(java.util.Collection<Celebrity> celebrityFaces) { if (celebrityFaces == null) { this.celebrityFaces = null; return; } this.celebrityFaces = new java.util.ArrayList<Celebrity>(celebrityFaces); } /** * <p> * Details about each celebrity found in the image. Amazon Rekognition can detect a maximum of 64 celebrities in an * image. Each celebrity object includes the following attributes: <code>Face</code>, <code>Confidence</code>, * <code>Emotions</code>, <code>Landmarks</code>, <code>Pose</code>, <code>Quality</code>, <code>Smile</code>, * <code>Id</code>, <code>KnownGender</code>, <code>MatchConfidence</code>, <code>Name</code>, <code>Urls</code>. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setCelebrityFaces(java.util.Collection)} or {@link #withCelebrityFaces(java.util.Collection)} if you want * to override the existing values. * </p> * * @param celebrityFaces * Details about each celebrity found in the image. Amazon Rekognition can detect a maximum of 64 celebrities * in an image. Each celebrity object includes the following attributes: <code>Face</code>, * <code>Confidence</code>, <code>Emotions</code>, <code>Landmarks</code>, <code>Pose</code>, * <code>Quality</code>, <code>Smile</code>, <code>Id</code>, <code>KnownGender</code>, * <code>MatchConfidence</code>, <code>Name</code>, <code>Urls</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public RecognizeCelebritiesResult withCelebrityFaces(Celebrity... celebrityFaces) { if (this.celebrityFaces == null) { setCelebrityFaces(new java.util.ArrayList<Celebrity>(celebrityFaces.length)); } for (Celebrity ele : celebrityFaces) { this.celebrityFaces.add(ele); } return this; } /** * <p> * Details about each celebrity found in the image. Amazon Rekognition can detect a maximum of 64 celebrities in an * image. Each celebrity object includes the following attributes: <code>Face</code>, <code>Confidence</code>, * <code>Emotions</code>, <code>Landmarks</code>, <code>Pose</code>, <code>Quality</code>, <code>Smile</code>, * <code>Id</code>, <code>KnownGender</code>, <code>MatchConfidence</code>, <code>Name</code>, <code>Urls</code>. * </p> * * @param celebrityFaces * Details about each celebrity found in the image. Amazon Rekognition can detect a maximum of 64 celebrities * in an image. Each celebrity object includes the following attributes: <code>Face</code>, * <code>Confidence</code>, <code>Emotions</code>, <code>Landmarks</code>, <code>Pose</code>, * <code>Quality</code>, <code>Smile</code>, <code>Id</code>, <code>KnownGender</code>, * <code>MatchConfidence</code>, <code>Name</code>, <code>Urls</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public RecognizeCelebritiesResult withCelebrityFaces(java.util.Collection<Celebrity> celebrityFaces) { setCelebrityFaces(celebrityFaces); return this; } /** * <p> * Details about each unrecognized face in the image. * </p> * * @return Details about each unrecognized face in the image. */ public java.util.List<ComparedFace> getUnrecognizedFaces() { return unrecognizedFaces; } /** * <p> * Details about each unrecognized face in the image. * </p> * * @param unrecognizedFaces * Details about each unrecognized face in the image. */ public void setUnrecognizedFaces(java.util.Collection<ComparedFace> unrecognizedFaces) { if (unrecognizedFaces == null) { this.unrecognizedFaces = null; return; } this.unrecognizedFaces = new java.util.ArrayList<ComparedFace>(unrecognizedFaces); } /** * <p> * Details about each unrecognized face in the image. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setUnrecognizedFaces(java.util.Collection)} or {@link #withUnrecognizedFaces(java.util.Collection)} if * you want to override the existing values. * </p> * * @param unrecognizedFaces * Details about each unrecognized face in the image. * @return Returns a reference to this object so that method calls can be chained together. */ public RecognizeCelebritiesResult withUnrecognizedFaces(ComparedFace... unrecognizedFaces) { if (this.unrecognizedFaces == null) { setUnrecognizedFaces(new java.util.ArrayList<ComparedFace>(unrecognizedFaces.length)); } for (ComparedFace ele : unrecognizedFaces) { this.unrecognizedFaces.add(ele); } return this; } /** * <p> * Details about each unrecognized face in the image. * </p> * * @param unrecognizedFaces * Details about each unrecognized face in the image. * @return Returns a reference to this object so that method calls can be chained together. */ public RecognizeCelebritiesResult withUnrecognizedFaces(java.util.Collection<ComparedFace> unrecognizedFaces) { setUnrecognizedFaces(unrecognizedFaces); return this; } /** * <note> * <p> * Support for estimating image orientation using the the OrientationCorrection field has ceased as of August 2021. * Any returned values for this field included in an API response will always be NULL. * </p> * </note> * <p> * The orientation of the input image (counterclockwise direction). If your application displays the image, you can * use this value to correct the orientation. The bounding box coordinates returned in <code>CelebrityFaces</code> * and <code>UnrecognizedFaces</code> represent face locations before the image orientation is corrected. * </p> * <note> * <p> * If the input image is in .jpeg format, it might contain exchangeable image (Exif) metadata that includes the * image's orientation. If so, and the Exif metadata for the input image populates the orientation field, the value * of <code>OrientationCorrection</code> is null. The <code>CelebrityFaces</code> and <code>UnrecognizedFaces</code> * bounding box coordinates represent face locations after Exif metadata is used to correct the image orientation. * Images in .png format don't contain Exif metadata. * </p> * </note> * * @param orientationCorrection * <p> * Support for estimating image orientation using the the OrientationCorrection field has ceased as of August * 2021. Any returned values for this field included in an API response will always be NULL. * </p> * </note> * <p> * The orientation of the input image (counterclockwise direction). If your application displays the image, * you can use this value to correct the orientation. The bounding box coordinates returned in * <code>CelebrityFaces</code> and <code>UnrecognizedFaces</code> represent face locations before the image * orientation is corrected. * </p> * <note> * <p> * If the input image is in .jpeg format, it might contain exchangeable image (Exif) metadata that includes * the image's orientation. If so, and the Exif metadata for the input image populates the orientation field, * the value of <code>OrientationCorrection</code> is null. The <code>CelebrityFaces</code> and * <code>UnrecognizedFaces</code> bounding box coordinates represent face locations after Exif metadata is * used to correct the image orientation. Images in .png format don't contain Exif metadata. * </p> * @see OrientationCorrection */ public void setOrientationCorrection(String orientationCorrection) { this.orientationCorrection = orientationCorrection; } /** * <note> * <p> * Support for estimating image orientation using the the OrientationCorrection field has ceased as of August 2021. * Any returned values for this field included in an API response will always be NULL. * </p> * </note> * <p> * The orientation of the input image (counterclockwise direction). If your application displays the image, you can * use this value to correct the orientation. The bounding box coordinates returned in <code>CelebrityFaces</code> * and <code>UnrecognizedFaces</code> represent face locations before the image orientation is corrected. * </p> * <note> * <p> * If the input image is in .jpeg format, it might contain exchangeable image (Exif) metadata that includes the * image's orientation. If so, and the Exif metadata for the input image populates the orientation field, the value * of <code>OrientationCorrection</code> is null. The <code>CelebrityFaces</code> and <code>UnrecognizedFaces</code> * bounding box coordinates represent face locations after Exif metadata is used to correct the image orientation. * Images in .png format don't contain Exif metadata. * </p> * </note> * * @return <p> * Support for estimating image orientation using the the OrientationCorrection field has ceased as of * August 2021. Any returned values for this field included in an API response will always be NULL. * </p> * </note> * <p> * The orientation of the input image (counterclockwise direction). If your application displays the image, * you can use this value to correct the orientation. The bounding box coordinates returned in * <code>CelebrityFaces</code> and <code>UnrecognizedFaces</code> represent face locations before the image * orientation is corrected. * </p> * <note> * <p> * If the input image is in .jpeg format, it might contain exchangeable image (Exif) metadata that includes * the image's orientation. If so, and the Exif metadata for the input image populates the orientation * field, the value of <code>OrientationCorrection</code> is null. The <code>CelebrityFaces</code> and * <code>UnrecognizedFaces</code> bounding box coordinates represent face locations after Exif metadata is * used to correct the image orientation. Images in .png format don't contain Exif metadata. * </p> * @see OrientationCorrection */ public String getOrientationCorrection() { return this.orientationCorrection; } /** * <note> * <p> * Support for estimating image orientation using the the OrientationCorrection field has ceased as of August 2021. * Any returned values for this field included in an API response will always be NULL. * </p> * </note> * <p> * The orientation of the input image (counterclockwise direction). If your application displays the image, you can * use this value to correct the orientation. The bounding box coordinates returned in <code>CelebrityFaces</code> * and <code>UnrecognizedFaces</code> represent face locations before the image orientation is corrected. * </p> * <note> * <p> * If the input image is in .jpeg format, it might contain exchangeable image (Exif) metadata that includes the * image's orientation. If so, and the Exif metadata for the input image populates the orientation field, the value * of <code>OrientationCorrection</code> is null. The <code>CelebrityFaces</code> and <code>UnrecognizedFaces</code> * bounding box coordinates represent face locations after Exif metadata is used to correct the image orientation. * Images in .png format don't contain Exif metadata. * </p> * </note> * * @param orientationCorrection * <p> * Support for estimating image orientation using the the OrientationCorrection field has ceased as of August * 2021. Any returned values for this field included in an API response will always be NULL. * </p> * </note> * <p> * The orientation of the input image (counterclockwise direction). If your application displays the image, * you can use this value to correct the orientation. The bounding box coordinates returned in * <code>CelebrityFaces</code> and <code>UnrecognizedFaces</code> represent face locations before the image * orientation is corrected. * </p> * <note> * <p> * If the input image is in .jpeg format, it might contain exchangeable image (Exif) metadata that includes * the image's orientation. If so, and the Exif metadata for the input image populates the orientation field, * the value of <code>OrientationCorrection</code> is null. The <code>CelebrityFaces</code> and * <code>UnrecognizedFaces</code> bounding box coordinates represent face locations after Exif metadata is * used to correct the image orientation. Images in .png format don't contain Exif metadata. * </p> * @return Returns a reference to this object so that method calls can be chained together. * @see OrientationCorrection */ public RecognizeCelebritiesResult withOrientationCorrection(String orientationCorrection) { setOrientationCorrection(orientationCorrection); return this; } /** * <note> * <p> * Support for estimating image orientation using the the OrientationCorrection field has ceased as of August 2021. * Any returned values for this field included in an API response will always be NULL. * </p> * </note> * <p> * The orientation of the input image (counterclockwise direction). If your application displays the image, you can * use this value to correct the orientation. The bounding box coordinates returned in <code>CelebrityFaces</code> * and <code>UnrecognizedFaces</code> represent face locations before the image orientation is corrected. * </p> * <note> * <p> * If the input image is in .jpeg format, it might contain exchangeable image (Exif) metadata that includes the * image's orientation. If so, and the Exif metadata for the input image populates the orientation field, the value * of <code>OrientationCorrection</code> is null. The <code>CelebrityFaces</code> and <code>UnrecognizedFaces</code> * bounding box coordinates represent face locations after Exif metadata is used to correct the image orientation. * Images in .png format don't contain Exif metadata. * </p> * </note> * * @param orientationCorrection * <p> * Support for estimating image orientation using the the OrientationCorrection field has ceased as of August * 2021. Any returned values for this field included in an API response will always be NULL. * </p> * </note> * <p> * The orientation of the input image (counterclockwise direction). If your application displays the image, * you can use this value to correct the orientation. The bounding box coordinates returned in * <code>CelebrityFaces</code> and <code>UnrecognizedFaces</code> represent face locations before the image * orientation is corrected. * </p> * <note> * <p> * If the input image is in .jpeg format, it might contain exchangeable image (Exif) metadata that includes * the image's orientation. If so, and the Exif metadata for the input image populates the orientation field, * the value of <code>OrientationCorrection</code> is null. The <code>CelebrityFaces</code> and * <code>UnrecognizedFaces</code> bounding box coordinates represent face locations after Exif metadata is * used to correct the image orientation. Images in .png format don't contain Exif metadata. * </p> * @see OrientationCorrection */ public void setOrientationCorrection(OrientationCorrection orientationCorrection) { withOrientationCorrection(orientationCorrection); } /** * <note> * <p> * Support for estimating image orientation using the the OrientationCorrection field has ceased as of August 2021. * Any returned values for this field included in an API response will always be NULL. * </p> * </note> * <p> * The orientation of the input image (counterclockwise direction). If your application displays the image, you can * use this value to correct the orientation. The bounding box coordinates returned in <code>CelebrityFaces</code> * and <code>UnrecognizedFaces</code> represent face locations before the image orientation is corrected. * </p> * <note> * <p> * If the input image is in .jpeg format, it might contain exchangeable image (Exif) metadata that includes the * image's orientation. If so, and the Exif metadata for the input image populates the orientation field, the value * of <code>OrientationCorrection</code> is null. The <code>CelebrityFaces</code> and <code>UnrecognizedFaces</code> * bounding box coordinates represent face locations after Exif metadata is used to correct the image orientation. * Images in .png format don't contain Exif metadata. * </p> * </note> * * @param orientationCorrection * <p> * Support for estimating image orientation using the the OrientationCorrection field has ceased as of August * 2021. Any returned values for this field included in an API response will always be NULL. * </p> * </note> * <p> * The orientation of the input image (counterclockwise direction). If your application displays the image, * you can use this value to correct the orientation. The bounding box coordinates returned in * <code>CelebrityFaces</code> and <code>UnrecognizedFaces</code> represent face locations before the image * orientation is corrected. * </p> * <note> * <p> * If the input image is in .jpeg format, it might contain exchangeable image (Exif) metadata that includes * the image's orientation. If so, and the Exif metadata for the input image populates the orientation field, * the value of <code>OrientationCorrection</code> is null. The <code>CelebrityFaces</code> and * <code>UnrecognizedFaces</code> bounding box coordinates represent face locations after Exif metadata is * used to correct the image orientation. Images in .png format don't contain Exif metadata. * </p> * @return Returns a reference to this object so that method calls can be chained together. * @see OrientationCorrection */ public RecognizeCelebritiesResult withOrientationCorrection(OrientationCorrection orientationCorrection) { this.orientationCorrection = orientationCorrection.toString(); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getCelebrityFaces() != null) sb.append("CelebrityFaces: ").append(getCelebrityFaces()).append(","); if (getUnrecognizedFaces() != null) sb.append("UnrecognizedFaces: ").append(getUnrecognizedFaces()).append(","); if (getOrientationCorrection() != null) sb.append("OrientationCorrection: ").append(getOrientationCorrection()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof RecognizeCelebritiesResult == false) return false; RecognizeCelebritiesResult other = (RecognizeCelebritiesResult) obj; if (other.getCelebrityFaces() == null ^ this.getCelebrityFaces() == null) return false; if (other.getCelebrityFaces() != null && other.getCelebrityFaces().equals(this.getCelebrityFaces()) == false) return false; if (other.getUnrecognizedFaces() == null ^ this.getUnrecognizedFaces() == null) return false; if (other.getUnrecognizedFaces() != null && other.getUnrecognizedFaces().equals(this.getUnrecognizedFaces()) == false) return false; if (other.getOrientationCorrection() == null ^ this.getOrientationCorrection() == null) return false; if (other.getOrientationCorrection() != null && other.getOrientationCorrection().equals(this.getOrientationCorrection()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getCelebrityFaces() == null) ? 0 : getCelebrityFaces().hashCode()); hashCode = prime * hashCode + ((getUnrecognizedFaces() == null) ? 0 : getUnrecognizedFaces().hashCode()); hashCode = prime * hashCode + ((getOrientationCorrection() == null) ? 0 : getOrientationCorrection().hashCode()); return hashCode; } @Override public RecognizeCelebritiesResult clone() { try { return (RecognizeCelebritiesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.commons.lang3.StringUtils; import org.junit.Assert; import org.junit.Test; import org.apache.cassandra.cache.RowCacheKey; import org.apache.cassandra.cql3.CQLTester; import org.apache.cassandra.cql3.UntypedResultSet; import org.apache.cassandra.db.lifecycle.LifecycleTransaction; import org.apache.cassandra.dht.BootStrapper; import org.apache.cassandra.io.sstable.Component; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.io.util.File; import org.apache.cassandra.locator.InetAddressAndPort; import org.apache.cassandra.locator.TokenMetadata; import org.apache.cassandra.service.CacheService; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class ImportTest extends CQLTester { @Test public void basicImportByMovingTest() throws Throwable { File backupDir = prepareBasicImporting(); // copy is false - so importing will be done by moving importSSTables(SSTableImporter.Options.options(backupDir.toString()).copyData(false).build(), 10); // files were moved Assert.assertEquals(0, countFiles(backupDir)); } @Test public void basicImportByCopyingTest() throws Throwable { File backupDir = prepareBasicImporting(); // copy is true - so importing will be done by copying importSSTables(SSTableImporter.Options.options(backupDir.toString()).copyData(true).build(), 10); // files are left there as they were just copied Assert.assertNotEquals(0, countFiles(backupDir)); } private File prepareBasicImporting() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 10; i++) { execute("insert into %s (id, d) values (?, ?)", i, i); } getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); File backupdir = moveToBackupDir(sstables); assertEquals(0, execute("select * from %s").size()); return backupdir; } private List<String> importSSTables(SSTableImporter.Options options, int expectedRows) throws Throwable { SSTableImporter importer = new SSTableImporter(getCurrentColumnFamilyStore()); List<String> failedDirectories = importer.importNewSSTables(options); assertEquals(expectedRows, execute("select * from %s").size()); return failedDirectories; } @Test public void basicImportMultiDirTest() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); File backupdir = moveToBackupDir(sstables); for (int i = 10; i < 20; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); File backupdir2 = moveToBackupDir(sstables); assertEquals(0, execute("select * from %s").size()); SSTableImporter.Options options = SSTableImporter.Options.options(Sets.newHashSet(backupdir.toString(), backupdir2.toString())).build(); SSTableImporter importer = new SSTableImporter(getCurrentColumnFamilyStore()); importer.importNewSSTables(options); assertEquals(20, execute("select * from %s").size()); } @Test @Deprecated public void refreshTest() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); sstables.forEach(s -> s.selfRef().release()); assertEquals(0, execute("select * from %s").size()); getCurrentColumnFamilyStore().loadNewSSTables(); assertEquals(10, execute("select * from %s").size()); } @Test public void importResetLevelTest() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); for (SSTableReader sstable : sstables) sstable.descriptor.getMetadataSerializer().mutateLevel(sstable.descriptor, 8); File backupdir = moveToBackupDir(sstables); assertEquals(0, execute("select * from %s").size()); SSTableImporter.Options options = SSTableImporter.Options.options(backupdir.toString()).build(); SSTableImporter importer = new SSTableImporter(getCurrentColumnFamilyStore()); importer.importNewSSTables(options); assertEquals(10, execute("select * from %s").size()); sstables = getCurrentColumnFamilyStore().getLiveSSTables(); assertEquals(1, sstables.size()); for (SSTableReader sstable : sstables) assertEquals(8, sstable.getSSTableLevel()); getCurrentColumnFamilyStore().clearUnsafe(); backupdir = moveToBackupDir(sstables); options = SSTableImporter.Options.options(backupdir.toString()).resetLevel(true).build(); importer.importNewSSTables(options); sstables = getCurrentColumnFamilyStore().getLiveSSTables(); assertEquals(1, sstables.size()); for (SSTableReader sstable : getCurrentColumnFamilyStore().getLiveSSTables()) assertEquals(0, sstable.getSSTableLevel()); } @Test public void importClearRepairedTest() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); for (SSTableReader sstable : sstables) sstable.descriptor.getMetadataSerializer().mutateRepairMetadata(sstable.descriptor, 111, null, false); File backupdir = moveToBackupDir(sstables); assertEquals(0, execute("select * from %s").size()); SSTableImporter.Options options = SSTableImporter.Options.options(backupdir.toString()).build(); SSTableImporter importer = new SSTableImporter(getCurrentColumnFamilyStore()); importer.importNewSSTables(options); assertEquals(10, execute("select * from %s").size()); sstables = getCurrentColumnFamilyStore().getLiveSSTables(); assertEquals(1, sstables.size()); for (SSTableReader sstable : sstables) assertTrue(sstable.isRepaired()); getCurrentColumnFamilyStore().clearUnsafe(); backupdir = moveToBackupDir(sstables); options = SSTableImporter.Options.options(backupdir.toString()).clearRepaired(true).build(); importer.importNewSSTables(options); sstables = getCurrentColumnFamilyStore().getLiveSSTables(); assertEquals(1, sstables.size()); for (SSTableReader sstable : getCurrentColumnFamilyStore().getLiveSSTables()) assertFalse(sstable.isRepaired()); } private File moveToBackupDir(Set<SSTableReader> sstables) throws IOException { Path temp = Files.createTempDirectory("importtest"); SSTableReader sst = sstables.iterator().next(); String tabledir = sst.descriptor.directory.name(); String ksdir = sst.descriptor.directory.parent().name(); Path backupdir = createDirectories(temp.toString(), ksdir, tabledir); for (SSTableReader sstable : sstables) { sstable.selfRef().release(); for (File f : sstable.descriptor.directory.tryList()) { if (f.toString().contains(sstable.descriptor.baseFilename())) { System.out.println("move " + f.toPath() + " to " + backupdir); File moveFileTo = new File(backupdir, f.name()); moveFileTo.deleteOnExit(); Files.move(f.toPath(), moveFileTo.toPath()); } } } return new File(backupdir); } private Path createDirectories(String base, String ... subdirs) { File b = new File(base); b.tryCreateDirectory(); System.out.println("mkdir "+b); b.deleteOnExit(); for (String subdir : subdirs) { b = new File(b, subdir); b.tryCreateDirectory(); System.out.println("mkdir "+b); b.deleteOnExit(); } return b.toPath(); } @Test public void testGetCorrectDirectory() throws Throwable { TokenMetadata metadata = StorageService.instance.getTokenMetadata(); metadata.updateNormalTokens(BootStrapper.getRandomTokens(metadata, 10), FBUtilities.getBroadcastAddressAndPort()); createTable("create table %s (id int primary key, d int)"); getCurrentColumnFamilyStore().disableAutoCompaction(); // generate sstables with different first tokens for (int i = 0; i < 10; i++) { execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); } Set<SSTableReader> toMove = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); File dir = moveToBackupDir(toMove); Directories dirs = new Directories(getCurrentColumnFamilyStore().metadata(), Lists.newArrayList(new Directories.DataDirectory(new File("/tmp/1")), new Directories.DataDirectory(new File("/tmp/2")), new Directories.DataDirectory(new File("/tmp/3")))); MockCFS mock = new MockCFS(getCurrentColumnFamilyStore(), dirs); SSTableImporter importer = new SSTableImporter(mock); importer.importNewSSTables(SSTableImporter.Options.options(dir.toString()).build()); for (SSTableReader sstable : mock.getLiveSSTables()) { File movedDir = sstable.descriptor.directory.toCanonical(); File correctDir = mock.getDiskBoundaries().getCorrectDiskForSSTable(sstable).location.toCanonical(); assertTrue(movedDir.toString().startsWith(correctDir.toString())); } for (SSTableReader sstable : mock.getLiveSSTables()) sstable.selfRef().release(); } private void testCorruptHelper(boolean verify, boolean copy) throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); SSTableReader sstableToCorrupt = getCurrentColumnFamilyStore().getLiveSSTables().iterator().next(); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i + 10, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); String filenameToCorrupt = sstableToCorrupt.descriptor.filenameFor(Component.STATS); try (FileChannel fileChannel = new File(filenameToCorrupt).newReadWriteChannel()) { fileChannel.position(0); fileChannel.write(ByteBufferUtil.bytes(StringUtils.repeat('z', 2))); } File backupdir = moveToBackupDir(sstables); // now move a correct sstable to another directory to make sure that directory gets properly imported for (int i = 100; i < 130; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> correctSSTables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); File backupdirCorrect = moveToBackupDir(correctSSTables); Set<File> beforeImport = Sets.newHashSet(backupdir.tryList()); // first we moved out 2 sstables, one correct and one corrupt in to a single directory (backupdir) // then we moved out 1 sstable, a correct one (in backupdirCorrect). // now import should fail import on backupdir, but import the one in backupdirCorrect. SSTableImporter.Options options = SSTableImporter.Options.options(Sets.newHashSet(backupdir.toString(), backupdirCorrect.toString())).copyData(copy).verifySSTables(verify).build(); SSTableImporter importer = new SSTableImporter(getCurrentColumnFamilyStore()); List<String> failedDirectories = importer.importNewSSTables(options); assertEquals(Collections.singletonList(backupdir.toString()), failedDirectories); UntypedResultSet res = execute("SELECT * FROM %s"); for (UntypedResultSet.Row r : res) { int pk = r.getInt("id"); assertTrue("pk = "+pk, pk >= 100 && pk < 130); } assertEquals("Data dir should contain one file", 1, countFiles(getCurrentColumnFamilyStore().getDirectories().getDirectoryForNewSSTables())); assertEquals("backupdir contained 2 files before import, should still contain 2 after failing to import it", beforeImport, Sets.newHashSet(backupdir.tryList())); if (copy) { assertEquals("backupdirCorrect contained 1 file before import, should contain 1 after import too", 1, countFiles(backupdirCorrect)); } else { assertEquals("backupdirCorrect contained 1 file before import, should be empty after import", 0, countFiles(backupdirCorrect)); } } private int countFiles(File dir) { int fileCount = 0; for (File f : dir.tryList()) { if (f.isFile() && f.toString().contains("-Data.db")) { fileCount++; } } return fileCount; } @Test public void testImportCorrupt() throws Throwable { testCorruptHelper(true, false); } @Test public void testImportCorruptWithCopying() throws Throwable { testCorruptHelper(true, true); } @Test public void testImportCorruptWithoutValidation() throws Throwable { testCorruptHelper(false, false); } @Test public void testImportCorruptWithoutValidationWithCopying() throws Throwable { testCorruptHelper(false, true); } @Test public void testImportOutOfRange() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 1000; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); TokenMetadata tmd = StorageService.instance.getTokenMetadata(); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 5), InetAddressAndPort.getByName("127.0.0.1")); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 5), InetAddressAndPort.getByName("127.0.0.2")); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 5), InetAddressAndPort.getByName("127.0.0.3")); File backupdir = moveToBackupDir(sstables); try { SSTableImporter.Options options = SSTableImporter.Options.options(backupdir.toString()).verifySSTables(true).verifyTokens(true).build(); SSTableImporter importer = new SSTableImporter(getCurrentColumnFamilyStore()); List<String> failed = importer.importNewSSTables(options); assertEquals(Collections.singletonList(backupdir.toString()), failed); // verify that we check the tokens if verifySSTables == false but verifyTokens == true: options = SSTableImporter.Options.options(backupdir.toString()).verifySSTables(false).verifyTokens(true).build(); importer = new SSTableImporter(getCurrentColumnFamilyStore()); failed = importer.importNewSSTables(options); assertEquals(Collections.singletonList(backupdir.toString()), failed); // and that we can import with it disabled: options = SSTableImporter.Options.options(backupdir.toString()).verifySSTables(true).verifyTokens(false).build(); importer = new SSTableImporter(getCurrentColumnFamilyStore()); failed = importer.importNewSSTables(options); assertTrue(failed.isEmpty()); } finally { tmd.clearUnsafe(); } } @Test public void testImportOutOfRangeExtendedVerify() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 1000; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); TokenMetadata tmd = StorageService.instance.getTokenMetadata(); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 5), InetAddressAndPort.getByName("127.0.0.1")); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 5), InetAddressAndPort.getByName("127.0.0.2")); tmd.updateNormalTokens(BootStrapper.getRandomTokens(tmd, 5), InetAddressAndPort.getByName("127.0.0.3")); File backupdir = moveToBackupDir(sstables); try { SSTableImporter.Options options = SSTableImporter.Options.options(backupdir.toString()) .verifySSTables(true) .verifyTokens(true) .extendedVerify(true).build(); SSTableImporter importer = new SSTableImporter(getCurrentColumnFamilyStore()); List<String> failedDirectories = importer.importNewSSTables(options); assertEquals(Collections.singletonList(backupdir.toString()), failedDirectories); } finally { tmd.clearUnsafe(); } } @Test public void testImportInvalidateCache() throws Throwable { createTable("create table %s (id int primary key, d int) WITH caching = { 'keys': 'NONE', 'rows_per_partition': 'ALL' }"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); CacheService.instance.setRowCacheCapacityInMB(1); Set<RowCacheKey> keysToInvalidate = new HashSet<>(); // populate the row cache with keys from the sstable we are about to remove for (int i = 0; i < 10; i++) { execute("SELECT * FROM %s WHERE id = ?", i); } Iterator<RowCacheKey> it = CacheService.instance.rowCache.keyIterator(); while (it.hasNext()) { keysToInvalidate.add(it.next()); } SSTableReader sstableToImport = getCurrentColumnFamilyStore().getLiveSSTables().iterator().next(); getCurrentColumnFamilyStore().clearUnsafe(); for (int i = 10; i < 20; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<RowCacheKey> allCachedKeys = new HashSet<>(); // populate row cache with sstable we are keeping for (int i = 10; i < 20; i++) { execute("SELECT * FROM %s WHERE id = ?", i); } it = CacheService.instance.rowCache.keyIterator(); while (it.hasNext()) { allCachedKeys.add(it.next()); } assertEquals(20, CacheService.instance.rowCache.size()); File backupdir = moveToBackupDir(Collections.singleton(sstableToImport)); // make sure we don't wipe caches with invalidateCaches = false: Set<SSTableReader> beforeFirstImport = getCurrentColumnFamilyStore().getLiveSSTables(); SSTableImporter.Options options = SSTableImporter.Options.options(backupdir.toString()).verifySSTables(true).verifyTokens(true).build(); SSTableImporter importer = new SSTableImporter(getCurrentColumnFamilyStore()); importer.importNewSSTables(options); assertEquals(20, CacheService.instance.rowCache.size()); Set<SSTableReader> toMove = Sets.difference(getCurrentColumnFamilyStore().getLiveSSTables(), beforeFirstImport); getCurrentColumnFamilyStore().clearUnsafe(); // move away the sstable we just imported again: backupdir = moveToBackupDir(toMove); beforeFirstImport.forEach(s -> s.selfRef().release()); options = SSTableImporter.Options.options(backupdir.toString()).verifySSTables(true).verifyTokens(true).invalidateCaches(true).build(); importer.importNewSSTables(options); assertEquals(10, CacheService.instance.rowCache.size()); it = CacheService.instance.rowCache.keyIterator(); while (it.hasNext()) { // make sure the keys from the sstable we are importing are invalidated and that the other one is still there RowCacheKey rck = it.next(); assertTrue(allCachedKeys.contains(rck)); assertFalse(keysToInvalidate.contains(rck)); } } @Test public void testImportCacheEnabledWithoutSrcDir() throws Throwable { createTable("create table %s (id int primary key, d int) WITH caching = { 'keys': 'NONE', 'rows_per_partition': 'ALL' }"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); CacheService.instance.setRowCacheCapacityInMB(1); getCurrentColumnFamilyStore().clearUnsafe(); sstables.forEach(s -> s.selfRef().release()); SSTableImporter.Options options = SSTableImporter.Options.options().invalidateCaches(true).build(); SSTableImporter importer = new SSTableImporter(getCurrentColumnFamilyStore()); importer.importNewSSTables(options); assertEquals(1, getCurrentColumnFamilyStore().getLiveSSTables().size()); } @Test public void testRefreshCorrupt() throws Throwable { createTable("create table %s (id int primary key, d int) WITH caching = { 'keys': 'NONE', 'rows_per_partition': 'ALL' }"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); sstables.forEach(s -> s.selfRef().release()); // corrupt the sstable which is still in the data directory SSTableReader sstableToCorrupt = sstables.iterator().next(); String filenameToCorrupt = sstableToCorrupt.descriptor.filenameFor(Component.STATS); try (FileChannel fileChannel = new File(filenameToCorrupt).newReadWriteChannel()) { fileChannel.position(0); fileChannel.write(ByteBufferUtil.bytes(StringUtils.repeat('z', 2))); } for (int i = 10; i < 20; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); for (int i = 20; i < 30; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> expectedFiles = new HashSet<>(getCurrentColumnFamilyStore().getLiveSSTables()); SSTableImporter.Options options = SSTableImporter.Options.options().build(); SSTableImporter importer = new SSTableImporter(getCurrentColumnFamilyStore()); boolean gotException = false; try { importer.importNewSSTables(options); } catch (Throwable t) { gotException = true; } assertTrue(gotException); assertEquals(2, getCurrentColumnFamilyStore().getLiveSSTables().size()); // for nodetool refresh we leave corrupt sstables in the data directory assertEquals(3, countFiles(sstableToCorrupt.descriptor.directory)); int rowCount = 0; for (UntypedResultSet.Row r : execute("SELECT * FROM %s")) { rowCount++; int pk = r.getInt("id"); assertTrue("pk = "+pk, pk >= 10 && pk < 30); } assertEquals(20, rowCount); assertEquals(expectedFiles, getCurrentColumnFamilyStore().getLiveSSTables()); for (SSTableReader sstable : expectedFiles) assertTrue(new File(sstable.descriptor.filenameFor(Component.DATA)).exists()); getCurrentColumnFamilyStore().truncateBlocking(); LifecycleTransaction.waitForDeletions(); for (File f : sstableToCorrupt.descriptor.directory.tryList()) // clean up the corrupt files which truncate does not handle f.tryDelete(); } /** * If a user gives a bad directory we don't import any directories - we should let the user correct the directories */ @Test public void importBadDirectoryTest() throws Throwable { createTable("create table %s (id int primary key, d int)"); for (int i = 0; i < 10; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); Set<SSTableReader> sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); File backupdir = moveToBackupDir(sstables); for (int i = 10; i < 20; i++) execute("insert into %s (id, d) values (?, ?)", i, i); getCurrentColumnFamilyStore().forceBlockingFlush(); sstables = getCurrentColumnFamilyStore().getLiveSSTables(); getCurrentColumnFamilyStore().clearUnsafe(); File backupdir2 = moveToBackupDir(sstables); assertEquals(0, execute("select * from %s").size()); SSTableImporter.Options options = SSTableImporter.Options.options(Sets.newHashSet(backupdir.toString(), backupdir2.toString(), "/tmp/DOESNTEXIST")).build(); SSTableImporter importer = new SSTableImporter(getCurrentColumnFamilyStore()); boolean gotException = false; try { importer.importNewSSTables(options); } catch (Throwable t) { gotException = true; } assertTrue(gotException); assertEquals(0, execute("select * from %s").size()); assertEquals(0, getCurrentColumnFamilyStore().getLiveSSTables().size()); } private static class MockCFS extends ColumnFamilyStore { public MockCFS(ColumnFamilyStore cfs, Directories dirs) { super(cfs.keyspace, cfs.getTableName(), 0, cfs.metadata, dirs, false, false, true); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode; import java.io.DataOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.concurrent.TimeUnit; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.server.datanode.BlockScanner.Conf; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeReference; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi.BlockIterator; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetrics; import org.apache.hadoop.hdfs.util.DataTransferThrottler; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.Time; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * VolumeScanner scans a single volume. Each VolumeScanner has its own thread.<p/> * They are all managed by the DataNode's BlockScanner. */ public class VolumeScanner extends Thread { public static final Logger LOG = LoggerFactory.getLogger(VolumeScanner.class); /** * Number of seconds in a minute. */ private final static int SECONDS_PER_MINUTE = 60; /** * Number of minutes in an hour. */ private final static int MINUTES_PER_HOUR = 60; /** * Name of the block iterator used by this scanner. */ private final static String BLOCK_ITERATOR_NAME = "scanner"; /** * The configuration. */ private Conf conf; @VisibleForTesting void setConf(Conf conf) { this.conf = conf; } /** * The DataNode this VolumEscanner is associated with. */ private final DataNode datanode; private final DataNodeMetrics metrics; /** * A reference to the volume that we're scanning. */ private final FsVolumeReference ref; /** * The volume that we're scanning. */ final FsVolumeSpi volume; /** * The number of scanned bytes in each minute of the last hour.<p/> * * This array is managed as a circular buffer. We take the monotonic time and * divide it up into one-minute periods. Each entry in the array represents * how many bytes were scanned during that period. */ private final long scannedBytes[] = new long[MINUTES_PER_HOUR]; /** * The sum of all the values of scannedBytes. */ private long scannedBytesSum = 0; /** * The throttler to use with BlockSender objects. */ private final DataTransferThrottler throttler = new DataTransferThrottler(1); /** * The null output stream to use with BlockSender objects. */ private final DataOutputStream nullStream = new DataOutputStream(new IOUtils.NullOutputStream()); /** * The block iterators associated with this VolumeScanner.<p/> * * Each block pool has its own BlockIterator. */ private final List<BlockIterator> blockIters = new LinkedList<BlockIterator>(); /** * Blocks which are suspect. * The scanner prioritizes scanning these blocks. */ private final LinkedHashSet<ExtendedBlock> suspectBlocks = new LinkedHashSet<ExtendedBlock>(); /** * Blocks which were suspect which we have scanned. * This is used to avoid scanning the same suspect block over and over. */ private final Cache<ExtendedBlock, Boolean> recentSuspectBlocks = CacheBuilder.newBuilder().maximumSize(1000) .expireAfterAccess(10, TimeUnit.MINUTES).build(); /** * The current block iterator, or null if there is none. */ private BlockIterator curBlockIter = null; /** * True if the thread is stopping.<p/> * Protected by this object's lock. */ private boolean stopping = false; /** * The monotonic minute that the volume scanner was started on. */ private long startMinute = 0; /** * The current minute, in monotonic terms. */ private long curMinute = 0; /** * Handles scan results. */ private final ScanResultHandler resultHandler; private final Statistics stats = new Statistics(); static class Statistics { long bytesScannedInPastHour = 0; long blocksScannedInCurrentPeriod = 0; long blocksScannedSinceRestart = 0; long scansSinceRestart = 0; long scanErrorsSinceRestart = 0; long nextBlockPoolScanStartMs = -1; long blockPoolPeriodEndsMs = -1; ExtendedBlock lastBlockScanned = null; boolean eof = false; Statistics() { } Statistics(Statistics other) { this.bytesScannedInPastHour = other.bytesScannedInPastHour; this.blocksScannedInCurrentPeriod = other.blocksScannedInCurrentPeriod; this.blocksScannedSinceRestart = other.blocksScannedSinceRestart; this.scansSinceRestart = other.scansSinceRestart; this.scanErrorsSinceRestart = other.scanErrorsSinceRestart; this.nextBlockPoolScanStartMs = other.nextBlockPoolScanStartMs; this.blockPoolPeriodEndsMs = other.blockPoolPeriodEndsMs; this.lastBlockScanned = other.lastBlockScanned; this.eof = other.eof; } @Override public String toString() { return new StringBuilder(). append("Statistics{"). append("bytesScannedInPastHour=").append(bytesScannedInPastHour). append(", blocksScannedInCurrentPeriod="). append(blocksScannedInCurrentPeriod). append(", blocksScannedSinceRestart="). append(blocksScannedSinceRestart). append(", scansSinceRestart=").append(scansSinceRestart). append(", scanErrorsSinceRestart=").append(scanErrorsSinceRestart). append(", nextBlockPoolScanStartMs=").append(nextBlockPoolScanStartMs). append(", blockPoolPeriodEndsMs=").append(blockPoolPeriodEndsMs). append(", lastBlockScanned=").append(lastBlockScanned). append(", eof=").append(eof). append("}").toString(); } } private static double positiveMsToHours(long ms) { if (ms <= 0) { return 0; } else { return TimeUnit.HOURS.convert(ms, TimeUnit.MILLISECONDS); } } public void printStats(StringBuilder p) { p.append(String.format("Block scanner information for volume %s with base" + " path %s%n", volume.getStorageID(), volume)); synchronized (stats) { p.append(String.format("Bytes verified in last hour : %57d%n", stats.bytesScannedInPastHour)); p.append(String.format("Blocks scanned in current period : %57d%n", stats.blocksScannedInCurrentPeriod)); p.append(String.format("Blocks scanned since restart : %57d%n", stats.blocksScannedSinceRestart)); p.append(String.format("Block pool scans since restart : %57d%n", stats.scansSinceRestart)); p.append(String.format("Block scan errors since restart : %57d%n", stats.scanErrorsSinceRestart)); if (stats.nextBlockPoolScanStartMs > 0) { p.append(String.format("Hours until next block pool scan : %57.3f%n", positiveMsToHours(stats.nextBlockPoolScanStartMs - Time.monotonicNow()))); } if (stats.blockPoolPeriodEndsMs > 0) { p.append(String.format("Hours until possible pool rescan : %57.3f%n", positiveMsToHours(stats.blockPoolPeriodEndsMs - Time.now()))); } p.append(String.format("Last block scanned : %57s%n", ((stats.lastBlockScanned == null) ? "none" : stats.lastBlockScanned.toString()))); p.append(String.format("More blocks to scan in period : %57s%n", !stats.eof)); p.append(System.lineSeparator()); } } static class ScanResultHandler { private VolumeScanner scanner; public void setup(VolumeScanner scanner) { LOG.trace("Starting VolumeScanner {}", scanner.volume); this.scanner = scanner; } public void handle(ExtendedBlock block, IOException e) { FsVolumeSpi volume = scanner.volume; if (e == null) { LOG.trace("Successfully scanned {} on {}", block, volume); return; } // If the block does not exist anymore, then it's not an error. if (!volume.getDataset().contains(block)) { LOG.debug("Volume {}: block {} is no longer in the dataset.", volume, block); return; } // If the block exists, the exception may due to a race with write: // The BlockSender got an old block path in rbw. BlockReceiver removed // the rbw block from rbw to finalized but BlockSender tried to open the // file before BlockReceiver updated the VolumeMap. The state of the // block can be changed again now, so ignore this error here. If there // is a block really deleted by mistake, DirectoryScan should catch it. if (e instanceof FileNotFoundException ) { LOG.info("Volume {}: verification failed for {} because of " + "FileNotFoundException. This may be due to a race with write.", volume, block); return; } LOG.warn("Reporting bad {} on {}", block, volume); try { scanner.datanode.reportBadBlocks(block, volume); } catch (IOException ie) { // This is bad, but not bad enough to shut down the scanner. LOG.warn("Cannot report bad block " + block, ie); } } } VolumeScanner(Conf conf, DataNode datanode, FsVolumeReference ref) { this.conf = conf; this.datanode = datanode; this.metrics = datanode.getMetrics(); this.ref = ref; this.volume = ref.getVolume(); ScanResultHandler handler; try { handler = conf.resultHandler.newInstance(); } catch (Throwable e) { LOG.error("unable to instantiate {}", conf.resultHandler, e); handler = new ScanResultHandler(); } this.resultHandler = handler; setName("VolumeScannerThread(" + volume + ")"); setDaemon(true); } private void saveBlockIterator(BlockIterator iter) { try { iter.save(); } catch (IOException e) { LOG.warn("{}: error saving {}.", this, iter, e); } } private void expireOldScannedBytesRecords(long monotonicMs) { long newMinute = TimeUnit.MINUTES.convert(monotonicMs, TimeUnit.MILLISECONDS); if (curMinute == newMinute) { return; } // If a minute or more has gone past since we last updated the scannedBytes // array, zero out the slots corresponding to those minutes. for (long m = curMinute + 1; m <= newMinute; m++) { int slotIdx = (int)(m % MINUTES_PER_HOUR); LOG.trace("{}: updateScannedBytes is zeroing out slotIdx {}. " + "curMinute = {}; newMinute = {}", this, slotIdx, curMinute, newMinute); scannedBytesSum -= scannedBytes[slotIdx]; scannedBytes[slotIdx] = 0; } curMinute = newMinute; } /** * Find a usable block iterator.<p/> * * We will consider available block iterators in order. This property is * important so that we don't keep rescanning the same block pool id over * and over, while other block pools stay unscanned.<p/> * * A block pool is always ready to scan if the iterator is not at EOF. If * the iterator is at EOF, the block pool will be ready to scan when * conf.scanPeriodMs milliseconds have elapsed since the iterator was last * rewound.<p/> * * @return 0 if we found a usable block iterator; the * length of time we should delay before * checking again otherwise. */ private synchronized long findNextUsableBlockIter() { int numBlockIters = blockIters.size(); if (numBlockIters == 0) { LOG.debug("{}: no block pools are registered.", this); return Long.MAX_VALUE; } int curIdx; if (curBlockIter == null) { curIdx = 0; } else { curIdx = blockIters.indexOf(curBlockIter); Preconditions.checkState(curIdx >= 0); } // Note that this has to be wall-clock time, not monotonic time. This is // because the time saved in the cursor file is a wall-clock time. We do // not want to save a monotonic time in the cursor file, because it resets // every time the machine reboots (on most platforms). long nowMs = Time.now(); long minTimeoutMs = Long.MAX_VALUE; for (int i = 0; i < numBlockIters; i++) { int idx = (curIdx + i + 1) % numBlockIters; BlockIterator iter = blockIters.get(idx); if (!iter.atEnd()) { LOG.info("Now scanning bpid {} on volume {}", iter.getBlockPoolId(), volume); curBlockIter = iter; return 0L; } long iterStartMs = iter.getIterStartMs(); long waitMs = (iterStartMs + conf.scanPeriodMs) - nowMs; if (waitMs <= 0) { iter.rewind(); LOG.info("Now rescanning bpid {} on volume {}, after more than " + "{} hour(s)", iter.getBlockPoolId(), volume, TimeUnit.HOURS.convert(conf.scanPeriodMs, TimeUnit.MILLISECONDS)); curBlockIter = iter; return 0L; } minTimeoutMs = Math.min(minTimeoutMs, waitMs); } LOG.info("{}: no suitable block pools found to scan. Waiting {} ms.", this, minTimeoutMs); return minTimeoutMs; } /** * Scan a block. * * @param cblock The block to scan. * @param bytesPerSec The bytes per second to scan at. * * @return The length of the block that was scanned, or * -1 if the block could not be scanned. */ private long scanBlock(ExtendedBlock cblock, long bytesPerSec) { // 'cblock' has a valid blockId and block pool id, but we don't yet know the // genstamp the block is supposed to have. Ask the FsDatasetImpl for this // information. ExtendedBlock block = null; try { Block b = volume.getDataset().getStoredBlock( cblock.getBlockPoolId(), cblock.getBlockId()); if (b == null) { LOG.info("Replica {} was not found in the VolumeMap for volume {}", cblock, volume); } else { block = new ExtendedBlock(cblock.getBlockPoolId(), b); } } catch (FileNotFoundException e) { LOG.info("FileNotFoundException while finding block {} on volume {}", cblock, volume); } catch (IOException e) { LOG.warn("I/O error while finding block {} on volume {}", cblock, volume); } if (block == null) { return -1; // block not found. } LOG.debug("start scanning block {}", block); BlockSender blockSender = null; try { blockSender = new BlockSender(block, 0, -1, false, true, true, datanode, null, CachingStrategy.newDropBehind()); throttler.setBandwidth(bytesPerSec); long bytesRead = blockSender.sendBlock(nullStream, null, throttler); resultHandler.handle(block, null); metrics.incrBlocksVerified(); return bytesRead; } catch (IOException e) { resultHandler.handle(block, e); } finally { IOUtils.cleanup(null, blockSender); } metrics.incrBlockVerificationFailures(); return -1; } @VisibleForTesting static boolean calculateShouldScan(String storageId, long targetBytesPerSec, long scannedBytesSum, long startMinute, long curMinute) { long runMinutes = curMinute - startMinute; long effectiveBytesPerSec; if (runMinutes <= 0) { // avoid division by zero effectiveBytesPerSec = scannedBytesSum; } else { if (runMinutes > MINUTES_PER_HOUR) { // we only keep an hour's worth of rate information runMinutes = MINUTES_PER_HOUR; } effectiveBytesPerSec = scannedBytesSum / (SECONDS_PER_MINUTE * runMinutes); } boolean shouldScan = effectiveBytesPerSec <= targetBytesPerSec; LOG.trace("{}: calculateShouldScan: effectiveBytesPerSec = {}, and " + "targetBytesPerSec = {}. startMinute = {}, curMinute = {}, " + "shouldScan = {}", storageId, effectiveBytesPerSec, targetBytesPerSec, startMinute, curMinute, shouldScan); return shouldScan; } /** * Run an iteration of the VolumeScanner loop. * * @param suspectBlock A suspect block which we should scan, or null to * scan the next regularly scheduled block. * * @return The number of milliseconds to delay before running the loop * again, or 0 to re-run the loop immediately. */ private long runLoop(ExtendedBlock suspectBlock) { long bytesScanned = -1; boolean scanError = false; ExtendedBlock block = null; try { long monotonicMs = Time.monotonicNow(); expireOldScannedBytesRecords(monotonicMs); if (!calculateShouldScan(volume.getStorageID(), conf.targetBytesPerSec, scannedBytesSum, startMinute, curMinute)) { // If neededBytesPerSec is too low, then wait few seconds for some old // scannedBytes records to expire. return 30000L; } // Find a usable block pool to scan. if (suspectBlock != null) { block = suspectBlock; } else { if ((curBlockIter == null) || curBlockIter.atEnd()) { long timeout = findNextUsableBlockIter(); if (timeout > 0) { LOG.trace("{}: no block pools are ready to scan yet. Waiting " + "{} ms.", this, timeout); synchronized (stats) { stats.nextBlockPoolScanStartMs = Time.monotonicNow() + timeout; } return timeout; } synchronized (stats) { stats.scansSinceRestart++; stats.blocksScannedInCurrentPeriod = 0; stats.nextBlockPoolScanStartMs = -1; } return 0L; } try { block = curBlockIter.nextBlock(); } catch (IOException e) { // There was an error listing the next block in the volume. This is a // serious issue. LOG.warn("{}: nextBlock error on {}", this, curBlockIter); // On the next loop iteration, curBlockIter#eof will be set to true, and // we will pick a different block iterator. return 0L; } if (block == null) { // The BlockIterator is at EOF. LOG.info("{}: finished scanning block pool {}", this, curBlockIter.getBlockPoolId()); saveBlockIterator(curBlockIter); return 0; } } if (curBlockIter != null) { long saveDelta = monotonicMs - curBlockIter.getLastSavedMs(); if (saveDelta >= conf.cursorSaveMs) { LOG.debug("{}: saving block iterator {} after {} ms.", this, curBlockIter, saveDelta); saveBlockIterator(curBlockIter); } } bytesScanned = scanBlock(block, conf.targetBytesPerSec); if (bytesScanned >= 0) { scannedBytesSum += bytesScanned; scannedBytes[(int)(curMinute % MINUTES_PER_HOUR)] += bytesScanned; } else { scanError = true; } return 0L; } finally { synchronized (stats) { stats.bytesScannedInPastHour = scannedBytesSum; if (bytesScanned > 0) { stats.blocksScannedInCurrentPeriod++; stats.blocksScannedSinceRestart++; } if (scanError) { stats.scanErrorsSinceRestart++; } if (block != null) { stats.lastBlockScanned = block; } if (curBlockIter == null) { stats.eof = true; stats.blockPoolPeriodEndsMs = -1; } else { stats.eof = curBlockIter.atEnd(); stats.blockPoolPeriodEndsMs = curBlockIter.getIterStartMs() + conf.scanPeriodMs; } } } } /** * If there are elements in the suspectBlocks list, removes * and returns the first one. Otherwise, returns null. */ private synchronized ExtendedBlock popNextSuspectBlock() { Iterator<ExtendedBlock> iter = suspectBlocks.iterator(); if (!iter.hasNext()) { return null; } ExtendedBlock block = iter.next(); iter.remove(); return block; } @Override public void run() { // Record the minute on which the scanner started. this.startMinute = TimeUnit.MINUTES.convert(Time.monotonicNow(), TimeUnit.MILLISECONDS); this.curMinute = startMinute; try { LOG.trace("{}: thread starting.", this); resultHandler.setup(this); try { long timeout = 0; while (true) { ExtendedBlock suspectBlock = null; // Take the lock to check if we should stop, and access the // suspect block list. synchronized (this) { if (stopping) { break; } if (timeout > 0) { LOG.debug("{}: wait for {} milliseconds", this, timeout); wait(timeout); if (stopping) { break; } } suspectBlock = popNextSuspectBlock(); } timeout = runLoop(suspectBlock); } } catch (InterruptedException e) { // We are exiting because of an InterruptedException, // probably sent by VolumeScanner#shutdown. LOG.trace("{} exiting because of InterruptedException.", this); } catch (Throwable e) { LOG.error("{} exiting because of exception ", this, e); } LOG.info("{} exiting.", this); // Save the current position of all block iterators and close them. for (BlockIterator iter : blockIters) { saveBlockIterator(iter); IOUtils.cleanup(null, iter); } } finally { // When the VolumeScanner exits, release the reference we were holding // on the volume. This will allow the volume to be removed later. IOUtils.cleanup(null, ref); } } @Override public String toString() { return "VolumeScanner(" + volume + ", " + volume.getStorageID() + ")"; } /** * Shut down this scanner. */ public synchronized void shutdown() { stopping = true; notify(); this.interrupt(); } public synchronized void markSuspectBlock(ExtendedBlock block) { if (stopping) { LOG.debug("{}: Not scheduling suspect block {} for " + "rescanning, because this volume scanner is stopping.", this, block); return; } Boolean recent = recentSuspectBlocks.getIfPresent(block); if (recent != null) { LOG.debug("{}: Not scheduling suspect block {} for " + "rescanning, because we rescanned it recently.", this, block); return; } if (suspectBlocks.contains(block)) { LOG.debug("{}: suspect block {} is already queued for " + "rescanning.", this, block); return; } suspectBlocks.add(block); recentSuspectBlocks.put(block, true); LOG.debug("{}: Scheduling suspect block {} for rescanning.", this, block); notify(); // wake scanner thread. } /** * Allow the scanner to scan the given block pool. * * @param bpid The block pool id. */ public synchronized void enableBlockPoolId(String bpid) { for (BlockIterator iter : blockIters) { if (iter.getBlockPoolId().equals(bpid)) { LOG.warn("{}: already enabled scanning on block pool {}", this, bpid); return; } } BlockIterator iter = null; try { // Load a block iterator for the next block pool on the volume. iter = volume.loadBlockIterator(bpid, BLOCK_ITERATOR_NAME); LOG.trace("{}: loaded block iterator for {}.", this, bpid); } catch (FileNotFoundException e) { LOG.debug("{}: failed to load block iterator: " + e.getMessage(), this); } catch (IOException e) { LOG.warn("{}: failed to load block iterator.", this, e); } if (iter == null) { iter = volume.newBlockIterator(bpid, BLOCK_ITERATOR_NAME); LOG.trace("{}: created new block iterator for {}.", this, bpid); } iter.setMaxStalenessMs(conf.maxStalenessMs); blockIters.add(iter); notify(); } /** * Disallow the scanner from scanning the given block pool. * * @param bpid The block pool id. */ public synchronized void disableBlockPoolId(String bpid) { Iterator<BlockIterator> i = blockIters.iterator(); while (i.hasNext()) { BlockIterator iter = i.next(); if (iter.getBlockPoolId().equals(bpid)) { LOG.trace("{}: disabling scanning on block pool {}", this, bpid); i.remove(); IOUtils.cleanup(null, iter); if (curBlockIter == iter) { curBlockIter = null; } notify(); return; } } LOG.warn("{}: can't remove block pool {}, because it was never " + "added.", this, bpid); } @VisibleForTesting Statistics getStatistics() { synchronized (stats) { return new Statistics(stats); } } }
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.web; import java.io.File; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.context.embedded.AbstractEmbeddedServletContainerFactory; import org.springframework.boot.context.embedded.AnnotationConfigEmbeddedWebApplicationContext; import org.springframework.boot.context.embedded.ConfigurableEmbeddedServletContainer; import org.springframework.boot.context.embedded.EmbeddedServletContainerCustomizer; import org.springframework.boot.context.embedded.EmbeddedServletContainerCustomizerBeanPostProcessor; import org.springframework.boot.context.embedded.EmbeddedServletContainerFactory; import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainerFactory; import org.springframework.boot.context.embedded.tomcat.TomcatEmbeddedServletContainerFactory; import org.springframework.boot.context.embedded.undertow.UndertowEmbeddedServletContainerFactory; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.util.EnvironmentTestUtils; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; /** * Integration tests for {@link DefaultServletContainerCustomizer}. * * @author Dave Syer * @author Ivan Sopov */ public class DefaultServletContainerCustomizerIntegrationTests { private static AbstractEmbeddedServletContainerFactory containerFactory; @Rule public ExpectedException thrown = ExpectedException.none(); private AnnotationConfigEmbeddedWebApplicationContext context; @Before public void init() { containerFactory = mock(AbstractEmbeddedServletContainerFactory.class); } @After public void close() { if (this.context != null) { this.context.close(); } } @Test public void createFromConfigClass() throws Exception { this.context = new AnnotationConfigEmbeddedWebApplicationContext(); this.context.register(Config.class, PropertyPlaceholderAutoConfiguration.class); EnvironmentTestUtils.addEnvironment(this.context, "server.port:9000"); this.context.refresh(); ServerProperties server = this.context.getBean(ServerProperties.class); assertThat(server).isNotNull(); assertThat(server.getPort().intValue()).isEqualTo(9000); verify(containerFactory).setPort(9000); } @Test public void tomcatProperties() throws Exception { containerFactory = mock(TomcatEmbeddedServletContainerFactory.class); this.context = new AnnotationConfigEmbeddedWebApplicationContext(); this.context.register(Config.class, PropertyPlaceholderAutoConfiguration.class); EnvironmentTestUtils.addEnvironment(this.context, "server.tomcat.basedir:target/foo", "server.port:9000"); this.context.refresh(); ServerProperties server = this.context.getBean(ServerProperties.class); assertThat(server).isNotNull(); assertThat(server.getTomcat().getBasedir()).isEqualTo(new File("target/foo")); verify(containerFactory).setPort(9000); } @Test public void customizeWithJettyContainerFactory() throws Exception { this.context = new AnnotationConfigEmbeddedWebApplicationContext(); this.context.register(CustomJettyContainerConfig.class, PropertyPlaceholderAutoConfiguration.class); this.context.refresh(); containerFactory = this.context .getBean(AbstractEmbeddedServletContainerFactory.class); ServerProperties server = this.context.getBean(ServerProperties.class); assertThat(server).isNotNull(); // The server.port environment property was not explicitly set so the container // factory should take precedence... assertThat(containerFactory.getPort()).isEqualTo(3000); } @Test public void customizeWithUndertowContainerFactory() throws Exception { this.context = new AnnotationConfigEmbeddedWebApplicationContext(); this.context.register(CustomUndertowContainerConfig.class, PropertyPlaceholderAutoConfiguration.class); this.context.refresh(); containerFactory = this.context .getBean(AbstractEmbeddedServletContainerFactory.class); ServerProperties server = this.context.getBean(ServerProperties.class); assertThat(server).isNotNull(); assertThat(containerFactory.getPort()).isEqualTo(3000); } @Test public void customizeTomcatWithCustomizer() throws Exception { containerFactory = mock(TomcatEmbeddedServletContainerFactory.class); this.context = new AnnotationConfigEmbeddedWebApplicationContext(); this.context.register(Config.class, CustomizeConfig.class, PropertyPlaceholderAutoConfiguration.class); this.context.refresh(); ServerProperties server = this.context.getBean(ServerProperties.class); assertThat(server).isNotNull(); // The server.port environment property was not explicitly set so the container // customizer should take precedence... verify(containerFactory).setPort(3000); } @Configuration @EnableConfigurationProperties(ServerProperties.class) protected static class Config { @Bean public DefaultServletContainerCustomizer defaultServletContainerCustomizer(ServerProperties properties) { return new DefaultServletContainerCustomizer(properties); } @Bean public EmbeddedServletContainerFactory containerFactory() { return DefaultServletContainerCustomizerIntegrationTests.containerFactory; } @Bean public EmbeddedServletContainerCustomizerBeanPostProcessor embeddedServletContainerCustomizerBeanPostProcessor() { return new EmbeddedServletContainerCustomizerBeanPostProcessor(); } } @Configuration @EnableConfigurationProperties(ServerProperties.class) protected static class CustomJettyContainerConfig { @Bean public EmbeddedServletContainerFactory containerFactory() { JettyEmbeddedServletContainerFactory factory = new JettyEmbeddedServletContainerFactory(); factory.setPort(3000); return factory; } @Bean public EmbeddedServletContainerCustomizerBeanPostProcessor embeddedServletContainerCustomizerBeanPostProcessor() { return new EmbeddedServletContainerCustomizerBeanPostProcessor(); } } @Configuration @EnableConfigurationProperties(ServerProperties.class) protected static class CustomUndertowContainerConfig { @Bean public EmbeddedServletContainerFactory containerFactory() { UndertowEmbeddedServletContainerFactory factory = new UndertowEmbeddedServletContainerFactory(); factory.setPort(3000); return factory; } @Bean public EmbeddedServletContainerCustomizerBeanPostProcessor embeddedServletContainerCustomizerBeanPostProcessor() { return new EmbeddedServletContainerCustomizerBeanPostProcessor(); } } @Configuration protected static class CustomizeConfig { @Bean public EmbeddedServletContainerCustomizer containerCustomizer() { return new EmbeddedServletContainerCustomizer() { @Override public void customize(ConfigurableEmbeddedServletContainer container) { container.setPort(3000); } }; } } }
/* ***** BEGIN LICENSE BLOCK ***** Version: Apache 2.0/GPL 3.0/LGPL 3.0 CCT - Computational Chemistry Tools Jamberoo - Java Molecules Editor Copyright 2008-2015 Dr. Vladislav Vasilyev Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Contributor(s): Dr. Vladislav Vasilyev <vvv900@gmail.com> (original author) Alternatively, the contents of this file may be used under the terms of either the GNU General Public License Version 2 or later (the "GPL"), or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), in which case the provisions of the GPL or the LGPL are applicable instead of those above. If you wish to allow use of your version of this file only under the terms of either the GPL or the LGPL, and not to allow others to use your version of this file under the terms of the Apache 2.0, indicate your decision by deleting the provisions above and replace them with the notice and other provisions required by the GPL or the LGPL. If you do not delete the provisions above, a recipient may use your version of this file under the terms of any one of the Apache 2.0, the GPL or the LGPL. ***** END LICENSE BLOCK *****/ package cct.math; import cct.Constants; import cct.interfaces.AtomInterface; import cct.interfaces.MoleculeInterface; /** * <p>Title: Computational Chemistry Tookit</p> * * <p>Description: </p> * * <p>Copyright: Copyright (c) 2005-2010 Dr. Vladislav Vassiliev</p> * * <p>Company: ANU</p> * * @author not attributable * @version 1.0 */ public class Crystal { public static final double RADIANS_TO_DEGREES = 180.0 / Math.PI; protected Crystal() { } public static double[] latticeParamFromLatticeVectors(double[][] latticeVectors) { double[] latticeParameters = new double[6]; double[] d = new double[3]; for (int i = 0; i < 3; i++) { d[i] = Math.sqrt(latticeVectors[i][0] * latticeVectors[i][0] + latticeVectors[i][1] * latticeVectors[i][1] + latticeVectors[i][2] * latticeVectors[i][2]); if (d[i] < 0.01) { d[i] = 1.0; } latticeParameters[i] = d[i]; } // --- alpha (between b & c) latticeParameters[3] = Math.acos( (latticeVectors[2][0] * latticeVectors[1][0] + latticeVectors[2][1] * latticeVectors[1][1] + latticeVectors[2][2] * latticeVectors[1][2]) / (d[2] * d[1])) * RADIANS_TO_DEGREES; // --- beta (between a & c ) latticeParameters[4] = Math.acos( (latticeVectors[0][0] * latticeVectors[2][0] + latticeVectors[0][1] * latticeVectors[2][1] + latticeVectors[0][2] * latticeVectors[2][2]) / (d[0] * d[2])) * RADIANS_TO_DEGREES; // --- gamma (between a & b ) latticeParameters[5] = Math.acos( (latticeVectors[0][0] * latticeVectors[1][0] + latticeVectors[0][1] * latticeVectors[1][1] + latticeVectors[0][2] * latticeVectors[1][2]) / (d[0] * d[1])) * RADIANS_TO_DEGREES; return latticeParameters; } public static double[] getDefaultLatticeParameters(MoleculeInterface molec, double space) { double[] latticeParameters = new double[6]; latticeParameters[3] = 90.0; latticeParameters[4] = 90.0; latticeParameters[5] = 90.0; if (molec == null || molec.getNumberOfAtoms() < 1) { latticeParameters[0] = 1.0 + 2.0 * space; latticeParameters[1] = 1.0 + 2.0 * space; latticeParameters[2] = 1.0 + 2.0 * space; return latticeParameters; } if (space < 0) { space = 0.001; } AtomInterface atom = molec.getAtomInterface(0); double xMin = atom.getX(); double xMax = atom.getX(); double yMin = atom.getY(); double yMax = atom.getY(); double zMin = atom.getZ(); double zMax = atom.getZ(); for (int i = 1; i < molec.getNumberOfAtoms(); i++) { atom = molec.getAtomInterface(i); if (xMin > atom.getX()) { xMin = atom.getX(); } else if (xMax < atom.getX()) { xMax = atom.getX(); } if (yMin > atom.getY()) { yMin = atom.getY(); } else if (yMax < atom.getY()) { yMax = atom.getY(); } if (zMin > atom.getZ()) { zMin = atom.getZ(); } else if (zMax < atom.getZ()) { zMax = atom.getZ(); } } latticeParameters[0] = xMax - xMin + 2.0 * space; latticeParameters[1] = yMax - yMin + 2.0 * space; latticeParameters[2] = zMax - zMin + 2.0 * space; latticeParameters[3] = 90.0; latticeParameters[4] = 90.0; latticeParameters[5] = 90.0; return latticeParameters; } public static double[][] getCartesianToFractionalTransMatrix(double[] latticePars) { double[][] matrix = new double[3][3]; double a = latticePars[0]; double b = latticePars[1]; double c = latticePars[2]; double alpha = latticePars[3] * Constants.DEGREES_TO_RADIANS; double beta = latticePars[4] * Constants.DEGREES_TO_RADIANS; double gamma = latticePars[5] * Constants.DEGREES_TO_RADIANS; double factor = Math.sqrt(1.0 - Math.cos(alpha) * Math.cos(alpha) - Math.cos(beta) * Math.cos(beta) - Math.cos(gamma) * Math.cos(gamma) + 2.0 * Math.cos(alpha) * Math.cos(beta) * Math.cos(gamma)); matrix[0][0] = 1.0 / a; matrix[0][1] = 0; matrix[0][2] = 0; matrix[1][0] = -Math.cos(gamma) / (a * Math.sin(gamma)); matrix[1][1] = 1.0 / (b * Math.sin(gamma)); matrix[1][2] = 0; matrix[2][0] = (Math.cos(alpha) * Math.cos(gamma) - Math.cos(beta)) / (a * Math.sin(gamma) * factor); matrix[2][1] = (Math.cos(beta) * Math.cos(gamma) - Math.cos(alpha)) / (b * Math.sin(gamma) * factor); matrix[2][2] = Math.sin(gamma) / (c * factor); return matrix; } public static double[][] getCartesianFromFractional(double[][] fractional, int nCenters, double latticeVectors[][]) throws Exception { double[][] coords = new double[nCenters][3]; for (int i = 0; i < nCenters; i++) { coords[i][0] = fractional[i][0] * latticeVectors[0][0] + fractional[i][1] * latticeVectors[1][0] + fractional[i][2] * latticeVectors[2][0]; coords[i][1] = fractional[i][0] * latticeVectors[0][1] + fractional[i][1] * latticeVectors[1][1] + fractional[i][2] * latticeVectors[2][1]; coords[i][2] = fractional[i][0] * latticeVectors[0][2] + fractional[i][1] * latticeVectors[1][2] + fractional[i][2] * latticeVectors[2][2]; } return coords; } /** * Validates lattice parameters. Alpha, beta and gamma are expected in degrees * @param a double * @param b double * @param c double * @param alpha double * @param beta double * @param gamma double * @throws Exception */ public static void validateLatticeParameters(double a, double b, double c, double alpha, double beta, double gamma) throws Exception { if (a <= 0.0) { throw new Exception("Lattice parameter \"a\" cannot be less than 0. Got " + a); } if (b <= 0.0) { throw new Exception("Lattice parameter \"b\" cannot be less than 0. Got " + b); } if (c <= 0.0) { throw new Exception("Lattice parameter \"c\" cannot be less than 0. Got " + c); } if (alpha <= 0.0 || alpha > 180.0) { throw new Exception("Lattice parameter \"alpha\" should be 0 < alpha < 180. Got " + alpha); } if (beta <= 0.0 || beta > 180.0) { throw new Exception("Lattice parameter \"beta\" should be 0 < beta < 180. Got " + beta); } if (gamma <= 0.0 || gamma > 180.0) { throw new Exception("Lattice parameter \"gamma\" should be 0 < gamma < 180. Got " + gamma); } } }
package logical; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Date; import javax.swing.ImageIcon; import javax.swing.JOptionPane; import calculator_views.HistoryView; /** * <P> * * @author Ivan Guerra * @date July 18, 2014 * @version 1.0 This class contains the lists of processed data and their * results as well as functions associated with accessing and * manipulating all data used within the Calculator program up to and * including aiding format input for the * <code>CalculatorController</code> class. * <P> */ public class DataModel { /** * A list containing recent Strings of input or the expressions entered by * the user. */ private ArrayList<String> historyOfInput = new ArrayList<String>(); /** A list containing the values of recently calculated expressions. */ private ArrayList<BigDecimal> historyOfResults = new ArrayList<BigDecimal>(); /** A list containing all action listeners associated with the views. */ private ArrayList<ActionListener> listenerList = new ArrayList<>(); /** * A default constructor for the <code>DataModel</code> class. */ public DataModel() { } /** * An accessor method for <code>historyOfInput</code>. * * @return A list containing the most recent expressions entered. */ public ArrayList<String> getHistoryOfInput() { return historyOfInput; } /** * An accessor method for <code>historyOfResults</code>. * * @return A list containing the most recent expression values calculated. */ public ArrayList<BigDecimal> getHistoryOfResults() { return historyOfResults; } /** * A mutator method for <code>historyOfInput</code>. * * @param inputLine * The string containing the users most recently entered * expression. */ public void addToHistoryOfInput(String inputLine) { this.historyOfInput.add(inputLine); } /** * A mutator method for <code>historyOfResults</code>. * * @param result * A result to be added to the historyOfResults list. */ public void addToHistoryOfResults(BigDecimal result) { this.historyOfResults.add(result); } /** * Reads a file specified by the user containing single expressions on each * line (no blank lines in between). The input lines are then added to the * <code>historyOfInput</code> list. * * @param userFile * A file containing unevaluated mathematical expressions * supplied by the user. */ public void loadExpressionsFromFile(File userFile) { try { BufferedReader r = new BufferedReader(new FileReader(userFile)); String currentExpression; while ((currentExpression = r.readLine()) != null) { if (currentExpression.equals("")) { } else historyOfInput.add(formatInput(currentExpression)); } historyOfInput.remove(""); r.close(); } catch (IOException errorInFormat) { JOptionPane.showMessageDialog(null, "Please be sure to select a file with the approriate format.\n" + "See help for more details.", "File Format Error", JOptionPane.ERROR_MESSAGE, new ImageIcon(getClass() .getResource("/resources/error.png"))); } } /** * Saves the data stored within <code>historyOfInput</code> and * <code>historyOfResults</code> to a file specified by the user. The data * is more easily output in a reasonable format of expression result using * the recent history views JTextField's contents. * * @param outputView * The recent history view containing all expressions and their * resulting values. * * @param userFile * A file specified by the user. */ public void saveHistoryToFile(HistoryView outputView, File userFile) { try { Date timeStamp = new Date(System.currentTimeMillis()); String newLine = System.getProperty("line.separator"); String header = new String("Calculator History" + newLine + String.valueOf(timeStamp) + newLine + newLine); String textToBeWritten = outputView.getJtaOutputHistory().getText(); BufferedWriter r = new BufferedWriter(new FileWriter(userFile)); r.write(header + textToBeWritten); r.close(); JOptionPane.showMessageDialog(outputView, "Done.", "Save Successful", JOptionPane.PLAIN_MESSAGE, new ImageIcon(getClass() .getResource("/resources/check.png"))); } catch (IOException errorReading) { JOptionPane.showMessageDialog(null, "An unexpected error has occured. Please try again.", "Unexpected Error", JOptionPane.ERROR_MESSAGE, new ImageIcon(getClass() .getResource("/resources/error.png"))); } } /** * A mutator method for the <code>listenerList</code>. * * @param listener * An ActionListener to be added to the list. */ public void addToListenerList(ActionListener listener) { this.listenerList.add(listener); } /** * A mutator method for the <code>listenerList</code>. * * @param listener * An ActionListener to be removed from the list. */ public void removeFromListenerList(ActionListener listener) { this.listenerList.remove(listener); } /** * A method used to format the input expression before being evaluated. * <P> * Algorithm:<br> * Using a regular expression, if the user entered the character 'X'<br> * replace that value in the evaluated expression with the character '*'. * Also replace all '\u00F7' symbols with '/'.<br> * </P> * * @param inputLine * The line of text to be edited. * * @return A String containing the edited text with all 'X'(s) replaced. */ public String formatInput(String inputLine) { String editedLine = inputLine.replaceAll("\u00F7", "/") .replaceAll("x", "*").replaceAll("X", "*"); return editedLine; } /** * Alerts all listeners in <code>listenerList</code> of any updates or * events that have occurred that could effect supporting views. * * @param e * An action event that has been fired. */ public void processEvent(ActionEvent e) { if (!listenerList.isEmpty()) { for (ActionListener l : listenerList) { l.actionPerformed(e); } } } }
/* * Copyright 2005 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.reteoo; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.drools.core.common.InternalFactHandle; import org.drools.core.common.InternalWorkingMemory; import org.drools.core.common.ReteEvaluator; import org.drools.core.common.RuleBasePartitionId; import org.drools.core.common.UpdateContext; import org.drools.core.impl.RuleBase; import org.drools.core.reteoo.builder.BuildContext; import org.drools.core.rule.EntryPointId; import org.drools.core.spi.ObjectType; import org.drools.core.spi.PropagationContext; import org.drools.core.util.bitmask.BitMask; /** * The Rete-OO network. * * The Rete class is the root <code>Object</code>. All objects are asserted into * the Rete node where it propagates to all matching ObjectTypeNodes. * * The first time an instance of a Class type is asserted it does a full * iteration of all ObjectTyppeNodes looking for matches, any matches are * then cached in a HashMap which is used for future assertions. * * While Rete extends ObjectSource nad implements ObjectSink it nulls the * methods attach(), remove() and updateNewNode() as this is the root node * they are no applicable * * @see ObjectTypeNode */ public class Rete extends ObjectSource implements ObjectSink { // ------------------------------------------------------------ // Instance members // ------------------------------------------------------------ private static final long serialVersionUID = 510l; private Map<EntryPointId, EntryPointNode> entryPoints; private transient RuleBase kBase; public Rete() { this( null ); } // ------------------------------------------------------------ // Constructors // ------------------------------------------------------------ public Rete(RuleBase kBase) { super( 0, RuleBasePartitionId.MAIN_PARTITION, kBase != null && kBase.getConfiguration().isMultithreadEvaluation() ); this.entryPoints = Collections.synchronizedMap( new HashMap<EntryPointId, EntryPointNode>() ); this.kBase = kBase; hashcode = calculateHashCode(); } public short getType() { return NodeTypeEnums.ReteNode; } /** * This is the entry point into the network for all asserted Facts. Iterates a cache * of matching <code>ObjectTypdeNode</code>s asserting the Fact. If the cache does not * exist it first iteraes and builds the cache. * * @param factHandle * The FactHandle of the fact to assert * @param context * The <code>PropagationContext</code> of the <code>WorkingMemory</code> action * @param reteEvaluator * The working memory session. */ public void assertObject(final InternalFactHandle factHandle, final PropagationContext context, final ReteEvaluator reteEvaluator) { EntryPointId entryPoint = context.getEntryPoint(); EntryPointNode node = this.entryPoints.get( entryPoint ); ObjectTypeConf typeConf = reteEvaluator.getEntryPoint( entryPoint.getEntryPointId() ) .getObjectTypeConfigurationRegistry().getOrCreateObjectTypeConf( entryPoint, factHandle.getObject() ); node.assertObject( factHandle, context, typeConf, reteEvaluator ); } /** * Retract a fact object from this <code>RuleBase</code> and the specified * <code>WorkingMemory</code>. * * @param handle * The handle of the fact to retract. * @param reteEvaluator * The working memory session. */ public void retractObject(final InternalFactHandle handle, final PropagationContext context, final ReteEvaluator reteEvaluator) { EntryPointId entryPoint = context.getEntryPoint(); EntryPointNode node = this.entryPoints.get( entryPoint ); ObjectTypeConf typeConf = reteEvaluator.getEntryPoint( entryPoint.getEntryPointId() ) .getObjectTypeConfigurationRegistry().getObjectTypeConf( handle.getObject() ); node.retractObject( handle, context, typeConf, reteEvaluator ); } public void modifyObject(final InternalFactHandle factHandle, final ModifyPreviousTuples modifyPreviousTuples, final PropagationContext context, final ReteEvaluator reteEvaluator) { throw new UnsupportedOperationException(); } /** * Adds the <code>ObjectSink</code> so that it may receive * <code>Objects</code> propagated from this <code>ObjectSource</code>. * * @param objectSink * The <code>ObjectSink</code> to receive propagated * <code>Objects</code>. Rete only accepts <code>ObjectTypeNode</code>s * as parameters to this method, though. */ public void addObjectSink(final ObjectSink objectSink) { final EntryPointNode node = (EntryPointNode) objectSink; entryPoints.put(node.getEntryPoint(), node); kBase.registerAddedEntryNodeCache(node); } public void removeObjectSink(final ObjectSink objectSink) { final EntryPointNode node = (EntryPointNode) objectSink; entryPoints.remove(node.getEntryPoint()); kBase.registeRremovedEntryNodeCache(node); } public void doAttach( BuildContext context ) { throw new UnsupportedOperationException( "cannot call attach() from the root Rete node" ); } public void networkUpdated(UpdateContext updateContext) { // nothing to do } protected boolean doRemove(final RuleRemovalContext context, final ReteooBuilder builder) { // for now, we don't remove EntryPointNodes because they might be referenced by external sources return false; } public EntryPointNode getEntryPointNode(final EntryPointId entryPoint) { return this.entryPoints.get( entryPoint ); } public List<ObjectTypeNode> getObjectTypeNodes() { List<ObjectTypeNode> allNodes = new ArrayList<>(); for ( EntryPointNode node : this.entryPoints.values() ) { allNodes.addAll( node.getObjectTypeNodes().values() ); } return allNodes; } public Map<ObjectType, ObjectTypeNode> getObjectTypeNodes(EntryPointId entryPoint) { return this.entryPoints.get( entryPoint ).getObjectTypeNodes(); } @Override public RuleBase getRuleBase() { return this.kBase; } private int calculateHashCode() { return this.entryPoints.hashCode(); } @Override public boolean equals(final Object object) { if (this == object) { return true; } if ( object == null || !(object instanceof Rete) || this.hashCode() != object.hashCode() ) { return false; } return this.entryPoints.equals( ((Rete)object).entryPoints ); } public void updateSink(final ObjectSink sink, final PropagationContext context, final InternalWorkingMemory wm) { // nothing to do, since Rete object itself holds no facts to propagate. } public Map<EntryPointId,EntryPointNode> getEntryPointNodes() { return this.entryPoints; } public void byPassModifyToBetaNode(InternalFactHandle factHandle, ModifyPreviousTuples modifyPreviousTuples, PropagationContext context, ReteEvaluator reteEvaluator) { throw new UnsupportedOperationException( "This should never get called, as the PropertyReactive first happens at the AlphaNode" ); } @Override public BitMask calculateDeclaredMask(Class modifiedClass, List<String> settableProperties) { throw new UnsupportedOperationException(); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.ui; import com.intellij.CommonBundle; import com.intellij.openapi.Disposable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.help.HelpManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NonNls; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.util.ArrayList; public class DialogBuilder { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.ui.DialogBuilder"); @NonNls public static final String REQUEST_FOCUS_ENABLED = "requestFocusEnabled"; private JComponent myCenterPanel; private String myTitle; private JComponent myPreferedFocusComponent; private String myDimensionServiceKey; private ArrayList<ActionDescriptor> myActions = null; private final MyDialogWrapper myDialogWrapper; private final ArrayList<Disposable> myDisposables = new ArrayList<Disposable>(); private Runnable myCancelOperation = null; private Runnable myOkOperation = null; public int show() { return showImpl(true).getExitCode(); } public void showNotModal() { showImpl(false); } public DialogBuilder(Project project) { myDialogWrapper = new MyDialogWrapper(project, true); } public DialogBuilder(Component parent) { myDialogWrapper = new MyDialogWrapper(parent, true); } private MyDialogWrapper showImpl(boolean isModal) { LOG.assertTrue(myTitle != null && myTitle.trim().length() != 0, String.valueOf(myTitle)); myDialogWrapper.setTitle(myTitle); myDialogWrapper.init(); myDialogWrapper.setModal(isModal); myDialogWrapper.show(); if (isModal) { myDialogWrapper.dispose(); } return myDialogWrapper; } public void setCenterPanel(JComponent centerPanel) { myCenterPanel = centerPanel; } public void setTitle(String title) { myTitle = title; } public void setPreferedFocusComponent(JComponent component) { myPreferedFocusComponent = component; } public void setDimensionServiceKey(@NonNls String dimensionServiceKey) { myDimensionServiceKey = dimensionServiceKey; } public void addAction(Action action) { addActionDescriptor(new CustomActionDescriptor(action)); } public <T extends ActionDescriptor> T addActionDescriptor(T actionDescriptor) { getActionDescriptors().add(actionDescriptor); return actionDescriptor; } private ArrayList<ActionDescriptor> getActionDescriptors() { if (myActions == null) removeAllActions(); return myActions; } public void setActionDescriptors(ActionDescriptor[] descriptors) { removeAllActions(); ContainerUtil.addAll(myActions, descriptors); } public void removeAllActions() { myActions = new ArrayList<ActionDescriptor>(); } public Window getWindow() { return myDialogWrapper.getWindow(); } public CustomizableAction addOkAction() { return addActionDescriptor(new OkActionDescriptor()); } public CustomizableAction addCancelAction() { return addActionDescriptor(new CancelActionDescriptor()); } public CustomizableAction addCloseButton() { CustomizableAction closeAction = addOkAction(); closeAction.setText(CommonBundle.getCloseButtonText()); return closeAction; } public void addDisposable(Disposable disposable) { myDisposables.add(disposable); } public void setButtonsAlignment(int alignment) { myDialogWrapper.setButtonsAlignment1(alignment); } public DialogWrapper getDialogWrapper() { return myDialogWrapper; } public void showModal(boolean modal) { if (modal) { show(); } else { showNotModal(); } } public void setHelpId(@NonNls String helpId) { myDialogWrapper.setHelpId(helpId); } public void setCancelOperation(Runnable runnable) { myCancelOperation = runnable; } public void setOkOperation(Runnable runnable) { myOkOperation = runnable; } public void setOkActionEnabled(final boolean isEnabled) { myDialogWrapper.setOKActionEnabled(isEnabled); } public CustomizableAction getOkAction() { return get(getActionDescriptors(), OkActionDescriptor.class); } private static CustomizableAction get(final ArrayList<ActionDescriptor> actionDescriptors, final Class aClass) { for (ActionDescriptor actionDescriptor : actionDescriptors) { if (actionDescriptor.getClass().isAssignableFrom(aClass)) return (CustomizableAction)actionDescriptor; } return null; } public CustomizableAction getCancelAction() { return get(getActionDescriptors(), CancelActionDescriptor.class); } public Component getCenterPanel() { return myCenterPanel; } public interface ActionDescriptor { Action getAction(DialogWrapper dialogWrapper); } public abstract static class DialogActionDescriptor implements ActionDescriptor { private final String myName; private final Object myMnemonicChar; private boolean myIsDeafult = false; protected DialogActionDescriptor(String name, int mnemonicChar) { myName = name; myMnemonicChar = mnemonicChar == -1 ? null : Integer.valueOf(mnemonicChar); } public Action getAction(DialogWrapper dialogWrapper) { Action action = createAction(dialogWrapper); action.putValue(Action.NAME, myName); if (myMnemonicChar != null) action.putValue(Action.MNEMONIC_KEY, myMnemonicChar); if (myIsDeafult) action.putValue(Action.DEFAULT, Boolean.TRUE); return action; } public void setDefault(boolean isDefault) { myIsDeafult = isDefault; } protected abstract Action createAction(DialogWrapper dialogWrapper); } public static class CloseDialogAction extends DialogActionDescriptor { private final int myExitCode; public CloseDialogAction(String name, int mnemonicChar, int exitCode) { super(name, mnemonicChar); myExitCode = exitCode; } public static CloseDialogAction createDefault(String name, int mnemonicChar, int exitCode) { CloseDialogAction closeDialogAction = new CloseDialogAction(name, mnemonicChar, exitCode); closeDialogAction.setDefault(true); return closeDialogAction; } protected Action createAction(final DialogWrapper dialogWrapper) { return new AbstractAction(){ public void actionPerformed(ActionEvent e) { dialogWrapper.close(myExitCode); } }; } } public interface CustomizableAction { void setText(String text); } public static class CustomActionDescriptor implements ActionDescriptor { private final Action myAction; public CustomActionDescriptor(Action action) { myAction = action; } public Action getAction(DialogWrapper dialogWrapper) { return myAction; } } private abstract static class BuiltinAction implements ActionDescriptor, CustomizableAction { protected String myText = null; public void setText(String text) { myText = text; } public Action getAction(DialogWrapper dialogWrapper) { Action builtinAction = getBuiltinAction((MyDialogWrapper)dialogWrapper); if (myText != null) builtinAction.putValue(Action.NAME, myText); return builtinAction; } protected abstract Action getBuiltinAction(MyDialogWrapper dialogWrapper); } public static class OkActionDescriptor extends BuiltinAction { protected Action getBuiltinAction(MyDialogWrapper dialogWrapper) { return dialogWrapper.getOKAction(); } } public static class CancelActionDescriptor extends BuiltinAction { protected Action getBuiltinAction(MyDialogWrapper dialogWrapper) { return dialogWrapper.getCancelAction(); } } private class MyDialogWrapper extends DialogWrapper { private String myHelpId = null; private MyDialogWrapper(Project project, boolean canBeParent) { super(project, canBeParent); } private MyDialogWrapper(Component parent, boolean canBeParent) { super(parent, canBeParent); } public void setHelpId(String helpId) { myHelpId = helpId; } public void init() { super.init(); } public Action getOKAction() { return super.getOKAction(); } // Make it public public Action getCancelAction() { return super.getCancelAction(); } // Make it public public void setButtonsAlignment1(int alignment) { setButtonsAlignment(alignment);} protected JComponent createCenterPanel() { return myCenterPanel; } public void dispose() { myPreferedFocusComponent = null; for (Disposable disposable : myDisposables) { Disposer.dispose(disposable); } super.dispose(); } public JComponent getPreferredFocusedComponent() { if (myPreferedFocusComponent != null) return myPreferedFocusComponent; FocusTraversalPolicy focusTraversalPolicy = null; Container container = myCenterPanel; while (container != null && (focusTraversalPolicy = container.getFocusTraversalPolicy()) == null && !(container instanceof Window)) { container = container.getParent(); } if (focusTraversalPolicy == null) return null; Component component = focusTraversalPolicy.getDefaultComponent(myCenterPanel); while (!(component instanceof JComponent) && component != null) { component = focusTraversalPolicy.getComponentAfter(myCenterPanel, component); } return (JComponent)component; } protected String getDimensionServiceKey() { return myDimensionServiceKey; } protected JButton createJButtonForAction(Action action) { JButton button = super.createJButtonForAction(action); Object value = action.getValue(REQUEST_FOCUS_ENABLED); if (value instanceof Boolean) button.setRequestFocusEnabled(((Boolean)value).booleanValue()); return button; } public void doCancelAction() { if (!getCancelAction().isEnabled()) return; if (myCancelOperation != null) { myCancelOperation.run(); } else { super.doCancelAction(); } } protected void doOKAction() { if (myOkOperation != null) { myOkOperation.run(); } else { super.doOKAction(); } } protected void doHelpAction() { if (myHelpId == null) { super.doHelpAction(); return; } HelpManager.getInstance().invokeHelp(myHelpId); } protected Action[] createActions() { if (myActions == null) return super.createActions(); ArrayList<Action> actions = new ArrayList<Action>(myActions.size()); for (ActionDescriptor actionDescriptor : myActions) { actions.add(actionDescriptor.getAction(this)); } if (myHelpId != null) actions.add(getHelpAction()); return actions.toArray(new Action[actions.size()]); } } }
package com.amazonaws.eclipse.codedeploy.deploy.util; import java.io.File; import java.io.IOException; import java.util.Map; import java.util.Map.Entry; import java.util.UUID; import org.apache.commons.io.FileUtils; import org.eclipse.core.resources.IProject; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.Path; import com.amazonaws.eclipse.codedeploy.CodeDeployPlugin; import com.amazonaws.eclipse.codedeploy.deploy.wizard.model.DeployProjectToCodeDeployWizardDataModel; import com.amazonaws.eclipse.core.AwsToolkitCore; import com.amazonaws.eclipse.core.regions.ServiceAbbreviations; import com.amazonaws.services.codedeploy.AmazonCodeDeploy; import com.amazonaws.services.codedeploy.model.BundleType; import com.amazonaws.services.codedeploy.model.CreateDeploymentRequest; import com.amazonaws.services.codedeploy.model.CreateDeploymentResult; import com.amazonaws.services.codedeploy.model.RevisionLocation; import com.amazonaws.services.codedeploy.model.RevisionLocationType; import com.amazonaws.services.codedeploy.model.S3Location; import com.amazonaws.services.s3.AmazonS3; public class DeployUtils { /** * @return the deployment ID */ public static String createDeployment( DeployProjectToCodeDeployWizardDataModel dataModel, IProgressMonitor progressMonitor) { CodeDeployPlugin.getDefault().logInfo( "Preparing for deployment: " + dataModel.toString()); /* * (1) Export application archive WAR file */ final IProject project = dataModel.getProject(); String uuid = UUID.randomUUID().toString(); String tempDirName = dataModel.getApplicationName() + "-" + uuid + "-deployment-dir"; String zipFileName = dataModel.getApplicationName() + "-" + uuid + ".zip"; File tempDir = new File( new File(System.getProperty("java.io.tmpdir")), tempDirName); File archiveContentDir = new File( tempDir, "archive-content"); progressMonitor.subTask("Export project to WAR file..."); String warFileRelativePath = dataModel.getTemplateModel() .getWarFileExportLocationWithinDeploymentArchive(); CodeDeployPlugin.getDefault().logInfo( "Preparing to export project [" + project.getName() + "] to a WAR file [" + new File(archiveContentDir, warFileRelativePath).getAbsolutePath() + "]."); File warFile = WTPWarUtils.exportProjectToWar( project, new Path(archiveContentDir.getAbsolutePath()), warFileRelativePath).toFile(); CodeDeployPlugin.getDefault().logInfo( "WAR file created at [" + warFile.getAbsolutePath() + "]"); progressMonitor.worked(10); progressMonitor.subTask("Add app-spec file and all the deployment event hooks..."); try { addAppSpecFileAndEventHooks(archiveContentDir, dataModel); } catch (IOException e) { CodeDeployPlugin.getDefault().reportException( "Error when adding app-spec fild and deployment event hooks.", e); } progressMonitor.worked(5); progressMonitor.subTask("Create the ZIP file including all the deployment artifacts..."); File zipArchive = new File(tempDir, zipFileName); CodeDeployPlugin.getDefault().logInfo( "Preparing to bundle project artifacts into a zip file [" + zipArchive.getAbsolutePath() + "]."); try { ZipUtils.createZipFileOfDirectory(archiveContentDir, zipArchive); } catch (IOException e) { CodeDeployPlugin.getDefault().reportException( "Error when creating zip archive file for the deployment.", e); } CodeDeployPlugin.getDefault().logInfo( "Zip file created at [" + zipArchive.getAbsolutePath() + "]."); progressMonitor.worked(15); /* * (2) Upload to S3 */ String bucketName = dataModel.getBucketName(); String keyName = zipArchive.getName(); AmazonS3 s3Client = AwsToolkitCore.getClientFactory().getS3ClientForBucket(bucketName); progressMonitor.subTask("Upload ZIP file to S3..."); CodeDeployPlugin.getDefault().logInfo( "Uploading zip file to S3 bucket [" + bucketName + "]."); s3Client.putObject(bucketName, keyName, zipArchive); CodeDeployPlugin.getDefault().logInfo( "Upload succeed. [s3://" + bucketName + "/" + keyName + "]"); progressMonitor.worked(30); /* * (3) CreateDeployment */ progressMonitor.subTask("Initiate deployment..."); CodeDeployPlugin.getDefault().logInfo( "Making CreateDeployment API call..."); String endpoint = dataModel.getRegion().getServiceEndpoints() .get(ServiceAbbreviations.CODE_DEPLOY); AmazonCodeDeploy client = AwsToolkitCore.getClientFactory() .getCodeDeployClientByEndpoint(endpoint); CreateDeploymentResult result = client.createDeployment(new CreateDeploymentRequest() .withApplicationName(dataModel.getApplicationName()) .withDeploymentGroupName(dataModel.getDeploymentGroupName()) .withDeploymentConfigName(dataModel.getDeploymentConfigName()) .withIgnoreApplicationStopFailures(dataModel.isIgnoreApplicationStopFailures()) .withRevision(new RevisionLocation() .withRevisionType(RevisionLocationType.S3) .withS3Location(new S3Location() .withBucket(bucketName) .withKey(keyName) .withBundleType(BundleType.Zip) )) .withDescription("Deployment created from AWS Eclipse plugin") ); CodeDeployPlugin.getDefault().logInfo( "Deployment submitted. Deployment ID [" + result.getDeploymentId() + "]"); progressMonitor.worked(10); return result.getDeploymentId(); } private static void addAppSpecFileAndEventHooks(File targetBaseDir, final DeployProjectToCodeDeployWizardDataModel deployDataModel) throws IOException { File templateCopySourceRoot = deployDataModel.getTemplateModel() .getResolvedTemplateBasedir(); copyDirectoryWithTransformationHandler(templateCopySourceRoot, targetBaseDir, new FileTransformationHandler() { @Override public void copyFromFileToFile(File src, File target) throws IOException { String srcContent = FileUtils.readFileToString(src); String transformedContent = substituteUserConfiguration( srcContent, deployDataModel.getTemplateParameterValues()); FileUtils.writeStringToFile(target, transformedContent); } }); } private static String substituteUserConfiguration(String originalContent, Map<String, String> paramAnchorTextAndValues) { for (Entry<String, String> entry : paramAnchorTextAndValues.entrySet()) { String anchorText = entry.getKey(); String value = entry.getValue(); originalContent = originalContent.replace(anchorText, value); } return originalContent; } private static void copyDirectoryWithTransformationHandler(File srcDir, File destDir, FileTransformationHandler handler) throws IOException { if (destDir.exists()) { if (destDir.isDirectory() == false) { throw new IOException("Destination '" + destDir + "' exists but is not a directory"); } } else { if (destDir.mkdirs() == false) { throw new IOException("Destination '" + destDir + "' directory cannot be created"); } } if (destDir.canWrite() == false) { throw new IOException("Destination '" + destDir + "' cannot be written to"); } // recurse File[] files = srcDir.listFiles(); if (files == null) { // null if security restricted throw new IOException("Failed to list contents of " + srcDir); } for (int i = 0; i < files.length; i++) { File copiedFile = new File(destDir, files[i].getName()); if (files[i].isDirectory()) { copyDirectoryWithTransformationHandler(files[i], copiedFile, handler); } else { handler.copyFromFileToFile(files[i], copiedFile); } } } private interface FileTransformationHandler { void copyFromFileToFile(File src, File target) throws IOException; } }
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.primitives; import static com.google.common.truth.Truth.assertThat; import static java.lang.Float.NaN; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.Converter; import com.google.common.collect.ImmutableList; import com.google.common.collect.testing.Helpers; import com.google.common.testing.NullPointerTester; import com.google.common.testing.SerializableTester; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import junit.framework.TestCase; /** * Unit test for {@link Floats}. * * @author Kevin Bourrillion */ @GwtCompatible(emulated = true) @SuppressWarnings("cast") // redundant casts are intentional and harmless public class FloatsTest extends TestCase { private static final float[] EMPTY = {}; private static final float[] ARRAY1 = {(float) 1}; private static final float[] ARRAY234 = {(float) 2, (float) 3, (float) 4}; private static final float LEAST = Float.NEGATIVE_INFINITY; private static final float GREATEST = Float.POSITIVE_INFINITY; private static final float[] NUMBERS = new float[] { LEAST, -Float.MAX_VALUE, -1f, -0f, 0f, 1f, Float.MAX_VALUE, GREATEST, Float.MIN_NORMAL, -Float.MIN_NORMAL, Float.MIN_VALUE, -Float.MIN_VALUE, Integer.MIN_VALUE, Integer.MAX_VALUE, Long.MIN_VALUE, Long.MAX_VALUE }; private static final float[] VALUES = Floats.concat(NUMBERS, new float[] {NaN}); public void testHashCode() { for (float value : VALUES) { assertEquals(((Float) value).hashCode(), Floats.hashCode(value)); } } public void testIsFinite() { for (float value : NUMBERS) { assertEquals(!(Float.isInfinite(value) || Float.isNaN(value)), Floats.isFinite(value)); } } public void testCompare() { for (float x : VALUES) { for (float y : VALUES) { // note: spec requires only that the sign is the same assertEquals(x + ", " + y, Float.valueOf(x).compareTo(y), Floats.compare(x, y)); } } } public void testContains() { assertFalse(Floats.contains(EMPTY, (float) 1)); assertFalse(Floats.contains(ARRAY1, (float) 2)); assertFalse(Floats.contains(ARRAY234, (float) 1)); assertTrue(Floats.contains(new float[] {(float) -1}, (float) -1)); assertTrue(Floats.contains(ARRAY234, (float) 2)); assertTrue(Floats.contains(ARRAY234, (float) 3)); assertTrue(Floats.contains(ARRAY234, (float) 4)); for (float value : NUMBERS) { assertTrue("" + value, Floats.contains(new float[] {5f, value}, value)); } assertFalse(Floats.contains(new float[] {5f, NaN}, NaN)); } public void testIndexOf() { assertEquals(-1, Floats.indexOf(EMPTY, (float) 1)); assertEquals(-1, Floats.indexOf(ARRAY1, (float) 2)); assertEquals(-1, Floats.indexOf(ARRAY234, (float) 1)); assertEquals(0, Floats.indexOf(new float[] {(float) -1}, (float) -1)); assertEquals(0, Floats.indexOf(ARRAY234, (float) 2)); assertEquals(1, Floats.indexOf(ARRAY234, (float) 3)); assertEquals(2, Floats.indexOf(ARRAY234, (float) 4)); assertEquals( 1, Floats.indexOf(new float[] {(float) 2, (float) 3, (float) 2, (float) 3}, (float) 3)); for (float value : NUMBERS) { assertEquals("" + value, 1, Floats.indexOf(new float[] {5f, value}, value)); } assertEquals(-1, Floats.indexOf(new float[] {5f, NaN}, NaN)); } public void testIndexOf_arrayTarget() { assertEquals(0, Floats.indexOf(EMPTY, EMPTY)); assertEquals(0, Floats.indexOf(ARRAY234, EMPTY)); assertEquals(-1, Floats.indexOf(EMPTY, ARRAY234)); assertEquals(-1, Floats.indexOf(ARRAY234, ARRAY1)); assertEquals(-1, Floats.indexOf(ARRAY1, ARRAY234)); assertEquals(0, Floats.indexOf(ARRAY1, ARRAY1)); assertEquals(0, Floats.indexOf(ARRAY234, ARRAY234)); assertEquals(0, Floats.indexOf(ARRAY234, new float[] {(float) 2, (float) 3})); assertEquals(1, Floats.indexOf(ARRAY234, new float[] {(float) 3, (float) 4})); assertEquals(1, Floats.indexOf(ARRAY234, new float[] {(float) 3})); assertEquals(2, Floats.indexOf(ARRAY234, new float[] {(float) 4})); assertEquals( 1, Floats.indexOf( new float[] {(float) 2, (float) 3, (float) 3, (float) 3, (float) 3}, new float[] {(float) 3})); assertEquals( 2, Floats.indexOf( new float[] { (float) 2, (float) 3, (float) 2, (float) 3, (float) 4, (float) 2, (float) 3 }, new float[] {(float) 2, (float) 3, (float) 4})); assertEquals( 1, Floats.indexOf( new float[] { (float) 2, (float) 2, (float) 3, (float) 4, (float) 2, (float) 3, (float) 4 }, new float[] {(float) 2, (float) 3, (float) 4})); assertEquals( -1, Floats.indexOf( new float[] {(float) 4, (float) 3, (float) 2}, new float[] {(float) 2, (float) 3, (float) 4})); for (float value : NUMBERS) { assertEquals( "" + value, 1, Floats.indexOf(new float[] {5f, value, value, 5f}, new float[] {value, value})); } assertEquals(-1, Floats.indexOf(new float[] {5f, NaN, NaN, 5f}, new float[] {NaN, NaN})); } public void testLastIndexOf() { assertEquals(-1, Floats.lastIndexOf(EMPTY, (float) 1)); assertEquals(-1, Floats.lastIndexOf(ARRAY1, (float) 2)); assertEquals(-1, Floats.lastIndexOf(ARRAY234, (float) 1)); assertEquals(0, Floats.lastIndexOf(new float[] {(float) -1}, (float) -1)); assertEquals(0, Floats.lastIndexOf(ARRAY234, (float) 2)); assertEquals(1, Floats.lastIndexOf(ARRAY234, (float) 3)); assertEquals(2, Floats.lastIndexOf(ARRAY234, (float) 4)); assertEquals( 3, Floats.lastIndexOf(new float[] {(float) 2, (float) 3, (float) 2, (float) 3}, (float) 3)); for (float value : NUMBERS) { assertEquals("" + value, 0, Floats.lastIndexOf(new float[] {value, 5f}, value)); } assertEquals(-1, Floats.lastIndexOf(new float[] {NaN, 5f}, NaN)); } public void testMax_noArgs() { try { Floats.max(); fail(); } catch (IllegalArgumentException expected) { } } public void testMax() { assertEquals(GREATEST, Floats.max(GREATEST)); assertEquals(LEAST, Floats.max(LEAST)); assertEquals( (float) 9, Floats.max((float) 8, (float) 6, (float) 7, (float) 5, (float) 3, (float) 0, (float) 9)); assertEquals(0f, Floats.max(-0f, 0f)); assertEquals(0f, Floats.max(0f, -0f)); assertEquals(GREATEST, Floats.max(NUMBERS)); assertTrue(Float.isNaN(Floats.max(VALUES))); } public void testMin_noArgs() { try { Floats.min(); fail(); } catch (IllegalArgumentException expected) { } } public void testMin() { assertEquals(LEAST, Floats.min(LEAST)); assertEquals(GREATEST, Floats.min(GREATEST)); assertEquals( (float) 0, Floats.min((float) 8, (float) 6, (float) 7, (float) 5, (float) 3, (float) 0, (float) 9)); assertEquals(-0f, Floats.min(-0f, 0f)); assertEquals(-0f, Floats.min(0f, -0f)); assertEquals(LEAST, Floats.min(NUMBERS)); assertTrue(Float.isNaN(Floats.min(VALUES))); } public void testConstrainToRange() { float tolerance = 1e-10f; assertEquals((float) 1, Floats.constrainToRange((float) 1, (float) 0, (float) 5), tolerance); assertEquals((float) 1, Floats.constrainToRange((float) 1, (float) 1, (float) 5), tolerance); assertEquals((float) 3, Floats.constrainToRange((float) 1, (float) 3, (float) 5), tolerance); assertEquals((float) -1, Floats.constrainToRange((float) 0, (float) -5, (float) -1), tolerance); assertEquals((float) 2, Floats.constrainToRange((float) 5, (float) 2, (float) 2), tolerance); try { Floats.constrainToRange((float) 1, (float) 3, (float) 2); fail(); } catch (IllegalArgumentException expected) { } } public void testConcat() { assertTrue(Arrays.equals(EMPTY, Floats.concat())); assertTrue(Arrays.equals(EMPTY, Floats.concat(EMPTY))); assertTrue(Arrays.equals(EMPTY, Floats.concat(EMPTY, EMPTY, EMPTY))); assertTrue(Arrays.equals(ARRAY1, Floats.concat(ARRAY1))); assertNotSame(ARRAY1, Floats.concat(ARRAY1)); assertTrue(Arrays.equals(ARRAY1, Floats.concat(EMPTY, ARRAY1, EMPTY))); assertTrue( Arrays.equals( new float[] {(float) 1, (float) 1, (float) 1}, Floats.concat(ARRAY1, ARRAY1, ARRAY1))); assertTrue( Arrays.equals( new float[] {(float) 1, (float) 2, (float) 3, (float) 4}, Floats.concat(ARRAY1, ARRAY234))); } public void testEnsureCapacity() { assertSame(EMPTY, Floats.ensureCapacity(EMPTY, 0, 1)); assertSame(ARRAY1, Floats.ensureCapacity(ARRAY1, 0, 1)); assertSame(ARRAY1, Floats.ensureCapacity(ARRAY1, 1, 1)); assertTrue( Arrays.equals( new float[] {(float) 1, (float) 0, (float) 0}, Floats.ensureCapacity(ARRAY1, 2, 1))); } public void testEnsureCapacity_fail() { try { Floats.ensureCapacity(ARRAY1, -1, 1); fail(); } catch (IllegalArgumentException expected) { } try { // notice that this should even fail when no growth was needed Floats.ensureCapacity(ARRAY1, 1, -1); fail(); } catch (IllegalArgumentException expected) { } } @GwtIncompatible // Float.toString returns different value in GWT. public void testJoin() { assertEquals("", Floats.join(",", EMPTY)); assertEquals("1.0", Floats.join(",", ARRAY1)); assertEquals("1.0,2.0", Floats.join(",", (float) 1, (float) 2)); assertEquals("1.02.03.0", Floats.join("", (float) 1, (float) 2, (float) 3)); } public void testLexicographicalComparator() { List<float[]> ordered = Arrays.asList( new float[] {}, new float[] {LEAST}, new float[] {LEAST, LEAST}, new float[] {LEAST, (float) 1}, new float[] {(float) 1}, new float[] {(float) 1, LEAST}, new float[] {GREATEST, Float.MAX_VALUE}, new float[] {GREATEST, GREATEST}, new float[] {GREATEST, GREATEST, GREATEST}); Comparator<float[]> comparator = Floats.lexicographicalComparator(); Helpers.testComparator(comparator, ordered); } @GwtIncompatible // SerializableTester public void testLexicographicalComparatorSerializable() { Comparator<float[]> comparator = Floats.lexicographicalComparator(); assertSame(comparator, SerializableTester.reserialize(comparator)); } public void testReverse() { testReverse(new float[] {}, new float[] {}); testReverse(new float[] {1}, new float[] {1}); testReverse(new float[] {1, 2}, new float[] {2, 1}); testReverse(new float[] {3, 1, 1}, new float[] {1, 1, 3}); testReverse(new float[] {-1, 1, -2, 2}, new float[] {2, -2, 1, -1}); } private static void testReverse(float[] input, float[] expectedOutput) { input = Arrays.copyOf(input, input.length); Floats.reverse(input); assertTrue(Arrays.equals(expectedOutput, input)); } private static void testReverse( float[] input, int fromIndex, int toIndex, float[] expectedOutput) { input = Arrays.copyOf(input, input.length); Floats.reverse(input, fromIndex, toIndex); assertTrue(Arrays.equals(expectedOutput, input)); } public void testReverseIndexed() { testReverse(new float[] {}, 0, 0, new float[] {}); testReverse(new float[] {1}, 0, 1, new float[] {1}); testReverse(new float[] {1, 2}, 0, 2, new float[] {2, 1}); testReverse(new float[] {3, 1, 1}, 0, 2, new float[] {1, 3, 1}); testReverse(new float[] {3, 1, 1}, 0, 1, new float[] {3, 1, 1}); testReverse(new float[] {-1, 1, -2, 2}, 1, 3, new float[] {-1, -2, 1, 2}); } public void testSortDescending() { testSortDescending(new float[] {}, new float[] {}); testSortDescending(new float[] {1}, new float[] {1}); testSortDescending(new float[] {1, 2}, new float[] {2, 1}); testSortDescending(new float[] {1, 3, 1}, new float[] {3, 1, 1}); testSortDescending(new float[] {-1, 1, -2, 2}, new float[] {2, 1, -1, -2}); testSortDescending( new float[] {-1, 1, Float.NaN, -2, -0, 0, 2}, new float[] {Float.NaN, 2, 1, 0, -0, -1, -2}); } private static void testSortDescending(float[] input, float[] expectedOutput) { input = Arrays.copyOf(input, input.length); Floats.sortDescending(input); // GWT's Arrays.equals doesn't appear to handle NaN correctly, so test each element individually for (int i = 0; i < input.length; i++) { assertEquals(0, Float.compare(expectedOutput[i], input[i])); } } private static void testSortDescending( float[] input, int fromIndex, int toIndex, float[] expectedOutput) { input = Arrays.copyOf(input, input.length); Floats.sortDescending(input, fromIndex, toIndex); // GWT's Arrays.equals doesn't appear to handle NaN correctly, so test each element individually for (int i = 0; i < input.length; i++) { assertEquals(0, Float.compare(expectedOutput[i], input[i])); } } public void testSortDescendingIndexed() { testSortDescending(new float[] {}, 0, 0, new float[] {}); testSortDescending(new float[] {1}, 0, 1, new float[] {1}); testSortDescending(new float[] {1, 2}, 0, 2, new float[] {2, 1}); testSortDescending(new float[] {1, 3, 1}, 0, 2, new float[] {3, 1, 1}); testSortDescending(new float[] {1, 3, 1}, 0, 1, new float[] {1, 3, 1}); testSortDescending(new float[] {-1, -2, 1, 2}, 1, 3, new float[] {-1, 1, -2, 2}); testSortDescending( new float[] {-1, 1, Float.NaN, -2, 2}, 1, 4, new float[] {-1, Float.NaN, 1, -2, 2}); } @GwtIncompatible // SerializableTester public void testStringConverterSerialization() { SerializableTester.reserializeAndAssert(Floats.stringConverter()); } public void testToArray() { // need explicit type parameter to avoid javac warning!? List<Float> none = Arrays.<Float>asList(); assertTrue(Arrays.equals(EMPTY, Floats.toArray(none))); List<Float> one = Arrays.asList((float) 1); assertTrue(Arrays.equals(ARRAY1, Floats.toArray(one))); float[] array = {(float) 0, (float) 1, (float) 3}; List<Float> three = Arrays.asList((float) 0, (float) 1, (float) 3); assertTrue(Arrays.equals(array, Floats.toArray(three))); assertTrue(Arrays.equals(array, Floats.toArray(Floats.asList(array)))); } public void testToArray_threadSafe() { for (int delta : new int[] {+1, 0, -1}) { for (int i = 0; i < VALUES.length; i++) { List<Float> list = Floats.asList(VALUES).subList(0, i); Collection<Float> misleadingSize = Helpers.misleadingSizeCollection(delta); misleadingSize.addAll(list); float[] arr = Floats.toArray(misleadingSize); assertEquals(i, arr.length); for (int j = 0; j < i; j++) { assertEquals(VALUES[j], arr[j]); } } } } public void testToArray_withNull() { List<Float> list = Arrays.asList((float) 0, (float) 1, null); try { Floats.toArray(list); fail(); } catch (NullPointerException expected) { } } public void testToArray_withConversion() { float[] array = {(float) 0, (float) 1, (float) 2}; List<Byte> bytes = Arrays.asList((byte) 0, (byte) 1, (byte) 2); List<Short> shorts = Arrays.asList((short) 0, (short) 1, (short) 2); List<Integer> ints = Arrays.asList(0, 1, 2); List<Float> floats = Arrays.asList((float) 0, (float) 1, (float) 2); List<Long> longs = Arrays.asList((long) 0, (long) 1, (long) 2); List<Double> doubles = Arrays.asList((double) 0, (double) 1, (double) 2); assertTrue(Arrays.equals(array, Floats.toArray(bytes))); assertTrue(Arrays.equals(array, Floats.toArray(shorts))); assertTrue(Arrays.equals(array, Floats.toArray(ints))); assertTrue(Arrays.equals(array, Floats.toArray(floats))); assertTrue(Arrays.equals(array, Floats.toArray(longs))); assertTrue(Arrays.equals(array, Floats.toArray(doubles))); } public void testAsList_isAView() { float[] array = {(float) 0, (float) 1}; List<Float> list = Floats.asList(array); list.set(0, (float) 2); assertTrue(Arrays.equals(new float[] {(float) 2, (float) 1}, array)); array[1] = (float) 3; assertThat(list).containsExactly((float) 2, (float) 3).inOrder(); } public void testAsList_toArray_roundTrip() { float[] array = {(float) 0, (float) 1, (float) 2}; List<Float> list = Floats.asList(array); float[] newArray = Floats.toArray(list); // Make sure it returned a copy list.set(0, (float) 4); assertTrue(Arrays.equals(new float[] {(float) 0, (float) 1, (float) 2}, newArray)); newArray[1] = (float) 5; assertEquals((float) 1, (float) list.get(1)); } // This test stems from a real bug found by andrewk public void testAsList_subList_toArray_roundTrip() { float[] array = {(float) 0, (float) 1, (float) 2, (float) 3}; List<Float> list = Floats.asList(array); assertTrue( Arrays.equals(new float[] {(float) 1, (float) 2}, Floats.toArray(list.subList(1, 3)))); assertTrue(Arrays.equals(new float[] {}, Floats.toArray(list.subList(2, 2)))); } public void testAsListEmpty() { assertSame(Collections.emptyList(), Floats.asList(EMPTY)); } /** * A reference implementation for {@code tryParse} that just catches the exception from {@link * Float#valueOf}. */ private static Float referenceTryParse(String input) { if (input.trim().length() < input.length()) { return null; } try { return Float.valueOf(input); } catch (NumberFormatException e) { return null; } } @GwtIncompatible // Floats.tryParse private static void checkTryParse(String input) { assertEquals(referenceTryParse(input), Floats.tryParse(input)); } @GwtIncompatible // Floats.tryParse private static void checkTryParse(float expected, String input) { assertEquals(Float.valueOf(expected), Floats.tryParse(input)); } @GwtIncompatible // Floats.tryParse public void testTryParseHex() { for (String signChar : ImmutableList.of("", "+", "-")) { for (String hexPrefix : ImmutableList.of("0x", "0X")) { for (String iPart : ImmutableList.of("", "0", "1", "F", "f", "c4", "CE")) { for (String fPart : ImmutableList.of("", ".", ".F", ".52", ".a")) { for (String expMarker : ImmutableList.of("p", "P")) { for (String exponent : ImmutableList.of("0", "-5", "+20", "52")) { for (String typePart : ImmutableList.of("", "D", "F", "d", "f")) { checkTryParse( signChar + hexPrefix + iPart + fPart + expMarker + exponent + typePart); } } } } } } } } @AndroidIncompatible // slow @GwtIncompatible // Floats.tryParse public void testTryParseAllCodePoints() { // Exercise non-ASCII digit test cases and the like. char[] tmp = new char[2]; for (int i = Character.MIN_CODE_POINT; i < Character.MAX_CODE_POINT; i++) { Character.toChars(i, tmp, 0); checkTryParse(String.copyValueOf(tmp, 0, Character.charCount(i))); } } @GwtIncompatible // Floats.tryParse public void testTryParseOfToStringIsOriginal() { for (float f : NUMBERS) { checkTryParse(f, Float.toString(f)); } } @GwtIncompatible // Floats.tryParse public void testTryParseOfToHexStringIsOriginal() { for (float f : NUMBERS) { checkTryParse(f, Float.toHexString(f)); } } @GwtIncompatible // Floats.tryParse public void testTryParseNaN() { checkTryParse("NaN"); checkTryParse("+NaN"); checkTryParse("-NaN"); } @GwtIncompatible // Floats.tryParse public void testTryParseInfinity() { checkTryParse(Float.POSITIVE_INFINITY, "Infinity"); checkTryParse(Float.POSITIVE_INFINITY, "+Infinity"); checkTryParse(Float.NEGATIVE_INFINITY, "-Infinity"); } private static final String[] BAD_TRY_PARSE_INPUTS = { "", "+-", "+-0", " 5", "32 ", " 55 ", "infinity", "POSITIVE_INFINITY", "0x9A", "0x9A.bE-5", ".", ".e5", "NaNd", "InfinityF" }; @GwtIncompatible // Floats.tryParse public void testTryParseFailures() { for (String badInput : BAD_TRY_PARSE_INPUTS) { assertEquals(referenceTryParse(badInput), Floats.tryParse(badInput)); assertNull(Floats.tryParse(badInput)); } } @GwtIncompatible // NullPointerTester public void testNulls() { new NullPointerTester().testAllPublicStaticMethods(Floats.class); } @GwtIncompatible // Float.toString returns different value in GWT. public void testStringConverter_convert() { Converter<String, Float> converter = Floats.stringConverter(); assertEquals((Float) 1.0f, converter.convert("1.0")); assertEquals((Float) 0.0f, converter.convert("0.0")); assertEquals((Float) (-1.0f), converter.convert("-1.0")); assertEquals((Float) 1.0f, converter.convert("1")); assertEquals((Float) 0.0f, converter.convert("0")); assertEquals((Float) (-1.0f), converter.convert("-1")); assertEquals((Float) 1e6f, converter.convert("1e6")); assertEquals((Float) 1e-6f, converter.convert("1e-6")); } public void testStringConverter_convertError() { try { Floats.stringConverter().convert("notanumber"); fail(); } catch (NumberFormatException expected) { } } public void testStringConverter_nullConversions() { assertNull(Floats.stringConverter().convert(null)); assertNull(Floats.stringConverter().reverse().convert(null)); } @GwtIncompatible // Float.toString returns different value in GWT. public void testStringConverter_reverse() { Converter<String, Float> converter = Floats.stringConverter(); assertEquals("1.0", converter.reverse().convert(1.0f)); assertEquals("0.0", converter.reverse().convert(0.0f)); assertEquals("-1.0", converter.reverse().convert(-1.0f)); assertEquals("1000000.0", converter.reverse().convert(1e6f)); assertEquals("1.0E-6", converter.reverse().convert(1e-6f)); } @GwtIncompatible // NullPointerTester public void testStringConverter_nullPointerTester() throws Exception { NullPointerTester tester = new NullPointerTester(); tester.testAllPublicInstanceMethods(Floats.stringConverter()); } @GwtIncompatible public void testTryParse_withNullNoGwt() { assertNull(Floats.tryParse("null")); try { Floats.tryParse(null); fail("Expected NPE"); } catch (NullPointerException expected) { } } }
/** * Copyright (c) 2012, Cloudera, Inc. All Rights Reserved. * * Cloudera, Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"). You may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for * the specific language governing permissions and limitations under the * License. */ package com.cloudera.kitten.appmaster.service; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import com.cloudera.kitten.ContainerLaunchContextFactory; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.client.api.AMRMClient; import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync; import org.apache.hadoop.yarn.client.api.async.NMClientAsync; import org.apache.hadoop.yarn.conf.YarnConfiguration; import com.cloudera.kitten.ContainerLaunchParameters; import com.cloudera.kitten.appmaster.AbstractClient; import com.cloudera.kitten.appmaster.ApplicationMasterParameters; import com.cloudera.kitten.appmaster.ApplicationMasterService; import com.cloudera.kitten.appmaster.params.lua.WorkflowParameters; import com.cloudera.kitten.lua.LuaFields; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import com.google.common.util.concurrent.AbstractScheduledService; public class WorkflowService extends AbstractScheduledService implements ApplicationMasterService, AMRMClientAsync.CallbackHandler { private static final Log LOG = LogFactory.getLog(WorkflowService.class); public final WorkflowParameters parameters; public final YarnConfiguration conf; private final AtomicInteger totalFailures = new AtomicInteger(); private HashMap<String,ContainerTracker> trackers; private HashMap<ContainerId, ContainerTracker> containerAllocation; public int prior; public AMRMClientAsync<ContainerRequest> resourceManager; private boolean hasRunningContainers = false; private Throwable throwable; protected ContainerLaunchContextFactory factory; public WorkflowService(WorkflowParameters parameters, Configuration conf) { this.trackers = new HashMap<String, ContainerTracker>(); this.parameters = Preconditions.checkNotNull(parameters); this.conf = new YarnConfiguration(conf); this.prior=1; } @Override public ApplicationMasterParameters getParameters() { return parameters; } @Override public boolean hasRunningContainers() { return hasRunningContainers; } @Override protected void startUp() throws IOException { this.containerAllocation = new HashMap<ContainerId, ContainerTracker>(); this.resourceManager = AMRMClientAsync.createAMRMClientAsync(1000, this); this.resourceManager.init(conf); this.resourceManager.start(); RegisterApplicationMasterResponse registration; try { registration = resourceManager.registerApplicationMaster( parameters.getHostname(), parameters.getClientPort(), parameters.getTrackingUrl()); } catch (Exception e) { LOG.error("Exception thrown registering application master", e); stop(); return; } factory = new ContainerLaunchContextFactory( registration.getMaximumResourceCapability()); trackers = parameters.createTrackers(this); for(ContainerTracker t : trackers.values()){ t.init(factory); } /*for ( Entry<String, ContainerLaunchParameters> e : parameters.getContainerLaunchParameters().entrySet()) { ContainerTracker tracker = new ContainerTracker(e.getValue()); LOG.info("Operator: " + e.getKey()); trackers.put(e.getKey(),tracker); } LOG.info("Trackers: " + trackers); trackers.get("Move_MySQL_HBase").addNextTracker(trackers.get("HBase_HashJoin")); trackers.get("HBase_HashJoin").addNextTracker(trackers.get("Sort2")); trackers.get("Move_MySQL_HBase").init(factory);*/ this.hasRunningContainers = true; } @Override protected void shutDown() { // Stop the containers in the case that we're finishing because of a timeout. LOG.info("Stopping trackers"); this.hasRunningContainers = false; for (ContainerTracker tracker : trackers.values()) { if (tracker.hasRunningContainers()) { tracker.kill(); } } FinalApplicationStatus status; String message = null; if (state() == State.FAILED || totalFailures.get() > parameters.getAllowedFailures()) { //TODO: diagnostics status = FinalApplicationStatus.FAILED; if (throwable != null) { message = throwable.getLocalizedMessage(); } } else { status = FinalApplicationStatus.SUCCEEDED; } LOG.info("Sending finish request with status = " + status); try { resourceManager.unregisterApplicationMaster(status, message, null); } catch (Exception e) { LOG.error("Error finishing application master", e); } } @Override protected Scheduler scheduler() { return Scheduler.newFixedRateSchedule(0, 1, TimeUnit.SECONDS); } @Override protected void runOneIteration() throws Exception { AbstractClient.issueRequest(parameters.jobName, parameters.workflow); if (totalFailures.get() > parameters.getAllowedFailures() || allTrackersFinished()) { stop(); } } private boolean allTrackersFinished() { boolean ret = true; for(ContainerTracker t : trackers.values()){ if(t.hasMoreContainers()){ ret =false; break; } } //LOG.info("allTrackersFinished: "+ret); return ret; } // AMRMClientHandler methods @Override public void onContainersCompleted(List<ContainerStatus> containerStatuses) { LOG.info(containerStatuses.size() + " containers have completed"); for (ContainerStatus status : containerStatuses) { int exitStatus = status.getExitStatus(); if (0 != exitStatus) { // container failed if (ContainerExitStatus.ABORTED != exitStatus) { totalFailures.incrementAndGet(); containerAllocation.remove(status.getContainerId()).containerCompleted(status.getContainerId()); } else { // container was killed by framework, possibly preempted // we should re-try as the container was lost for some reason } } else { // nothing to do // container completed successfully LOG.info("Container id = " + status.getContainerId() + " completed successfully"); containerAllocation.remove(status.getContainerId()).containerCompleted(status.getContainerId()); } } } @Override public void onContainersAllocated(List<Container> allocatedContainers) { LOG.info("Allocating " + allocatedContainers.size() + " container(s)"); Set<Container> assigned = Sets.newHashSet(); for (ContainerTracker tracker : trackers.values()) { for (Container allocated : allocatedContainers) { if (tracker.isInitilized && tracker.needsContainers()) { if (!assigned.contains(allocated) && tracker.matches(allocated)) { LOG.info("Allocated cores: "+allocated.getResource().getVirtualCores()); tracker.launchContainer(allocated); assigned.add(allocated); containerAllocation.put(allocated.getId(), tracker); } } } } for(Entry<ContainerId, ContainerTracker> e: containerAllocation.entrySet()){ LOG.info("Allocated: "+e.getKey()+" to operator: "+e.getValue().params.getName()); } /*if (assigned.size() < allocatedContainers.size()) { LOG.error(String.format("Not all containers were allocated (%d out of %d)", assigned.size(), allocatedContainers.size())); stop(); }*/ } @Override public void onShutdownRequest() { stop(); } @Override public void onNodesUpdated(List<NodeReport> nodeReports) { //TODO } @Override public float getProgress() { int num = 0, den = 0; for (ContainerTracker tracker : trackers.values()) { num += tracker.completed.get(); den += tracker.params.getNumInstances(); } if (den == 0) { return 0.0f; } return ((float) num) / den; } @Override public void onError(Throwable throwable) { this.throwable = throwable; stop(); } }
/* * Copyright 2012-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.function.context.catalog; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.gson.Gson; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import reactor.core.publisher.Flux; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.cloud.function.context.FunctionCatalog; import org.springframework.cloud.function.context.FunctionRegistration; import org.springframework.cloud.function.context.FunctionRegistry; import org.springframework.cloud.function.context.FunctionType; import org.springframework.cloud.function.context.HybridFunctionalRegistrationTests.UppercaseFunction; import org.springframework.cloud.function.context.catalog.SimpleFunctionRegistry.FunctionInvocationWrapper; import org.springframework.cloud.function.context.config.JsonMessageConverter; import org.springframework.cloud.function.json.GsonMapper; import org.springframework.cloud.function.json.JacksonMapper; import org.springframework.cloud.function.json.JsonMapper; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.core.ResolvableType; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.support.DefaultConversionService; import org.springframework.lang.Nullable; import org.springframework.messaging.Message; import org.springframework.messaging.MessageHeaders; import org.springframework.messaging.converter.AbstractMessageConverter; import org.springframework.messaging.converter.ByteArrayMessageConverter; import org.springframework.messaging.converter.CompositeMessageConverter; import org.springframework.messaging.converter.MessageConverter; import org.springframework.messaging.converter.StringMessageConverter; import org.springframework.messaging.support.MessageBuilder; import org.springframework.util.MimeType; import static org.assertj.core.api.Assertions.assertThat; /** * @author Oleg Zhurakousky * */ public class SimpleFunctionRegistryTests { private CompositeMessageConverter messageConverter; private ConversionService conversionService; @BeforeEach public void before() { List<MessageConverter> messageConverters = new ArrayList<>(); JsonMapper jsonMapper = new GsonMapper(new Gson()); messageConverters.add(new JsonMessageConverter(jsonMapper)); messageConverters.add(new ByteArrayMessageConverter()); messageConverters.add(new StringMessageConverter()); this.messageConverter = new CompositeMessageConverter(messageConverters); this.conversionService = new DefaultConversionService(); } @Test public void testSCF640() { Echo function = new Echo(); FunctionRegistration<Echo> registration = new FunctionRegistration<>( function, "echo").type(FunctionType.of(Echo.class)); SimpleFunctionRegistry catalog = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); catalog.register(registration); FunctionInvocationWrapper lookedUpFunction = catalog.lookup("echo"); Object result = lookedUpFunction.apply("{\"HELLO\":\"WORLD\"}"); assertThat(result).isNotInstanceOf(Message.class); assertThat(result).isEqualTo("{\"HELLO\":\"WORLD\"}"); } @SuppressWarnings("unchecked") @Test public void testSCF588() { UpperCase function = new UpperCase(); FunctionRegistration<UpperCase> registration = new FunctionRegistration<>( function, "foo").type(FunctionType.of(UppercaseFunction.class)); SimpleFunctionRegistry catalog = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); catalog.register(registration); FunctionInvocationWrapper lookedUpFunction = catalog.lookup("uppercase"); Message<String> message = MessageBuilder.withPayload("hello") .setHeader("lambda-runtime-aws-request-id", UUID.randomUUID()) .build(); Object result = lookedUpFunction.apply(message); assertThat(result).isInstanceOf(Message.class); assertThat(((Message<String>) result).getPayload()).isEqualTo("HELLO"); } @Test public void testFunctionLookup() { TestFunction function = new TestFunction(); FunctionRegistration<TestFunction> registration = new FunctionRegistration<>( function, "foo").type(FunctionType.of(TestFunction.class)); SimpleFunctionRegistry catalog = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); catalog.register(registration); //FunctionInvocationWrapper lookedUpFunction = catalog.lookup("hello"); FunctionInvocationWrapper lookedUpFunction = catalog.lookup("hello"); assertThat(lookedUpFunction).isNotNull(); // because we only have one and can look it up with any name FunctionRegistration<TestFunction> registration2 = new FunctionRegistration<>( function, "foo2").type(FunctionType.of(TestFunction.class)); catalog.register(registration2); lookedUpFunction = catalog.lookup("hello"); assertThat(lookedUpFunction).isNull(); } @Test public void testFunctionComposition() { FunctionRegistration<UpperCase> upperCaseRegistration = new FunctionRegistration<>( new UpperCase(), "uppercase").type(FunctionType.of(UpperCase.class)); FunctionRegistration<Reverse> reverseRegistration = new FunctionRegistration<>( new Reverse(), "reverse").type(FunctionType.of(Reverse.class)); SimpleFunctionRegistry catalog = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); catalog.register(upperCaseRegistration); catalog.register(reverseRegistration); Function<Flux<String>, Flux<String>> lookedUpFunction = catalog .lookup("uppercase|reverse"); assertThat(lookedUpFunction).isNotNull(); Flux flux = lookedUpFunction.apply(Flux.just("star")); flux.subscribe(v -> { System.out.println(v); }); // assertThat(lookedUpFunction.apply(Flux.just("star")).blockFirst()) // .isEqualTo("RATS"); } @Test @Disabled public void testFunctionCompositionImplicit() { FunctionRegistration<Words> wordsRegistration = new FunctionRegistration<>( new Words(), "words").type(FunctionType.of(Words.class)); FunctionRegistration<Reverse> reverseRegistration = new FunctionRegistration<>( new Reverse(), "reverse").type(FunctionType.of(Reverse.class)); FunctionRegistry catalog = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); catalog.register(wordsRegistration); catalog.register(reverseRegistration); // There's only one function, we should be able to leave that blank Supplier<String> lookedUpFunction = catalog.lookup("words|"); assertThat(lookedUpFunction).isNotNull(); assertThat(lookedUpFunction.get()).isEqualTo("olleh"); } @Test @Disabled public void testFunctionCompletelyImplicitComposition() { FunctionRegistration<Words> wordsRegistration = new FunctionRegistration<>( new Words(), "words").type(FunctionType.of(Words.class)); FunctionRegistration<Reverse> reverseRegistration = new FunctionRegistration<>( new Reverse(), "reverse").type(FunctionType.of(Reverse.class)); SimpleFunctionRegistry catalog = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); catalog.register(wordsRegistration); catalog.register(reverseRegistration); // There's only one function, we should be able to leave that blank Supplier<Flux<String>> lookedUpFunction = catalog.lookup("|"); assertThat(lookedUpFunction).isNotNull(); assertThat(lookedUpFunction.get().blockFirst()).isEqualTo("olleh"); } @Test public void testFunctionCompositionExplicit() { FunctionRegistration<Words> wordsRegistration = new FunctionRegistration<>( new Words(), "words").type(FunctionType.of(Words.class)); FunctionRegistration<Reverse> reverseRegistration = new FunctionRegistration<>( new Reverse(), "reverse").type(FunctionType.of(Reverse.class)); SimpleFunctionRegistry catalog = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); catalog.register(wordsRegistration); catalog.register(reverseRegistration); Supplier<String> lookedUpFunction = catalog.lookup("words|reverse"); assertThat(lookedUpFunction).isNotNull(); assertThat(lookedUpFunction.get()).isEqualTo("olleh"); } @Test public void testFunctionCompositionWithMessages() { FunctionRegistration<UpperCaseMessage> upperCaseRegistration = new FunctionRegistration<>( new UpperCaseMessage(), "uppercase") .type(FunctionType.of(UpperCaseMessage.class)); FunctionRegistration<ReverseMessage> reverseRegistration = new FunctionRegistration<>( new ReverseMessage(), "reverse") .type(FunctionType.of(ReverseMessage.class)); SimpleFunctionRegistry catalog = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); catalog.register(upperCaseRegistration); catalog.register(reverseRegistration); Function<Flux<Message<String>>, Flux<Message<String>>> lookedUpFunction = catalog .lookup("uppercase|reverse"); assertThat(lookedUpFunction).isNotNull(); assertThat(lookedUpFunction .apply(Flux.just(MessageBuilder.withPayload("star").build())).blockFirst() .getPayload()).isEqualTo("RATS"); } @Test public void testFunctionCompositionMixedMessages() { FunctionRegistration<UpperCaseMessage> upperCaseRegistration = new FunctionRegistration<>( new UpperCaseMessage(), "uppercase") .type(FunctionType.of(UpperCaseMessage.class)); FunctionRegistration<Reverse> reverseRegistration = new FunctionRegistration<>( new Reverse(), "reverse").type(FunctionType.of(Reverse.class)); SimpleFunctionRegistry catalog = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); catalog.register(upperCaseRegistration); catalog.register(reverseRegistration); Function<Message<String>, String> lookedUpFunction = catalog .lookup("uppercase|reverse"); assertThat(lookedUpFunction).isNotNull(); String result = lookedUpFunction.apply(MessageBuilder.withPayload("star").setHeader("foo", "bar").build()); assertThat(result).isEqualTo("RATS"); } @SuppressWarnings({ "rawtypes", "unchecked" }) @Test public void testReactiveFunctionMessages() { FunctionRegistration<ReactiveFunction> registration = new FunctionRegistration<>(new ReactiveFunction(), "reactive") .type(FunctionType.of(ReactiveFunction.class)); SimpleFunctionRegistry catalog = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); catalog.register(registration); Function lookedUpFunction = catalog.lookup("reactive"); assertThat(lookedUpFunction).isNotNull(); Flux<List<String>> result = (Flux<List<String>>) lookedUpFunction .apply(Flux.just(MessageBuilder .withPayload("[{\"name\":\"item1\"},{\"name\":\"item2\"}]") .setHeader(MessageHeaders.CONTENT_TYPE, "application/json") .build() )); Assertions.assertIterableEquals(result.blockFirst(), Arrays.asList("item1", "item2")); } @SuppressWarnings({ "rawtypes", "unchecked" }) @Test public void testWithCustomMessageConverter() { FunctionCatalog catalog = this.configureCatalog(CustomConverterConfiguration.class); Function function = catalog.lookup("func"); Object result = function.apply(MessageBuilder.withPayload("Jim Lahey").setHeader(MessageHeaders.CONTENT_TYPE, "text/person").build()); assertThat(result).isEqualTo("Jim Lahey"); } @Test public void lookup() { SimpleFunctionRegistry functionRegistry = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); FunctionInvocationWrapper function = functionRegistry.lookup("uppercase"); assertThat(function).isNull(); Function userFunction = uppercase(); FunctionRegistration functionRegistration = new FunctionRegistration(userFunction, "uppercase") .type(FunctionType.from(String.class).to(String.class)); functionRegistry.register(functionRegistration); function = functionRegistry.lookup("uppercase"); assertThat(function).isNotNull(); } @Test public void lookupDefaultName() { SimpleFunctionRegistry functionRegistry = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); Function userFunction = uppercase(); FunctionRegistration functionRegistration = new FunctionRegistration(userFunction, "uppercase") .type(FunctionType.from(String.class).to(String.class)); functionRegistry.register(functionRegistration); FunctionInvocationWrapper function = functionRegistry.lookup(""); assertThat(function).isNotNull(); } @SuppressWarnings("unchecked") @Test public void lookupWithCompositionFunctionAndConsumer() { SimpleFunctionRegistry functionRegistry = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); Object userFunction = uppercase(); FunctionRegistration functionRegistration = new FunctionRegistration(userFunction, "uppercase") .type(FunctionType.from(String.class).to(String.class)); functionRegistry.register(functionRegistration); userFunction = consumer(); functionRegistration = new FunctionRegistration(userFunction, "consumer") .type(ResolvableType.forClassWithGenerics(Consumer.class, Integer.class).getType()); functionRegistry.register(functionRegistration); FunctionInvocationWrapper functionWrapper = functionRegistry.lookup("uppercase|consumer"); functionWrapper.apply("123"); } @Test public void lookupWithReactiveConsumer() { SimpleFunctionRegistry functionRegistry = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); Object userFunction = reactiveConsumer(); FunctionRegistration functionRegistration = new FunctionRegistration(userFunction, "reactiveConsumer") .type(ResolvableType.forClassWithGenerics(Consumer.class, ResolvableType.forClassWithGenerics(Flux.class, Integer.class)).getType()); functionRegistry.register(functionRegistration); FunctionInvocationWrapper functionWrapper = functionRegistry.lookup("reactiveConsumer"); functionWrapper.apply("123"); } @SuppressWarnings({ "rawtypes", "unchecked" }) @Test public void testHeaderEnricherFunction() { FunctionRegistration<HeaderEnricherFunction> registration = new FunctionRegistration<>(new HeaderEnricherFunction(), "headerEnricher") .type(FunctionType.of(HeaderEnricherFunction.class)); SimpleFunctionRegistry catalog = new SimpleFunctionRegistry(this.conversionService, this.messageConverter, new JacksonMapper(new ObjectMapper())); catalog.register(registration); Function<Message<?>, Message<?>> function = catalog.lookup("headerEnricher"); Message<?> message = function.apply(MessageBuilder.withPayload("hello").setHeader("original", "originalValue") .build()); assertThat(message.getHeaders().get("original")).isEqualTo("newValue"); } public Function<String, String> uppercase() { return v -> v.toUpperCase(); } public Function<Object, Integer> hash() { return v -> v.hashCode(); } public Supplier<Integer> supplier() { return () -> 4; } public Consumer<Integer> consumer() { return System.out::println; } public Consumer<Flux<Integer>> reactiveConsumer() { return flux -> flux.subscribe(v -> { System.out.println(v); }); } private FunctionCatalog configureCatalog(Class<?>... configClass) { ApplicationContext context = new SpringApplicationBuilder(configClass) .run("--logging.level.org.springframework.cloud.function=DEBUG", "--spring.main.lazy-initialization=true"); FunctionCatalog catalog = context.getBean(FunctionCatalog.class); return catalog; } @EnableAutoConfiguration private static class CustomConverterConfiguration { @Bean public MessageConverter stringToPersonConverter() { return new AbstractMessageConverter(MimeType.valueOf("text/person")) { @Override protected Object convertFromInternal(Message<?> message, Class<?> targetClass, @Nullable Object conversionHint) { String payload = message.getPayload() instanceof byte[] ? new String((byte[]) message.getPayload()) : (String) message.getPayload(); Person person = new Person(); person.setName(payload); return person; } @Override protected boolean canConvertFrom(Message<?> message, @Nullable Class<?> targetClass) { return supportsMimeType(message.getHeaders()) && Person.class.isAssignableFrom(targetClass) && ( message.getPayload() instanceof String || message.getPayload() instanceof byte[]); } @Override public Object convertToInternal(Object rawPayload, MessageHeaders headers, Object conversionHint) { return rawPayload.toString(); } @Override protected boolean canConvertTo(Object payload, @Nullable MessageHeaders headers) { return true; } @Override protected boolean supports(Class<?> clazz) { throw new UnsupportedOperationException(); } }; } @Bean public Function<Person, String> func() { return person -> person.getName(); } } public static class Person { private String name; public String getName() { return name; } public void setName(String name) { this.name = name; } } private static class Words implements Supplier<String> { @Override public String get() { return "hello"; } } private static class UpperCase implements Function<String, String> { @Override public String apply(String t) { return t.toUpperCase(); } } private static class Echo implements Function<Object, Object> { @Override public Object apply(Object t) { return t; } } private static class UpperCaseMessage implements Function<Message<String>, Message<String>> { @Override public Message<String> apply(Message<String> t) { return MessageBuilder.withPayload(t.getPayload().toUpperCase()) .copyHeaders(t.getHeaders()).build(); } } private static class Reverse implements Function<String, String> { @Override public String apply(String t) { return new StringBuilder(t).reverse().toString(); } } private static class ReverseMessage implements Function<Message<String>, Message<String>> { @Override public Message<String> apply(Message<String> t) { return MessageBuilder .withPayload(new StringBuilder(t.getPayload()).reverse().toString()) .copyHeaders(t.getHeaders()).build(); } } private static class TestFunction implements Function<Integer, String> { @Override public String apply(Integer t) { return "i=" + t; } } private static class ReactiveFunction implements Function<Flux<Message<List<Person>>>, Flux<List<String>>> { @Override public Flux<List<String>> apply(Flux<Message<List<Person>>> listFlux) { return listFlux .map(Message::getPayload) .map(lst -> lst.stream().map(Person::getName).collect(Collectors.toList())); } } private static class HeaderEnricherFunction implements Function<Message<?>, Message<?>> { @Override public Message<?> apply(Message<?> message) { return MessageBuilder.withPayload(message.getPayload()).setHeader("original", "newValue") .build(); } } }
/** * Copyright 2013-2019 the original author or authors from the Jeddict project (https://jeddict.github.io/). * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package io.github.jeddict.source; import com.github.javaparser.ast.ImportDeclaration; import com.github.javaparser.ast.Node; import com.github.javaparser.ast.body.BodyDeclaration; import com.github.javaparser.ast.body.FieldDeclaration; import com.github.javaparser.ast.body.MethodDeclaration; import com.github.javaparser.ast.expr.Expression; import com.github.javaparser.ast.expr.LiteralStringValueExpr; import com.github.javaparser.ast.nodeTypes.NodeWithAnnotations; import com.github.javaparser.ast.nodeTypes.NodeWithSimpleName; import com.github.javaparser.resolution.declarations.ResolvedReferenceTypeDeclaration; import com.github.javaparser.resolution.declarations.ResolvedTypeDeclaration; import com.github.javaparser.resolution.declarations.ResolvedTypeParameterDeclaration; import com.github.javaparser.resolution.types.ResolvedReferenceType; import com.github.javaparser.resolution.types.ResolvedType; import com.github.javaparser.utils.Pair; import io.github.jeddict.bv.constraints.Constraint; import static io.github.jeddict.jcode.util.JavaIdentifiers.unqualify; import io.github.jeddict.jcode.util.JavaUtil; import io.github.jeddict.jpa.spec.EntityMappings; import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import static java.util.stream.Collectors.joining; /** * * @author jGauravGupta */ public class MemberExplorer extends AnnotatedMember { private BodyDeclaration<? extends Annotation> annotatedMember; private MethodDeclaration getter; private MethodDeclaration setter; private FieldDeclaration field; private final ClassExplorer clazz; public MemberExplorer(ClassExplorer clazz) { this.clazz = clazz; } public EntityMappings getEntityMapping() { return clazz.getEntityMappings(); } public boolean isIncludeReference() { return clazz.isIncludeReference(); } public SourceExplorer getSource() { return clazz.getSource(); } public boolean isTransient() { return field != null ? field.isTransient() : false; } public String getFieldName() { if (field != null) { return field.getVariable(0).getNameAsString(); } else { return JavaUtil.getFieldName(getter.getNameAsString()); } } public Set<Constraint> getTypeArgumentBeanValidationConstraints(int index) { NodeWithAnnotations<? extends Annotation> nodeWithAnnotations = null; if (index < 0) { throw new IllegalStateException("index value must be positive"); } index = index + 1; List<Node> childNodes = field.getElementType().getChildNodes(); if (!childNodes.isEmpty() && childNodes.size() >= index && childNodes.get(index) instanceof NodeWithAnnotations) { nodeWithAnnotations = (NodeWithAnnotations) childNodes.get(index); } Set<Constraint> constraints = Collections.emptySet(); if (nodeWithAnnotations != null) { constraints = getBeanValidationConstraints( nodeWithAnnotations.getAnnotations() .stream() .map(AnnotationExplorer::new) ); } return constraints; } public List<String> getTypeArguments() { List<String> args = new ArrayList<>(); for (Pair<ResolvedTypeParameterDeclaration, ResolvedType> pair : getReferenceType().getTypeParametersMap()) { if (pair.b.isReferenceType()) { args.add(pair.b.asReferenceType().getQualifiedName()); } else if (pair.b.isTypeVariable()) { // generics args.add(pair.b.asTypeVariable().describe()); } } return args; } public Optional<ResolvedTypeDeclaration> getTypeArgumentDeclaration(int index) { if (index < 0) { throw new IllegalStateException("index value must be positive"); } List<ResolvedTypeDeclaration> declarations = getTypeArgumentDeclarations(); if (!declarations.isEmpty() && declarations.size() >= index) { return Optional.of(declarations.get(index)); } return Optional.empty(); } public List<ResolvedTypeDeclaration> getTypeArgumentDeclarations() { List<ResolvedTypeDeclaration> declarations = new ArrayList<>(); for (Pair<ResolvedTypeParameterDeclaration, ResolvedType> pair : getReferenceType().getTypeParametersMap()) { if (pair.b.isReferenceType() && pair.b.asReferenceType().getTypeDeclaration().isPresent()) { declarations.add(pair.b.asReferenceType().getTypeDeclaration().get()); } else if (pair.b.isTypeVariable()) { // generics declarations.add(pair.b.asTypeVariable().asTypeParameter()); } // isTypeVariable()asTypeParameter() } return declarations; } public Optional<ResolvedReferenceTypeDeclaration> getTypeDeclaration() { return getReferenceType().getTypeDeclaration(); } public String getType() { String type; ResolvedType resolvedType = null; if (field != null) { try { resolvedType = field.getElementType().resolve(); } catch (UnsupportedOperationException ex) { System.out.println("UnsupportedOperationException : " + field); } if (resolvedType != null && resolvedType.isReferenceType()) { type = resolvedType.asReferenceType().getQualifiedName(); } else if (resolvedType != null && resolvedType.isPrimitive()) { type = resolvedType.asPrimitive().describe(); } else { type = field.getElementType().toString(); } if (field.getVariable(0).getType().isArrayType()) { type = type + "[]"; } } else { try { resolvedType = getter.getType().resolve(); } catch (UnsupportedOperationException ex) { System.out.println("UnsupportedOperationException : " + field); } if (resolvedType != null && resolvedType.isReferenceType()) { type = resolvedType.asReferenceType().getQualifiedName(); } else if (resolvedType != null && resolvedType.isPrimitive()) { type = resolvedType.asPrimitive().describe(); } else { type = getter.getTypeAsString(); } if (getter.getType().isArrayType()) { type = type + "[]"; } } return type; } public String getSimpleType() { return unqualify(getType()); } public boolean isCollectionType() { Class classType = null; try { classType = Class.forName(getType()); } catch (ClassNotFoundException ex) { } return classType != null && java.util.Collection.class.isAssignableFrom(classType); } public boolean isMapType() { Class classType = null; try { classType = Class.forName(getType()); } catch (ClassNotFoundException ex) { } return classType != null && java.util.Map.class.isAssignableFrom(classType); } private ResolvedReferenceType getReferenceType() { return field.getElementType().resolve().asReferenceType(); } public String getDefaultValue() { String defaultValue = null; if (field != null && field.getVariables().get(0).getChildNodes().size() == 3) { Node node = field.getVariables().get(0).getChildNodes().get(2); if (node instanceof Expression) { //FieldAccessExpr, MethodCallExpr, ObjectCreationExpr defaultValue = node.toString(); Map<String, ImportDeclaration> imports = clazz.getImports(); String importList = imports.keySet() .stream() .filter(defaultValue::contains) .map(imports::get) .map(ImportDeclaration::getNameAsString) .collect(joining(" ,\n")); defaultValue = importList.isEmpty() ? defaultValue : "[\n" + importList + "\n]\n" + defaultValue; } else if (node instanceof NodeWithSimpleName) { defaultValue = ((NodeWithSimpleName) node).getNameAsString(); } else if (node instanceof LiteralStringValueExpr) { defaultValue = "'" + ((LiteralStringValueExpr) node).getValue() + "'"; } else { throw new UnsupportedOperationException(); } } return defaultValue; } public boolean isAnnotationPresent(Class<? extends Annotation> annotationClass) { return annotatedMember.isAnnotationPresent(annotationClass); } @Override protected BodyDeclaration getAnnotatedMember() { return annotatedMember; } public void setAnnotatedMember(BodyDeclaration annotationMember) { this.annotatedMember = annotationMember; } public MethodDeclaration getGetter() { return getter; } public void setGetter(MethodDeclaration getter) { this.getter = getter; } public MethodDeclaration getSetter() { return setter; } public void setSetter(MethodDeclaration setter) { this.setter = setter; } public FieldDeclaration getField() { return field; } public void setField(FieldDeclaration field) { this.field = field; } }
/* Copyright 2019, 2020 WeAreFrank! Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package nl.nn.adapterframework.senders; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import javax.mail.internet.AddressException; import javax.mail.internet.InternetAddress; import org.apache.commons.codec.binary.Base64InputStream; import org.apache.commons.lang3.StringUtils; import org.w3c.dom.Element; import org.w3c.dom.Node; import nl.nn.adapterframework.configuration.ConfigurationException; import nl.nn.adapterframework.core.PipeLineSession; import nl.nn.adapterframework.core.ParameterException; import nl.nn.adapterframework.core.SenderException; import nl.nn.adapterframework.core.TimeoutException; import nl.nn.adapterframework.doc.IbisDoc; import nl.nn.adapterframework.parameters.ParameterValue; import nl.nn.adapterframework.parameters.ParameterValueList; import nl.nn.adapterframework.stream.Message; import nl.nn.adapterframework.util.CredentialFactory; import nl.nn.adapterframework.util.DomBuilderException; import nl.nn.adapterframework.util.StreamUtil; import nl.nn.adapterframework.util.XmlUtils; /** * * @ff.parameter from email address of the sender * @ff.parameter subject subject field of the message * @ff.parameter threadTopic (optional) conversation field of the message, used to correlate mails in mail viewer (header field "Thread-Topic"). Note: subject must end with value of threadTopic, but cann't be exactly the same * @ff.parameter message message itself. If absent, the complete input message is assumed to be the message * @ff.parameter messageType message MIME type (at this moment only available are <code>text/plain</code> and <code>text/html</code> - default: <code>text/plain</code>) * @ff.parameter messageBase64 (boolean) indicates whether the message content is base64 encoded (default: <code>false</code>) * @ff.parameter charSet the character encoding (e.g. ISO-8859-1 or UTF-8) used to send the email (default: UTF-8) * @ff.parameter recipients (xml) recipients of the message. Must result in a structure like: <code><pre> * &lt;recipient type="to"&gt;***@hotmail.com&lt;/recipient&gt; * &lt;recipient type="cc"&gt;***@gmail.com&lt;/recipient&gt; * </pre></code> * @ff.parameter attachments (xml) attachments to the message. Must result in a structure like: <code><pre> * &lt;attachment name="filename1.txt"&gt;This is the first attachment&lt;/attachment&gt; * &lt;attachment name="filename2.pdf" base64="true"&gt;JVBERi0xLjQKCjIgMCBvYmoKPDwvVHlwZS9YT2JqZWN0L1N1YnR5cGUvSW1...vSW5mbyA5IDAgUgo+PgpzdGFydHhyZWYKMzQxNDY2CiUlRU9GCg==&lt;/attachment&gt; * &lt;attachment name="filename3.pdf" url="file:/c:/filename3.pdf"/&gt; * &lt;attachment name="filename4.pdf" sessionKey="fileContent"/&gt; * </pre></code> * */ public abstract class MailSenderBase extends SenderWithParametersBase { private String authAlias; private String userId; private String password; private CredentialFactory cf; private String defaultAttachmentName = "attachment"; private String defaultMessageType = "text/plain"; private boolean defaultMessageBase64 = false; private String defaultSubject; private String defaultFrom; private int timeout = 20000; private String bounceAddress; protected abstract String sendEmail(MailSession mailSession) throws SenderException; @Override public void configure() throws ConfigurationException { cf = new CredentialFactory(getAuthAlias(), getUserId(), getPassword()); super.configure(); } @Override public Message sendMessage(Message message, PipeLineSession session) throws SenderException, TimeoutException { MailSession mailSession; try { mailSession = extract(message, session); } catch (DomBuilderException e) { throw new SenderException(e); } sendEmail(mailSession); String correlationID = session==null ? null : session.getMessageId(); return new Message(correlationID); } /** * Reads fields from either paramList or Xml file */ public MailSession extract(Message input, PipeLineSession session) throws SenderException, DomBuilderException { MailSession mailSession; if (paramList == null) { mailSession = parseXML(input, session); } else { mailSession = readParameters(input, session); } return mailSession; } private Collection<MailAttachmentStream> retrieveAttachmentsFromParamList(ParameterValue pv, PipeLineSession session) throws SenderException, ParameterException { Collection<MailAttachmentStream> attachments = null; if (pv != null) { attachments = retrieveAttachments(pv.asCollection(), session); log.debug("MailSender [" + getName() + "] retrieved attachments-parameter [" + attachments + "]"); } return attachments; } private Collection<EMail> retrieveRecipientsFromParameterList(ParameterValue pv) throws ParameterException, SenderException { Collection<EMail> recipients = null; if (pv != null) { recipients = retrieveRecipients(pv.asCollection()); log.debug("MailSender [" + getName() + "] retrieved recipients-parameter [" + recipients + "]"); } return recipients; } private MailSession readParameters(Message input, PipeLineSession session) throws SenderException { EMail from = null; String subject = null; String threadTopic = null; String messageType = null; boolean messageBase64 = false; String charset = null; List<EMail> recipients; List<MailAttachmentStream> attachments = null; ParameterValueList pvl=null; ParameterValue pv; MailSession mail = new MailSession(); try { pvl = paramList.getValues(input, session); pv = pvl.getParameterValue("from"); if (pv != null) { from = new EMail(pv.asStringValue(null)); log.debug("MailSender [" + getName() + "] retrieved from-parameter [" + from + "]"); mail.setFrom(from); } pv = pvl.getParameterValue("subject"); if (pv != null) { subject = pv.asStringValue(null); log.debug("MailSender [" + getName() + "] retrieved subject-parameter [" + subject + "]"); mail.setSubject(subject); } pv = pvl.getParameterValue("threadTopic"); if (pv != null) { threadTopic = pv.asStringValue(null); log.debug("MailSender [" + getName() + "] retrieved threadTopic-parameter [" + threadTopic + "]"); mail.setThreadTopic(threadTopic); } pv = pvl.getParameterValue("message"); if (pv != null) { String message = pv.asStringValue("message"); log.debug("MailSender [" + getName() + "] retrieved message-parameter [" + message + "]"); mail.setMessage(message); } pv = pvl.getParameterValue("messageType"); if (pv != null) { messageType = pv.asStringValue(null); log.debug("MailSender [" + getName() + "] retrieved messageType-parameter [" + messageType + "]"); mail.setMessageType(messageType); } pv = pvl.getParameterValue("messageBase64"); if (pv != null) { messageBase64 = pv.asBooleanValue(false); log.debug("MailSender [" + getName() + "] retrieved messageBase64-parameter [" + messageBase64 + "]"); mail.setMessageBase64(messageBase64); } pv = pvl.getParameterValue("charset"); if (pv != null) { charset = pv.asStringValue(null); log.debug("MailSender [" + getName() + "] retrieved charset-parameter [" + charset + "]"); mail.setCharSet(charset); } pv = pvl.getParameterValue("recipients"); Collection<EMail> recipientsCollection = retrieveRecipientsFromParameterList(pv); if (recipientsCollection != null && !recipientsCollection.isEmpty()) { recipients = new ArrayList<EMail>(recipientsCollection); mail.setRecipientList(recipients); } else { throw new SenderException("Recipients cannot be empty. At least one recipient is required"); } pv = pvl.getParameterValue("attachments"); Collection<MailAttachmentStream> attachmentsCollection = retrieveAttachmentsFromParamList(pv, session); if (attachmentsCollection != null && !attachmentsCollection.isEmpty()) { attachments = new ArrayList<MailAttachmentStream>(attachmentsCollection); mail.setAttachmentList(attachments); } } catch (ParameterException e) { throw new SenderException("MailSender [" + getName() + "] got exception determining parametervalues", e); } return mail; } private List<EMail> retrieveRecipients(Collection<Node> recipientsNode) throws SenderException { List<EMail> recipients = null; if (recipientsNode != null && !recipientsNode.isEmpty()) { Iterator<Node> iter = recipientsNode.iterator(); if (iter.hasNext()) { recipients = new LinkedList<EMail>(); while (iter.hasNext()) { Element recipientElement = (Element) iter.next(); String value = XmlUtils.getStringValue(recipientElement); if (StringUtils.isNotEmpty(value)) { String name = recipientElement.getAttribute("name"); String type = recipientElement.getAttribute("type"); EMail recipient = new EMail(value, name, StringUtils.isNotEmpty(type)?type:"to"); recipients.add(recipient); } else { log.debug("empty recipient found, ignoring"); } } } } else { throw new SenderException("no recipients for message"); } return recipients; } private Collection<MailAttachmentStream> retrieveAttachments(Collection<Node> attachmentsNode, PipeLineSession session) throws SenderException { Collection<MailAttachmentStream> attachments = null; Iterator<Node> iter = attachmentsNode.iterator(); if (iter != null && iter.hasNext()) { attachments = new LinkedList<MailAttachmentStream>(); while (iter.hasNext()) { Element attachmentElement = (Element) iter.next(); String name = attachmentElement.getAttribute("name"); String mimeType = attachmentElement.getAttribute("type"); if (StringUtils.isNotEmpty(mimeType) && mimeType.indexOf("/")<0) { throw new SenderException("mimeType ["+mimeType+"] of attachment ["+name+"] must contain a forward slash ('/')"); } String sessionKey = attachmentElement.getAttribute("sessionKey"); boolean base64 = Boolean.parseBoolean(attachmentElement.getAttribute("base64")); MailAttachmentStream attachment = null; if (StringUtils.isNotEmpty(sessionKey)) { Object object = session.get(sessionKey); if (object instanceof InputStream) { attachment = streamToMailAttachment((InputStream) object, base64, mimeType); } else if (object instanceof String) { attachment = stringToMailAttachment((String) object, base64, mimeType); } else { throw new SenderException("MailSender ["+getName()+"] received unknown attachment type ["+object.getClass().getName()+"] in sessionkey"); } } else { String nodeValue = XmlUtils.getStringValue(attachmentElement); attachment = stringToMailAttachment(nodeValue, base64, mimeType); } attachment.setName(name); log.debug("created attachment ["+attachment+"]"); attachments.add(attachment); } } return attachments; } private MailAttachmentStream stringToMailAttachment(String value, boolean isBase64, String mimeType) { ByteArrayInputStream stream = new ByteArrayInputStream(value.getBytes()); if (!isBase64 && StringUtils.isEmpty(mimeType)) { mimeType = "text/plain"; } return streamToMailAttachment(stream, isBase64, mimeType); } private MailAttachmentStream streamToMailAttachment(InputStream stream, boolean isBase64, String mimeType) { MailAttachmentStream attachment = new MailAttachmentStream(); if(StringUtils.isEmpty(mimeType)) { mimeType = "application/octet-stream"; } if (isBase64) { attachment.setContent(new Base64InputStream(stream)); } else { attachment.setContent(stream); } attachment.setMimeType(mimeType); return attachment; } private MailSession parseXML(Message input, PipeLineSession session) throws SenderException, DomBuilderException { Element from; String subject; String threadTopic; String message; String messageType; boolean messageBase64; String charset; Collection<Node> recipientList; Collection<Node> attachments; Element replyTo = null; MailSession mailSession = new MailSession(); Element emailElement = XmlUtils.buildElement(input); from = XmlUtils.getFirstChildTag(emailElement, "from"); subject = XmlUtils.getChildTagAsString(emailElement, "subject"); if (StringUtils.isEmpty(subject)) { subject=getDefaultSubject(); } threadTopic = XmlUtils.getChildTagAsString(emailElement, "threadTopic"); message = XmlUtils.getChildTagAsString(emailElement, "message"); messageType = XmlUtils.getChildTagAsString(emailElement, "messageType"); if (StringUtils.isEmpty(messageType)) { messageType=getDefaultMessageType(); } if (messageType.indexOf("/")<0) { throw new SenderException("messageType ["+messageType+"] must contain a forward slash ('/')"); } messageBase64 = XmlUtils.getChildTagAsBoolean(emailElement, "messageBase64"); charset = XmlUtils.getChildTagAsString(emailElement, "charset"); Element recipientsElement = XmlUtils.getFirstChildTag(emailElement, "recipients"); if(recipientsElement == null) { throw new SenderException("at least 1 recipient must be specified"); } recipientList = XmlUtils.getChildTags(recipientsElement, "recipient"); Element attachmentsElement = XmlUtils.getFirstChildTag(emailElement, "attachments"); attachments = attachmentsElement == null ? null : XmlUtils.getChildTags(attachmentsElement, "attachment"); replyTo = XmlUtils.getFirstChildTag(emailElement, "replyTo"); Element headersElement = XmlUtils.getFirstChildTag(emailElement, "headers"); Collection<Node> headers = headersElement == null ? null : XmlUtils.getChildTags(headersElement, "header"); String bounceAddress = XmlUtils.getChildTagAsString(emailElement, "bounceAddress"); mailSession.setBounceAddress(bounceAddress); mailSession.setFrom(getEmailAddress(from, "from")); mailSession.setSubject(subject); mailSession.setThreadTopic(threadTopic); mailSession.setMessage(message); mailSession.setMessageType(messageType); mailSession.setMessageBase64(messageBase64); mailSession.setCharSet(charset); mailSession.setHeaders(headers); mailSession.setReplyto(getEmailAddress(replyTo,"replyTo")); List<EMail> recipients = retrieveRecipients(recipientList); mailSession.setRecipientList(recipients); if (attachments != null) { List<MailAttachmentStream> attachmentList = (List<MailAttachmentStream>) retrieveAttachments(attachments, session); mailSession.setAttachmentList(attachmentList); } return mailSession; } private EMail getEmailAddress(Element element, String type) throws SenderException { if (element == null) { return null; } String value = XmlUtils.getStringValue(element); if (StringUtils.isNotEmpty(value)) { return new EMail(value, element.getAttribute("name"), type); } return null; } @Override public boolean isSynchronous() { return false; } public String getAuthAlias() { return authAlias; } @IbisDoc({ "authAlias used to obtain credentials for authentication", "" }) public void setAuthAlias(String authAlias) { this.authAlias = authAlias; } public String getUserId() { return userId; } @IbisDoc({ "userId on the smtphost", "" }) public void setUserId(String userId) { this.userId = userId; } public String getPassword() { return password; } @IbisDoc({ "password of userid", "" }) public void setPassword(String password) { this.password = password; } @IbisDoc({ "alias used to obtain credentials for authentication to smtphost", "" }) @Deprecated public void setSmtpAuthAlias(String smtpAuthAlias) { setAuthAlias(smtpAuthAlias); } @IbisDoc({ "userId on the smtphost", "" }) @Deprecated public void setSmtpUserid(String smtpUserId) { setUserId(smtpUserId); } @IbisDoc({ "password of userid on the smtphost", "" }) @Deprecated public void setSmtpPassword(String smtpPassword) { setPassword(smtpPassword); } public CredentialFactory getCredentialFactory() { return cf; } public void setCredentialFactory(CredentialFactory cf) { this.cf = cf; } /** * Set the default for Subject> */ @IbisDoc({ "value of the subject: header if not specified in message itself", "" }) public void setDefaultSubject(String defaultSubject) { this.defaultSubject = defaultSubject; } public String getDefaultSubject() { return defaultSubject; } /** * Set the default for From */ @IbisDoc({ "value of the from: header if not specified in message itself", "" }) public void setDefaultFrom(String defaultFrom) { this.defaultFrom = defaultFrom; } public String getDefaultFrom() { return defaultFrom; } @IbisDoc({ "Timeout <i>in milliseconds</i> for socket connection timeout and socket i/o timeouts", "20000" }) public void setTimeout(int timeout) { this.timeout = timeout; } public int getTimeout() { return timeout; } @IbisDoc({ "when this name is used, it will be followed by a number which is equal to the node's position", "attachment" }) public void setDefaultAttachmentName(String defaultAttachmentName) { this.defaultAttachmentName = defaultAttachmentName; } public String getDefaultAttachmentName() { return defaultAttachmentName; } @IbisDoc({ "when messageType is not specified defaultMessageType will be used", "text/plain" }) public void setDefaultMessageType(String defaultMessageType) { this.defaultMessageType = defaultMessageType; } public String getDefaultMessageType() { return defaultMessageType; } @IbisDoc({ "when messageBase64 is not specified defaultMessageBase64 will be used", "false" }) public void setDefaultMessageBase64(boolean defaultMessageBase64) { this.defaultMessageBase64 = defaultMessageBase64; } public boolean isDefaultMessageBase64() { return defaultMessageBase64; } @IbisDoc({ "NDR return address when mail cannot be delivered. This adds a Return-Path header", "MAIL FROM attribute" }) public void setBounceAddress(String string) { bounceAddress = string; } public String getBounceAddress() { return bounceAddress; } /** * Generic email class */ public class MailSession { private EMail from = null; private EMail replyto = null; private List<EMail> recipients = new ArrayList<EMail>(); private List<MailAttachmentStream> attachmentList = new ArrayList<MailAttachmentStream>(); private String subject = getDefaultSubject(); private String message = null; private String messageType = getDefaultMessageType(); private boolean messageIsBase64 = isDefaultMessageBase64(); private String charSet = StreamUtil.DEFAULT_INPUT_STREAM_ENCODING; private String threadTopic = null; private Collection<Node> headers; private String bounceAddress = getBounceAddress(); public MailSession() throws SenderException { from = new EMail(getDefaultFrom(),"from"); } public EMail getFrom() { return from; } public void setFrom(EMail from) { this.from = from; } public EMail getReplyto() { return replyto; } public void setReplyto(EMail replyto) { this.replyto = replyto; } public List<EMail> getRecipientList() throws SenderException { if (recipients == null || recipients.size() == 0) { throw new SenderException("MailSender [" + getName() + "] has no recipients for message"); } return recipients; } public void setRecipientList(List<EMail> recipients) { this.recipients = recipients; } public List<MailAttachmentStream> getAttachmentList() { return attachmentList; } public void setAttachmentList(List<MailAttachmentStream> attachmentList) { this.attachmentList = attachmentList; } public String getSubject() { return subject; } public void setSubject(String subject) { this.subject = subject; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public String getMessageType() { return messageType; } public void setMessageType(String messageType) { this.messageType = messageType; } public boolean isMessageBase64() { return messageIsBase64; } public void setMessageBase64(boolean messageIsBase64) { this.messageIsBase64 = messageIsBase64; } public String getCharSet() { return charSet; } public void setCharSet(String charSet) { if(StringUtils.isNotEmpty(charSet)) { this.charSet = charSet; } } public String getThreadTopic() { return threadTopic; } public void setThreadTopic(String threadTopic) { this.threadTopic = threadTopic; } public Collection<Node> getHeaders() { return headers; } public void setHeaders(Collection<Node> headers) { this.headers = headers; } public void setBounceAddress(String bounceAddress) { this.bounceAddress = bounceAddress; } public String getBounceAddress() { return this.bounceAddress; } } /** * Generic mail attachment class * @author Niels Meijer * */ protected abstract class MailAttachmentBase<T> { private String name; private String mimeType; private T value; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getMimeType() { return mimeType; } public void setMimeType(String mimeType) { this.mimeType = mimeType; } public T getContent() { return value; } public void setContent(T value) { this.value = value; } @Override public String toString() { return "Attachment name ["+name+"] type ["+value.getClass().getSimpleName()+"]"; } } protected class MailAttachmentStream extends MailAttachmentBase<InputStream>{}; /** * Generic mail class * @author alisihab * */ public class EMail { private InternetAddress emailAddress; private String type; //"cc", "to", "from", "bcc" public EMail(String address, String name, String type) throws SenderException { try { if (StringUtils.isNotEmpty(address)) { InternetAddress ia[] = InternetAddress.parseHeader(address, true); if (ia.length==0) { throw new AddressException("No address found in ["+address+"]"); } emailAddress = ia[0]; } else { emailAddress = new InternetAddress(); } if (StringUtils.isNotEmpty(name)) { emailAddress.setPersonal(name); } this.type = type; } catch (AddressException | UnsupportedEncodingException e) { throw new SenderException("cannot parse email address from ["+address+"] ["+name+"]", e); } } public EMail(String address, String type) throws SenderException { this(address, null, type); } public EMail(String address) throws SenderException { this(address, null, null); } public InternetAddress getInternetAddress() { return emailAddress; } public String getAddress() { return emailAddress.getAddress(); } public String getName() { return emailAddress.getPersonal(); } public String getType() { return type; } @Override public String toString() { return "address ["+emailAddress.toUnicodeString()+"] type ["+type+"]"; } } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.config; import com.hazelcast.spi.annotation.Beta; import com.hazelcast.topic.TopicOverloadPolicy; import java.util.LinkedList; import java.util.List; import java.util.concurrent.Executor; import static com.hazelcast.topic.TopicOverloadPolicy.BLOCK; import static com.hazelcast.util.Preconditions.checkHasText; import static com.hazelcast.util.Preconditions.checkNotNull; import static com.hazelcast.util.Preconditions.checkPositive; /** * Configuration for a reliable {@link com.hazelcast.core.ITopic}. * * The reliable topic makes use of the {@link com.hazelcast.ringbuffer.Ringbuffer} to store the actual messages. * * To configure the ringbuffer for a reliable topic, define a ringbuffer in the config with exactly the same name. It is very * unlikely that you want to run with the default settings. * * When a ReliableTopic starts, it will always start from the tail+1 item from the RingBuffer. It will not chew its way through * all available events but it will wait for the next item being published. * * In the reliable topic, global order is always maintained, so all listeners will observe exactly the same order of sequence of * messages. */ @Beta public class ReliableTopicConfig { /** * The default read batch size. */ public static final int DEFAULT_READ_BATCH_SIZE = 10; /** * The default slow consumer policy. */ public static final TopicOverloadPolicy DEFAULT_TOPIC_OVERLOAD_POLICY = BLOCK; /** * Default value for statistics enabled. */ public static final boolean DEFAULT_STATISTICS_ENABLED = true; private Executor executor; private int readBatchSize = DEFAULT_READ_BATCH_SIZE; private String name; private boolean statisticsEnabled = DEFAULT_STATISTICS_ENABLED; private List<ListenerConfig> listenerConfigs = new LinkedList<ListenerConfig>(); private TopicOverloadPolicy topicOverloadPolicy = DEFAULT_TOPIC_OVERLOAD_POLICY; public ReliableTopicConfig() { } /** * Creates a new ReliableTopicConfig with default settings. */ public ReliableTopicConfig(String name) { this.name = checkNotNull(name, "name"); } /** * Creates a new ReliableTopicConfig by cloning an existing one. * * @param config the ReliableTopicConfig to clone. */ ReliableTopicConfig(ReliableTopicConfig config) { this.name = config.name; this.statisticsEnabled = config.statisticsEnabled; this.readBatchSize = config.readBatchSize; this.executor = config.executor; this.topicOverloadPolicy = config.topicOverloadPolicy; this.listenerConfigs = config.listenerConfigs; } ReliableTopicConfig(ReliableTopicConfig config, String name) { this(config); this.name = name; } /** * Sets the name of the reliable topic. * * @param name the name of the reliable topic * @return the updated ReliableTopicConfig * @throws IllegalArgumentException if name is null or an empty string. */ public ReliableTopicConfig setName(String name) { this.name = checkHasText(name, "name must contain text"); return this; } /** * Gets the name of the reliable topic. * * @return the name of the reliable topic. */ public String getName() { return name; } /** * Gets the TopicOverloadPolicy for this reliable topic. * * @return the TopicOverloadPolicy. */ public TopicOverloadPolicy getTopicOverloadPolicy() { return topicOverloadPolicy; } /** * Sets the TopicOverloadPolicy for this reliable topic. Check the {@link TopicOverloadPolicy} for more details about * this setting. * * @param topicOverloadPolicy the new TopicOverloadPolicy. * @return the updated reliable topic config. * @throws IllegalArgumentException if topicOverloadPolicy is null. */ public ReliableTopicConfig setTopicOverloadPolicy(TopicOverloadPolicy topicOverloadPolicy) { this.topicOverloadPolicy = checkNotNull(topicOverloadPolicy, "topicOverloadPolicy can't be null"); return this; } /** * Gets the Executor that is going to process the events. * * If no Executor is selected, then the {@link com.hazelcast.spi.ExecutionService#ASYNC_EXECUTOR} is used. * * @return the Executor used to process events. * @see #setExecutor(java.util.concurrent.Executor) */ public Executor getExecutor() { return executor; } /** * Sets the Executor that is going to process the event. * * In some cases it is desirable to set a specific Executor. For example, you may want to isolate a certain topic from other * topics because it contains long running messages or very high priority messages. * * A single Executor can be shared between multiple Reliable topics, although it could take more time to process a message. * If a single Executor is not shared with other reliable topics, then the Executor only needs to have a single thread. * * @param executor the Executor. if the executor is null, the {@link com.hazelcast.spi.ExecutionService#ASYNC_EXECUTOR} will * be used to process the event. * @return the updated config. */ public ReliableTopicConfig setExecutor(Executor executor) { this.executor = executor; return this; } /** * Gets the maximum number of items to read in a batch. Returned value will always be equal or larger than 1. * * @return the read batch size. */ public int getReadBatchSize() { return readBatchSize; } /** * Sets the read batch size. * * The ReliableTopic tries to read a batch of messages from the ringbuffer. It will get at least one, but * if there are more available, then it will try to get more to increase throughput. The minimal read * batch size can be influenced using the read batch size. * * Apart from influencing the number of messages to download, the readBatchSize also determines how many * messages will be processed by the thread running the MessageListener before it returns back to the pool * to look for other MessageListeners that need to be processed. The problem with returning to the pool and * looking for new work is that interacting with an Executor is quite expensive due to contention on the * work-queue. The more work that can be done without retuning to the pool, the smaller the overhead. * * If the readBatchSize is 10 and there are 50 messages available, 10 items are retrieved and processed * consecutively before the thread goes back to the pool and helps out with the processing of other messages. * * If the readBatchSize is 10 and there are 2 items available, 2 items are retrieved and processed consecutively. * * If the readBatchSize is an issue because a thread will be busy too long with processing a single MessageListener * and it can't help out other MessageListeners, increase the size of the threadpool so the other MessageListeners don't * need to wait for a thread, but can be processed in parallel. * * @param readBatchSize the maximum number of items to read in a batch. * @return the updated reliable topic config. * @throws IllegalArgumentException if readBatchSize is smaller than 1. */ public ReliableTopicConfig setReadBatchSize(int readBatchSize) { this.readBatchSize = checkPositive(readBatchSize, "readBatchSize should be positive"); return this; } /** * Checks if statistics are enabled for this reliable topic. * * @return true if enabled, false otherwise. */ public boolean isStatisticsEnabled() { return statisticsEnabled; } /** * Enables or disables statistics for this reliable topic.. * * @param statisticsEnabled true to enable statistics, false to disable. * @return the updated reliable topic config. */ public ReliableTopicConfig setStatisticsEnabled(boolean statisticsEnabled) { this.statisticsEnabled = statisticsEnabled; return this; } /** * Sets the list of message listeners (listens for when messages are added or removed) for this topic. * * @param listenerConfigs The list of message listeners for this topic. * @return This updated topic configuration. */ public ReliableTopicConfig setMessageListenerConfigs(List<ListenerConfig> listenerConfigs) { this.listenerConfigs = listenerConfigs != null ? listenerConfigs : new LinkedList<ListenerConfig>(); return this; } /** * Gets the list of message listeners (listens for when messages are added or removed) for this reliable topic. * * @return list of MessageListener configurations. */ public List<ListenerConfig> getMessageListenerConfigs() { return listenerConfigs; } /** * Adds a message listener (listens for when messages are added or removed) to this reliable topic. * * @param listenerConfig the ListenerConfig to add. * @return the updated config. * @throws NullPointerException if listenerConfig is null. */ public ReliableTopicConfig addMessageListenerConfig(ListenerConfig listenerConfig) { checkNotNull(listenerConfig, "listenerConfig can't be null"); listenerConfigs.add(listenerConfig); return this; } @Override public String toString() { return "ReliableTopicConfig{" + "name='" + name + '\'' + ", topicOverloadPolicy=" + topicOverloadPolicy + ", executor=" + executor + ", readBatchSize=" + readBatchSize + ", statisticsEnabled=" + statisticsEnabled + ", listenerConfigs=" + listenerConfigs + '}'; } /** * Gets immutable version of this configuration. * * @return Immutable version of this configuration. * @deprecated this method will be removed in 4.0; it is meant for internal usage only. */ public ReliableTopicConfig getAsReadOnly() { return new ReliableTopicConfigReadOnly(this); } static class ReliableTopicConfigReadOnly extends ReliableTopicConfig { ReliableTopicConfigReadOnly(ReliableTopicConfig config) { super(config); } @Override public ReliableTopicConfig setExecutor(Executor executor) { throw new UnsupportedOperationException("This config is read-only"); } @Override public ReliableTopicConfig setReadBatchSize(int readBatchSize) { throw new UnsupportedOperationException("This config is read-only"); } @Override public ReliableTopicConfig setStatisticsEnabled(boolean statisticsEnabled) { throw new UnsupportedOperationException("This config is read-only"); } @Override public ReliableTopicConfig addMessageListenerConfig(ListenerConfig listenerConfig) { throw new UnsupportedOperationException("This config is read-only"); } @Override public ReliableTopicConfig setTopicOverloadPolicy(TopicOverloadPolicy topicOverloadPolicy) { throw new UnsupportedOperationException("This config is read-only"); } } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.hint; import com.intellij.icons.AllIcons; import com.intellij.ide.IdeTooltipManager; import com.intellij.openapi.util.Ref; import com.intellij.ui.*; import com.intellij.util.Consumer; import com.intellij.util.ui.Html; import com.intellij.util.ui.UIUtil; import org.intellij.lang.annotations.JdkConstants; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.CompoundBorder; import javax.swing.event.HyperlinkListener; import java.awt.*; import java.awt.event.MouseListener; public class HintUtil { public static final Color INFORMATION_COLOR = new JBColor(new Color(253, 254, 226), new Color(0x4d4f51)); public static final Color QUESTION_COLOR = new JBColor(new Color(181, 208, 251), new Color(55, 108, 137)); public static final Color ERROR_COLOR = new JBColor(new Color(255, 220, 220), new Color(0x781732)); public static final Color QUESTION_UNDERSCORE_COLOR = JBColor.foreground(); private HintUtil() { } public static JComponent createInformationLabel(@NotNull String text) { return createInformationLabel(text, null, null, null); } public static JComponent createInformationLabel(@NotNull String text, @Nullable HyperlinkListener hyperlinkListener, @Nullable MouseListener mouseListener, @Nullable Ref<Consumer<String>> updatedTextConsumer) { HintHint hintHint = getInformationHint(); final HintLabel label = new HintLabel(); label.setText(text, hintHint); label.setIcon(null); if (!hintHint.isAwtTooltip()) { label.setBorder(createHintBorder()); label.setForeground(JBColor.foreground()); label.setFont(getBoldFont()); label.setBackground(INFORMATION_COLOR); label.setOpaque(true); } if (hyperlinkListener != null) { label.myPane.addHyperlinkListener(hyperlinkListener); } if (mouseListener != null) { label.myPane.addMouseListener(mouseListener); } if (updatedTextConsumer != null) { updatedTextConsumer.set(new Consumer<String>() { @Override public void consume(String s) { label.myPane.setText(s); // Force preferred size recalculation. label.setPreferredSize(null); label.myPane.setPreferredSize(null); } }); } return label; } @NotNull public static HintHint getInformationHint() { return new HintHint().setTextBg(INFORMATION_COLOR) .setTextFg(UIUtil.isUnderDarcula() ? UIUtil.getLabelForeground() : Color.black) .setFont(getBoldFont()) .setAwtTooltip(true); } public static CompoundBorder createHintBorder() { return BorderFactory.createCompoundBorder( new ColoredSideBorder(Color.white, Color.white, Color.gray, Color.gray, 1), BorderFactory.createEmptyBorder(2, 2, 2, 2) ); } public static JComponent createInformationLabel(SimpleColoredText text) { return createInformationLabel(text, null); } public static JComponent createQuestionLabel(String text) { HintHint hintHint = new HintHint().setTextBg(QUESTION_COLOR) .setTextFg(JBColor.foreground()) .setFont(getBoldFont()) .setAwtTooltip(true); HintLabel label = new HintLabel(); label.setText(text, hintHint); label.setIcon(AllIcons.General.Help_small); if (!hintHint.isAwtTooltip()) { label.setBorder(createHintBorder()); label.setForeground(JBColor.foreground()); label.setFont(getBoldFont()); label.setBackground(QUESTION_COLOR); label.setOpaque(true); } return label; } public static JComponent createInformationLabel(final SimpleColoredText text, final Icon icon) { SimpleColoredComponent highlighted = new SimpleColoredComponent(); highlighted.setIcon(icon); highlighted.setBackground(INFORMATION_COLOR); highlighted.setForeground(JBColor.foreground()); highlighted.setFont(getBoldFont()); text.appendToComponent(highlighted); HintLabel label = new HintLabel(); label.setText(highlighted); return label; } public static JComponent createErrorLabel(String text) { HintHint hintHint = new HintHint().setTextBg(ERROR_COLOR) .setTextFg(JBColor.foreground()) .setFont(getBoldFont()) .setAwtTooltip(true); HintLabel label = new HintLabel(); label.setText(text, hintHint); label.setIcon(null); if (!hintHint.isAwtTooltip()) { label.setBorder(createHintBorder() ); label.setForeground(JBColor.foreground()); label.setFont(getBoldFont()); label.setBackground(ERROR_COLOR); label.setOpaque(true); } return label; } private static Font getBoldFont() { return UIUtil.getLabelFont().deriveFont(Font.BOLD); } public static JLabel createAdComponent(final String bottomText, final Border border, @JdkConstants.HorizontalAlignment int alignment) { JLabel label = new JLabel(); label.setText(bottomText); label.setHorizontalAlignment(alignment); label.setFont(label.getFont().deriveFont((float)(label.getFont().getSize() - 2))); if (bottomText != null) { label.setBorder(border); } return label; } @NotNull public static String prepareHintText(@NotNull String text, @NotNull HintHint hintHint) { return prepareHintText(new Html(text), hintHint); } public static String prepareHintText(@NotNull Html text, @NotNull HintHint hintHint) { String htmlBody = UIUtil.getHtmlBody(text); return String.format( "<html><head>%s</head><body>%s</body></html>", UIUtil.getCssFontDeclaration(hintHint.getTextFont(), hintHint.getTextForeground(), hintHint.getLinkForeground(), hintHint.getUlImg()), htmlBody ); } private static class HintLabel extends JPanel { private JEditorPane myPane; private SimpleColoredComponent myColored; private JLabel myIcon; private HintLabel() { setLayout(new BorderLayout()); } public void setText(SimpleColoredComponent colored) { clearText(); myColored = colored; add(myColored, BorderLayout.CENTER); setOpaque(true); setBackground(colored.getBackground()); revalidate(); repaint(); } public void setText(String s, HintHint hintHint) { clearText(); if (s != null) { myPane = IdeTooltipManager.initPane(s, hintHint, null); add(myPane, BorderLayout.CENTER); } setOpaque(true); setBackground(hintHint.getTextBackground()); revalidate(); repaint(); } private void clearText() { if (myPane != null) { remove(myPane); myPane = null; } if (myColored != null) { remove(myColored); myColored = null; } } public void setIcon(Icon icon) { if (myIcon != null) { remove(myIcon); } myIcon = new JLabel(icon, SwingConstants.CENTER); myIcon.setVerticalAlignment(SwingConstants.TOP); add(myIcon, BorderLayout.WEST); revalidate(); repaint(); } @Override public String toString() { return "Hint: text='" + (myPane != null ? myPane.getText() : "") + "'"; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.*; import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexComponent; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; /** * Thread-safe utility class that allows to get per-segment values via the * {@link #load(LeafReaderContext)} method. */ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexComponent { public static class CommonSettings { public static final String SETTING_MEMORY_STORAGE_HINT = "memory_storage_hint"; public enum MemoryStorageFormat { ORDINALS, PACKED, PAGED; public static MemoryStorageFormat fromString(String string) { for (MemoryStorageFormat e : MemoryStorageFormat.values()) { if (e.name().equalsIgnoreCase(string)) { return e; } } return null; } } /** * Gets a memory storage hint that should be honored if possible but is not mandatory */ public static MemoryStorageFormat getMemoryStorageHint(FieldDataType fieldDataType) { // backwards compatibility String s = fieldDataType.getSettings().get("ordinals"); if (s != null) { return "always".equals(s) ? MemoryStorageFormat.ORDINALS : null; } return MemoryStorageFormat.fromString(fieldDataType.getSettings().get(SETTING_MEMORY_STORAGE_HINT)); } } /** * The field name. */ MappedFieldType.Names getFieldNames(); /** * The field data type. */ FieldDataType getFieldDataType(); /** * Loads the atomic field data for the reader, possibly cached. */ FD load(LeafReaderContext context); /** * Loads directly the atomic field data for the reader, ignoring any caching involved. */ FD loadDirect(LeafReaderContext context) throws Exception; /** * Comparator used for sorting. */ XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested); /** * Clears any resources associated with this field data. */ void clear(); void clear(IndexReader reader); // we need this extended source we we have custom comparators to reuse our field data // in this case, we need to reduce type that will be used when search results are reduced // on another node (we don't have the custom source them...) public abstract class XFieldComparatorSource extends FieldComparatorSource { /** * Simple wrapper class around a filter that matches parent documents * and a filter that matches child documents. For every root document R, * R will be in the parent filter and its children documents will be the * documents that are contained in the inner set between the previous * parent + 1, or 0 if there is no previous parent, and R (excluded). */ public static class Nested { private final BitDocIdSetFilter rootFilter; private final Filter innerFilter; public Nested(BitDocIdSetFilter rootFilter, Filter innerFilter) { this.rootFilter = rootFilter; this.innerFilter = innerFilter; } /** * Get a {@link BitDocIdSet} that matches the root documents. */ public BitDocIdSet rootDocs(LeafReaderContext ctx) throws IOException { return rootFilter.getDocIdSet(ctx); } /** * Get a {@link DocIdSet} that matches the inner documents. */ public DocIdSet innerDocs(LeafReaderContext ctx) throws IOException { return innerFilter.getDocIdSet(ctx, null); } } /** Whether missing values should be sorted first. */ protected final boolean sortMissingFirst(Object missingValue) { return "_first".equals(missingValue); } /** Whether missing values should be sorted last, this is the default. */ protected final boolean sortMissingLast(Object missingValue) { return missingValue == null || "_last".equals(missingValue); } /** Return the missing object value according to the reduced type of the comparator. */ protected final Object missingObject(Object missingValue, boolean reversed) { if (sortMissingFirst(missingValue) || sortMissingLast(missingValue)) { final boolean min = sortMissingFirst(missingValue) ^ reversed; switch (reducedType()) { case INT: return min ? Integer.MIN_VALUE : Integer.MAX_VALUE; case LONG: return min ? Long.MIN_VALUE : Long.MAX_VALUE; case FLOAT: return min ? Float.NEGATIVE_INFINITY : Float.POSITIVE_INFINITY; case DOUBLE: return min ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; case STRING: case STRING_VAL: return null; default: throw new UnsupportedOperationException("Unsupported reduced type: " + reducedType()); } } else { switch (reducedType()) { case INT: if (missingValue instanceof Number) { return ((Number) missingValue).intValue(); } else { return Integer.parseInt(missingValue.toString()); } case LONG: if (missingValue instanceof Number) { return ((Number) missingValue).longValue(); } else { return Long.parseLong(missingValue.toString()); } case FLOAT: if (missingValue instanceof Number) { return ((Number) missingValue).floatValue(); } else { return Float.parseFloat(missingValue.toString()); } case DOUBLE: if (missingValue instanceof Number) { return ((Number) missingValue).doubleValue(); } else { return Double.parseDouble(missingValue.toString()); } case STRING: case STRING_VAL: if (missingValue instanceof BytesRef) { return (BytesRef) missingValue; } else if (missingValue instanceof byte[]) { return new BytesRef((byte[]) missingValue); } else { return new BytesRef(missingValue.toString()); } default: throw new UnsupportedOperationException("Unsupported reduced type: " + reducedType()); } } } public abstract SortField.Type reducedType(); /** * Return a missing value that is understandable by {@link SortField#setMissingValue(Object)}. * Most implementations return null because they already replace the value at the fielddata level. * However this can't work in case of strings since there is no such thing as a string which * compares greater than any other string, so in that case we need to return * {@link SortField#STRING_FIRST} or {@link SortField#STRING_LAST} so that the coordinating node * knows how to deal with null values. */ public Object missingValue(boolean reversed) { return null; } } interface Builder { IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService); } public static interface Global<FD extends AtomicFieldData> extends IndexFieldData<FD> { IndexFieldData<FD> loadGlobal(IndexReader indexReader); IndexFieldData<FD> localGlobalDirect(IndexReader indexReader) throws Exception; } }
package com.google.ads.googleads.v9.services; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * <pre> * Service to manage income range views. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/ads/googleads/v9/services/income_range_view_service.proto") @io.grpc.stub.annotations.GrpcGenerated public final class IncomeRangeViewServiceGrpc { private IncomeRangeViewServiceGrpc() {} public static final String SERVICE_NAME = "google.ads.googleads.v9.services.IncomeRangeViewService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v9.services.GetIncomeRangeViewRequest, com.google.ads.googleads.v9.resources.IncomeRangeView> getGetIncomeRangeViewMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetIncomeRangeView", requestType = com.google.ads.googleads.v9.services.GetIncomeRangeViewRequest.class, responseType = com.google.ads.googleads.v9.resources.IncomeRangeView.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<com.google.ads.googleads.v9.services.GetIncomeRangeViewRequest, com.google.ads.googleads.v9.resources.IncomeRangeView> getGetIncomeRangeViewMethod() { io.grpc.MethodDescriptor<com.google.ads.googleads.v9.services.GetIncomeRangeViewRequest, com.google.ads.googleads.v9.resources.IncomeRangeView> getGetIncomeRangeViewMethod; if ((getGetIncomeRangeViewMethod = IncomeRangeViewServiceGrpc.getGetIncomeRangeViewMethod) == null) { synchronized (IncomeRangeViewServiceGrpc.class) { if ((getGetIncomeRangeViewMethod = IncomeRangeViewServiceGrpc.getGetIncomeRangeViewMethod) == null) { IncomeRangeViewServiceGrpc.getGetIncomeRangeViewMethod = getGetIncomeRangeViewMethod = io.grpc.MethodDescriptor.<com.google.ads.googleads.v9.services.GetIncomeRangeViewRequest, com.google.ads.googleads.v9.resources.IncomeRangeView>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetIncomeRangeView")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v9.services.GetIncomeRangeViewRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.ads.googleads.v9.resources.IncomeRangeView.getDefaultInstance())) .setSchemaDescriptor(new IncomeRangeViewServiceMethodDescriptorSupplier("GetIncomeRangeView")) .build(); } } } return getGetIncomeRangeViewMethod; } /** * Creates a new async stub that supports all call types for the service */ public static IncomeRangeViewServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<IncomeRangeViewServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<IncomeRangeViewServiceStub>() { @java.lang.Override public IncomeRangeViewServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IncomeRangeViewServiceStub(channel, callOptions); } }; return IncomeRangeViewServiceStub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static IncomeRangeViewServiceBlockingStub newBlockingStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<IncomeRangeViewServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<IncomeRangeViewServiceBlockingStub>() { @java.lang.Override public IncomeRangeViewServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IncomeRangeViewServiceBlockingStub(channel, callOptions); } }; return IncomeRangeViewServiceBlockingStub.newStub(factory, channel); } /** * Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static IncomeRangeViewServiceFutureStub newFutureStub( io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<IncomeRangeViewServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<IncomeRangeViewServiceFutureStub>() { @java.lang.Override public IncomeRangeViewServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IncomeRangeViewServiceFutureStub(channel, callOptions); } }; return IncomeRangeViewServiceFutureStub.newStub(factory, channel); } /** * <pre> * Service to manage income range views. * </pre> */ public static abstract class IncomeRangeViewServiceImplBase implements io.grpc.BindableService { /** * <pre> * Returns the requested income range view in full detail. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public void getIncomeRangeView(com.google.ads.googleads.v9.services.GetIncomeRangeViewRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v9.resources.IncomeRangeView> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetIncomeRangeViewMethod(), responseObserver); } @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getGetIncomeRangeViewMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.ads.googleads.v9.services.GetIncomeRangeViewRequest, com.google.ads.googleads.v9.resources.IncomeRangeView>( this, METHODID_GET_INCOME_RANGE_VIEW))) .build(); } } /** * <pre> * Service to manage income range views. * </pre> */ public static final class IncomeRangeViewServiceStub extends io.grpc.stub.AbstractAsyncStub<IncomeRangeViewServiceStub> { private IncomeRangeViewServiceStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected IncomeRangeViewServiceStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IncomeRangeViewServiceStub(channel, callOptions); } /** * <pre> * Returns the requested income range view in full detail. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public void getIncomeRangeView(com.google.ads.googleads.v9.services.GetIncomeRangeViewRequest request, io.grpc.stub.StreamObserver<com.google.ads.googleads.v9.resources.IncomeRangeView> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetIncomeRangeViewMethod(), getCallOptions()), request, responseObserver); } } /** * <pre> * Service to manage income range views. * </pre> */ public static final class IncomeRangeViewServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<IncomeRangeViewServiceBlockingStub> { private IncomeRangeViewServiceBlockingStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected IncomeRangeViewServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IncomeRangeViewServiceBlockingStub(channel, callOptions); } /** * <pre> * Returns the requested income range view in full detail. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.ads.googleads.v9.resources.IncomeRangeView getIncomeRangeView(com.google.ads.googleads.v9.services.GetIncomeRangeViewRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetIncomeRangeViewMethod(), getCallOptions(), request); } } /** * <pre> * Service to manage income range views. * </pre> */ public static final class IncomeRangeViewServiceFutureStub extends io.grpc.stub.AbstractFutureStub<IncomeRangeViewServiceFutureStub> { private IncomeRangeViewServiceFutureStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected IncomeRangeViewServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IncomeRangeViewServiceFutureStub(channel, callOptions); } /** * <pre> * Returns the requested income range view in full detail. * List of thrown errors: * [AuthenticationError]() * [AuthorizationError]() * [HeaderError]() * [InternalError]() * [QuotaError]() * [RequestError]() * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v9.resources.IncomeRangeView> getIncomeRangeView( com.google.ads.googleads.v9.services.GetIncomeRangeViewRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetIncomeRangeViewMethod(), getCallOptions()), request); } } private static final int METHODID_GET_INCOME_RANGE_VIEW = 0; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final IncomeRangeViewServiceImplBase serviceImpl; private final int methodId; MethodHandlers(IncomeRangeViewServiceImplBase serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_GET_INCOME_RANGE_VIEW: serviceImpl.getIncomeRangeView((com.google.ads.googleads.v9.services.GetIncomeRangeViewRequest) request, (io.grpc.stub.StreamObserver<com.google.ads.googleads.v9.resources.IncomeRangeView>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } private static abstract class IncomeRangeViewServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { IncomeRangeViewServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.ads.googleads.v9.services.IncomeRangeViewServiceProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("IncomeRangeViewService"); } } private static final class IncomeRangeViewServiceFileDescriptorSupplier extends IncomeRangeViewServiceBaseDescriptorSupplier { IncomeRangeViewServiceFileDescriptorSupplier() {} } private static final class IncomeRangeViewServiceMethodDescriptorSupplier extends IncomeRangeViewServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final String methodName; IncomeRangeViewServiceMethodDescriptorSupplier(String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (IncomeRangeViewServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new IncomeRangeViewServiceFileDescriptorSupplier()) .addMethod(getGetIncomeRangeViewMethod()) .build(); } } } return result; } }
/* * Copyright 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.fragment.app; import android.content.Context; import android.content.res.Configuration; import android.os.Parcelable; import android.util.AttributeSet; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import androidx.annotation.Nullable; import androidx.collection.SimpleArrayMap; import androidx.loader.app.LoaderManager; import java.io.FileDescriptor; import java.io.PrintWriter; import java.util.List; /** * Provides integration points with a {@link FragmentManager} for a fragment host. * <p> * It is the responsibility of the host to take care of the Fragment's lifecycle. * The methods provided by {@link FragmentController} are for that purpose. */ public class FragmentController { private final FragmentHostCallback<?> mHost; /** * Returns a {@link FragmentController}. */ public static FragmentController createController(FragmentHostCallback<?> callbacks) { return new FragmentController(callbacks); } private FragmentController(FragmentHostCallback<?> callbacks) { mHost = callbacks; } /** * Returns a {@link FragmentManager} for this controller. */ public FragmentManager getSupportFragmentManager() { return mHost.getFragmentManagerImpl(); } /** * Returns a {@link LoaderManager}. * * @deprecated Loaders are managed separately from FragmentController and this now throws an * {@link UnsupportedOperationException}. Use {@link LoaderManager#getInstance} to obtain a * LoaderManager. * @see LoaderManager#getInstance */ @Deprecated public LoaderManager getSupportLoaderManager() { throw new UnsupportedOperationException("Loaders are managed separately from " + "FragmentController, use LoaderManager.getInstance() to obtain a LoaderManager."); } /** * Returns a fragment with the given identifier. */ @Nullable public Fragment findFragmentByWho(String who) { return mHost.mFragmentManager.findFragmentByWho(who); } /** * Returns the number of active fragments. */ public int getActiveFragmentsCount() { return mHost.mFragmentManager.getActiveFragmentCount(); } /** * Returns the list of active fragments. */ public List<Fragment> getActiveFragments(List<Fragment> actives) { return mHost.mFragmentManager.getActiveFragments(); } /** * Attaches the host to the FragmentManager for this controller. The host must be * attached before the FragmentManager can be used to manage Fragments. */ public void attachHost(Fragment parent) { mHost.mFragmentManager.attachController( mHost, mHost /*container*/, parent); } /** * Instantiates a Fragment's view. * * @param parent The parent that the created view will be placed * in; <em>note that this may be null</em>. * @param name Tag name to be inflated. * @param context The context the view is being created in. * @param attrs Inflation attributes as specified in XML file. * * @return view the newly created view */ public View onCreateView(View parent, String name, Context context, AttributeSet attrs) { return mHost.mFragmentManager.onCreateView(parent, name, context, attrs); } /** * Marks the fragment state as unsaved. This allows for "state loss" detection. */ public void noteStateNotSaved() { mHost.mFragmentManager.noteStateNotSaved(); } /** * Saves the state for all Fragments. */ public Parcelable saveAllState() { return mHost.mFragmentManager.saveAllState(); } /** * Restores the saved state for all Fragments. The given Fragment list are Fragment * instances retained across configuration changes. * * @see #retainNonConfig() * * @deprecated use {@link #restoreAllState(Parcelable, FragmentManagerNonConfig)} */ @Deprecated public void restoreAllState(Parcelable state, List<Fragment> nonConfigList) { mHost.mFragmentManager.restoreAllState(state, new FragmentManagerNonConfig(nonConfigList, null, null)); } /** * Restores the saved state for all Fragments. The given FragmentManagerNonConfig are Fragment * instances retained across configuration changes, including nested fragments * * @see #retainNestedNonConfig() */ public void restoreAllState(Parcelable state, FragmentManagerNonConfig nonConfig) { mHost.mFragmentManager.restoreAllState(state, nonConfig); } /** * Returns a list of Fragments that have opted to retain their instance across * configuration changes. * * @deprecated use {@link #retainNestedNonConfig()} to also track retained * nested child fragments */ @Deprecated public List<Fragment> retainNonConfig() { FragmentManagerNonConfig nonconf = mHost.mFragmentManager.retainNonConfig(); return nonconf != null ? nonconf.getFragments() : null; } /** * Returns a nested tree of Fragments that have opted to retain their instance across * configuration changes. */ public FragmentManagerNonConfig retainNestedNonConfig() { return mHost.mFragmentManager.retainNonConfig(); } /** * Moves all Fragments managed by the controller's FragmentManager * into the create state. * <p>Call when Fragments should be created. * * @see Fragment#onCreate(Bundle) */ public void dispatchCreate() { mHost.mFragmentManager.dispatchCreate(); } /** * Moves all Fragments managed by the controller's FragmentManager * into the activity created state. * <p>Call when Fragments should be informed their host has been created. * * @see Fragment#onActivityCreated(Bundle) */ public void dispatchActivityCreated() { mHost.mFragmentManager.dispatchActivityCreated(); } /** * Moves all Fragments managed by the controller's FragmentManager * into the start state. * <p>Call when Fragments should be started. * * @see Fragment#onStart() */ public void dispatchStart() { mHost.mFragmentManager.dispatchStart(); } /** * Moves all Fragments managed by the controller's FragmentManager * into the resume state. * <p>Call when Fragments should be resumed. * * @see Fragment#onResume() */ public void dispatchResume() { mHost.mFragmentManager.dispatchResume(); } /** * Moves all Fragments managed by the controller's FragmentManager * into the pause state. * <p>Call when Fragments should be paused. * * @see Fragment#onPause() */ public void dispatchPause() { mHost.mFragmentManager.dispatchPause(); } /** * Moves all Fragments managed by the controller's FragmentManager * into the stop state. * <p>Call when Fragments should be stopped. * * @see Fragment#onStop() */ public void dispatchStop() { mHost.mFragmentManager.dispatchStop(); } /** * @deprecated This functionality has been rolled into {@link #dispatchStop()}. */ @Deprecated public void dispatchReallyStop() { } /** * Moves all Fragments managed by the controller's FragmentManager * into the destroy view state. * <p>Call when the Fragment's views should be destroyed. * * @see Fragment#onDestroyView() */ public void dispatchDestroyView() { mHost.mFragmentManager.dispatchDestroyView(); } /** * Moves all Fragments managed by the controller's FragmentManager * into the destroy state. * <p>Call when Fragments should be destroyed. * * @see Fragment#onDestroy() */ public void dispatchDestroy() { mHost.mFragmentManager.dispatchDestroy(); } /** * Lets all Fragments managed by the controller's FragmentManager know the multi-window mode of * the activity changed. * <p>Call when the multi-window mode of the activity changed. * * @see Fragment#onMultiWindowModeChanged */ public void dispatchMultiWindowModeChanged(boolean isInMultiWindowMode) { mHost.mFragmentManager.dispatchMultiWindowModeChanged(isInMultiWindowMode); } /** * Lets all Fragments managed by the controller's FragmentManager know the picture-in-picture * mode of the activity changed. * <p>Call when the picture-in-picture mode of the activity changed. * * @see Fragment#onPictureInPictureModeChanged */ public void dispatchPictureInPictureModeChanged(boolean isInPictureInPictureMode) { mHost.mFragmentManager.dispatchPictureInPictureModeChanged(isInPictureInPictureMode); } /** * Lets all Fragments managed by the controller's FragmentManager * know a configuration change occurred. * <p>Call when there is a configuration change. * * @see Fragment#onConfigurationChanged(Configuration) */ public void dispatchConfigurationChanged(Configuration newConfig) { mHost.mFragmentManager.dispatchConfigurationChanged(newConfig); } /** * Lets all Fragments managed by the controller's FragmentManager * know the device is in a low memory condition. * <p>Call when the device is low on memory and Fragment's should trim * their memory usage. * * @see Fragment#onLowMemory() */ public void dispatchLowMemory() { mHost.mFragmentManager.dispatchLowMemory(); } /** * Lets all Fragments managed by the controller's FragmentManager * know they should create an options menu. * <p>Call when the Fragment should create an options menu. * * @return {@code true} if the options menu contains items to display * @see Fragment#onCreateOptionsMenu(Menu, MenuInflater) */ public boolean dispatchCreateOptionsMenu(Menu menu, MenuInflater inflater) { return mHost.mFragmentManager.dispatchCreateOptionsMenu(menu, inflater); } /** * Lets all Fragments managed by the controller's FragmentManager * know they should prepare their options menu for display. * <p>Call immediately before displaying the Fragment's options menu. * * @return {@code true} if the options menu contains items to display * @see Fragment#onPrepareOptionsMenu(Menu) */ public boolean dispatchPrepareOptionsMenu(Menu menu) { return mHost.mFragmentManager.dispatchPrepareOptionsMenu(menu); } /** * Sends an option item selection event to the Fragments managed by the * controller's FragmentManager. Once the event has been consumed, * no additional handling will be performed. * <p>Call immediately after an options menu item has been selected * * @return {@code true} if the options menu selection event was consumed * @see Fragment#onOptionsItemSelected(MenuItem) */ public boolean dispatchOptionsItemSelected(MenuItem item) { return mHost.mFragmentManager.dispatchOptionsItemSelected(item); } /** * Sends a context item selection event to the Fragments managed by the * controller's FragmentManager. Once the event has been consumed, * no additional handling will be performed. * <p>Call immediately after an options menu item has been selected * * @return {@code true} if the context menu selection event was consumed * @see Fragment#onContextItemSelected(MenuItem) */ public boolean dispatchContextItemSelected(MenuItem item) { return mHost.mFragmentManager.dispatchContextItemSelected(item); } /** * Lets all Fragments managed by the controller's FragmentManager * know their options menu has closed. * <p>Call immediately after closing the Fragment's options menu. * * @see Fragment#onOptionsMenuClosed(Menu) */ public void dispatchOptionsMenuClosed(Menu menu) { mHost.mFragmentManager.dispatchOptionsMenuClosed(menu); } /** * Execute any pending actions for the Fragments managed by the * controller's FragmentManager. * <p>Call when queued actions can be performed [eg when the * Fragment moves into a start or resume state]. * @return {@code true} if queued actions were performed */ public boolean execPendingActions() { return mHost.mFragmentManager.execPendingActions(); } /** * Starts the loaders. * * @deprecated Loaders are managed separately from FragmentController */ @Deprecated public void doLoaderStart() { } /** * Stops the loaders, optionally retaining their state. This is useful for keeping the * loader state across configuration changes. * * @param retain When {@code true}, the loaders aren't stopped, but, their instances * are retained in a started state * * @deprecated Loaders are managed separately from FragmentController */ @Deprecated public void doLoaderStop(boolean retain) { } /** * Retains the state of each of the loaders. * * @deprecated Loaders are managed separately from FragmentController */ @Deprecated public void doLoaderRetain() { } /** * Destroys the loaders and, if their state is not being retained, removes them. * * @deprecated Loaders are managed separately from FragmentController */ @Deprecated public void doLoaderDestroy() { } /** * Lets the loaders know the host is ready to receive notifications. * * @deprecated Loaders are managed separately from FragmentController */ @Deprecated public void reportLoaderStart() { } /** * Returns a list of LoaderManagers that have opted to retain their instance across * configuration changes. * * @deprecated Loaders are managed separately from FragmentController */ @Deprecated public SimpleArrayMap<String, LoaderManager> retainLoaderNonConfig() { return null; } /** * Restores the saved state for all LoaderManagers. The given LoaderManager list are * LoaderManager instances retained across configuration changes. * * @see #retainLoaderNonConfig() * * @deprecated Loaders are managed separately from FragmentController */ @Deprecated public void restoreLoaderNonConfig(SimpleArrayMap<String, LoaderManager> loaderManagers) { } /** * Dumps the current state of the loaders. * * @deprecated Loaders are managed separately from FragmentController */ @Deprecated public void dumpLoaders(String prefix, FileDescriptor fd, PrintWriter writer, String[] args) { } }
/* * RED5 Open Source Flash Server - https://github.com/Red5/ * * Copyright 2006-2015 by respective authors (see below). All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.red5.client.net.rtmpt; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicLong; import org.apache.mina.core.buffer.IoBuffer; import org.red5.server.api.Red5; import org.red5.server.net.rtmp.IRTMPHandler; import org.red5.server.net.rtmp.RTMPConnection; import org.red5.server.net.rtmp.codec.RTMP; import org.red5.server.net.rtmp.codec.RTMPProtocolDecoder; import org.red5.server.net.rtmp.codec.RTMPProtocolEncoder; import org.red5.server.net.rtmp.message.Packet; import org.red5.server.net.rtmpt.codec.RTMPTProtocolDecoder; import org.red5.server.net.rtmpt.codec.RTMPTProtocolEncoder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class BaseRTMPTConnection extends RTMPConnection { private static final Logger log = LoggerFactory.getLogger(BaseRTMPTConnection.class); /** * Protocol decoder */ private RTMPTProtocolDecoder decoder; /** * Protocol encoder */ private RTMPTProtocolEncoder encoder; private static class PendingData { private IoBuffer buffer; private Packet packet; private PendingData(IoBuffer buffer, Packet packet) { this.buffer = buffer; this.packet = packet; } private PendingData(IoBuffer buffer) { this.buffer = buffer; } public IoBuffer getBuffer() { return buffer; } public Packet getPacket() { return packet; } @Override public String toString() { return getClass().getName() + "(buffer=" + buffer + "; packet=" + packet + ")"; } } /** * List of pending messages */ private ConcurrentLinkedQueue<PendingData> pendingMessages = new ConcurrentLinkedQueue<>(); /** * Closing flag */ private volatile boolean closing; /** * Number of read bytes */ private AtomicLong readBytes = new AtomicLong(0); /** * Number of written bytes */ private AtomicLong writtenBytes = new AtomicLong(0); /** * Byte buffer */ private IoBuffer buffer; /** * Clients session id, used to override the BaseConnection.sessionId for client implementations. */ protected String clientSessionId; /** * RTMP events handler */ private volatile IRTMPHandler handler; public BaseRTMPTConnection(String type) { super(type); this.buffer = IoBuffer.allocate(2048); this.buffer.setAutoExpand(true); } /** * Return any pending messages up to a given size. * * @param targetSize * the size the resulting buffer should have * @return a buffer containing the data to send or null if no messages are pending */ abstract public IoBuffer getPendingMessages(int targetSize); /** {@inheritDoc} */ @Override public void close() { log.debug("close - state: {}", state.getState()); // Defer actual closing so we can send back pending messages to the client. closing = true; } /** * Getter for property 'closing'. * * @return Value for property 'closing'. */ public boolean isClosing() { return closing; } /** * Real close */ public void realClose() { if (isClosing()) { if (buffer != null) { buffer.free(); buffer = null; } state.setState(RTMP.STATE_DISCONNECTED); pendingMessages.clear(); super.close(); } } /** * Send raw data down the connection. * * @param packet * the buffer containing the raw data */ @Override public void writeRaw(IoBuffer packet) { pendingMessages.add(new PendingData(packet)); } /** {@inheritDoc} */ @Override public long getReadBytes() { return readBytes.get(); } /** {@inheritDoc} */ @Override public long getWrittenBytes() { return writtenBytes.get(); } /** {@inheritDoc} */ @Override public long getPendingMessages() { return pendingMessages.size(); } public void setSessionId(String sessionId) { log.debug("Overriding generated session id {} with {}", this.sessionId, sessionId); this.clientSessionId = sessionId; // reset the session id on the decoder state to prevent confusing log messages //RTMPDecodeState state = this.decoderState.get(); //if (state != null) { // state.setSessionId(sessionId); //} } @Override public String getSessionId() { if (clientSessionId == null) { return sessionId; } return clientSessionId; } /** * Decode data sent by the client. * * @param data * the data to decode * @return a list of decoded objects */ public List<?> decode(IoBuffer data) { log.debug("decode - state: {}", state); if (closing || state.getState() == RTMP.STATE_DISCONNECTED) { // Connection is being closed, don't decode any new packets return Collections.EMPTY_LIST; } readBytes.addAndGet(data.limit()); buffer.put(data); buffer.flip(); return decoder.decodeBuffer(this, buffer); } /** * Send RTMP packet down the connection. * * @param packet * the packet to send */ @Override public void write(final Packet packet) { log.debug("write - state: {}", state); if (closing || state.getState() == RTMP.STATE_DISCONNECTED) { // Connection is being closed, don't send any new packets return; } IoBuffer data; try { Red5.setConnectionLocal(this); data = encoder.encode(packet); } catch (Exception e) { log.error("Could not encode message {}", packet, e); return; } finally { Red5.setConnectionLocal(null); } if (data != null) { // Mark packet as being written writingMessage(packet); //add to pending pendingMessages.add(new PendingData(data, packet)); } else { log.info("Response buffer was null after encoding"); } } protected IoBuffer foldPendingMessages(int targetSize) { if (pendingMessages.isEmpty()) { return null; } IoBuffer result = IoBuffer.allocate(2048); result.setAutoExpand(true); // We'll have to create a copy here to avoid endless recursion List<Packet> toNotify = new LinkedList<Packet>(); while (!pendingMessages.isEmpty()) { PendingData pendingMessage = pendingMessages.remove(); result.put(pendingMessage.getBuffer()); if (pendingMessage.getPacket() != null) { toNotify.add(pendingMessage.getPacket()); } if ((result.position() > targetSize)) { break; } } for (Packet message : toNotify) { try { handler.messageSent(this, message); } catch (Exception e) { log.error("Could not notify stream subsystem about sent message", e); } } result.flip(); writtenBytes.addAndGet(result.limit()); return result; } @Override public void setHandler(IRTMPHandler handler) { this.handler = handler; } public void setDecoder(RTMPProtocolDecoder decoder) { this.decoder = (RTMPTProtocolDecoder) decoder; } public void setEncoder(RTMPProtocolEncoder encoder) { this.encoder = (RTMPTProtocolEncoder) encoder; } }
package com.huang.rp.web.sys.rbac.domain; import java.util.ArrayList; import java.util.List; public class SysResourceExample { protected String orderByClause; protected boolean distinct; protected List<Criteria> oredCriteria; public SysResourceExample() { oredCriteria = new ArrayList<Criteria>(); } public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } public String getOrderByClause() { return orderByClause; } public void setDistinct(boolean distinct) { this.distinct = distinct; } public boolean isDistinct() { return distinct; } public List<Criteria> getOredCriteria() { return oredCriteria; } public void or(Criteria criteria) { oredCriteria.add(criteria); } public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andIdIsNull() { addCriterion("id is null"); return (Criteria) this; } public Criteria andIdIsNotNull() { addCriterion("id is not null"); return (Criteria) this; } public Criteria andIdEqualTo(Long value) { addCriterion("id =", value, "id"); return (Criteria) this; } public Criteria andIdNotEqualTo(Long value) { addCriterion("id <>", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThan(Long value) { addCriterion("id >", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThanOrEqualTo(Long value) { addCriterion("id >=", value, "id"); return (Criteria) this; } public Criteria andIdLessThan(Long value) { addCriterion("id <", value, "id"); return (Criteria) this; } public Criteria andIdLessThanOrEqualTo(Long value) { addCriterion("id <=", value, "id"); return (Criteria) this; } public Criteria andIdIn(List<Long> values) { addCriterion("id in", values, "id"); return (Criteria) this; } public Criteria andIdNotIn(List<Long> values) { addCriterion("id not in", values, "id"); return (Criteria) this; } public Criteria andIdBetween(Long value1, Long value2) { addCriterion("id between", value1, value2, "id"); return (Criteria) this; } public Criteria andIdNotBetween(Long value1, Long value2) { addCriterion("id not between", value1, value2, "id"); return (Criteria) this; } public Criteria andNameIsNull() { addCriterion("name is null"); return (Criteria) this; } public Criteria andNameIsNotNull() { addCriterion("name is not null"); return (Criteria) this; } public Criteria andNameEqualTo(String value) { addCriterion("name =", value, "name"); return (Criteria) this; } public Criteria andNameNotEqualTo(String value) { addCriterion("name <>", value, "name"); return (Criteria) this; } public Criteria andNameGreaterThan(String value) { addCriterion("name >", value, "name"); return (Criteria) this; } public Criteria andNameGreaterThanOrEqualTo(String value) { addCriterion("name >=", value, "name"); return (Criteria) this; } public Criteria andNameLessThan(String value) { addCriterion("name <", value, "name"); return (Criteria) this; } public Criteria andNameLessThanOrEqualTo(String value) { addCriterion("name <=", value, "name"); return (Criteria) this; } public Criteria andNameLike(String value) { addCriterion("name like", value, "name"); return (Criteria) this; } public Criteria andNameNotLike(String value) { addCriterion("name not like", value, "name"); return (Criteria) this; } public Criteria andNameIn(List<String> values) { addCriterion("name in", values, "name"); return (Criteria) this; } public Criteria andNameNotIn(List<String> values) { addCriterion("name not in", values, "name"); return (Criteria) this; } public Criteria andNameBetween(String value1, String value2) { addCriterion("name between", value1, value2, "name"); return (Criteria) this; } public Criteria andNameNotBetween(String value1, String value2) { addCriterion("name not between", value1, value2, "name"); return (Criteria) this; } public Criteria andIdentityIsNull() { addCriterion("identity is null"); return (Criteria) this; } public Criteria andIdentityIsNotNull() { addCriterion("identity is not null"); return (Criteria) this; } public Criteria andIdentityEqualTo(String value) { addCriterion("identity =", value, "identity"); return (Criteria) this; } public Criteria andIdentityNotEqualTo(String value) { addCriterion("identity <>", value, "identity"); return (Criteria) this; } public Criteria andIdentityGreaterThan(String value) { addCriterion("identity >", value, "identity"); return (Criteria) this; } public Criteria andIdentityGreaterThanOrEqualTo(String value) { addCriterion("identity >=", value, "identity"); return (Criteria) this; } public Criteria andIdentityLessThan(String value) { addCriterion("identity <", value, "identity"); return (Criteria) this; } public Criteria andIdentityLessThanOrEqualTo(String value) { addCriterion("identity <=", value, "identity"); return (Criteria) this; } public Criteria andIdentityLike(String value) { addCriterion("identity like", value, "identity"); return (Criteria) this; } public Criteria andIdentityNotLike(String value) { addCriterion("identity not like", value, "identity"); return (Criteria) this; } public Criteria andIdentityIn(List<String> values) { addCriterion("identity in", values, "identity"); return (Criteria) this; } public Criteria andIdentityNotIn(List<String> values) { addCriterion("identity not in", values, "identity"); return (Criteria) this; } public Criteria andIdentityBetween(String value1, String value2) { addCriterion("identity between", value1, value2, "identity"); return (Criteria) this; } public Criteria andIdentityNotBetween(String value1, String value2) { addCriterion("identity not between", value1, value2, "identity"); return (Criteria) this; } public Criteria andUrlIsNull() { addCriterion("url is null"); return (Criteria) this; } public Criteria andUrlIsNotNull() { addCriterion("url is not null"); return (Criteria) this; } public Criteria andUrlEqualTo(String value) { addCriterion("url =", value, "url"); return (Criteria) this; } public Criteria andUrlNotEqualTo(String value) { addCriterion("url <>", value, "url"); return (Criteria) this; } public Criteria andUrlGreaterThan(String value) { addCriterion("url >", value, "url"); return (Criteria) this; } public Criteria andUrlGreaterThanOrEqualTo(String value) { addCriterion("url >=", value, "url"); return (Criteria) this; } public Criteria andUrlLessThan(String value) { addCriterion("url <", value, "url"); return (Criteria) this; } public Criteria andUrlLessThanOrEqualTo(String value) { addCriterion("url <=", value, "url"); return (Criteria) this; } public Criteria andUrlLike(String value) { addCriterion("url like", value, "url"); return (Criteria) this; } public Criteria andUrlNotLike(String value) { addCriterion("url not like", value, "url"); return (Criteria) this; } public Criteria andUrlIn(List<String> values) { addCriterion("url in", values, "url"); return (Criteria) this; } public Criteria andUrlNotIn(List<String> values) { addCriterion("url not in", values, "url"); return (Criteria) this; } public Criteria andUrlBetween(String value1, String value2) { addCriterion("url between", value1, value2, "url"); return (Criteria) this; } public Criteria andUrlNotBetween(String value1, String value2) { addCriterion("url not between", value1, value2, "url"); return (Criteria) this; } public Criteria andParentIdIsNull() { addCriterion("parent_id is null"); return (Criteria) this; } public Criteria andParentIdIsNotNull() { addCriterion("parent_id is not null"); return (Criteria) this; } public Criteria andParentIdEqualTo(Long value) { addCriterion("parent_id =", value, "parentId"); return (Criteria) this; } public Criteria andParentIdNotEqualTo(Long value) { addCriterion("parent_id <>", value, "parentId"); return (Criteria) this; } public Criteria andParentIdGreaterThan(Long value) { addCriterion("parent_id >", value, "parentId"); return (Criteria) this; } public Criteria andParentIdGreaterThanOrEqualTo(Long value) { addCriterion("parent_id >=", value, "parentId"); return (Criteria) this; } public Criteria andParentIdLessThan(Long value) { addCriterion("parent_id <", value, "parentId"); return (Criteria) this; } public Criteria andParentIdLessThanOrEqualTo(Long value) { addCriterion("parent_id <=", value, "parentId"); return (Criteria) this; } public Criteria andParentIdIn(List<Long> values) { addCriterion("parent_id in", values, "parentId"); return (Criteria) this; } public Criteria andParentIdNotIn(List<Long> values) { addCriterion("parent_id not in", values, "parentId"); return (Criteria) this; } public Criteria andParentIdBetween(Long value1, Long value2) { addCriterion("parent_id between", value1, value2, "parentId"); return (Criteria) this; } public Criteria andParentIdNotBetween(Long value1, Long value2) { addCriterion("parent_id not between", value1, value2, "parentId"); return (Criteria) this; } public Criteria andParentIdsIsNull() { addCriterion("parent_ids is null"); return (Criteria) this; } public Criteria andParentIdsIsNotNull() { addCriterion("parent_ids is not null"); return (Criteria) this; } public Criteria andParentIdsEqualTo(String value) { addCriterion("parent_ids =", value, "parentIds"); return (Criteria) this; } public Criteria andParentIdsNotEqualTo(String value) { addCriterion("parent_ids <>", value, "parentIds"); return (Criteria) this; } public Criteria andParentIdsGreaterThan(String value) { addCriterion("parent_ids >", value, "parentIds"); return (Criteria) this; } public Criteria andParentIdsGreaterThanOrEqualTo(String value) { addCriterion("parent_ids >=", value, "parentIds"); return (Criteria) this; } public Criteria andParentIdsLessThan(String value) { addCriterion("parent_ids <", value, "parentIds"); return (Criteria) this; } public Criteria andParentIdsLessThanOrEqualTo(String value) { addCriterion("parent_ids <=", value, "parentIds"); return (Criteria) this; } public Criteria andParentIdsLike(String value) { addCriterion("parent_ids like", value, "parentIds"); return (Criteria) this; } public Criteria andParentIdsNotLike(String value) { addCriterion("parent_ids not like", value, "parentIds"); return (Criteria) this; } public Criteria andParentIdsIn(List<String> values) { addCriterion("parent_ids in", values, "parentIds"); return (Criteria) this; } public Criteria andParentIdsNotIn(List<String> values) { addCriterion("parent_ids not in", values, "parentIds"); return (Criteria) this; } public Criteria andParentIdsBetween(String value1, String value2) { addCriterion("parent_ids between", value1, value2, "parentIds"); return (Criteria) this; } public Criteria andParentIdsNotBetween(String value1, String value2) { addCriterion("parent_ids not between", value1, value2, "parentIds"); return (Criteria) this; } public Criteria andIconIsNull() { addCriterion("icon is null"); return (Criteria) this; } public Criteria andIconIsNotNull() { addCriterion("icon is not null"); return (Criteria) this; } public Criteria andIconEqualTo(String value) { addCriterion("icon =", value, "icon"); return (Criteria) this; } public Criteria andIconNotEqualTo(String value) { addCriterion("icon <>", value, "icon"); return (Criteria) this; } public Criteria andIconGreaterThan(String value) { addCriterion("icon >", value, "icon"); return (Criteria) this; } public Criteria andIconGreaterThanOrEqualTo(String value) { addCriterion("icon >=", value, "icon"); return (Criteria) this; } public Criteria andIconLessThan(String value) { addCriterion("icon <", value, "icon"); return (Criteria) this; } public Criteria andIconLessThanOrEqualTo(String value) { addCriterion("icon <=", value, "icon"); return (Criteria) this; } public Criteria andIconLike(String value) { addCriterion("icon like", value, "icon"); return (Criteria) this; } public Criteria andIconNotLike(String value) { addCriterion("icon not like", value, "icon"); return (Criteria) this; } public Criteria andIconIn(List<String> values) { addCriterion("icon in", values, "icon"); return (Criteria) this; } public Criteria andIconNotIn(List<String> values) { addCriterion("icon not in", values, "icon"); return (Criteria) this; } public Criteria andIconBetween(String value1, String value2) { addCriterion("icon between", value1, value2, "icon"); return (Criteria) this; } public Criteria andIconNotBetween(String value1, String value2) { addCriterion("icon not between", value1, value2, "icon"); return (Criteria) this; } public Criteria andWeightIsNull() { addCriterion("weight is null"); return (Criteria) this; } public Criteria andWeightIsNotNull() { addCriterion("weight is not null"); return (Criteria) this; } public Criteria andWeightEqualTo(Integer value) { addCriterion("weight =", value, "weight"); return (Criteria) this; } public Criteria andWeightNotEqualTo(Integer value) { addCriterion("weight <>", value, "weight"); return (Criteria) this; } public Criteria andWeightGreaterThan(Integer value) { addCriterion("weight >", value, "weight"); return (Criteria) this; } public Criteria andWeightGreaterThanOrEqualTo(Integer value) { addCriterion("weight >=", value, "weight"); return (Criteria) this; } public Criteria andWeightLessThan(Integer value) { addCriterion("weight <", value, "weight"); return (Criteria) this; } public Criteria andWeightLessThanOrEqualTo(Integer value) { addCriterion("weight <=", value, "weight"); return (Criteria) this; } public Criteria andWeightIn(List<Integer> values) { addCriterion("weight in", values, "weight"); return (Criteria) this; } public Criteria andWeightNotIn(List<Integer> values) { addCriterion("weight not in", values, "weight"); return (Criteria) this; } public Criteria andWeightBetween(Integer value1, Integer value2) { addCriterion("weight between", value1, value2, "weight"); return (Criteria) this; } public Criteria andWeightNotBetween(Integer value1, Integer value2) { addCriterion("weight not between", value1, value2, "weight"); return (Criteria) this; } public Criteria andIsShowIsNull() { addCriterion("is_show is null"); return (Criteria) this; } public Criteria andIsShowIsNotNull() { addCriterion("is_show is not null"); return (Criteria) this; } public Criteria andIsShowEqualTo(Boolean value) { addCriterion("is_show =", value, "isShow"); return (Criteria) this; } public Criteria andIsShowNotEqualTo(Boolean value) { addCriterion("is_show <>", value, "isShow"); return (Criteria) this; } public Criteria andIsShowGreaterThan(Boolean value) { addCriterion("is_show >", value, "isShow"); return (Criteria) this; } public Criteria andIsShowGreaterThanOrEqualTo(Boolean value) { addCriterion("is_show >=", value, "isShow"); return (Criteria) this; } public Criteria andIsShowLessThan(Boolean value) { addCriterion("is_show <", value, "isShow"); return (Criteria) this; } public Criteria andIsShowLessThanOrEqualTo(Boolean value) { addCriterion("is_show <=", value, "isShow"); return (Criteria) this; } public Criteria andIsShowIn(List<Boolean> values) { addCriterion("is_show in", values, "isShow"); return (Criteria) this; } public Criteria andIsShowNotIn(List<Boolean> values) { addCriterion("is_show not in", values, "isShow"); return (Criteria) this; } public Criteria andIsShowBetween(Boolean value1, Boolean value2) { addCriterion("is_show between", value1, value2, "isShow"); return (Criteria) this; } public Criteria andIsShowNotBetween(Boolean value1, Boolean value2) { addCriterion("is_show not between", value1, value2, "isShow"); return (Criteria) this; } public Criteria andPermissionIdIsNull() { addCriterion("permission_id is null"); return (Criteria) this; } public Criteria andPermissionIdIsNotNull() { addCriterion("permission_id is not null"); return (Criteria) this; } public Criteria andPermissionIdEqualTo(Long value) { addCriterion("permission_id =", value, "permissionId"); return (Criteria) this; } public Criteria andPermissionIdNotEqualTo(Long value) { addCriterion("permission_id <>", value, "permissionId"); return (Criteria) this; } public Criteria andPermissionIdGreaterThan(Long value) { addCriterion("permission_id >", value, "permissionId"); return (Criteria) this; } public Criteria andPermissionIdGreaterThanOrEqualTo(Long value) { addCriterion("permission_id >=", value, "permissionId"); return (Criteria) this; } public Criteria andPermissionIdLessThan(Long value) { addCriterion("permission_id <", value, "permissionId"); return (Criteria) this; } public Criteria andPermissionIdLessThanOrEqualTo(Long value) { addCriterion("permission_id <=", value, "permissionId"); return (Criteria) this; } public Criteria andPermissionIdIn(List<Long> values) { addCriterion("permission_id in", values, "permissionId"); return (Criteria) this; } public Criteria andPermissionIdNotIn(List<Long> values) { addCriterion("permission_id not in", values, "permissionId"); return (Criteria) this; } public Criteria andPermissionIdBetween(Long value1, Long value2) { addCriterion("permission_id between", value1, value2, "permissionId"); return (Criteria) this; } public Criteria andPermissionIdNotBetween(Long value1, Long value2) { addCriterion("permission_id not between", value1, value2, "permissionId"); return (Criteria) this; } } public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } }
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.server.operations.service.akka.actors.core; import static org.kaaproject.kaa.server.operations.service.akka.DefaultAkkaService.ENDPOINT_DISPATCHER_NAME; import static org.kaaproject.kaa.server.operations.service.akka.DefaultAkkaService.LOG_DISPATCHER_NAME; import static org.kaaproject.kaa.server.operations.service.akka.DefaultAkkaService.TOPIC_DISPATCHER_NAME; import static org.kaaproject.kaa.server.operations.service.akka.DefaultAkkaService.VERIFIER_DISPATCHER_NAME; import akka.actor.ActorRef; import akka.actor.LocalActorRef; import akka.actor.Props; import akka.actor.SupervisorStrategy; import akka.actor.Terminated; import akka.actor.UntypedActor; import akka.japi.Creator; import org.kaaproject.kaa.common.hash.EndpointObjectHash; import org.kaaproject.kaa.server.common.thrift.gen.operations.Notification; import org.kaaproject.kaa.server.common.thrift.gen.operations.ThriftEndpointDeregistrationMessage; import org.kaaproject.kaa.server.operations.service.akka.AkkaContext; import org.kaaproject.kaa.server.operations.service.akka.actors.core.endpoint.global.GlobalEndpointActorCreator; import org.kaaproject.kaa.server.operations.service.akka.actors.core.endpoint.local.LocalEndpointActorCreator; import org.kaaproject.kaa.server.operations.service.akka.actors.supervision.SupervisionStrategyFactory; import org.kaaproject.kaa.server.operations.service.akka.messages.core.endpoint.EndpointAwareMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.endpoint.EndpointStopMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.lb.ClusterUpdateMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.logs.LogEventPackMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.notification.ThriftNotificationMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.route.ActorClassifier; import org.kaaproject.kaa.server.operations.service.akka.messages.core.route.EndpointActorMsg; import org.kaaproject.kaa.server.operations.service.akka.messages.core.route.EndpointAddress; import org.kaaproject.kaa.server.operations.service.akka.messages.core.route.EndpointClusterAddress; import org.kaaproject.kaa.server.operations.service.akka.messages.core.route.EndpointRouteMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.route.RouteMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.route.RouteOperation; import org.kaaproject.kaa.server.operations.service.akka.messages.core.route.ThriftEndpointActorMsg; import org.kaaproject.kaa.server.operations.service.akka.messages.core.stats.ApplicationActorStatusResponse; import org.kaaproject.kaa.server.operations.service.akka.messages.core.stats.StatusRequestMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.topic.TopicSubscriptionMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.user.EndpointEventDeliveryMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.user.EndpointEventDeliveryMessage.EventDeliveryStatus; import org.kaaproject.kaa.server.operations.service.akka.messages.core.user.EndpointEventReceiveMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.user.EndpointEventSendMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.user.EndpointUserActionMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.user.EndpointUserActionRouteMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.user.EndpointUserConnectMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.user.EndpointUserDisconnectMessage; import org.kaaproject.kaa.server.operations.service.akka.messages.core.user.verification.UserVerificationRequestMessage; import org.kaaproject.kaa.server.transport.session.SessionAware; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; public class ApplicationActor extends UntypedActor { private static final Logger LOG = LoggerFactory.getLogger(ApplicationActor.class); private final AkkaContext context; private final Map<EndpointObjectHash, GlobalEndpointActorMetaData> globalEndpointSessions; /** * The endpoint sessions. */ private final Map<EndpointObjectHash, LocalEndpointActorMetaData> localEndpointSessions; private final Map<String, EndpointObjectHash> endpointActorMap; /** * The topic sessions. */ private final Map<String, ActorRef> topicSessions; private final String nodeId; private final String tenantId; private final String appToken; private final Map<String, ActorRef> logsSessions; private final Map<String, ActorRef> userVerifierSessions; private ActorRef applicationLogActor; private ActorRef userVerifierActor; /** * Instantiates a new application actor. * * @param context the context * @param applicationToken the application token */ private ApplicationActor(AkkaContext context, String tenantId, String applicationToken) { this.context = context; this.nodeId = context.getClusterService().getNodeId(); this.tenantId = tenantId; this.appToken = applicationToken; this.globalEndpointSessions = new HashMap<>(); this.localEndpointSessions = new HashMap<>(); this.endpointActorMap = new HashMap<>(); this.topicSessions = new HashMap<>(); this.logsSessions = new HashMap<>(); this.userVerifierSessions = new HashMap<>(); this.applicationLogActor = getOrCreateLogActor(); this.userVerifierActor = getOrCreateUserVerifierActor(); } /** * Builds the topic key. * * @param topicId the topic id * @return the string */ public static String buildTopicKey(String topicId) { // TODO: Improve; return topicId; } @Override public SupervisorStrategy supervisorStrategy() { return SupervisionStrategyFactory.createApplicationActorStrategy(context); } /* * (non-Javadoc) * * @see akka.actor.UntypedActor#onReceive(java.lang.Object) */ @Override public void onReceive(Object message) throws Exception { if (LOG.isTraceEnabled()) { LOG.trace("[{}] Received: {}", appToken, message); } else { LOG.debug("[{}] Received: {}", appToken, message.getClass().getName()); } if (message instanceof EndpointAwareMessage) { processEndpointAwareMessage((EndpointAwareMessage) message); } if (message instanceof EndpointActorMsg) { processEndpointActorMsg((EndpointActorMsg) message); } else if (message instanceof SessionAware) { processSessionAwareMessage((SessionAware) message); } else if (message instanceof EndpointEventDeliveryMessage) { processEndpointEventDeliveryMessage((EndpointEventDeliveryMessage) message); } else if (message instanceof Terminated) { processTermination((Terminated) message); } else if (message instanceof ThriftNotificationMessage) { processThriftNotification((ThriftNotificationMessage) message); } else if (message instanceof EndpointStopMessage) { updateEndpointActor((EndpointStopMessage) message); } else if (message instanceof LogEventPackMessage) { processLogEventPackMessage((LogEventPackMessage) message); } else if (message instanceof UserVerificationRequestMessage) { processUserVerificationRequestMessage((UserVerificationRequestMessage) message); } else if (message instanceof EndpointUserActionMessage) { processEndpointUserActionMessage((EndpointUserActionMessage) message, true); } else if (message instanceof EndpointUserActionRouteMessage) { processEndpointUserActionMessage(((EndpointUserActionRouteMessage) message).getMessage(), false); } else if (message instanceof StatusRequestMessage) { processStatusRequest((StatusRequestMessage) message); } else if (message instanceof ClusterUpdateMessage) { processClusterUpdate((ClusterUpdateMessage) message); } else if (message instanceof RouteMessage<?>) { processRouteMessage((RouteMessage<?>) message); } } private void processEndpointActorMsg(EndpointActorMsg message) { EndpointAddress address = message.getAddress(); EndpointObjectHash endpointId = EndpointObjectHash.fromBytes(address.getEntityId()); ActorClassifier classifier = message.getClassifier(); if (classifier == ActorClassifier.APPLICATION) { boolean processed = false; if (message instanceof ThriftEndpointActorMsg<?>) { processed = processCommonThriftEndpointActorMsg( endpointId, (ThriftEndpointActorMsg<?>) message); } if (!processed) { LOG.warn("[{}] Failed to lookup processor for endpoint msg {}.", endpointId, message); } } else { EndpointActorMetaData actorMetaData = null; if (classifier == ActorClassifier.GLOBAL) { actorMetaData = globalEndpointSessions.get(endpointId); } else if (classifier == ActorClassifier.LOCAL) { actorMetaData = localEndpointSessions.get(endpointId); } if (actorMetaData != null) { actorMetaData.actorRef.tell(message, context().self()); } else { LOG.warn("[{}] Failed to lookup {} actor for endpoint.", endpointId, classifier.name()); } } } private boolean processCommonThriftEndpointActorMsg(EndpointObjectHash endpointId, ThriftEndpointActorMsg<?> msg) { if (msg.getMsg() instanceof ThriftEndpointDeregistrationMessage) { forwardMessageQuietly(globalEndpointSessions.get(endpointId), msg); forwardMessageQuietly(localEndpointSessions.get(endpointId), msg); return true; } else { return false; } } private void forwardMessageQuietly(EndpointActorMetaData actorMetaData, Object msg) { if (actorMetaData != null) { actorMetaData.actorRef.tell(msg, context().self()); } } private void processClusterUpdate(ClusterUpdateMessage message) { for (Entry<EndpointObjectHash, LocalEndpointActorMetaData> entry : localEndpointSessions.entrySet()) { String globalActorNodeId = getGlobalEndpointActorNodeId(entry.getKey()); if (!globalActorNodeId.equals(entry.getValue().globalActorNodeId)) { entry.getValue().globalActorNodeId = globalActorNodeId; notifyGlobalEndpointActor(entry.getKey(), globalActorNodeId); } } for (GlobalEndpointActorMetaData entry : globalEndpointSessions.values()) { entry.actorRef.tell(message, context().self()); } } /** * Process log event pack message. * * @param message the message */ private void processLogEventPackMessage(LogEventPackMessage message) { LOG.debug("[{}] Processing log event pack message", appToken); applicationLogActor.tell(message, self()); } private void processUserVerificationRequestMessage(UserVerificationRequestMessage message) { LOG.debug("[{}] Processing user verification request message", appToken); userVerifierActor.tell(message, self()); } private void processLogNotificationMessage(ThriftNotificationMessage message) { processThriftNotificationMessage(applicationLogActor, message); } private void processUserVerifierNotificationMessage(ThriftNotificationMessage message) { processThriftNotificationMessage(userVerifierActor, message); } private void processThriftNotificationMessage(ActorRef actor, ThriftNotificationMessage message) { LOG.debug("[{}] Processing thrift notification message {}", appToken, message); actor.tell(message, self()); } private void processStatusRequest(StatusRequestMessage message) { LOG.debug("[{}] Processing status request", message.getId()); int endpointCount = localEndpointSessions.size(); context().parent() .tell(new ApplicationActorStatusResponse(message.getId(), endpointCount), ActorRef.noSender()); } /** * Process thrift notification. * * @param message the message */ private void processThriftNotification(ThriftNotificationMessage message) { Notification notification = message.getNotification(); if (notification.isSetNotificationId()) { LOG.debug("[{}] Forwarding message to specific topic", appToken); sendToSpecificTopic(message); } else if (notification.isSetAppenderId()) { LOG.debug("[{}] Forwarding message to application log actor", appToken); processLogNotificationMessage(message); } else if (notification.isSetUserVerifierToken()) { LOG.debug("[{}] Forwarding message to application user verifier actor", appToken); processUserVerifierNotificationMessage(message); } else { LOG.debug("[{}] Broadcasting message to all endpoints", appToken); broadcastToAllEndpoints(message); } } /** * Send to specific topic. * * @param message the message */ private void sendToSpecificTopic(ThriftNotificationMessage message) { Notification notification = message.getNotification(); ActorRef topicActor = getOrCreateTopic(notification.getTopicId()); topicActor.tell(message, self()); } /** * Gets the or create topic. * * @param topicId the topic id * @return the or create topic */ private ActorRef getOrCreateTopic(String topicId) { ActorRef topicActor = topicSessions.get(topicId); if (topicActor == null) { topicActor = context().actorOf( Props.create(new TopicActor.ActorCreator(context.getNotificationDeltaService())) .withDispatcher(TOPIC_DISPATCHER_NAME), buildTopicKey(topicId) ); topicSessions.put(topicId, topicActor); context().watch(topicActor); } return topicActor; } /** * Broadcast to all endpoints. * * @param message the message */ private void broadcastToAllEndpoints(ThriftNotificationMessage message) { for (LocalEndpointActorMetaData endpoint : localEndpointSessions.values()) { endpoint.actorRef.tell(message, self()); } } /** * Process endpoint aware message. * * @param message the message */ private void processEndpointAwareMessage(EndpointAwareMessage message) { if (message instanceof TopicSubscriptionMessage) { processEndpointTopicRegistration((TopicSubscriptionMessage) message); } else if (message instanceof EndpointUserConnectMessage) { processEndpointUserRegistration((EndpointUserConnectMessage) message); } else if (message instanceof EndpointUserDisconnectMessage) { processEndpointUserDeregistration((EndpointUserDisconnectMessage) message); } else if (message instanceof EndpointEventSendMessage) { processEndpointEventSendMessage((EndpointEventSendMessage) message); } else if (message instanceof EndpointEventReceiveMessage) { processEndpointEventReceiveMessage((EndpointEventReceiveMessage) message); } else { processEndpointRequest(message); } } /** * Process endpoint aware message. * * @param message the message */ private void processSessionAwareMessage(SessionAware message) { LocalEndpointActorMetaData endpointMetaData = localEndpointSessions .get(message.getSessionInfo().getKey()); if (endpointMetaData != null) { endpointMetaData.actorRef.tell(message, self()); } else { LOG.debug("[{}] Can't find endpoint actor that corresponds to {}", appToken, message.getSessionInfo().getKey()); } } private void processEndpointEventReceiveMessage(EndpointEventReceiveMessage message) { LocalEndpointActorMetaData endpointActor = localEndpointSessions.get(message.getKey()); if (endpointActor != null) { endpointActor.actorRef.tell(message, self()); } else { LOG.debug("[{}] Can't find endpoint actor that corresponds to {}", appToken, message.getKey()); context().parent() .tell(new EndpointEventDeliveryMessage(message, EventDeliveryStatus.FAILURE), self()); } } private void processEndpointEventSendMessage(EndpointEventSendMessage message) { LOG.debug("[{}] Forwarding message to specific user", appToken, message.getUserId()); context().parent().tell(message, self()); } private void processEndpointEventDeliveryMessage(EndpointEventDeliveryMessage message) { LOG.debug("[{}] Forwarding message to specific user", appToken, message.getUserId()); context().parent().tell(message, self()); } /** * Process endpoint registration. * * @param message the message */ private void processEndpointTopicRegistration(TopicSubscriptionMessage message) { ActorRef topicActor = getOrCreateTopic(message.getTopicId()); topicActor.tell(message, self()); } /** * Process endpoint registration. * * @param message the message */ private void processEndpointUserRegistration(EndpointUserConnectMessage message) { context().parent().tell(message, self()); } /** * Process endpoint deregistration. * * @param message the message */ private void processEndpointUserDeregistration(EndpointUserDisconnectMessage message) { context().parent().tell(message, self()); } /** * Process session endpoint request. * * @param message the message */ private void processEndpointRequest(EndpointAwareMessage message) { LocalEndpointActorMetaData actorMetaData = localEndpointSessions.get(message.getKey()); if (actorMetaData == null) { EndpointObjectHash endpointKey = message.getKey(); String endpointActorId = LocalEndpointActorCreator.generateActorKey(); LOG.debug("[{}] Creating actor with endpointKey: {}", appToken, endpointActorId); String globalActorNodeId = getGlobalEndpointActorNodeId(endpointKey); actorMetaData = new LocalEndpointActorMetaData(context() .actorOf(Props .create(new LocalEndpointActorCreator( context, endpointActorId, message.getAppToken(), message.getKey() )).withDispatcher(ENDPOINT_DISPATCHER_NAME), endpointActorId), endpointActorId, globalActorNodeId); localEndpointSessions.put(message.getKey(), actorMetaData); endpointActorMap.put(endpointActorId, message.getKey()); context().watch(actorMetaData.actorRef); notifyGlobalEndpointActor(endpointKey, globalActorNodeId); } actorMetaData.actorRef.tell(message, self()); } private String getGlobalEndpointActorNodeId(EndpointObjectHash endpointKey) { return context.getClusterService().getEntityNode(endpointKey); } private void notifyGlobalEndpointActor(EndpointObjectHash endpointKey, String globalActorNodeId) { notifyGlobalEndpointActor(endpointKey, globalActorNodeId, RouteOperation.ADD); } private void notifyGlobalEndpointActor(EndpointObjectHash endpointKey, String globalActorNodeId, RouteOperation operation) { EndpointRouteMessage msg = new EndpointRouteMessage( new EndpointClusterAddress(nodeId, tenantId, appToken, endpointKey), operation); if (globalActorNodeId.equals(nodeId)) { processEndpointRouteMessage(msg); } else { context.getClusterService().sendRouteMessage(msg); } } private void processRouteMessage(RouteMessage<?> msg) { if (msg instanceof EndpointRouteMessage) { processEndpointRouteMessage((EndpointRouteMessage) msg); } } private void processEndpointRouteMessage(EndpointRouteMessage msg) { EndpointObjectHash endpointKey = msg.getAddress().getEndpointKey(); GlobalEndpointActorMetaData actorMetaData = globalEndpointSessions.get(endpointKey); if (actorMetaData == null) { String endpointActorId = GlobalEndpointActorCreator.generateActorKey(); LOG.debug("[{}] Creating global endpoint actor for endpointKey: {}", appToken, endpointKey); actorMetaData = new GlobalEndpointActorMetaData( context().actorOf(Props.create( new GlobalEndpointActorCreator(context, endpointActorId, appToken, endpointKey)) .withDispatcher(ENDPOINT_DISPATCHER_NAME), endpointActorId), endpointActorId); globalEndpointSessions.put(endpointKey, actorMetaData); context().watch(actorMetaData.actorRef); } actorMetaData.actorRef.tell(msg, self()); } private void processEndpointUserActionMessage(EndpointUserActionMessage message, boolean escalate) { LocalEndpointActorMetaData endpointMetaData = localEndpointSessions.get(message.getKey()); if (endpointMetaData != null) { LOG.debug("[{}] Found affected endpoint and forwarding message to it", appToken); endpointMetaData.actorRef.tell(message, self()); } else if (escalate) { LOG.debug("[{}] Failed to fing affected endpoint in scope of current application." + " Forwarding message to tenant actor", appToken); EndpointUserActionRouteMessage routeMessage = new EndpointUserActionRouteMessage(message, appToken); context().parent().tell(routeMessage, self()); } } private void updateEndpointActor(EndpointStopMessage message) { String actorKey = message.getActorKey(); EndpointObjectHash endpointKey = message.getEndpointKey(); LOG.debug("[{}] Stoping actor [{}] with [{}]", appToken, message.getActorKey(), endpointKey); LocalEndpointActorMetaData endpointMetaData = localEndpointSessions.get(endpointKey); if (endpointMetaData != null) { if (actorKey.equals(endpointMetaData.actorId)) { localEndpointSessions.remove(endpointKey); LOG.debug("[{}] Removed actor [{}] from endpoint sessions map", appToken, actorKey); } } else { LOG.warn("[{}] EndpointSession for actor {} is not found!", appToken, endpointKey); } endpointActorMap.remove(actorKey); message.getOriginator().tell(message, self()); } /** * Process termination. * * @param message the message */ private void processTermination(Terminated message) { ActorRef terminated = message.actor(); if (terminated instanceof LocalActorRef) { LocalActorRef localActor = (LocalActorRef) terminated; String name = localActor.path().name(); EndpointObjectHash endpointHash = endpointActorMap.remove(name); if (endpointHash != null) { LocalEndpointActorMetaData actorMetaData = localEndpointSessions.get(endpointHash); if (actorMetaData != null && actorMetaData.actorRef.equals(localActor)) { localEndpointSessions.remove(endpointHash); LOG.debug("[{}] removed endpoint: {}", appToken, localActor); notifyGlobalEndpointActor(endpointHash, actorMetaData.globalActorNodeId, RouteOperation.DELETE); } } else if (topicSessions.remove(name) != null) { LOG.debug("[{}] removed topic: {}", appToken, localActor); } else if (logsSessions.remove(name) != null) { LOG.debug("[{}] removed log: {}", appToken, localActor); applicationLogActor = getOrCreateLogActor(name); LOG.debug("[{}] created log: {}", appToken, applicationLogActor); } else if (userVerifierSessions.remove(name) != null) { LOG.debug("[{}] removed log: {}", appToken, localActor); userVerifierActor = getOrCreateUserVerifierActor(name); LOG.debug("[{}] created log: {}", appToken, applicationLogActor); } } else { LOG.warn("remove commands for remote actors are not supported yet!"); } } private ActorRef getOrCreateLogActor() { return getOrCreateLogActor(null); } private ActorRef getOrCreateLogActor(String name) { ActorRef logActor = logsSessions.get(name); if (logActor == null) { logActor = context().actorOf( Props.create(new ApplicationLogActor.ActorCreator(context, appToken)) .withDispatcher(LOG_DISPATCHER_NAME) ); context().watch(logActor); logsSessions.put(logActor.path().name(), logActor); } return logActor; } private ActorRef getOrCreateUserVerifierActor() { return getOrCreateUserVerifierActor(null); } private ActorRef getOrCreateUserVerifierActor(String name) { ActorRef userVerifierActor = userVerifierSessions.get(name); if (userVerifierActor == null) { userVerifierActor = context() .actorOf( Props.create(new ApplicationUserVerifierActor.ActorCreator(context, appToken)) .withDispatcher(VERIFIER_DISPATCHER_NAME) ); context().watch(userVerifierActor); userVerifierSessions.put(userVerifierActor.path().name(), userVerifierActor); } return userVerifierActor; } /* * (non-Javadoc) * * @see akka.actor.UntypedActor#preStart() */ @Override public void preStart() { LOG.info("[{}] Starting ", appToken); } /* * (non-Javadoc) * * @see akka.actor.UntypedActor#postStop() */ @Override public void postStop() { LOG.info("[{}] Stoped ", appToken); } public static class ActorCreator implements Creator<ApplicationActor> { private static final long serialVersionUID = 1L; private final AkkaContext context; private final String tenantId; private final String appToken; /** * Instantiates a new actor creator. * * @param context the context * @param tenantId the tenant id * @param appToken the application token */ public ActorCreator(AkkaContext context, String tenantId, String appToken) { super(); this.context = context; this.tenantId = tenantId; this.appToken = appToken; } /* * (non-Javadoc) * * @see akka.japi.Creator#create() */ @Override public ApplicationActor create() throws Exception { return new ApplicationActor(context, tenantId, appToken); } } }
package com.set; import java.util.ArrayList; import android.app.AlertDialog; import android.content.DialogInterface; import android.graphics.Color; import android.os.Bundle; import android.os.Handler; import android.os.SystemClock; import android.support.v7.app.ActionBarActivity; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.widget.Chronometer; import android.widget.ImageView; import android.widget.TableRow; import android.widget.TextView; public class MainActivity extends ActionBarActivity { public ImageView[] cards = new ImageView[16]; public TableRow[] rows = new TableRow[4]; public boolean[] marked = new boolean[16]; public boolean[] active = new boolean[16]; public int[] value = new int[15]; public int cont = 0; public int N; ArrayList<Integer> deck; public int posDeck; public int id[] = new int[3]; public Handler handler = new Handler(); Chronometer chrono; boolean started = false; TextView scoreText,endText; public int score = 0,score2 = 0; ImageView scoreImage[] = new ImageView[3]; int round; boolean end; void cleanCards(int n){ for(int i = 0,j = 0;i < n;++i){ if(!marked[i]){ value[j] = value[i]; ++j; }else{ marked[i] = false; } } for(int i = 0;i < n - 3;++i) cards[i].setImageDrawable(new CardDrawable(value[i])); for(int i = n - 3;i < n;++i) cards[i].setImageResource(android.R.color.transparent); } void endGame(){ end = true; chrono.stop(); endText.setText("Fin du jeu"); new AlertDialog.Builder(this) .setTitle("Notification") .setMessage("Game Finished") .setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .setIcon(android.R.drawable.ic_dialog_alert) .show(); } Client client; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); chrono = (Chronometer)findViewById(R.id.chronometer1); scoreText = (TextView)findViewById(R.id.textView1); scoreImage[0] = (ImageView)findViewById(R.id.imageView16); scoreImage[1] = (ImageView)findViewById(R.id.imageView17); scoreImage[2] = (ImageView)findViewById(R.id.imageView18); endText = (TextView)findViewById(R.id.textView2); rows[0] = (TableRow)findViewById(R.id.tableRow1); rows[1] = (TableRow)findViewById(R.id.tableRow2); rows[2] = (TableRow)findViewById(R.id.tableRow3); rows[3] = (TableRow)findViewById(R.id.tableRow4); startSolo(); } public void clean(){ started = false; score = 0; score2 = 0; posDeck = 0; scoreImage[0].setImageResource(android.R.color.transparent); scoreImage[1].setImageResource(android.R.color.transparent); scoreImage[2].setImageResource(android.R.color.transparent); chrono.stop(); chrono.setBase(SystemClock.elapsedRealtime()); chrono.start(); chrono.stop(); int it = N / 3; for(int i = 0;i < it;++i) cleanCards(N - 3 * i); for(int i = 0;i < 15;++i){ marked[i] = false; active[i] = false; } round = 0; end = false; } public void paintCards(ArrayList<Integer> deck){ for(int i = 0;i < N;++i) value[i] = deck.get(i); for(int i = 0;i < 4;++i){ for(int j = 0;j < 4;++j){ if(i < 3 || (i == 3 && j < 3)) cards[4 * i + j] = (ImageView)rows[i].getChildAt(j); if(i < 3) cards[4 * i + j].setImageDrawable(new CardDrawable(value[4 * i + j])); } } } void startSolo(){ scoreText.setText("Score : " + 0); deck = Cards.generateDeck(); N = 12; posDeck = 12; paintCards(deck); for(int i = 0;i < 15;++i){ final int i2 = i; cards[i].setOnClickListener(new OnClickListener () { @Override public void onClick(View v) { if(i2 >= N || active[i2]) return; if(!started){ chrono.setBase(SystemClock.elapsedRealtime()); chrono.start(); started = true; } if(!marked[i2]){ cards[i2].setColorFilter(Color.argb(50, 0, 0, 0)); marked[i2] = true; ++cont; if(cont == 3){ int pos = 0; for(int k = 0;k < N;++k){ if(marked[k]){ id[pos++] = k; } } if(Cards.isSet(value[ id[0] ], value[ id[1] ], value[ id[2] ])){ for(int k = 0;k < 3;++k){ cards[ id[k] ].setColorFilter(Color.argb(100, 0, 200, 0)); active[ id[k] ] = true; scoreImage[k].setImageDrawable(new CardDrawable(value[ id[k] ])); } N -= 3; score++; scoreText.setText("Score : " + score); handler.postDelayed(new Runnable(){ public void run(){ for(int k = 0;k < 3;++k){ cards[ id[k] ].setColorFilter(Color.argb(0, 0, 0, 0)); active[ id[k] ] = false; --cont; } if(N == 12) cleanCards(15); if((N == 9 && 81 - posDeck >= 3) || (N == 12 && !Cards.test(value,N))){ for(int k = 0;k < 3;++k){ value[ id[k] ] = deck.get(posDeck); posDeck++; cards[ id[k] ].setImageDrawable(new CardDrawable(value[ id[k] ])); marked[ id[k] ] = false; } N += 3; if(!Cards.test(value,N) && 81 - posDeck >= 3){ for(int k = 0;k < 3;++k){ value[12 + k] = deck.get(posDeck); posDeck++; cards[12 + k].setImageDrawable(new CardDrawable(value[12 + k])); } N += 3; if(!Cards.test(value,N)){ endGame(); } } }else if(posDeck == 81){ cleanCards(N + 3); if(!Cards.test(value,N)){ endGame(); } } } }, 500); }else{ for(int k = 0;k < 3;++k){ cards[ id[k] ].setColorFilter(Color.argb(100, 200, 0, 0)); active[ id[k] ] = true; } score--; scoreText.setText("Score : " + score); handler.postDelayed(new Runnable(){ public void run(){ for(int k = 0;k < 3;++k){ cards[ id[k] ].setColorFilter(Color.argb(0, 0, 0, 0)); marked[ id[k] ] = false; active[ id[k] ] = false; --cont; } } }, 500); } } }else{ cards[i2].setColorFilter(Color.argb(0, 0, 0, 0)); marked[i2] = false; --cont; } } }); } } void startMulti(){ scoreText.setText("Score :\n0 | 0"); client = new Client(this); client.start(); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); /*if (id == R.id.action_settings) { return true; }*/ if(id == R.id.action_exit){ finish(); return true; } if(id == R.id.action_solo){ clean(); startSolo(); } if(id == R.id.action_multi){ clean(); startMulti(); } return super.onOptionsItemSelected(item); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.metadata; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.apache.druid.indexer.TaskInfo; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.jackson.JacksonUtils; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.stream.Collectors; public class SQLMetadataStorageActionHandlerTest { @Rule public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule(); @Rule public final ExpectedException thrown = ExpectedException.none(); private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); private SQLMetadataStorageActionHandler<Map<String, Integer>, Map<String, Integer>, Map<String, String>, Map<String, Integer>> handler; @Before public void setUp() { TestDerbyConnector connector = derbyConnectorRule.getConnector(); final String entryType = "entry"; final String entryTable = "entries"; final String logTable = "logs"; final String lockTable = "locks"; connector.createEntryTable(entryTable); connector.createLockTable(lockTable, entryType); connector.createLogTable(logTable, entryType); handler = new DerbyMetadataStorageActionHandler<>( connector, JSON_MAPPER, new MetadataStorageActionHandlerTypes<Map<String, Integer>, Map<String, Integer>, Map<String, String>, Map<String, Integer>>() { @Override public TypeReference<Map<String, Integer>> getEntryType() { return new TypeReference<Map<String, Integer>>() { }; } @Override public TypeReference<Map<String, Integer>> getStatusType() { return new TypeReference<Map<String, Integer>>() { }; } @Override public TypeReference<Map<String, String>> getLogType() { return JacksonUtils.TYPE_REFERENCE_MAP_STRING_STRING; } @Override public TypeReference<Map<String, Integer>> getLockType() { return new TypeReference<Map<String, Integer>>() { }; } }, entryType, entryTable, logTable, lockTable ); } @Test public void testEntryAndStatus() throws Exception { Map<String, Integer> entry = ImmutableMap.of("numericId", 1234); Map<String, Integer> status1 = ImmutableMap.of("count", 42); Map<String, Integer> status2 = ImmutableMap.of("count", 42, "temp", 1); final String entryId = "1234"; handler.insert(entryId, DateTimes.of("2014-01-02T00:00:00.123"), "testDataSource", entry, true, null); Assert.assertEquals( Optional.of(entry), handler.getEntry(entryId) ); Assert.assertEquals(Optional.absent(), handler.getEntry("non_exist_entry")); Assert.assertEquals(Optional.absent(), handler.getStatus(entryId)); Assert.assertEquals(Optional.absent(), handler.getStatus("non_exist_entry")); Assert.assertTrue(handler.setStatus(entryId, true, status1)); Assert.assertEquals( ImmutableList.of(Pair.of(entry, status1)), handler.getActiveTaskInfo(null).stream() .map(taskInfo -> Pair.of(taskInfo.getTask(), taskInfo.getStatus())) .collect(Collectors.toList()) ); Assert.assertTrue(handler.setStatus(entryId, true, status2)); Assert.assertEquals( ImmutableList.of(Pair.of(entry, status2)), handler.getActiveTaskInfo(null).stream() .map(taskInfo -> Pair.of(taskInfo.getTask(), taskInfo.getStatus())) .collect(Collectors.toList()) ); Assert.assertEquals( ImmutableList.of(), handler.getCompletedTaskInfo(DateTimes.of("2014-01-01"), null, null) ); Assert.assertTrue(handler.setStatus(entryId, false, status1)); Assert.assertEquals( Optional.of(status1), handler.getStatus(entryId) ); // inactive statuses cannot be updated, this should fail Assert.assertFalse(handler.setStatus(entryId, false, status2)); Assert.assertEquals( Optional.of(status1), handler.getStatus(entryId) ); Assert.assertEquals( Optional.of(entry), handler.getEntry(entryId) ); Assert.assertEquals( ImmutableList.of(), handler.getCompletedTaskInfo(DateTimes.of("2014-01-03"), null, null) ); Assert.assertEquals( ImmutableList.of(status1), handler.getCompletedTaskInfo(DateTimes.of("2014-01-01"), null, null) .stream() .map(TaskInfo::getStatus) .collect(Collectors.toList()) ); } @Test public void testGetRecentStatuses() throws EntryExistsException { for (int i = 1; i < 11; i++) { final String entryId = "abcd_" + i; final Map<String, Integer> entry = ImmutableMap.of("a", i); final Map<String, Integer> status = ImmutableMap.of("count", i * 10); handler.insert(entryId, DateTimes.of(StringUtils.format("2014-01-%02d", i)), "test", entry, false, status); } final List<TaskInfo<Map<String, Integer>, Map<String, Integer>>> statuses = handler.getCompletedTaskInfo( DateTimes.of("2014-01-01"), 7, null ); Assert.assertEquals(7, statuses.size()); int i = 10; for (TaskInfo<Map<String, Integer>, Map<String, Integer>> status : statuses) { Assert.assertEquals(ImmutableMap.of("count", i-- * 10), status.getStatus()); } } @Test public void testGetRecentStatuses2() throws EntryExistsException { for (int i = 1; i < 6; i++) { final String entryId = "abcd_" + i; final Map<String, Integer> entry = ImmutableMap.of("a", i); final Map<String, Integer> status = ImmutableMap.of("count", i * 10); handler.insert(entryId, DateTimes.of(StringUtils.format("2014-01-%02d", i)), "test", entry, false, status); } final List<TaskInfo<Map<String, Integer>, Map<String, Integer>>> statuses = handler.getCompletedTaskInfo( DateTimes.of("2014-01-01"), 10, null ); Assert.assertEquals(5, statuses.size()); int i = 5; for (TaskInfo<Map<String, Integer>, Map<String, Integer>> status : statuses) { Assert.assertEquals(ImmutableMap.of("count", i-- * 10), status.getStatus()); } } @Test(timeout = 60_000L) public void testRepeatInsert() throws Exception { final String entryId = "abcd"; Map<String, Integer> entry = ImmutableMap.of("a", 1); Map<String, Integer> status = ImmutableMap.of("count", 42); handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); thrown.expect(EntryExistsException.class); handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); } @Test public void testLogs() throws Exception { final String entryId = "abcd"; Map<String, Integer> entry = ImmutableMap.of("a", 1); Map<String, Integer> status = ImmutableMap.of("count", 42); handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); Assert.assertEquals( ImmutableList.of(), handler.getLogs("non_exist_entry") ); Assert.assertEquals( ImmutableMap.of(), handler.getLocks(entryId) ); final ImmutableMap<String, String> log1 = ImmutableMap.of("logentry", "created"); final ImmutableMap<String, String> log2 = ImmutableMap.of("logentry", "updated"); Assert.assertTrue(handler.addLog(entryId, log1)); Assert.assertTrue(handler.addLog(entryId, log2)); Assert.assertEquals( ImmutableList.of(log1, log2), handler.getLogs(entryId) ); } @Test public void testLocks() throws Exception { final String entryId = "ABC123"; Map<String, Integer> entry = ImmutableMap.of("a", 1); Map<String, Integer> status = ImmutableMap.of("count", 42); handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); Assert.assertEquals( ImmutableMap.<Long, Map<String, Integer>>of(), handler.getLocks("non_exist_entry") ); Assert.assertEquals( ImmutableMap.<Long, Map<String, Integer>>of(), handler.getLocks(entryId) ); final ImmutableMap<String, Integer> lock1 = ImmutableMap.of("lock", 1); final ImmutableMap<String, Integer> lock2 = ImmutableMap.of("lock", 2); Assert.assertTrue(handler.addLock(entryId, lock1)); Assert.assertTrue(handler.addLock(entryId, lock2)); final Map<Long, Map<String, Integer>> locks = handler.getLocks(entryId); Assert.assertEquals(2, locks.size()); Assert.assertEquals( ImmutableSet.<Map<String, Integer>>of(lock1, lock2), new HashSet<>(locks.values()) ); long lockId = locks.keySet().iterator().next(); handler.removeLock(lockId); locks.remove(lockId); final Map<Long, Map<String, Integer>> updated = handler.getLocks(entryId); Assert.assertEquals( new HashSet<>(locks.values()), new HashSet<>(updated.values()) ); Assert.assertEquals(updated.keySet(), locks.keySet()); } @Test public void testReplaceLock() throws EntryExistsException { final String entryId = "ABC123"; Map<String, Integer> entry = ImmutableMap.of("a", 1); Map<String, Integer> status = ImmutableMap.of("count", 42); handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); Assert.assertEquals( ImmutableMap.<Long, Map<String, Integer>>of(), handler.getLocks("non_exist_entry") ); Assert.assertEquals( ImmutableMap.<Long, Map<String, Integer>>of(), handler.getLocks(entryId) ); final ImmutableMap<String, Integer> lock1 = ImmutableMap.of("lock", 1); final ImmutableMap<String, Integer> lock2 = ImmutableMap.of("lock", 2); Assert.assertTrue(handler.addLock(entryId, lock1)); final Long lockId1 = handler.getLockId(entryId, lock1); Assert.assertNotNull(lockId1); Assert.assertTrue(handler.replaceLock(entryId, lockId1, lock2)); } @Test public void testGetLockId() throws EntryExistsException { final String entryId = "ABC123"; Map<String, Integer> entry = ImmutableMap.of("a", 1); Map<String, Integer> status = ImmutableMap.of("count", 42); handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); Assert.assertEquals( ImmutableMap.<Long, Map<String, Integer>>of(), handler.getLocks("non_exist_entry") ); Assert.assertEquals( ImmutableMap.<Long, Map<String, Integer>>of(), handler.getLocks(entryId) ); final ImmutableMap<String, Integer> lock1 = ImmutableMap.of("lock", 1); final ImmutableMap<String, Integer> lock2 = ImmutableMap.of("lock", 2); Assert.assertTrue(handler.addLock(entryId, lock1)); Assert.assertNotNull(handler.getLockId(entryId, lock1)); Assert.assertNull(handler.getLockId(entryId, lock2)); } @Test public void testRemoveTasksOlderThan() throws Exception { final String entryId1 = "1234"; Map<String, Integer> entry1 = ImmutableMap.of("numericId", 1234); Map<String, Integer> status1 = ImmutableMap.of("count", 42, "temp", 1); handler.insert(entryId1, DateTimes.of("2014-01-01T00:00:00.123"), "testDataSource", entry1, false, status1); Assert.assertTrue(handler.addLog(entryId1, ImmutableMap.of("logentry", "created"))); final String entryId2 = "ABC123"; Map<String, Integer> entry2 = ImmutableMap.of("a", 1); Map<String, Integer> status2 = ImmutableMap.of("count", 42); handler.insert(entryId2, DateTimes.of("2014-01-01T00:00:00.123"), "test", entry2, true, status2); Assert.assertTrue(handler.addLog(entryId2, ImmutableMap.of("logentry", "created"))); final String entryId3 = "DEF5678"; Map<String, Integer> entry3 = ImmutableMap.of("numericId", 5678); Map<String, Integer> status3 = ImmutableMap.of("count", 21, "temp", 2); handler.insert(entryId3, DateTimes.of("2014-01-02T12:00:00.123"), "testDataSource", entry3, false, status3); Assert.assertTrue(handler.addLog(entryId3, ImmutableMap.of("logentry", "created"))); Assert.assertEquals(Optional.of(entry1), handler.getEntry(entryId1)); Assert.assertEquals(Optional.of(entry2), handler.getEntry(entryId2)); Assert.assertEquals(Optional.of(entry3), handler.getEntry(entryId3)); Assert.assertEquals( ImmutableList.of(entryId2), handler.getActiveTaskInfo(null).stream() .map(taskInfo -> taskInfo.getId()) .collect(Collectors.toList()) ); Assert.assertEquals( ImmutableList.of(entryId3, entryId1), handler.getCompletedTaskInfo(DateTimes.of("2014-01-01"), null, null).stream() .map(taskInfo -> taskInfo.getId()) .collect(Collectors.toList()) ); handler.removeTasksOlderThan(DateTimes.of("2014-01-02").getMillis()); // active task not removed. Assert.assertEquals( ImmutableList.of(entryId2), handler.getActiveTaskInfo(null).stream() .map(taskInfo -> taskInfo.getId()) .collect(Collectors.toList()) ); Assert.assertEquals( ImmutableList.of(entryId3), handler.getCompletedTaskInfo(DateTimes.of("2014-01-01"), null, null).stream() .map(taskInfo -> taskInfo.getId()) .collect(Collectors.toList()) ); // tasklogs Assert.assertEquals(0, handler.getLogs(entryId1).size()); Assert.assertEquals(1, handler.getLogs(entryId2).size()); Assert.assertEquals(1, handler.getLogs(entryId3).size()); } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.svn; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.jetbrains.idea.svn.api.Depth; import org.jetbrains.idea.svn.auth.SvnAuthenticationManager; import org.jetbrains.idea.svn.checkout.SvnCheckoutProvider; import org.junit.Before; import org.tmatesoft.svn.core.ISVNDirEntryHandler; import org.tmatesoft.svn.core.SVNDirEntry; import org.tmatesoft.svn.core.SVNException; import org.tmatesoft.svn.core.SVNURL; import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager; import org.tmatesoft.svn.core.auth.ISVNAuthenticationProvider; import org.tmatesoft.svn.core.auth.SVNAuthentication; import org.tmatesoft.svn.core.auth.SVNPasswordAuthentication; import org.tmatesoft.svn.core.auth.SVNSSHAuthentication; import org.tmatesoft.svn.core.auth.SVNUserNameAuthentication; import org.tmatesoft.svn.core.io.SVNRepository; import org.tmatesoft.svn.core.wc.SVNRevision; import com.intellij.openapi.progress.EmptyProgressIndicator; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vcs.CheckoutProvider; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.VcsKey; import com.intellij.openapi.vcs.actions.VcsContextFactory; import com.intellij.openapi.vcs.changes.Change; import com.intellij.openapi.vcs.changes.CurrentContentRevision; import com.intellij.openapi.vcs.history.VcsAppendableHistoryPartnerAdapter; import com.intellij.openapi.vcs.history.VcsFileRevision; import com.intellij.openapi.vcs.history.VcsHistoryProvider; import com.intellij.openapi.vcs.update.FileGroup; import com.intellij.openapi.vcs.update.UpdateSession; import com.intellij.openapi.vcs.update.UpdatedFiles; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.Processor; import com.intellij.util.containers.Convertor; import com.intellij.vcsUtil.VcsUtil; import junit.framework.Assert; /** * Created with IntelliJ IDEA. * User: Irina.Chernushina * Date: 2/11/13 * Time: 3:48 PM */ public abstract class SvnProtocolsTest extends Svn17TestCase { // todo correct URL private final static String ourSSH_URL = "svn+ssh://unit-069:222/home/irina/svnrepo"; private final static String ourHTTP_URL = "http://unit-364.labs.intellij.net/svn/forMerge/tmp"; private final static String ourHTTPS_URL = "https://"; private final static String ourSVN_URL = "svn://"; //private final static String[] ourTestURL = {ourSSH_URL, ourHTTP_URL}; // at the moment private final static String[] ourTestURL = {ourHTTP_URL}; public static final String SSH_USER_NAME = "user"; public static final String SSH_PASSWORD = "qwerty4321"; public static final int SSH_PORT_NUMBER = 222; private SvnVcs myVcs; @Override @Before public void setUp() throws Exception { super.setUp(); myVcs = SvnVcs.getInstance(myProject); // replace authentication provider so that pass credentials without dialogs final SvnConfiguration configuration = SvnConfiguration.getInstance(myProject); final SvnAuthenticationManager interactiveManager = configuration.getInteractiveManager(myVcs); final SvnTestInteractiveAuthentication authentication = new SvnTestInteractiveAuthentication(interactiveManager) { @Override public int acceptServerAuthentication(SVNURL url, String realm, Object certificate, boolean resultMayBeStored) { return ISVNAuthenticationProvider.ACCEPTED; } }; interactiveManager.setAuthenticationProvider(authentication); final SvnAuthenticationManager manager = configuration.getAuthenticationManager(myVcs); // will be the same as in interactive -> authentication notifier is not used manager.setAuthenticationProvider(authentication); authentication.addAuthentication(ISVNAuthenticationManager.SSH, new Convertor<SVNURL, SVNAuthentication>() { @Override public SVNAuthentication convert(SVNURL o) { return new SVNSSHAuthentication(SSH_USER_NAME, SSH_PASSWORD, SSH_PORT_NUMBER, true, o, false); } }); authentication.addAuthentication(ISVNAuthenticationManager.USERNAME, new Convertor<SVNURL, SVNAuthentication>() { @Override public SVNAuthentication convert(SVNURL o) { return new SVNUserNameAuthentication(SSH_USER_NAME, true, o, false); } }); authentication.addAuthentication(ISVNAuthenticationManager.PASSWORD, new Convertor<SVNURL, SVNAuthentication>() { @Override public SVNAuthentication convert(SVNURL o) { return new SVNPasswordAuthentication("sally", "abcde", true, o, false); } }); } @Test public void testBrowseRepository() throws Exception { for (String s : ourTestURL) { System.out.println("Testing URL: " + s); testBrowseRepositoryImpl(s); } } private void testBrowseRepositoryImpl(final String url) throws SVNException { final List<SVNDirEntry> list = new ArrayList<>(); final SVNRepository repository = myVcs.getSvnKitManager().createRepository(url); repository.getDir(".", -1, null, new ISVNDirEntryHandler() { @Override public void handleDirEntry(SVNDirEntry dirEntry) throws SVNException { list.add(dirEntry); } }); Assert.assertTrue(! list.isEmpty()); } @Test public void testCheckout() throws Exception { for (String s : ourTestURL) { System.out.println("Testing URL: " + s); testCheckoutImpl(s); } } @Test public void testHistory() throws Exception { for (String s : ourTestURL) { System.out.println("Testing URL: " + s); testHistoryImpl(s); } } private void testHistoryImpl(String s) throws VcsException { final VcsHistoryProvider provider = myVcs.getVcsHistoryProvider(); final VcsAppendableHistoryPartnerAdapter partner = new VcsAppendableHistoryPartnerAdapter() { @Override public void acceptRevision(VcsFileRevision revision) { super.acceptRevision(revision); if(getSession().getRevisionList().size() > 1) { throw new ProcessCanceledException(); } } }; try { provider.reportAppendableHistory(VcsContextFactory.SERVICE.getInstance().createFilePathOnNonLocal(s, true), partner); } catch (ProcessCanceledException e) { //ok } final List<VcsFileRevision> list = partner.getSession().getRevisionList(); Assert.assertTrue(! list.isEmpty()); } // todo this test writes to repository - so it's disabled for now - while admins are preparing a server /* @Test public void testUpdateAndCommit() throws Exception { for (String url : ourTestURL) { final File wc1 = testCheckoutImpl(url); final File wc2 = testCheckoutImpl(url); final File file = testCommitImpl(wc1); System.out.println("Committed file: " + file.getPath()); testUpdateImpl(wc2, file); } }*/ private void testUpdateImpl(File wc1, final File created) { Assert.assertTrue(wc1.isDirectory()); final VirtualFile vf = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(wc1); final UpdatedFiles files = UpdatedFiles.create(); final UpdateSession session = myVcs.getUpdateEnvironment().updateDirectories(new FilePath[]{VcsUtil.getFilePath(vf)}, files, new EmptyProgressIndicator(), new Ref<>()); Assert.assertTrue(session.getExceptions() == null || session.getExceptions().isEmpty()); Assert.assertTrue(! session.isCanceled()); Assert.assertTrue(! files.getGroupById(FileGroup.CREATED_ID).getFiles().isEmpty()); final String path = files.getGroupById(FileGroup.CREATED_ID).getFiles().iterator().next(); final String name = path.substring(path.lastIndexOf(File.separator) + 1); Assert.assertEquals(created.getName(), name); } private File testCommitImpl(File wc1) throws IOException { Assert.assertTrue(wc1.isDirectory()); final File file = FileUtil.createTempFile(wc1, "file", ".txt"); final VirtualFile vf = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(file); Assert.assertNotNull(vf); final ArrayList<VirtualFile> files = new ArrayList<>(); files.add(vf); final List<VcsException> exceptions = myVcs.getCheckinEnvironment().scheduleUnversionedFilesForAddition(files); Assert.assertTrue(exceptions.isEmpty()); final Change change = new Change(null, new CurrentContentRevision(VcsUtil.getFilePath(vf))); final List<VcsException> commit = myVcs.getCheckinEnvironment().commit(Collections.singletonList(change), "commit"); Assert.assertTrue(commit.isEmpty()); return file; } private File testCheckoutImpl(final String url) throws IOException { final File root = FileUtil.createTempDirectory("checkoutRoot", ""); root.deleteOnExit(); Assert.assertTrue(root.exists()); SvnCheckoutProvider .checkout(myProject, root, url, SVNRevision.HEAD, Depth.INFINITY, false, new CheckoutProvider.Listener() { @Override public void directoryCheckedOut(File directory, VcsKey vcs) { } @Override public void checkoutCompleted() { } }, WorkingCopyFormat.ONE_DOT_SEVEN); final int[] cnt = new int[1]; cnt[0] = 0; FileUtil.processFilesRecursively(root, new Processor<File>() { @Override public boolean process(File file) { ++ cnt[0]; return ! (cnt[0] > 1); } }); Assert.assertTrue(cnt[0] > 1); return root; } // disable tests for now private @interface Test{} }
package it.cavallium.warppi.desktop; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.net.URL; import java.util.HashMap; import java.util.Map; import org.apache.commons.io.FileUtils; import it.cavallium.warppi.Engine; import it.cavallium.warppi.Platform; import it.cavallium.warppi.gui.graphicengine.GraphicEngine; import it.cavallium.warppi.gui.graphicengine.impl.jogl.JOGLEngine; import it.cavallium.warppi.gui.graphicengine.impl.swing.SwingEngine; import it.cavallium.warppi.util.CacheUtils; import it.cavallium.warppi.util.Error; import net.lingala.zip4j.core.ZipFile; import net.lingala.zip4j.model.ZipParameters; import net.lingala.zip4j.util.Zip4jConstants; public class DesktopPlatform implements Platform { private final DesktopConsoleUtils cu; private final DesktopGpio gi; private final DesktopStorageUtils su; private final ImageUtils pu; private final String on; private final Map<String, GraphicEngine> el; private final DesktopSettings settings; private Boolean runningOnRaspberryOverride = null; public DesktopPlatform() { cu = new DesktopConsoleUtils(); gi = new DesktopGpio(); su = new DesktopStorageUtils(); pu = new DesktopImageUtils(); on = System.getProperty("os.name").toLowerCase(); el = new HashMap<>(); el.put("CPU engine", new SwingEngine()); el.put("GPU engine", new JOGLEngine()); settings = new DesktopSettings(); } @Override public ConsoleUtils getConsoleUtils() { return cu; } @Override public Gpio getGpio() { return gi; } @Override public StorageUtils getStorageUtils() { return su; } @Override public ImageUtils getImageUtils() { return pu; } @Override public DesktopSettings getSettings() { return settings; } @Override public void setThreadName(final Thread t, final String name) { t.setName(name); } @Override public void setThreadDaemon(final Thread t) { t.setDaemon(true); } @Override public void setThreadDaemon(final Thread t, final boolean value) { t.setDaemon(value); } @Override public void exit(final int value) { System.exit(value); } @Override public void gc() { System.gc(); } @Override public boolean isJavascript() { return false; } @Override public String getOsName() { return on; } @Override public void alphaChanged(final boolean val) { final GraphicEngine currentEngine = Engine.INSTANCE.getHardwareDevice().getDisplayManager().engine; if (currentEngine instanceof SwingEngine) ((SwingEngine) currentEngine).setAlphaChanged(val); } @Override public void shiftChanged(final boolean val) { final GraphicEngine currentEngine = Engine.INSTANCE.getHardwareDevice().getDisplayManager().engine; if (currentEngine instanceof SwingEngine) ((SwingEngine) currentEngine).setShiftChanged(val); } @Override public Semaphore newSemaphore() { return new DesktopSemaphore(0); } @Override public Semaphore newSemaphore(final int i) { return new DesktopSemaphore(i); } @Override public URLClassLoader newURLClassLoader(final URL[] urls) { return new DesktopURLClassLoader(urls); } @Override public Map<String, GraphicEngine> getEnginesList() { return el; } @Override public GraphicEngine getEngine(final String string) throws NullPointerException { return el.get(string); } @Override public void throwNewExceptionInInitializerError(final String text) { throw new ExceptionInInitializerError(); } @Override public String[] stacktraceToString(final Error e) { final StringWriter sw = new StringWriter(); final PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); return sw.toString().toUpperCase().replace("\t", " ").replace("\r", "").split("\n"); } @Override public void loadPlatformRules() { } @Override public void zip(final String targetPath, final String destinationFilePath, final String password) { try { final ZipParameters parameters = new ZipParameters(); parameters.setCompressionMethod(Zip4jConstants.COMP_DEFLATE); parameters.setCompressionLevel(Zip4jConstants.DEFLATE_LEVEL_NORMAL); if (password.length() > 0) { parameters.setEncryptFiles(true); parameters.setEncryptionMethod(Zip4jConstants.ENC_METHOD_AES); parameters.setAesKeyStrength(Zip4jConstants.AES_STRENGTH_256); parameters.setPassword(password); } final ZipFile zipFile = new ZipFile(destinationFilePath); final File targetFile = new File(targetPath); if (targetFile.isFile()) zipFile.addFile(targetFile, parameters); else if (targetFile.isDirectory()) zipFile.addFolder(targetFile, parameters); } catch (final Exception e) { e.printStackTrace(); } } @Override public void unzip(final String targetZipFilePath, final String destinationFolderPath, final String password) { try { final ZipFile zipFile = new ZipFile(targetZipFilePath); if (zipFile.isEncrypted()) zipFile.setPassword(password); zipFile.extractAll(destinationFolderPath); } catch (final Exception e) { e.printStackTrace(); } } @Override public boolean compile(final String[] command, final PrintWriter printWriter, final PrintWriter errors) { return org.eclipse.jdt.internal.compiler.batch.Main.compile(command, printWriter, errors, null); } @Override public void setRunningOnRaspberry(boolean b) { if (isRunningOnRaspberry()) { runningOnRaspberryOverride = b; } else { runningOnRaspberryOverride = false; } } @Override public boolean isRunningOnRaspberry() { if (runningOnRaspberryOverride != null) return runningOnRaspberryOverride; return CacheUtils.get("isRunningOnRaspberry", 24 * 60 * 60 * 1000, () -> { if (Engine.getPlatform().isJavascript()) return false; if (Engine.getPlatform().getOsName().equals("Linux")) try { final File osRelease = new File("/etc", "os-release"); return FileUtils.readLines(osRelease, "UTF-8").stream().map(String::toLowerCase).anyMatch(line -> line.contains("raspbian") && line.contains("name")); } catch (final IOException readException) { return false; } else return false; }); } }
package io.dropwizard.metrics; import java.io.PrintStream; import java.text.DateFormat; import java.util.*; import java.util.concurrent.TimeUnit; /** * A reporter which outputs measurements to a {@link PrintStream}, like {@code System.out}. */ public class ConsoleReporter extends ScheduledReporter { /** * Returns a new {@link Builder} for {@link ConsoleReporter}. * * @param registry the registry to report * @return a {@link Builder} instance for a {@link ConsoleReporter} */ public static Builder forRegistry(MetricRegistry registry) { return new Builder(registry); } /** * A builder for {@link ConsoleReporter} instances. Defaults to using the default locale and * time zone, writing to {@code System.out}, converting rates to events/second, converting * durations to milliseconds, and not filtering metrics. */ public static class Builder { private final MetricRegistry registry; private PrintStream output; private Locale locale; private Clock clock; private TimeZone timeZone; private TimeUnit rateUnit; private TimeUnit durationUnit; private MetricFilter filter; private Builder(MetricRegistry registry) { this.registry = registry; this.output = System.out; this.locale = Locale.getDefault(); this.clock = Clock.defaultClock(); this.timeZone = TimeZone.getDefault(); this.rateUnit = TimeUnit.SECONDS; this.durationUnit = TimeUnit.MILLISECONDS; this.filter = MetricFilter.ALL; } /** * Write to the given {@link PrintStream}. * * @param output a {@link PrintStream} instance. * @return {@code this} */ public Builder outputTo(PrintStream output) { this.output = output; return this; } /** * Format numbers for the given {@link Locale}. * * @param locale a {@link Locale} * @return {@code this} */ public Builder formattedFor(Locale locale) { this.locale = locale; return this; } /** * Use the given {@link Clock} instance for the time. * * @param clock a {@link Clock} instance * @return {@code this} */ public Builder withClock(Clock clock) { this.clock = clock; return this; } /** * Use the given {@link TimeZone} for the time. * * @param timeZone a {@link TimeZone} * @return {@code this} */ public Builder formattedFor(TimeZone timeZone) { this.timeZone = timeZone; return this; } /** * Convert rates to the given time unit. * * @param rateUnit a unit of time * @return {@code this} */ public Builder convertRatesTo(TimeUnit rateUnit) { this.rateUnit = rateUnit; return this; } /** * Convert durations to the given time unit. * * @param durationUnit a unit of time * @return {@code this} */ public Builder convertDurationsTo(TimeUnit durationUnit) { this.durationUnit = durationUnit; return this; } /** * Only report metrics which match the given filter. * * @param filter a {@link MetricFilter} * @return {@code this} */ public Builder filter(MetricFilter filter) { this.filter = filter; return this; } /** * Builds a {@link ConsoleReporter} with the given properties. * * @return a {@link ConsoleReporter} */ public ConsoleReporter build() { return new ConsoleReporter(registry, output, locale, clock, timeZone, rateUnit, durationUnit, filter); } } private static final int CONSOLE_WIDTH = 80; private final PrintStream output; private final Locale locale; private final Clock clock; private final DateFormat dateFormat; private ConsoleReporter(MetricRegistry registry, PrintStream output, Locale locale, Clock clock, TimeZone timeZone, TimeUnit rateUnit, TimeUnit durationUnit, MetricFilter filter) { super(registry, "console-reporter", filter, rateUnit, durationUnit); this.output = output; this.locale = locale; this.clock = clock; this.dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.MEDIUM, locale); dateFormat.setTimeZone(timeZone); } @Override public void report(SortedMap<MetricName, Gauge> gauges, SortedMap<MetricName, Counter> counters, SortedMap<MetricName, Histogram> histograms, SortedMap<MetricName, Meter> meters, SortedMap<MetricName, Timer> timers) { final String dateTime = dateFormat.format(new Date(clock.getTime())); printWithBanner(dateTime, '='); output.println(); if (!gauges.isEmpty()) { printWithBanner("-- Gauges", '-'); for (Map.Entry<MetricName, Gauge> entry : gauges.entrySet()) { output.println(entry.getKey()); printGauge(entry); } output.println(); } if (!counters.isEmpty()) { printWithBanner("-- Counters", '-'); for (Map.Entry<MetricName, Counter> entry : counters.entrySet()) { output.println(entry.getKey()); printCounter(entry); } output.println(); } if (!histograms.isEmpty()) { printWithBanner("-- Histograms", '-'); for (Map.Entry<MetricName, Histogram> entry : histograms.entrySet()) { output.println(entry.getKey()); printHistogram(entry.getValue()); } output.println(); } if (!meters.isEmpty()) { printWithBanner("-- Meters", '-'); for (Map.Entry<MetricName, Meter> entry : meters.entrySet()) { output.println(entry.getKey()); printMeter(entry.getValue()); } output.println(); } if (!timers.isEmpty()) { printWithBanner("-- Timers", '-'); for (Map.Entry<MetricName, Timer> entry : timers.entrySet()) { output.println(entry.getKey()); printTimer(entry.getValue()); } output.println(); } output.println(); output.flush(); } private void printMeter(Meter meter) { output.printf(locale, " count = %d%n", meter.getCount()); output.printf(locale, " mean rate = %2.2f events/%s%n", convertRate(meter.getMeanRate()), getRateUnit()); output.printf(locale, " 1-minute rate = %2.2f events/%s%n", convertRate(meter.getOneMinuteRate()), getRateUnit()); output.printf(locale, " 5-minute rate = %2.2f events/%s%n", convertRate(meter.getFiveMinuteRate()), getRateUnit()); output.printf(locale, " 15-minute rate = %2.2f events/%s%n", convertRate(meter.getFifteenMinuteRate()), getRateUnit()); } private void printCounter(Map.Entry<MetricName, Counter> entry) { output.printf(locale, " count = %d%n", entry.getValue().getCount()); } private void printGauge(Map.Entry<MetricName, Gauge> entry) { output.printf(locale, " value = %s%n", entry.getValue().getValue()); } private void printHistogram(Histogram histogram) { output.printf(locale, " count = %d%n", histogram.getCount()); Snapshot snapshot = histogram.getSnapshot(); output.printf(locale, " min = %d%n", snapshot.getMin()); output.printf(locale, " max = %d%n", snapshot.getMax()); output.printf(locale, " mean = %2.2f%n", snapshot.getMean()); output.printf(locale, " stddev = %2.2f%n", snapshot.getStdDev()); output.printf(locale, " median = %2.2f%n", snapshot.getMedian()); output.printf(locale, " 75%% <= %2.2f%n", snapshot.get75thPercentile()); output.printf(locale, " 95%% <= %2.2f%n", snapshot.get95thPercentile()); output.printf(locale, " 98%% <= %2.2f%n", snapshot.get98thPercentile()); output.printf(locale, " 99%% <= %2.2f%n", snapshot.get99thPercentile()); output.printf(locale, " 99.9%% <= %2.2f%n", snapshot.get999thPercentile()); } private void printTimer(Timer timer) { final Snapshot snapshot = timer.getSnapshot(); output.printf(locale, " count = %d%n", timer.getCount()); output.printf(locale, " mean rate = %2.2f calls/%s%n", convertRate(timer.getMeanRate()), getRateUnit()); output.printf(locale, " 1-minute rate = %2.2f calls/%s%n", convertRate(timer.getOneMinuteRate()), getRateUnit()); output.printf(locale, " 5-minute rate = %2.2f calls/%s%n", convertRate(timer.getFiveMinuteRate()), getRateUnit()); output.printf(locale, " 15-minute rate = %2.2f calls/%s%n", convertRate(timer.getFifteenMinuteRate()), getRateUnit()); output.printf(locale, " min = %2.2f %s%n", convertDuration(snapshot.getMin()), getDurationUnit()); output.printf(locale, " max = %2.2f %s%n", convertDuration(snapshot.getMax()), getDurationUnit()); output.printf(locale, " mean = %2.2f %s%n", convertDuration(snapshot.getMean()), getDurationUnit()); output.printf(locale, " stddev = %2.2f %s%n", convertDuration(snapshot.getStdDev()), getDurationUnit()); output.printf(locale, " median = %2.2f %s%n", convertDuration(snapshot.getMedian()), getDurationUnit()); output.printf(locale, " 75%% <= %2.2f %s%n", convertDuration(snapshot.get75thPercentile()), getDurationUnit()); output.printf(locale, " 95%% <= %2.2f %s%n", convertDuration(snapshot.get95thPercentile()), getDurationUnit()); output.printf(locale, " 98%% <= %2.2f %s%n", convertDuration(snapshot.get98thPercentile()), getDurationUnit()); output.printf(locale, " 99%% <= %2.2f %s%n", convertDuration(snapshot.get99thPercentile()), getDurationUnit()); output.printf(locale, " 99.9%% <= %2.2f %s%n", convertDuration(snapshot.get999thPercentile()), getDurationUnit()); } private void printWithBanner(String s, char c) { output.print(s); output.print(' '); for (int i = 0; i < (CONSOLE_WIDTH - s.length() - 1); i++) { output.print(c); } output.println(); } }
// -*- mode: java; c-basic-offset: 2; -*- // Copyright 2009-2011 Google, All Rights reserved // Copyright 2011-2012 MIT, All rights reserved // Released under the Apache License, Version 2.0 // http://www.apache.org/licenses/LICENSE-2.0 package com.google.appinventor.server.project.youngandroid; import com.google.appengine.api.utils.SystemProperty; import com.google.apphosting.api.ApiProxy; import com.google.appinventor.common.utils.StringUtils; import com.google.appinventor.common.version.GitBuildId; import com.google.appinventor.components.common.YaVersion; import com.google.appinventor.server.CrashReport; import com.google.appinventor.server.FileExporter; import com.google.appinventor.server.FileExporterImpl; import com.google.appinventor.server.Server; import com.google.appinventor.server.encryption.EncryptionException; import com.google.appinventor.server.flags.Flag; import com.google.appinventor.server.project.CommonProjectService; import com.google.appinventor.server.project.utils.Security; import com.google.appinventor.server.properties.json.ServerJsonParser; import com.google.appinventor.server.storage.StorageIo; import com.google.appinventor.shared.properties.json.JSONParser; import com.google.appinventor.shared.rpc.RpcResult; import com.google.appinventor.shared.rpc.ServerLayout; import com.google.appinventor.shared.rpc.project.NewProjectParameters; import com.google.appinventor.shared.rpc.project.Project; import com.google.appinventor.shared.rpc.project.ProjectNode; import com.google.appinventor.shared.rpc.project.ProjectRootNode; import com.google.appinventor.shared.rpc.project.ProjectSourceZip; import com.google.appinventor.shared.rpc.project.RawFile; import com.google.appinventor.shared.rpc.project.TextFile; import com.google.appinventor.shared.rpc.project.youngandroid.NewYoungAndroidProjectParameters; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidAssetNode; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidAssetsFolder; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidBlocksNode; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidComponentNode; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidComponentsFolder; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidFormNode; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidPackageNode; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidProjectNode; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidSourceFolderNode; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidSourceNode; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidYailNode; import com.google.appinventor.shared.rpc.user.User; import com.google.appinventor.shared.settings.Settings; import com.google.appinventor.shared.settings.SettingsConstants; import com.google.appinventor.shared.storage.StorageUtil; import com.google.appinventor.shared.youngandroid.YoungAndroidSourceAnalyzer; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import com.google.common.collect.Maps; import com.google.common.io.CharStreams; import org.json.JSONException; import org.json.JSONObject; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.StringReader; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.net.URLEncoder; import java.text.DecimalFormat; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.logging.Logger; /** * Provides support for Young Android projects. * * @author lizlooney@google.com (Liz Looney) * @author markf@google.com (Mark Friedman) */ public final class YoungAndroidProjectService extends CommonProjectService { private static int currentProgress = 0; private static final Logger LOG = Logger.getLogger(YoungAndroidProjectService.class.getName()); // The value of this flag can be changed in appengine-web.xml private static final Flag<Boolean> sendGitVersion = Flag.createFlag("build.send.git.version", true); // Project folder prefixes public static final String SRC_FOLDER = YoungAndroidSourceAnalyzer.SRC_FOLDER; protected static final String ASSETS_FOLDER = "assets"; private static final String EXTERNAL_COMPS_FOLDER = "assets/external_comps"; static final String PROJECT_DIRECTORY = "youngandroidproject"; // TODO(user) Source these from a common constants library. private static final String FORM_PROPERTIES_EXTENSION = YoungAndroidSourceAnalyzer.FORM_PROPERTIES_EXTENSION; private static final String CODEBLOCKS_SOURCE_EXTENSION = YoungAndroidSourceAnalyzer.CODEBLOCKS_SOURCE_EXTENSION; private static final String BLOCKLY_SOURCE_EXTENSION = YoungAndroidSourceAnalyzer.BLOCKLY_SOURCE_EXTENSION; private static final String YAIL_FILE_EXTENSION = YoungAndroidSourceAnalyzer.YAIL_FILE_EXTENSION; public static final String PROJECT_PROPERTIES_FILE_NAME = PROJECT_DIRECTORY + "/" + "project.properties"; private static final JSONParser JSON_PARSER = new ServerJsonParser(); // Build folder path private static final String BUILD_FOLDER = "build"; public static final String PROJECT_KEYSTORE_LOCATION = "android.keystore"; // host[:port] to use for connecting to the build server private static final Flag<String> buildServerHost = Flag.createFlag("build.server.host", "localhost:9990"); // host[:port] to tell build server app host url private static final Flag<String> appengineHost = Flag.createFlag("appengine.host", ""); public YoungAndroidProjectService(StorageIo storageIo) { super(YoungAndroidProjectNode.YOUNG_ANDROID_PROJECT_TYPE, storageIo); } /** * Returns project settings that can be used when creating a new project. */ public static String getProjectSettings(String icon, String vCode, String vName, String useslocation, String aName, String sizing) { icon = Strings.nullToEmpty(icon); vCode = Strings.nullToEmpty(vCode); vName = Strings.nullToEmpty(vName); useslocation = Strings.nullToEmpty(useslocation); sizing = Strings.nullToEmpty(sizing); aName = Strings.nullToEmpty(aName); return "{\"" + SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS + "\":{" + "\"" + SettingsConstants.YOUNG_ANDROID_SETTINGS_ICON + "\":\"" + icon + "\",\"" + SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_CODE + "\":\"" + vCode + "\",\"" + SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_NAME + "\":\"" + vName + "\",\"" + SettingsConstants.YOUNG_ANDROID_SETTINGS_USES_LOCATION + "\":\"" + useslocation + "\",\"" + SettingsConstants.YOUNG_ANDROID_SETTINGS_APP_NAME + "\":\"" + aName + "\",\"" + SettingsConstants.YOUNG_ANDROID_SETTINGS_SIZING + "\":\"" + sizing + "\"}}"; } /** * Returns the contents of the project properties file for a new Young Android * project. * * @param projectName the name of the project * @param qualifiedName the qualified name of Screen1 in the project * @param icon the name of the asset to use as the application icon * @param vcode the version code * @param vname the version name */ public static String getProjectPropertiesFileContents(String projectName, String qualifiedName, String icon, String vcode, String vname, String useslocation, String aname, String sizing) { String contents = "main=" + qualifiedName + "\n" + "name=" + projectName + '\n' + "assets=../" + ASSETS_FOLDER + "\n" + "source=../" + SRC_FOLDER + "\n" + "build=../build\n"; if (icon != null && !icon.isEmpty()) { contents += "icon=" + icon + "\n"; } if (vcode != null && !vcode.isEmpty()) { contents += "versioncode=" + vcode + "\n"; } if (vname != null && !vname.isEmpty()) { contents += "versionname=" + vname + "\n"; } if (useslocation != null && !useslocation.isEmpty()) { contents += "useslocation=" + useslocation + "\n"; } if (aname != null) { contents += "aname=" + aname + "\n"; } if (sizing != null && !sizing.isEmpty()) { contents += "sizing=" + sizing + "\n"; } return contents; } /** * Returns the contents of a new Young Android form file. * @param qualifiedName the qualified name of the form. * @return the contents of a new Young Android form file. */ @VisibleForTesting public static String getInitialFormPropertiesFileContents(String qualifiedName) { final int lastDotPos = qualifiedName.lastIndexOf('.'); String packageName = qualifiedName.split("\\.")[2]; String formName = qualifiedName.substring(lastDotPos + 1); // The initial Uuid is set to zero here since (as far as we know) we can't get random numbers // in ode.shared. This shouldn't actually matter since all Uuid's are random int's anyway (and // 0 was randomly chosen, I promise). The TODO(user) in MockComponent.java indicates that // there will someday be assurance that these random Uuid's are unique. Once that happens // this will be perfectly acceptable. Until that happens, choosing 0 is just as safe as // allowing a random number to be chosen when the MockComponent is first created. return "#|\n$JSON\n" + "{\"authURL\":[]," + "\"YaVersion\":\"" + YaVersion.YOUNG_ANDROID_VERSION + "\",\"Source\":\"Form\"," + "\"Properties\":{\"$Name\":\"" + formName + "\",\"$Type\":\"Form\"," + "\"$Version\":\"" + YaVersion.FORM_COMPONENT_VERSION + "\",\"Uuid\":\"" + 0 + "\"," + "\"Title\":\"" + formName + "\",\"AppName\":\"" + packageName +"\"}}\n|#"; } /** * Returns the initial contents of a Young Android blockly blocks file. */ private static String getInitialBlocklySourceFileContents(String qualifiedName) { return ""; } private static String packageNameToPath(String packageName) { return SRC_FOLDER + '/' + packageName.replace('.', '/'); } public static String getSourceDirectory(String qualifiedName) { return StorageUtil.dirname(packageNameToPath(qualifiedName)); } // CommonProjectService implementation @Override public void storeProjectSettings(String userId, long projectId, String projectSettings) { super.storeProjectSettings(userId, projectId, projectSettings); // If the icon has been changed, update the project properties file. // Extract the new icon from the projectSettings parameter. Settings settings = new Settings(JSON_PARSER, projectSettings); String newIcon = Strings.nullToEmpty(settings.getSetting( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_ICON)); String newVCode = Strings.nullToEmpty(settings.getSetting( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_CODE)); String newVName = Strings.nullToEmpty(settings.getSetting( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_NAME)); String newUsesLocation = Strings.nullToEmpty(settings.getSetting( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_USES_LOCATION)); String newSizing = Strings.nullToEmpty(settings.getSetting( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_SIZING)); String newAName = Strings.nullToEmpty(settings.getSetting( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_APP_NAME)); // Extract the old icon from the project.properties file from storageIo. String projectProperties = storageIo.downloadFile(userId, projectId, PROJECT_PROPERTIES_FILE_NAME, StorageUtil.DEFAULT_CHARSET); Properties properties = new Properties(); try { properties.load(new StringReader(projectProperties)); } catch (IOException e) { // Since we are reading from a String, I don't think this exception can actually happen. e.printStackTrace(); return; } String oldIcon = Strings.nullToEmpty(properties.getProperty("icon")); String oldVCode = Strings.nullToEmpty(properties.getProperty("versioncode")); String oldVName = Strings.nullToEmpty(properties.getProperty("versionname")); String oldUsesLocation = Strings.nullToEmpty(properties.getProperty("useslocation")); String oldSizing = Strings.nullToEmpty(properties.getProperty("sizing")); String oldAName = Strings.nullToEmpty(properties.getProperty("aname")); if (!newIcon.equals(oldIcon) || !newVCode.equals(oldVCode) || !newVName.equals(oldVName) || !newUsesLocation.equals(oldUsesLocation) || !newAName.equals(oldAName) || !newSizing.equals(oldSizing)) { // Recreate the project.properties and upload it to storageIo. String projectName = properties.getProperty("name"); String qualifiedName = properties.getProperty("main"); String newContent = getProjectPropertiesFileContents(projectName, qualifiedName, newIcon, newVCode, newVName, newUsesLocation, newAName, newSizing); storageIo.uploadFileForce(projectId, PROJECT_PROPERTIES_FILE_NAME, userId, newContent, StorageUtil.DEFAULT_CHARSET); } } /** * {@inheritDoc} * * {@code params} needs to be an instance of * {@link NewYoungAndroidProjectParameters}. */ @Override public long newProject(String userId, String projectName, NewProjectParameters params) { NewYoungAndroidProjectParameters youngAndroidParams = (NewYoungAndroidProjectParameters) params; String qualifiedFormName = youngAndroidParams.getQualifiedFormName(); String propertiesFileName = PROJECT_PROPERTIES_FILE_NAME; String propertiesFileContents = getProjectPropertiesFileContents(projectName, qualifiedFormName, null, null, null, null, null, null); String formFileName = YoungAndroidFormNode.getFormFileId(qualifiedFormName); String formFileContents = getInitialFormPropertiesFileContents(qualifiedFormName); String blocklyFileName = YoungAndroidBlocksNode.getBlocklyFileId(qualifiedFormName); String blocklyFileContents = getInitialBlocklySourceFileContents(qualifiedFormName); String yailFileName = YoungAndroidYailNode.getYailFileId(qualifiedFormName); String yailFileContents = ""; Project project = new Project(projectName); project.setProjectType(YoungAndroidProjectNode.YOUNG_ANDROID_PROJECT_TYPE); // Project history not supported in legacy ode new project wizard project.addTextFile(new TextFile(propertiesFileName, propertiesFileContents)); project.addTextFile(new TextFile(formFileName, formFileContents)); project.addTextFile(new TextFile(blocklyFileName, blocklyFileContents)); project.addTextFile(new TextFile(yailFileName, yailFileContents)); // Create new project return storageIo.createProject(userId, project, getProjectSettings("", "1", "1.0", "false", projectName, "Fixed")); } @Override public long copyProject(String userId, long oldProjectId, String newName) { String oldName = storageIo.getProjectName(userId, oldProjectId); String oldProjectSettings = storageIo.loadProjectSettings(userId, oldProjectId); String oldProjectHistory = storageIo.getProjectHistory(userId, oldProjectId); Settings oldSettings = new Settings(JSON_PARSER, oldProjectSettings); String icon = oldSettings.getSetting( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_ICON); String vcode = oldSettings.getSetting( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_CODE); String vname = oldSettings.getSetting( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_NAME); String useslocation = oldSettings.getSetting( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_USES_LOCATION); String aname = oldSettings.getSetting( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_APP_NAME); String sizing = oldSettings.getSetting( SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS, SettingsConstants.YOUNG_ANDROID_SETTINGS_SIZING); Project newProject = new Project(newName); newProject.setProjectType(YoungAndroidProjectNode.YOUNG_ANDROID_PROJECT_TYPE); newProject.setProjectHistory(oldProjectHistory); // Get the old project's source files and add them to new project, modifying where necessary. for (String oldSourceFileName : storageIo.getProjectSourceFiles(userId, oldProjectId)) { String newSourceFileName; String newContents = null; if (oldSourceFileName.equals(PROJECT_PROPERTIES_FILE_NAME)) { // This is the project properties file. The name of the file doesn't contain the old // project name. newSourceFileName = oldSourceFileName; // For the contents of the project properties file, generate the file with the new project // name and qualified name. String qualifiedFormName = StringUtils.getQualifiedFormName( storageIo.getUser(userId).getUserEmail(), newName); newContents = getProjectPropertiesFileContents(newName, qualifiedFormName, icon, vcode, vname, useslocation, aname, sizing); } else { // This is some file other than the project properties file. // oldSourceFileName may contain the old project name as a path segment, surrounded by /. // Replace the old name with the new name. newSourceFileName = StringUtils.replaceLastOccurrence(oldSourceFileName, "/" + oldName + "/", "/" + newName + "/"); } if (newContents != null) { // We've determined (above) that the contents of the file must change for the new project. // Use newContents when adding the file to the new project. newProject.addTextFile(new TextFile(newSourceFileName, newContents)); } else { // If we get here, we know that the contents of the file can just be copied from the old // project. Since it might be a binary file, we copy it as a raw file (that works for both // text and binary files). byte[] contents = storageIo.downloadRawFile(userId, oldProjectId, oldSourceFileName); newProject.addRawFile(new RawFile(newSourceFileName, contents)); } } // Create the new project and return the new project's id. return storageIo.createProject(userId, newProject, getProjectSettings(icon, vcode, vname, useslocation, aname, sizing)); } @Override public ProjectRootNode getRootNode(String userId, long projectId) { // Create root, assets, and source nodes (they are mocked nodes as they don't really // have to exist like this on the file system) ProjectRootNode rootNode = new YoungAndroidProjectNode(storageIo.getProjectName(userId, projectId), projectId); ProjectNode assetsNode = new YoungAndroidAssetsFolder(ASSETS_FOLDER); ProjectNode sourcesNode = new YoungAndroidSourceFolderNode(SRC_FOLDER); ProjectNode compsNode = new YoungAndroidComponentsFolder(EXTERNAL_COMPS_FOLDER); rootNode.addChild(assetsNode); rootNode.addChild(sourcesNode); rootNode.addChild(compsNode); // Sources contains nested folders that are interpreted as packages Map<String, ProjectNode> packagesMap = Maps.newHashMap(); // Retrieve project information List<String> sourceFiles = storageIo.getProjectSourceFiles(userId, projectId); for (String fileId : sourceFiles) { if (fileId.startsWith(ASSETS_FOLDER + '/')) { if (fileId.startsWith(EXTERNAL_COMPS_FOLDER + '/')) { compsNode.addChild(new YoungAndroidComponentNode(StorageUtil.basename(fileId), fileId)); } else { assetsNode.addChild(new YoungAndroidAssetNode(StorageUtil.basename(fileId), fileId)); } } else if (fileId.startsWith(SRC_FOLDER + '/')) { // We send form (.scm), blocks (.blk), and yail (.yail) nodes to the ODE client. YoungAndroidSourceNode sourceNode = null; if (fileId.endsWith(FORM_PROPERTIES_EXTENSION)) { sourceNode = new YoungAndroidFormNode(fileId); } else if (fileId.endsWith(BLOCKLY_SOURCE_EXTENSION)) { sourceNode = new YoungAndroidBlocksNode(fileId); } else if (fileId.endsWith(CODEBLOCKS_SOURCE_EXTENSION)) { String blocklyFileName = fileId.substring(0, fileId.lastIndexOf(CODEBLOCKS_SOURCE_EXTENSION)) + BLOCKLY_SOURCE_EXTENSION; if (!sourceFiles.contains(blocklyFileName)) { // This is an old project that hasn't been converted yet. Convert // the blocks file to Blockly format and name. Leave the old // codeblocks file around for now (for debugging) but don't send it to the client. String blocklyFileContents = convertCodeblocksToBlockly(userId, projectId, fileId); storageIo.addSourceFilesToProject(userId, projectId, false, blocklyFileName); storageIo.uploadFileForce(projectId, blocklyFileName, userId, blocklyFileContents, StorageUtil.DEFAULT_CHARSET); sourceNode = new YoungAndroidBlocksNode(blocklyFileName); } } else if (fileId.endsWith(YAIL_FILE_EXTENSION)) { sourceNode = new YoungAndroidYailNode(fileId); } if (sourceNode != null) { String packageName = StorageUtil.getPackageName(sourceNode.getQualifiedName()); ProjectNode packageNode = packagesMap.get(packageName); if (packageNode == null) { packageNode = new YoungAndroidPackageNode(packageName, packageNameToPath(packageName)); packagesMap.put(packageName, packageNode); sourcesNode.addChild(packageNode); } packageNode.addChild(sourceNode); } } } return rootNode; } /* * Convert the contents of the codeblocks file named codeblocksFileId * to blockly format and return the blockly contents. */ private String convertCodeblocksToBlockly(String userId, long projectId, String codeblocksFileId) { // TODO(sharon): implement this! return ""; } @Override public long addFile(String userId, long projectId, String fileId) { if (fileId.endsWith(FORM_PROPERTIES_EXTENSION) || fileId.endsWith(BLOCKLY_SOURCE_EXTENSION)) { // If the file to be added is a form file or a blocks file, add a new form file, a new // blocks file, and a new yail file (as a placeholder for later code generation) String qualifiedFormName = YoungAndroidSourceNode.getQualifiedName(fileId); String formFileName = YoungAndroidFormNode.getFormFileId(qualifiedFormName); String blocklyFileName = YoungAndroidBlocksNode.getBlocklyFileId(qualifiedFormName); String yailFileName = YoungAndroidYailNode.getYailFileId(qualifiedFormName); List<String> sourceFiles = storageIo.getProjectSourceFiles(userId, projectId); if (!sourceFiles.contains(formFileName) && !sourceFiles.contains(blocklyFileName) && !sourceFiles.contains(yailFileName)) { String formFileContents = getInitialFormPropertiesFileContents(qualifiedFormName); storageIo.addSourceFilesToProject(userId, projectId, false, formFileName); storageIo.uploadFileForce(projectId, formFileName, userId, formFileContents, StorageUtil.DEFAULT_CHARSET); String blocklyFileContents = getInitialBlocklySourceFileContents(qualifiedFormName); storageIo.addSourceFilesToProject(userId, projectId, false, blocklyFileName); storageIo.uploadFileForce(projectId, blocklyFileName, userId, blocklyFileContents, StorageUtil.DEFAULT_CHARSET); String yailFileContents = ""; // start empty storageIo.addSourceFilesToProject(userId, projectId, false, yailFileName); return storageIo.uploadFileForce(projectId, yailFileName, userId, yailFileContents, StorageUtil.DEFAULT_CHARSET); } else { throw new IllegalStateException("One or more files to be added already exists."); } } else { return super.addFile(userId, projectId, fileId); } } @Override public long deleteFile(String userId, long projectId, String fileId) { if (fileId.endsWith(FORM_PROPERTIES_EXTENSION) || fileId.endsWith(BLOCKLY_SOURCE_EXTENSION)) { // If the file to be deleted is a form file or a blocks file, delete both the form file // and the blocks file. Also, if there was a codeblocks file laying around // for that same form, delete it too (if it doesn't exist the delete // for it will be a no-op). String qualifiedFormName = YoungAndroidSourceNode.getQualifiedName(fileId); String formFileName = YoungAndroidFormNode.getFormFileId(qualifiedFormName); String blocklyFileName = YoungAndroidBlocksNode.getBlocklyFileId(qualifiedFormName); String codeblocksFileName = YoungAndroidBlocksNode.getCodeblocksFileId(qualifiedFormName); String yailFileName = YoungAndroidYailNode.getYailFileId(qualifiedFormName); storageIo.deleteFile(userId, projectId, formFileName); storageIo.deleteFile(userId, projectId, blocklyFileName); storageIo.deleteFile(userId, projectId, codeblocksFileName); storageIo.deleteFile(userId, projectId, yailFileName); storageIo.removeSourceFilesFromProject(userId, projectId, true, formFileName, blocklyFileName, codeblocksFileName, yailFileName); return storageIo.getProjectDateModified(userId, projectId); } else { return super.deleteFile(userId, projectId, fileId); } } /** * Make a request to the Build Server to build a project. The Build Server will asynchronously * post the results of the build via the {@link com.google.appinventor.server.ReceiveBuildServlet} * A later call will need to be made by the client in order to get those results. * * @param user the User that owns the {@code projectId}. * @param projectId project id to be built * @param nonce random string used to find resulting APK from unauth context * @param target build target (optional, implementation dependent) * * @return an RpcResult reflecting the call to the Build Server */ @Override public RpcResult build(User user, long projectId, String nonce, String target) { String userId = user.getUserId(); String projectName = storageIo.getProjectName(userId, projectId); String outputFileDir = BUILD_FOLDER + '/' + target; // Store the userId and projectId based on the nonce storageIo.storeNonce(nonce, userId, projectId); // Delete the existing build output files, if any, so that future attempts to get it won't get // old versions. List<String> buildOutputFiles = storageIo.getProjectOutputFiles(userId, projectId); for (String buildOutputFile : buildOutputFiles) { storageIo.deleteFile(userId, projectId, buildOutputFile); } URL buildServerUrl = null; ProjectSourceZip zipFile = null; try { buildServerUrl = new URL(getBuildServerUrlStr( user.getUserEmail(), userId, projectId, outputFileDir)); HttpURLConnection connection = (HttpURLConnection) buildServerUrl.openConnection(); connection.setDoOutput(true); connection.setRequestMethod("POST"); BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(connection.getOutputStream()); FileExporter fileExporter = new FileExporterImpl(); zipFile = fileExporter.exportProjectSourceZip(userId, projectId, false, /* includeAndroidKeystore */ true, projectName + ".aia", true, true, false); bufferedOutputStream.write(zipFile.getContent()); bufferedOutputStream.flush(); bufferedOutputStream.close(); int responseCode = 0; responseCode = connection.getResponseCode(); if (responseCode != HttpURLConnection.HTTP_OK) { // Put the HTTP response code into the RpcResult so the client code in BuildCommand.java // can provide an appropriate error message to the user. // NOTE(lizlooney) - There is some weird bug/problem with HttpURLConnection. When the // responseCode is 503, connection.getResponseMessage() returns "OK", but it should return // "Service Unavailable". If I make the request with curl and look at the headers, they // have the expected error message. // For now, the moral of the story is: don't use connection.getResponseMessage(). String error = "Build server responded with response code " + responseCode + "."; try { String content = readContent(connection.getInputStream()); if (content != null && !content.isEmpty()) { error += "\n" + content; } } catch (IOException e) { // No content. That's ok. } try { String errorContent = readContent(connection.getErrorStream()); if (errorContent != null && !errorContent.isEmpty()) { error += "\n" + errorContent; } } catch (IOException e) { // No error content. That's ok. } if (responseCode == HttpURLConnection.HTTP_CONFLICT) { // The build server is not compatible with this App Inventor instance. Log this as severe // so the owner of the app engine instance will know about it. LOG.severe(error); } return new RpcResult(responseCode, "", StringUtils.escape(error)); } } catch (MalformedURLException e) { CrashReport.createAndLogError(LOG, null, buildErrorMsg("MalformedURLException", buildServerUrl, userId, projectId), e); return new RpcResult(false, "", e.getMessage()); } catch (IOException e) { // As of App Engine 1.9.0 we get these when UrlFetch is asked to send too much data Throwable wrappedException = e; int zipFileLength = zipFile == null ? -1 : zipFile.getContent().length; if (zipFileLength >= (5 * 1024 * 1024) /* 5 MB */) { String lengthMbs = format((zipFileLength * 1.0)/(1024*1024)); wrappedException = new IllegalArgumentException( "Sorry, can't package projects larger than 5MB." + " Yours is " + lengthMbs + "MB.", e); } CrashReport.createAndLogError(LOG, null, buildErrorMsg("IOException", buildServerUrl, userId, projectId), wrappedException); return new RpcResult(false, "", wrappedException.getMessage()); } catch (EncryptionException e) { CrashReport.createAndLogError(LOG, null, buildErrorMsg("EncryptionException", buildServerUrl, userId, projectId), e); return new RpcResult(false, "", e.getMessage()); } catch (RuntimeException e) { // In particular, we often see RequestTooLargeException (if the zip is too // big) and ApiProxyException. There may be others. Throwable wrappedException = e; if (e instanceof ApiProxy.RequestTooLargeException && zipFile != null) { int zipFileLength = zipFile.getContent().length; if (zipFileLength >= (5 * 1024 * 1024) /* 5 MB */) { String lengthMbs = format((zipFileLength * 1.0)/(1024*1024)); wrappedException = new IllegalArgumentException( "Sorry, can't package projects larger than 5MB." + " Yours is " + lengthMbs + "MB.", e); } else { wrappedException = new IllegalArgumentException( "Sorry, project was too large to package (" + zipFileLength + " bytes)"); } } CrashReport.createAndLogError(LOG, null, buildErrorMsg("RuntimeException", buildServerUrl, userId, projectId), wrappedException); return new RpcResult(false, "", wrappedException.getMessage()); } return new RpcResult(true, "Building " + projectName, ""); } private String buildErrorMsg(String exceptionName, URL buildURL, String userId, long projectId) { return "Request to build failed with " + exceptionName + ", user=" + userId + ", project=" + projectId + ", build URL is " + buildURL + " [" + buildURL.toString().length() + "]"; } // Note that this is a function rather than just a constant because we assume it will get // a little more complicated when we want to get the URL from an App Engine config file or // command line argument. private String getBuildServerUrlStr(String userName, String userId, long projectId, String fileName) throws UnsupportedEncodingException, EncryptionException { return "http://" + buildServerHost.get() + "/buildserver/build-all-from-zip-async" + "?uname=" + URLEncoder.encode(userName, "UTF-8") + (sendGitVersion.get() ? "&gitBuildVersion=" + URLEncoder.encode(GitBuildId.getVersion(), "UTF-8") : "") + "&callback=" + URLEncoder.encode("http://" + getCurrentHost() + ServerLayout.ODE_BASEURL_NOAUTH + ServerLayout.RECEIVE_BUILD_SERVLET + "/" + Security.encryptUserAndProjectId(userId, projectId) + "/" + fileName, "UTF-8"); } private String getCurrentHost() { if (Server.isProductionServer()) { if (appengineHost.get()=="") { String applicationVersionId = SystemProperty.applicationVersion.get(); String applicationId = SystemProperty.applicationId.get(); return applicationVersionId + "." + applicationId + ".appspot.com"; } else { return appengineHost.get(); } } else { // TODO(user): Figure out how to make this more generic return "localhost:8888"; } } /* * Reads the UTF-8 content from the given input stream. */ private static String readContent(InputStream stream) throws IOException { if (stream != null) { BufferedReader reader = new BufferedReader(new InputStreamReader(stream, "UTF-8")); try { return CharStreams.toString(reader); } finally { reader.close(); } } return null; } /** * Check if there are any build results available for the given user's project * * @param user the User that owns the {@code projectId}. * @param projectId project id to be built * @param target build target (optional, implementation dependent) * @return an RpcResult reflecting the call to the Build Server. The following values may be in * RpcResult.result: * 0: Build is done and was successful * 1: Build is done and was unsuccessful * 2: Yail generation failed * -1: Build is not yet done. */ @Override public RpcResult getBuildResult(User user, long projectId, String target) { String userId = user.getUserId(); String buildOutputFileName = BUILD_FOLDER + '/' + target + '/' + "build.out"; List<String> outputFiles = storageIo.getProjectOutputFiles(userId, projectId); updateCurrentProgress(user, projectId, target); RpcResult buildResult = new RpcResult(-1, ""+currentProgress, ""); // Build not finished for (String outputFile : outputFiles) { if (buildOutputFileName.equals(outputFile)) { String outputStr = storageIo.downloadFile(userId, projectId, outputFile, "UTF-8"); try { JSONObject buildResultJsonObj = new JSONObject(outputStr); buildResult = new RpcResult(buildResultJsonObj.getInt("result"), buildResultJsonObj.getString("output"), buildResultJsonObj.getString("error"), outputStr); } catch (JSONException e) { buildResult = new RpcResult(1, "", ""); } break; } } return buildResult; } /** * Check if there are any build progress available for the given user's project * * @param user the User that owns the {@code projectId}. * @param projectId project id to be built * @param target build target (optional, implementation dependent) */ public void updateCurrentProgress(User user, long projectId, String target) { try { String userId = user.getUserId(); String projectName = storageIo.getProjectName(userId, projectId); String outputFileDir = BUILD_FOLDER + '/' + target; URL buildServerUrl = null; ProjectSourceZip zipFile = null; buildServerUrl = new URL(getBuildServerUrlStr(user.getUserEmail(), userId, projectId, outputFileDir)); HttpURLConnection connection = (HttpURLConnection) buildServerUrl.openConnection(); connection.setDoOutput(true); connection.setRequestMethod("POST"); int responseCode = connection.getResponseCode(); if (responseCode == HttpURLConnection.HTTP_OK) { try { String content = readContent(connection.getInputStream()); if (content != null && !content.isEmpty()) { LOG.info("The current progress is " + content + "%."); currentProgress = Integer.parseInt(content); } } catch (IOException e) { // No content. That's ok. } } } catch (MalformedURLException e) { // that's ok, nothing to do } catch (IOException e) { // that's ok, nothing to do } catch (EncryptionException e) { // that's ok, nothing to do } catch (RuntimeException e) { // that's ok, nothing to do } } // Nicely format floating number using only two decimal places private String format(double input) { DecimalFormat formatter = new DecimalFormat("###.##"); return formatter.format(input); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2014 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.ascanrulesAlpha; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.core.scanner.AbstractAppParamPlugin; import org.parosproxy.paros.core.scanner.Alert; import org.parosproxy.paros.core.scanner.Category; import org.parosproxy.paros.core.scanner.Plugin; import org.parosproxy.paros.network.HttpBody; import org.parosproxy.paros.network.HttpMessage; import org.zaproxy.zap.model.Tech; import org.zaproxy.zap.model.TechSet; /** * An example active scan rule, for more details see * https://www.zaproxy.org/blog/2014-04-30-hacking-zap-4-active-scan-rules/ * * @author psiinon */ public class ExampleFileActiveScanRule extends AbstractAppParamPlugin { /** Prefix for internationalized messages used by this rule */ private static final String MESSAGE_PREFIX = "ascanalpha.examplefile."; private static final String exampleAscanFile = "txt/example-ascan-file.txt"; private List<String> strings = null; private static Logger log = LogManager.getLogger(ExampleFileActiveScanRule.class); @Override public int getId() { /* * This should be unique across all active and passive rules. * The master list is https://github.com/zaproxy/zaproxy/blob/main/docs/scanners.md */ return 60101; } @Override public String getName() { return Constant.messages.getString(MESSAGE_PREFIX + "name"); } @Override public boolean targets( TechSet technologies) { // This method allows the programmer or user to restrict when a // scanner is run based on the technologies selected. For example, to restrict the scanner // to run just when // C language is selected return technologies.includes(Tech.C); } @Override public String getDescription() { return Constant.messages.getString(MESSAGE_PREFIX + "desc"); } private String getOtherInfo() { return Constant.messages.getString(MESSAGE_PREFIX + "other"); } @Override public String getSolution() { return Constant.messages.getString(MESSAGE_PREFIX + "soln"); } @Override public String getReference() { return Constant.messages.getString(MESSAGE_PREFIX + "refs"); } @Override public int getCategory() { return Category.MISC; } /* * This method is called by the active scanner for each GET and POST parameter for every page * @see org.parosproxy.paros.core.scanner.AbstractAppParamPlugin#scan(org.parosproxy.paros.network.HttpMessage, java.lang.String, java.lang.String) */ @Override public void scan(HttpMessage msg, String param, String value) { try { if (!Constant.isDevBuild()) { // Only run this example scan rule in dev mode // Uncomment locally if you want to see these alerts in non dev mode ;) return; } if (this.strings == null) { this.strings = loadFile(exampleAscanFile); } // This is where you change the 'good' request to attack the application // You can make multiple requests if needed int numAttacks = 0; switch (this.getAttackStrength()) { case LOW: numAttacks = 6; break; case MEDIUM: numAttacks = 12; break; case HIGH: numAttacks = 24; break; case INSANE: numAttacks = 96; break; default: break; } for (int i = 0; i < numAttacks; i++) { if (this.isStop()) { // User has stopped the scan break; } if (i >= this.strings.size()) { // run out of attack strings break; } String attack = this.strings.get(i); // Always use getNewMsg() for each new request HttpMessage testMsg = getNewMsg(); setParameter(testMsg, param, attack); sendAndReceive(testMsg); // This is where you detect potential vulnerabilities in the response String evidence; if ((evidence = doesResponseContainString(msg.getResponseBody(), attack)) != null) { // Raise an alert newAlert() .setConfidence(Alert.CONFIDENCE_MEDIUM) .setParam(param) .setAttack(attack) .setOtherInfo(getOtherInfo()) .setEvidence(evidence) .setMessage(testMsg) .raise(); return; } } } catch (IOException e) { log.error(e.getMessage(), e); } } private String doesResponseContainString(HttpBody body, String str) { String sBody; if (Plugin.AlertThreshold.HIGH.equals(this.getAlertThreshold())) { // For a high threshold perform a case exact check sBody = body.toString(); } else { // For all other thresholds perform a case ignore check sBody = body.toString().toLowerCase(); } if (!Plugin.AlertThreshold.HIGH.equals(this.getAlertThreshold())) { // Use case ignore unless a high threshold has been specified str = str.toLowerCase(); } int start = sBody.indexOf(str); if (start >= 0) { // Return the original (case exact) string so we can match it in the response return body.toString().substring(start, start + str.length()); } return null; } private List<String> loadFile(String file) { /* * ZAP will have already extracted the file from the add-on and put it underneath the 'ZAP home' directory */ List<String> strings = new ArrayList<>(); BufferedReader reader = null; File f = new File(Constant.getZapHome() + File.separator + file); if (!f.exists()) { log.error("No such file: {}", f.getAbsolutePath()); return strings; } try { String line; reader = new BufferedReader(new FileReader(f)); while ((line = reader.readLine()) != null) { if (!line.startsWith("#") && line.length() > 0) { strings.add(line); } } } catch (IOException e) { log.error("Error on opening/reading example error file. Error: {}", e.getMessage(), e); } finally { if (reader != null) { try { reader.close(); } catch (IOException e) { log.debug("Error on closing the file reader. Error: {}", e.getMessage(), e); } } } return strings; } @Override public int getRisk() { return Alert.RISK_HIGH; } @Override public int getCweId() { // The CWE id return 0; } @Override public int getWascId() { // The WASC ID return 0; } }
/* * Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.basics.date; import static com.opengamma.strata.collect.Guavate.toImmutableList; import static com.opengamma.strata.collect.Guavate.toImmutableSet; import static java.time.temporal.ChronoField.DAY_OF_MONTH; import static java.time.temporal.ChronoField.DAY_OF_WEEK; import static java.time.temporal.ChronoField.MONTH_OF_YEAR; import java.time.DayOfWeek; import java.time.LocalDate; import java.time.MonthDay; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; import java.time.format.DateTimeParseException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableMap; import com.opengamma.strata.basics.ReferenceData; import com.opengamma.strata.basics.currency.Currency; import com.opengamma.strata.collect.io.IniFile; import com.opengamma.strata.collect.io.PropertySet; import com.opengamma.strata.collect.io.ResourceConfig; import com.opengamma.strata.collect.io.ResourceLocator; import com.opengamma.strata.collect.named.NamedLookup; /** * Loads holiday calendar implementations from INI files. * <p> * These will form the standard holiday calendars available in {@link ReferenceData#standard()}. */ final class HolidayCalendarIniLookup implements NamedLookup<HolidayCalendar> { /** * The logger. */ private static final Logger log = Logger.getLogger(HolidayCalendarIniLookup.class.getName()); /** * The singleton instance of the lookup. */ public static final HolidayCalendarIniLookup INSTANCE = new HolidayCalendarIniLookup(); /** * The Weekend key name. */ private static final String WEEKEND_KEY = "Weekend"; /** * The WorkingDays key name. */ private static final String WORKING_DAYS_KEY = "WorkingDays"; /** * The lenient day-of-week parser. */ private static final DateTimeFormatter DOW_PARSER = new DateTimeFormatterBuilder() .parseCaseInsensitive() .parseLenient() .appendText(DAY_OF_WEEK) .toFormatter(Locale.ENGLISH); /** * The lenient month-day parser. */ private static final DateTimeFormatter DAY_MONTH_PARSER = new DateTimeFormatterBuilder() .parseCaseInsensitive() .parseLenient() .appendText(MONTH_OF_YEAR) .appendOptional(new DateTimeFormatterBuilder().appendLiteral('-').toFormatter(Locale.ENGLISH)) .appendValue(DAY_OF_MONTH) .toFormatter(Locale.ENGLISH); /** * The holiday calendars by name. */ private static final ImmutableMap<String, HolidayCalendar> BY_NAME = loadFromIni("HolidayCalendarData.ini"); /** * The default holiday calendars by currency. */ private static final ImmutableMap<Currency, HolidayCalendarId> BY_CURRENCY = loadDefaultsFromIni("HolidayCalendarDefaultData.ini"); /** * Restricted constructor. */ private HolidayCalendarIniLookup() { } //------------------------------------------------------------------------- @Override public Map<String, HolidayCalendar> lookupAll() { return BY_NAME; } // finds a default HolidayCalendarId defaultByCurrency(Currency currency) { Optional<HolidayCalendarId> calId = findDefaultByCurrency(currency); return calId.orElseThrow(() -> new IllegalArgumentException( "No default Holiday Calendar for currency " + currency)); } // try to find a default Optional<HolidayCalendarId> findDefaultByCurrency(Currency currency) { return Optional.ofNullable(BY_CURRENCY.get(currency)); } //------------------------------------------------------------------------- @VisibleForTesting static ImmutableMap<String, HolidayCalendar> loadFromIni(String filename) { List<ResourceLocator> resources = ResourceConfig.orderedResources(filename); Map<String, HolidayCalendar> map = new HashMap<>(); for (ResourceLocator resource : resources) { try { IniFile ini = IniFile.of(resource.getCharSource()); for (String sectionName : ini.sections()) { PropertySet section = ini.section(sectionName); HolidayCalendar parsed = parseHolidayCalendar(sectionName, section); map.put(parsed.getName(), parsed); map.putIfAbsent(parsed.getName().toUpperCase(Locale.ENGLISH), parsed); } } catch (RuntimeException ex) { log.log(Level.SEVERE, "Error processing resource as Holiday Calendar INI file: " + resource, ex); return ImmutableMap.of(); } } return ImmutableMap.copyOf(map); } // parses the holiday calendar private static HolidayCalendar parseHolidayCalendar(String calendarName, PropertySet section) { String weekendStr = section.value(WEEKEND_KEY); Set<DayOfWeek> weekends = parseWeekends(weekendStr); List<LocalDate> holidays = new ArrayList<>(); Set<LocalDate> workingDays = new HashSet<>(); for (String key : section.keys()) { if (key.equals(WEEKEND_KEY)) { continue; } String value = section.value(key); if (key.length() == 4) { int year = Integer.parseInt(key); holidays.addAll(parseYearDates(year, value)); } else if (WORKING_DAYS_KEY.equals(key)) { workingDays.addAll(parseDates(value)); } else { holidays.add(LocalDate.parse(key)); } } // build result return ImmutableHolidayCalendar.of(HolidayCalendarId.of(calendarName), holidays, weekends, workingDays); } // parse weekend format, such as 'Sat,Sun' private static Set<DayOfWeek> parseWeekends(String str) { List<String> split = Splitter.on(',').splitToList(str); return split.stream() .map(v -> DOW_PARSER.parse(v, DayOfWeek::from)) .collect(toImmutableSet()); } // parse year format, such as 'Jan1,Mar12,Dec25' or '2015-01-01,2015-03-12,2015-12-25' private static List<LocalDate> parseYearDates(int year, String str) { List<String> split = Splitter.on(',').splitToList(str); return split.stream() .map(v -> parseDate(year, v)) .collect(toImmutableList()); } // parse comma separated date format such as "2015-01-01,2015-03-12" private static List<LocalDate> parseDates(String str) { List<String> split = Splitter.on(',').splitToList(str); return split.stream() .map(LocalDate::parse) .collect(toImmutableList()); } private static LocalDate parseDate(int year, String str) { try { return MonthDay.parse(str, DAY_MONTH_PARSER).atYear(year); } catch (DateTimeParseException ex) { LocalDate date = LocalDate.parse(str); if (date.getYear() != year) { throw new IllegalArgumentException("Parsed date had incorrect year: " + str + ", but expected: " + year); } return date; } } //------------------------------------------------------------------------- @VisibleForTesting static ImmutableMap<Currency, HolidayCalendarId> loadDefaultsFromIni(String filename) { List<ResourceLocator> resources = ResourceConfig.orderedResources(filename); Map<Currency, HolidayCalendarId> map = new HashMap<>(); for (ResourceLocator resource : resources) { try { IniFile ini = IniFile.of(resource.getCharSource()); PropertySet section = ini.section("defaultByCurrency"); for (String currencyCode : section.keys()) { map.put(Currency.of(currencyCode), HolidayCalendarId.of(section.value(currencyCode))); } } catch (RuntimeException ex) { log.log(Level.SEVERE, "Error processing resource as Holiday Calendar Defaults INI file: " + resource, ex); return ImmutableMap.of(); } } return ImmutableMap.copyOf(map); } }
package cz.muni.fi.civ.newohybat.bpmn; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import java.util.HashSet; import java.util.List; import java.util.Set; import org.drools.core.base.RuleNameMatchesAgendaFilter; import org.drools.core.event.DebugProcessEventListener; import org.drools.core.impl.StatefulKnowledgeSessionImpl; import org.junit.Assert; import org.junit.Test; import org.kie.api.event.rule.AfterMatchFiredEvent; import org.kie.api.event.rule.AgendaEventListener; import org.kie.api.event.rule.DebugAgendaEventListener; import org.kie.api.event.rule.DebugRuleRuntimeEventListener; import org.kie.api.runtime.process.ProcessInstance; import org.kie.api.runtime.rule.FactHandle; import org.mockito.ArgumentCaptor; import cz.muni.fi.civ.newohybat.drools.events.AdvanceEvent; import cz.muni.fi.civ.newohybat.drools.events.TurnEvent; import cz.muni.fi.civ.newohybat.persistence.facade.dto.AdvanceDTO; import cz.muni.fi.civ.newohybat.persistence.facade.dto.CityDTO; import cz.muni.fi.civ.newohybat.persistence.facade.dto.GovernmentDTO; import cz.muni.fi.civ.newohybat.persistence.facade.dto.PlayerDTO; import cz.muni.fi.civ.newohybat.persistence.facade.dto.TileDTO; public class AdvanceRulesJUnitTest extends BaseJUnitTest { /* * This test case shows research of an advance by a player. * Process is dependent on the TurnEvent occurrences. * Scenario: Player with one enabled advance (can research it), starts the research, has sufficient production, * process can be completed after single new TurnEvent. * Check: * process active before turn, waits for it * process completed after one turn * player can research next advance * player can build improvement/create unit/change government, which was invented by researched advance */ @Test public void testWaitForNewTurnToComplete(){ ksession.addEventListener(new DebugAgendaEventListener()); ksession.addEventListener(new DebugRuleRuntimeEventListener()); ksession.addEventListener(new DebugProcessEventListener()); // Add mock eventlistener to check which rules fired AgendaEventListener ael = mock( AgendaEventListener.class ); ksession.addEventListener( ael ); // prepare test data GovernmentDTO mercantilism = getGovernment("mercantilism"); // new player with research points 205, "basicOne" advance reached, "consecutiveOne" to research PlayerDTO player = getPlayer(1L, "honza"); player.setGovernment("democracy"); // Define advances AdvanceDTO basicOne = getAdvance("basicOne", 100); basicOne.getEnabledAdvances().add("consecutiveOne"); // Advance to be researched next, its cost is 100 units, player has enough, should complete after one turn AdvanceDTO consecutiveOne = getAdvance("consecutiveOne", 100); consecutiveOne.getEnabledAdvances().add("consecutiveTwo"); consecutiveOne.getEnabledCityImprovements().add("bank"); consecutiveOne.getEnabledGovernments().add("mercantilism"); consecutiveOne.getEnabledUnitTypes().add("warClerk"); // init the advance tree by setting reached and enabled advances manually player.getAdvances().add("basicOne"); player.getEnabledAdvances().add("consecutiveOne"); player.setResearchRatio(100); // create a city of player CityDTO city = getCity(1L, "marefy"); city.setTradeProduction(205); Set<String> improvements = new HashSet<String>(); city.setImprovements(improvements); city.setOwner(player.getId()); TileDTO cheatTile = new TileDTO(); cheatTile.setTradeProduction(205); cheatTile.setId(1L); city.getManagedTiles().add(cheatTile.getId()); // insert test data as facts ksession.insert(cheatTile); ksession.insert(basicOne); ksession.insert(consecutiveOne); ksession.insert(getAdvance("consecutiveTwo", 10)); ksession.insert(mercantilism); FactHandle pH = ksession.insert(player); ksession.insert(city); // currentAdvance not set, just to prepare data inserted in session ksession.fireAllRules(); player.setResearch(205); // ((StatefulKnowledgeSessionImpl)ksession).session.getAgenda().activateRuleFlowGroup("manageSurpluses"); ksession.getEntryPoint("GameControlStream").insert(new TurnEvent()); ksession.fireAllRules(); // begin research player.setCurrentAdvance("consecutiveOne"); ksession.update(pH, player); // now it should start the process ksession.fireAllRules(); // get all active processes List<ProcessInstance> processes = (List<ProcessInstance>)ksession.getProcessInstances(); // Catch the afterMatchFired events, which contains fired rules ArgumentCaptor<AfterMatchFiredEvent> aafe = ArgumentCaptor.forClass( AfterMatchFiredEvent.class ); verify( ael ,atLeastOnce()).afterMatchFired( aafe.capture() ); List<String> firedRules = getFiredRules(aafe.getAllValues()); Assert.assertTrue("Discover Advance rule fired.",firedRules.contains("Discover Advance")); Assert.assertTrue("One Process Should Be Active",processes.size()==1); // get the process Long pId = processes.get(0).getId(); assertProcessInstanceActive(pId, ksession); // new TurnEvent occured ksession.getEntryPoint("GameControlStream").insert(new TurnEvent()); ksession.fireAllRules(); try { Thread.sleep(1000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } // process should be completed assertProcessInstanceCompleted(pId, ksession); Assert.assertTrue("Player has reached advance.",player.getAdvances().contains(consecutiveOne.getIdent())); Assert.assertTrue("Player can discover advance.",player.getEnabledAdvances().containsAll(consecutiveOne.getEnabledAdvances())); Assert.assertTrue("Player can build bank.",city.getEnabledImprovements().contains("bank")); Assert.assertTrue("Player can make warClerk.",city.getEnabledUnitTypes().contains("warClerk")); Assert.assertTrue("Player can convert to mercantilism.",player.getEnabledGovernments().contains("mercantilism")); } /* * Tests ability to cancel the research before it finishes. */ @Test public void testCancel(){ // Add mock eventlistener to check which rules fired AgendaEventListener ael = mock( AgendaEventListener.class ); ksession.addEventListener( ael ); // prepare test data GovernmentDTO mercantilism = getGovernment("mercantilism"); // new player with research points 205, "basicOne" advance reached, "consecutiveOne" to research PlayerDTO player = getPlayer(1L, "honza"); player.setResearch(205); // Define advances AdvanceDTO basicOne = getAdvance("basicOne", 100); basicOne.getEnabledAdvances().add("consecutiveOne"); // Advance to be researched next, its cost is 100 units, player has enough, should complete after one turn AdvanceDTO consecutiveOne = getAdvance("consecutiveOne", 100); consecutiveOne.getEnabledAdvances().add("consecutiveTwo"); consecutiveOne.getEnabledCityImprovements().add("bank"); consecutiveOne.getEnabledGovernments().add("mercantilism"); consecutiveOne.getEnabledUnitTypes().add("warClerk"); // init the advance tree by setting reached and enabled advances manually player.getAdvances().add("basicOne"); player.getEnabledAdvances().add("consecutiveOne"); // create a city of player CityDTO city = getCity(1L, "marefy"); Set<String> improvements = new HashSet<String>(); city.setImprovements(improvements); city.setOwner(player.getId()); // insert test data as facts ksession.insert(basicOne); ksession.insert(consecutiveOne); ksession.insert(getAdvance("consecutiveTwo", 10)); ksession.insert(mercantilism); FactHandle pH = ksession.insert(player); ksession.insert(city); // currentAdvance not set, just to prepare data inserted in session ksession.fireAllRules(); // begin research player.setCurrentAdvance("consecutiveOne"); ksession.update(pH, player); // now it should start the process ksession.fireAllRules(); // get all active processes List<ProcessInstance> processes = (List<ProcessInstance>)ksession.getProcessInstances(); // Catch the afterMatchFired events, which contains fired rules ArgumentCaptor<AfterMatchFiredEvent> aafe = ArgumentCaptor.forClass( AfterMatchFiredEvent.class ); verify( ael ,atLeastOnce()).afterMatchFired( aafe.capture() ); List<String> firedRules = getFiredRules(aafe.getAllValues()); Assert.assertTrue("Discover Advance rule fired.",firedRules.contains("Discover Advance")); Assert.assertTrue("One Process Should Be Active",processes.size()==1); // get the process Long pId = processes.get(0).getId(); assertProcessInstanceActive(pId, ksession); ksession.getEntryPoint("ActionCanceledStream").insert(new AdvanceEvent(player.getId())); ksession.fireAllRules(); // process should be completed assertProcessInstanceCompleted(pId, ksession); // new TurnEvent occured, should not have any effect ksession.getEntryPoint("GameControlStream").insert(new TurnEvent()); ksession.fireAllRules(); // process should be completed assertProcessInstanceCompleted(pId, ksession); Assert.assertTrue("Player has basic advance.",player.getAdvances().contains(basicOne.getIdent())); Assert.assertFalse("Player can't discover advance.",player.getEnabledAdvances().containsAll(consecutiveOne.getEnabledAdvances())); Assert.assertFalse("Player cannot build bank.",city.getEnabledImprovements().contains("bank")); Assert.assertFalse("Player cannot make warClerk.",city.getEnabledUnitTypes().contains("warClerk")); Assert.assertFalse("Player cannot convert to mercantilism.",player.getEnabledGovernments().contains("mercantilism")); } private static CityDTO getCity(Long id, String name){ CityDTO city = new CityDTO(); city.setId(id); city.setName(name); city.setResourcesConsumption(0); city.setResourcesProduction(0); city.setUnitsSupport(0); city.setFoodConsumption(0); city.setFoodProduction(0); city.setFoodStock(0); city.setSize(0); city.setTradeProduction(0); city.setPeopleEntertainers(0); city.setPeopleScientists(0); city.setPeopleTaxmen(0); city.setWeLoveDay(false); city.setDisorder(false); city.setSize(1); city.setPeopleHappy(0); city.setPeopleContent(0); city.setPeopleUnhappy(0); city.setImprovements(new HashSet<String>()); city.setHomeUnits(new HashSet<Long>()); city.setEnabledUnitTypes(new HashSet<String>()); city.setEnabledImprovements(new HashSet<String>()); return city; } private static PlayerDTO getPlayer(Long id, String name){ PlayerDTO player = new PlayerDTO(); player.setId(id); player.setName(name); player.setLuxuriesRatio(0); player.setTaxesRatio(0); player.setResearchRatio(0); player.setResearch(0); player.setResearchSpent(0); player.setAdvances(new HashSet<String>()); player.setEnabledAdvances(new HashSet<String>()); player.setEnabledGovernments(new HashSet<String>()); return player; } private static AdvanceDTO getAdvance(String ident, Integer cost){ AdvanceDTO advance = new AdvanceDTO(); advance.setIdent(ident); advance.setEnabledAdvances(new HashSet<String>()); advance.setEnabledCityImprovements(new HashSet<String>()); advance.setEnabledGovernments(new HashSet<String>()); advance.setEnabledUnitTypes(new HashSet<String>()); advance.setCost(cost); return advance; } private static GovernmentDTO getGovernment(String ident){ GovernmentDTO gov = new GovernmentDTO(); gov.setIdent(ident); return gov; } }