gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2013 Michael Mackenzie High * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package autumn.lang.compiler.ast.nodes; import autumn.lang.compiler.ast.commons.ConstructList; import autumn.lang.compiler.ast.commons.IAnnotated; import autumn.lang.compiler.ast.commons.IBinaryOperation; import autumn.lang.compiler.ast.commons.IConstruct; import autumn.lang.compiler.ast.commons.IConversionOperation; import autumn.lang.compiler.ast.commons.IDatum; import autumn.lang.compiler.ast.commons.IDirective; import autumn.lang.compiler.ast.commons.IDocumented; import autumn.lang.compiler.ast.commons.IExpression; import autumn.lang.compiler.ast.commons.IRecord; import autumn.lang.compiler.ast.commons.IStatement; import autumn.lang.compiler.ast.commons.IUnaryOperation; import autumn.lang.compiler.ast.literals.BigDecimalLiteral; import autumn.lang.compiler.ast.literals.BigIntegerLiteral; import autumn.lang.compiler.ast.literals.ByteLiteral; import autumn.lang.compiler.ast.literals.CharLiteral; import autumn.lang.compiler.ast.literals.DoubleLiteral; import autumn.lang.compiler.ast.literals.FloatLiteral; import autumn.lang.compiler.ast.literals.IntLiteral; import autumn.lang.compiler.ast.literals.LongLiteral; import autumn.lang.compiler.ast.literals.ShortLiteral; import java.io.File; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; /** * An instance of this class is an AST node that represents the definition of an anonymous function. * * <p> * <table border="1"> * <tr> <td> <b>Property Name</b> </td> <td> <b>Property Description</b> </td> </tr> * <tr> <td> <code>variable</code> </td> <td>This is the variable that the new lambda will be assigned to.</td> </tr> * <tr> <td> <code>type</code> </td> <td>This is the type of the new lambda.</td> </tr> * <tr> <td> <code>body</code> </td> <td>This is the expression that is evaluated whenever the anonymous function is invoked.</td> </tr> * <tr> <td> <code>location</code> </td> <td>This is the source-location information regarding this construct.</td> </tr> * </table> * </p> * * <p> This file was auto-generated on (Sun May 31 11:54:12 EDT 2015).</p> */ @SuppressWarnings("unchecked") public final class LambdaStatement extends Object implements IStatement { private Variable variable; private TypeSpecifier type; private IExpression body; private SourceLocation location = new SourceLocation(); /** * Setter. * * @param value is the new value of property <code>variable</code>. * @return a copy of this object with property <code>variable</code> set to value. */ public LambdaStatement setVariable(final Variable value) { final LambdaStatement result = this.copy(); result.variable = value; return result; } /** * Getter. * * @return the value of property <code>variable</code>. */ public Variable getVariable() { final Variable value = this.variable; return value; } /** * Setter. * * @param value is the new value of property <code>type</code>. * @return a copy of this object with property <code>type</code> set to value. */ public LambdaStatement setType(final TypeSpecifier value) { final LambdaStatement result = this.copy(); result.type = value; return result; } /** * Getter. * * @return the value of property <code>type</code>. */ public TypeSpecifier getType() { final TypeSpecifier value = this.type; return value; } /** * Setter. * * @param value is the new value of property <code>body</code>. * @return a copy of this object with property <code>body</code> set to value. */ public LambdaStatement setBody(final IExpression value) { final LambdaStatement result = this.copy(); result.body = value; return result; } /** * Getter. * * @return the value of property <code>body</code>. */ public IExpression getBody() { final IExpression value = this.body; return value; } /** * Setter. * * @param value is the new value of property <code>location</code>. * @return a copy of this object with property <code>location</code> set to value. */ public LambdaStatement setLocation(final SourceLocation value) { final LambdaStatement result = this.copy(); result.location = value; return result; } /** * Getter. * * @return the value of property <code>location</code>. */ public SourceLocation getLocation() { final SourceLocation value = this.location; return value; } /** * This method creates a new instance of this class. * * @param variable is the value for property <code>variable</code>. * @param type is the value for property <code>type</code>. * @param body is the value for property <code>body</code>. * @param location is the value for property <code>location</code>. * @return a new instance of this class. */ public static LambdaStatement create(Variable variable, TypeSpecifier type, IExpression body, SourceLocation location) { LambdaStatement object = new LambdaStatement(); object = object.setVariable(variable); object = object.setType(type); object = object.setBody(body); object = object.setLocation(location); return object; } /** * This method welcomes a visitor that wants to visit this object. * * @param visitor is the visitor that is visiting this object. */ public void accept(final IAstVisitor visitor) { visitor.visit(this); } /** * This method creates a shallow copy of this object. * * @return a shallow copy of this object. */ public LambdaStatement copy() { final LambdaStatement result = new LambdaStatement(); result.variable = this.variable; result.type = this.type; result.body = this.body; result.location = this.location; return result; } /** * This method creates a map representation of this struct. * * <p> * Each key is the name of a field. * Each value is the result of calling the key field's getter. * </p> * * @return a map containing the entries in this struct. */ public Map<String, Object> toMap() { final Map<String, Object> map = new TreeMap<String, Object>(); map.put("variable", this.getVariable()); map.put("type", this.getType()); map.put("body", this.getBody()); map.put("location", this.getLocation()); return map; } /** * {@inheritDoc} */ @Override public String toString() { return this.toMap().toString(); } }
package com.thinkspace.opentalkon; import java.io.File; import java.io.PrintWriter; import java.io.StringWriter; import java.io.Writer; import java.util.Date; import org.json.JSONObject; import android.content.Context; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.pm.PackageManager.NameNotFoundException; import android.os.Environment; import android.os.StatFs; import com.thinkspace.common.util.PLEtcUtilMgr; import com.thinkspace.opentalkon.data.TAMultiData; import com.thinkspace.opentalkon.satelite.TADataHandler; import com.thinkspace.opentalkon.satelite.TASatelite; public class ErrorReporter implements Thread.UncaughtExceptionHandler { String VersionName; String PackageName; String FilePath; String PhoneModel; String AndroidVersion; String Board; String Brand; // String CPU_ABI; String Device; String Display; String FingerPrint; String Host; String ID; String Manufacturer; String Model; String Product; String Tags; long Time; String Type; String User; private Thread.UncaughtExceptionHandler PreviousHandler; private static ErrorReporter S_mInstance; private Context CurContext; public void Init(Context context) { PreviousHandler = Thread.getDefaultUncaughtExceptionHandler(); Thread.setDefaultUncaughtExceptionHandler(this); RecoltInformations(context); CurContext = context; } public long getAvailableInternalMemorySize() { File path = Environment.getDataDirectory(); StatFs stat = new StatFs(path.getPath()); long blockSize = stat.getBlockSize(); long availableBlocks = stat.getAvailableBlocks(); return availableBlocks * blockSize; } public long getTotalInternalMemorySize() { File path = Environment.getDataDirectory(); StatFs stat = new StatFs(path.getPath()); long blockSize = stat.getBlockSize(); long totalBlocks = stat.getBlockCount(); return totalBlocks * blockSize; } void RecoltInformations(Context context) { PackageManager pm = context.getPackageManager(); try { PackageInfo pi; // Version pi = pm.getPackageInfo(context.getPackageName(), 0); VersionName = pi.versionName; // Package name PackageName = pi.packageName; // Files dir for storing the stack traces FilePath = context.getFilesDir().getAbsolutePath(); // Device model PhoneModel = android.os.Build.MODEL; // Android version AndroidVersion = android.os.Build.VERSION.RELEASE; Board = android.os.Build.BOARD; Brand = android.os.Build.BRAND; // CPU_ABI = android.os.Build.; Device = android.os.Build.DEVICE; Display = android.os.Build.DISPLAY; FingerPrint = android.os.Build.FINGERPRINT; Host = android.os.Build.HOST; ID = android.os.Build.ID; // Manufacturer = android.os.Build.; Model = android.os.Build.MODEL; Product = android.os.Build.PRODUCT; Tags = android.os.Build.TAGS; Time = android.os.Build.TIME; Type = android.os.Build.TYPE; User = android.os.Build.USER; } catch (NameNotFoundException e) { e.printStackTrace(); } } public String CreateInformationString() { String ReturnVal = ""; ReturnVal += "Version : " + VersionName; ReturnVal += "\n"; ReturnVal += "Package : " + PackageName; ReturnVal += "\n"; ReturnVal += "FilePath : " + FilePath; ReturnVal += "\n"; ReturnVal += "Phone Model" + PhoneModel; ReturnVal += "\n"; ReturnVal += "Android Version : " + AndroidVersion; ReturnVal += "\n"; ReturnVal += "Board : " + Board; ReturnVal += "\n"; ReturnVal += "Brand : " + Brand; ReturnVal += "\n"; ReturnVal += "Device : " + Device; ReturnVal += "\n"; ReturnVal += "Display : " + Display; ReturnVal += "\n"; ReturnVal += "Finger Print : " + FingerPrint; ReturnVal += "\n"; ReturnVal += "Host : " + Host; ReturnVal += "\n"; ReturnVal += "ID : " + ID; ReturnVal += "\n"; ReturnVal += "Model : " + Model; ReturnVal += "\n"; ReturnVal += "Product : " + Product; ReturnVal += "\n"; ReturnVal += "Tags : " + Tags; ReturnVal += "\n"; ReturnVal += "Time : " + Time; ReturnVal += "\n"; ReturnVal += "Type : " + Type; ReturnVal += "\n"; ReturnVal += "User : " + User; ReturnVal += "\n"; ReturnVal += "Total Internal memory : " + getTotalInternalMemorySize(); ReturnVal += "\n"; ReturnVal += "Available Internal memory : " + getAvailableInternalMemorySize(); ReturnVal += "\n"; return ReturnVal; } public void uncaughtException(Thread t, Throwable e) { String Report = ""; Date CurDate = new Date(); Report += "Error Report collected on : " + CurDate.toString(); Report += "\n"; Report += "\n"; Report += "Informations :"; Report += "\n"; Report += "=============="; Report += "\n"; Report += "\n"; Report += CreateInformationString(); Report += "\n\n"; Report += "Stack : \n"; Report += "======= \n"; final Writer result = new StringWriter(); final PrintWriter printWriter = new PrintWriter(result); e.printStackTrace(printWriter); String stacktrace = result.toString(); Report += stacktrace; Report += "\n"; Report += "Cause : \n"; Report += "======= \n"; // If the exception was thrown in a background thread inside // AsyncTask, then the actual exception can be found with getCause Throwable cause = e.getCause(); while (cause != null) { cause.printStackTrace(printWriter); Report += result.toString(); cause = cause.getCause(); } printWriter.close(); Report += "**** End of current Report ***"; SendErrorToServer(this.CurContext, Report); PreviousHandler.uncaughtException(t, e); } static ErrorReporter getInstance() { if (S_mInstance == null) S_mInstance = new ErrorReporter(); return S_mInstance; } private void SendErrorToServer(Context _context, String ErrorContent) { String subject = ("Crash Report - Android ErrorReporter"); String body = subject + "\n\n" + ErrorContent + "\n\n"; TASatelite satelite = new TASatelite(new TADataHandler() { @Override public void onHttpPacketReceived(JSONObject data) {} @Override public void onHttpException(Exception ex, TAMultiData data, String addr) {} @Override public void onHttpException(Exception ex, JSONObject data, String addr) {} @Override public void onTokenIsNotValid(JSONObject data) {} @Override public void onLimitMaxUser(JSONObject data) {} }); String unique_key = PLEtcUtilMgr.getUniqueKeyWithSha(); satelite.doReportError(unique_key, body); } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.search.Filter; import org.elasticsearch.ElasticSearchIllegalArgumentException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedCustomAnalyzer; import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Map; import static org.elasticsearch.index.mapper.MapperBuilders.stringField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; /** * */ public class StringFieldMapper extends AbstractFieldMapper<String> implements AllFieldMapper.IncludeInAll { public static final String CONTENT_TYPE = "string"; public static class Defaults extends AbstractFieldMapper.Defaults { public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); static { FIELD_TYPE.freeze(); } // NOTE, when adding defaults here, make sure you add them in the builder public static final String NULL_VALUE = null; public static final int POSITION_OFFSET_GAP = 0; public static final int IGNORE_ABOVE = -1; } public static class Builder extends AbstractFieldMapper.OpenBuilder<Builder, StringFieldMapper> { protected String nullValue = Defaults.NULL_VALUE; protected int positionOffsetGap = Defaults.POSITION_OFFSET_GAP; protected NamedAnalyzer searchQuotedAnalyzer; protected int ignoreAbove = Defaults.IGNORE_ABOVE; public Builder(String name) { super(name, new FieldType(Defaults.FIELD_TYPE)); builder = this; } public Builder nullValue(String nullValue) { this.nullValue = nullValue; return this; } @Override public Builder includeInAll(Boolean includeInAll) { this.includeInAll = includeInAll; return this; } @Override public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) { super.searchAnalyzer(searchAnalyzer); if (searchQuotedAnalyzer == null) { searchQuotedAnalyzer = searchAnalyzer; } return this; } public Builder positionOffsetGap(int positionOffsetGap) { this.positionOffsetGap = positionOffsetGap; return this; } public Builder searchQuotedAnalyzer(NamedAnalyzer analyzer) { this.searchQuotedAnalyzer = analyzer; return builder; } public Builder ignoreAbove(int ignoreAbove) { this.ignoreAbove = ignoreAbove; return this; } @Override public StringFieldMapper build(BuilderContext context) { if (positionOffsetGap > 0) { indexAnalyzer = new NamedCustomAnalyzer(indexAnalyzer, positionOffsetGap); searchAnalyzer = new NamedCustomAnalyzer(searchAnalyzer, positionOffsetGap); searchQuotedAnalyzer = new NamedCustomAnalyzer(searchQuotedAnalyzer, positionOffsetGap); } // if the field is not analyzed, then by default, we should omit norms and have docs only // index options, as probably what the user really wants // if they are set explicitly, we will use those values if (fieldType.indexed() && !fieldType.tokenized()) { if (!omitNormsSet && boost == Defaults.BOOST) { fieldType.setOmitNorms(true); } if (!indexOptionsSet) { fieldType.setIndexOptions(IndexOptions.DOCS_ONLY); } } StringFieldMapper fieldMapper = new StringFieldMapper(buildNames(context), boost, fieldType, nullValue, indexAnalyzer, searchAnalyzer, searchQuotedAnalyzer, positionOffsetGap, ignoreAbove, provider, similarity, fieldDataSettings); fieldMapper.includeInAll(includeInAll); return fieldMapper; } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { StringFieldMapper.Builder builder = stringField(name); parseField(builder, name, node, parserContext); for (Map.Entry<String, Object> entry : node.entrySet()) { String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); if (propName.equals("null_value")) { builder.nullValue(propNode.toString()); } else if (propName.equals("search_quote_analyzer")) { NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); if (analyzer == null) { throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); } builder.searchQuotedAnalyzer(analyzer); } else if (propName.equals("position_offset_gap")) { builder.positionOffsetGap(XContentMapValues.nodeIntegerValue(propNode, -1)); // we need to update to actual analyzers if they are not set in this case... // so we can inject the position offset gap... if (builder.indexAnalyzer == null) { builder.indexAnalyzer = parserContext.analysisService().defaultIndexAnalyzer(); } if (builder.searchAnalyzer == null) { builder.searchAnalyzer = parserContext.analysisService().defaultSearchAnalyzer(); } if (builder.searchQuotedAnalyzer == null) { builder.searchQuotedAnalyzer = parserContext.analysisService().defaultSearchQuoteAnalyzer(); } } else if (propName.equals("ignore_above")) { builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1)); } } return builder; } } private String nullValue; private Boolean includeInAll; private int positionOffsetGap; private NamedAnalyzer searchQuotedAnalyzer; private int ignoreAbove; protected StringFieldMapper(Names names, float boost, FieldType fieldType, String nullValue, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, NamedAnalyzer searchQuotedAnalyzer, int positionOffsetGap, int ignoreAbove, PostingsFormatProvider postingsFormat, SimilarityProvider similarity, @Nullable Settings fieldDataSettings) { super(names, boost, fieldType, indexAnalyzer, searchAnalyzer, postingsFormat, similarity, fieldDataSettings); this.nullValue = nullValue; this.positionOffsetGap = positionOffsetGap; this.searchQuotedAnalyzer = searchQuotedAnalyzer != null ? searchQuotedAnalyzer : this.searchAnalyzer; this.ignoreAbove = ignoreAbove; } @Override public FieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @Override public FieldDataType defaultFieldDataType() { return new FieldDataType("string"); } @Override public void includeInAll(Boolean includeInAll) { if (includeInAll != null) { this.includeInAll = includeInAll; } } @Override public void includeInAllIfNotSet(Boolean includeInAll) { if (includeInAll != null && this.includeInAll == null) { this.includeInAll = includeInAll; } } @Override public String value(Object value) { if (value == null) { return null; } return value.toString(); } @Override protected boolean customBoost() { return true; } public int getPositionOffsetGap() { return this.positionOffsetGap; } @Override public Analyzer searchQuoteAnalyzer() { return this.searchQuotedAnalyzer; } @Override public Filter nullValueFilter() { if (nullValue == null) { return null; } return termFilter(nullValue, null); } @Override protected Field parseCreateField(ParseContext context) throws IOException { String value = nullValue; float boost = this.boost; if (context.externalValueSet()) { value = (String) context.externalValue(); } else { XContentParser parser = context.parser(); if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { value = nullValue; } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { value = parser.textOrNull(); } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { throw new ElasticSearchIllegalArgumentException("unknown property [" + currentFieldName + "]"); } } } } else { value = parser.textOrNull(); } } if (value == null) { return null; } if (ignoreAbove > 0 && value.length() > ignoreAbove) { return null; } if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(names.fullName(), value, boost); } if (!fieldType().indexed() && !fieldType().stored()) { context.ignoredValue(names.indexName(), value); return null; } Field field = new StringField(names.indexName(), value, fieldType); field.setBoost(boost); return field; } @Override protected String contentType() { return CONTENT_TYPE; } @Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException { super.merge(mergeWith, mergeContext); if (!this.getClass().equals(mergeWith.getClass())) { return; } if (!mergeContext.mergeFlags().simulate()) { this.includeInAll = ((StringFieldMapper) mergeWith).includeInAll; this.nullValue = ((StringFieldMapper) mergeWith).nullValue; this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove; } } @Override protected void doXContentBody(XContentBuilder builder) throws IOException { super.doXContentBody(builder); if (nullValue != null) { builder.field("null_value", nullValue); } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } if (positionOffsetGap != Defaults.POSITION_OFFSET_GAP) { builder.field("position_offset_gap", positionOffsetGap); } if (searchQuotedAnalyzer != null && searchAnalyzer != searchQuotedAnalyzer) { builder.field("search_quote_analyzer", searchQuotedAnalyzer.name()); } if (ignoreAbove != Defaults.IGNORE_ABOVE) { builder.field("ignore_above", ignoreAbove); } } /** * Extension of {@link Field} supporting reuse of a cached TokenStream for not-tokenized values. */ static class StringField extends Field { public StringField(String name, String value, FieldType fieldType) { super(name, value, fieldType); } @Override public TokenStream tokenStream(Analyzer analyzer) throws IOException { if (!fieldType().indexed()) { return null; } // Only use the cached TokenStream if the value is indexed and not-tokenized if (fieldType().tokenized()) { return super.tokenStream(analyzer); } return NOT_ANALYZED_TOKENSTREAM.get().setValue((String) fieldsData); } } private static final ThreadLocal<StringTokenStream> NOT_ANALYZED_TOKENSTREAM = new ThreadLocal<StringTokenStream>() { @Override protected StringTokenStream initialValue() { return new StringTokenStream(); } }; // Copied from Field.java static final class StringTokenStream extends TokenStream { private final CharTermAttribute termAttribute = addAttribute(CharTermAttribute.class); private final OffsetAttribute offsetAttribute = addAttribute(OffsetAttribute.class); private boolean used = false; private String value = null; /** * Creates a new TokenStream that returns a String as single token. * <p>Warning: Does not initialize the value, you must call * {@link #setValue(String)} afterwards! */ StringTokenStream() { } /** * Sets the string value. */ StringTokenStream setValue(String value) { this.value = value; return this; } @Override public boolean incrementToken() { if (used) { return false; } clearAttributes(); termAttribute.append(value); offsetAttribute.setOffset(0, value.length()); used = true; return true; } @Override public void end() { final int finalOffset = value.length(); offsetAttribute.setOffset(finalOffset, finalOffset); value = null; } @Override public void reset() { used = false; } @Override public void close() { value = null; } } }
package container; import java.util.Random; /** * This class contains several methods for manipulating arrays. */ @SuppressWarnings("unchecked") public class Arrays { private static void swap(Comparable[] a, int i, int j) { Comparable tmp = a[i]; a[i] = a[j]; a[j] = tmp; } /** * Sort the specific comparable array into ascending order using selection sort. * @param a The array to be sorted. */ public static void selectionSort(Comparable[] a) { for (int i = 0, n = a.length; i < n; i++) { int minIndex = i; for (int j = i+1; j < n; j++) { if (a[j].compareTo(a[minIndex]) < 0) { minIndex = j; } } swap(a, i, minIndex); } } /** * Sort the specific comparable array into ascending order using bubble sort. * @param a The array to be sorted. */ public static void bubbleSort(Comparable[] a) { for (int j = a.length - 1; j > 0; j--) { for (int i = 0; i < j; i++) { if (a[i].compareTo(a[i+1]) > 0) { swap(a, i, i+1); } } } } /** * Sort the specific comparable array into ascending order using insertion sort. * @param a The array to be sorted. */ public static void insertionSort(Comparable[] a) { for (int i = 1, n = a.length; i < n; i++) { int j = i - 1; Comparable insert = a[i]; while (j >= 0 && insert.compareTo(a[j]) <= 0) { a[j+1] = a[j]; j--; } a[j+1] = insert; } } private static Comparable[] aux; /** * Sort the specific comparable array into ascending order using merge sort. * @param a The array to be sorted. */ public static void mergeSort(Comparable[] a) { aux = new Comparable[a.length]; mergeSort(a, 0, a.length - 1); } private static void mergeSort(Comparable[] a, int fromIndex, int toIndex) { if (fromIndex >= toIndex) { return; } int midIndex = fromIndex + (toIndex - fromIndex) / 2; mergeSort(a, fromIndex, midIndex); mergeSort(a, midIndex+1, toIndex); merge(a, fromIndex, midIndex, toIndex); } private static void merge(Comparable[] a, int fromIndex, int midIndex, int toIndex) { int i = fromIndex; int j = midIndex + 1; // shallow copy of array for (int k = fromIndex; k <= toIndex; k++) { aux[k] = a[k]; } // merge to the original array. for (int k = fromIndex; k <= toIndex; k++) { if (i > midIndex) { a[k] = aux[j++]; } else if (j > toIndex) { a[k] = aux[i++]; } else if (aux[i].compareTo(aux[j]) <= 0) { a[k] = aux[i++]; } else { a[k] = aux[j++]; } } } private static Random r; /** * Sort the speicific array into ascending order using quick sort. * @param a The array to be sorted. */ public static void quickSort(Comparable[] a) { r = new Random(); quickSort(a, 0, a.length - 1); } private static void quickSort(Comparable[] a, int fromIndex, int toIndex) { if (toIndex > fromIndex) { int pivotIndex = partition(a, fromIndex, toIndex); quickSort(a, fromIndex, pivotIndex - 1); quickSort(a, pivotIndex + 1, toIndex); } } private static int partition(Comparable[] a, int fromIndex, int toIndex) { // choose the pivot randomly. int rIndex = fromIndex + r.nextInt(toIndex - fromIndex + 1); swap(a, rIndex, toIndex); // partition in place. Comparable pivot = a[toIndex]; int p = fromIndex; // pivot index for (int i = fromIndex; i <= toIndex; i++) { // item from left of the pivot is always no larger than pivot if (a[i].compareTo(pivot) <= 0) { swap(a, p, i); p++; } } return p - 1; } /** * Returns a string representation of the elements in the array. * @param a The array whose string representation returns * @return String representation of a. */ public static String toString (Object[] a) { if (a == null) { return "null"; } StringBuilder builder = new StringBuilder(a.length*2 + 3); builder.append("[ "); for (int i = 0, n = a.length; i < n; i++) { builder.append(a[i].toString()); builder.append(" "); } builder.append("]"); return builder.toString(); } /** * Find the longest increasing subarray. * @param a The given array * @return The longest increasing subarray. */ public static Comparable[] longestIncreasingSubarray(Comparable[] a) { int N = a.length; int[] lis = new int[N]; // lis ending with a[i] int[] lastIndex = new int[N]; for (int i = 0; i < N; i++) { lastIndex[i] = -1; } lis[0] = 1; for (int i = 1; i < N; i++) { lis[i] = 1; for (int j = 0; j < i; j++) { if (a[i].compareTo(a[j]) > 0 && lis[j] + 1 > lis[i]) { lis[i] = lis[j] + 1; lastIndex[i] = j; } } } int max = 0; int maxIndex = 0; for (int i = 0; i < N; i++) { if (lis[i] > max) { max = lis[i]; maxIndex = i; } } Comparable[] res = new Comparable[max]; for (int i = maxIndex; i > 0; ) { res[--max] = a[i]; i = lastIndex[i]; } return res; } /** * Find the longest bitonic subarray. * @param a The given array * @return The number of longest bitonic subarray. */ public static int longestBitonicSubarray(Comparable[] a) { int N = a.length; int[] lis = new int[N]; // longest increasing subarray ending with a[i] int[] lds = new int[N]; // longest decreasing subarray begining with a[i] lis[0] = 1; for (int i = 1; i < N; i++) { lis[i] = 1; for (int j = 0; j < i; j++) { if (a[i].compareTo(a[j]) > 0) { lis[i] = Math.max(lis[i], lis[j] + 1); } } } lds[N-1] = 1; for (int i = N-2; i >= 0; i--) { lds[i] = 1; for (int j = N-1; j > i; j--) { if (a[i].compareTo(a[j]) > 0) { lds[i] = Math.max(lds[i], lds[j] + 1); } } } int len = 0; for (int i = 0; i < N; i++) { if (lis[i] + lds[i] - 1 > len) { len = lis[i] + lds[i] - 1; } } return len; } }
package org.ovirt.mobile.movirt.sync; import android.accounts.Account; import android.app.PendingIntent; import android.content.AbstractThreadedSyncAdapter; import android.content.ContentProviderClient; import android.content.Context; import android.content.Intent; import android.content.SyncResult; import android.database.Cursor; import android.os.Bundle; import android.os.RemoteException; import android.util.Log; import org.androidannotations.annotations.Bean; import org.androidannotations.annotations.EBean; import org.androidannotations.annotations.RootContext; import org.ovirt.mobile.movirt.Broadcasts; import org.ovirt.mobile.movirt.auth.MovirtAuthenticator; import org.ovirt.mobile.movirt.facade.EntityFacade; import org.ovirt.mobile.movirt.facade.EntityFacadeLocator; import org.ovirt.mobile.movirt.model.Cluster; import org.ovirt.mobile.movirt.model.DataCenter; import org.ovirt.mobile.movirt.model.EntityMapper; import org.ovirt.mobile.movirt.model.Host; import org.ovirt.mobile.movirt.model.OVirtEntity; import org.ovirt.mobile.movirt.model.StorageDomain; import org.ovirt.mobile.movirt.model.Vm; import org.ovirt.mobile.movirt.model.trigger.Trigger; import org.ovirt.mobile.movirt.provider.ProviderFacade; import org.ovirt.mobile.movirt.rest.OVirtClient; import org.ovirt.mobile.movirt.util.NotificationHelper; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @EBean public class SyncAdapter extends AbstractThreadedSyncAdapter { private static final String TAG = SyncAdapter.class.getSimpleName(); public static volatile boolean inSync = false; @RootContext Context context; @Bean OVirtClient oVirtClient; @Bean ProviderFacade provider; @Bean EntityFacadeLocator entityFacadeLocator; @Bean EventsHandler eventsHandler; @Bean MovirtAuthenticator authenticator; @Bean NotificationHelper notificationHelper; /** * access to the {@code batch} field should be always under synchronized(this) */ ProviderFacade.BatchBuilder batch; public SyncAdapter(Context context) { super(context, true); } private static <E extends OVirtEntity> Map<String, E> groupEntitiesById(List<E> entities) { Map<String, E> entityMap = new HashMap<>(); for (E entity : entities) { entityMap.put(entity.getId(), entity); } return entityMap; } @Override public void onPerformSync(Account account, Bundle extras, String authority, ContentProviderClient providerClient, SyncResult syncResult) { doPerformSync(true); } public synchronized void doPerformSync(boolean tryEvents) { if (inSync) { return; } if (!authenticator.accountConfigured()) { Log.d(TAG, "Account not configured, not performing sync"); return; } sendSyncIntent(true); try { // split to two methods so at least the quick entities can be already shown / used until the slow ones get processed (better ux) updateAll(tryEvents); } catch (Exception e) { Log.e(TAG, "Error updating data", e); Intent intent = new Intent(Broadcasts.CONNECTION_FAILURE); intent.putExtra(Broadcasts.Extras.CONNECTION_FAILURE_REASON, e.getMessage()); context.sendBroadcast(intent); } } public synchronized void syncVm(final String id, final OVirtClient.Response<Vm> response) { initBatch(); oVirtClient.getVm(id, new OVirtClient.CompositeResponse<>(new OVirtClient.SimpleResponse<Vm>() { @Override public void onResponse(Vm vm) throws RemoteException { final EntityFacade<Vm> entityFacade = entityFacadeLocator.getFacade(Vm.class); Collection<Trigger<Vm>> allTriggers = entityFacade.getAllTriggers(); updateLocalEntity(vm, Vm.class, allTriggers); applyBatch(); } }, response)); } public synchronized void syncHost(String id, OVirtClient.Response<Host> response) { initBatch(); oVirtClient.getHost(id, new OVirtClient.CompositeResponse<>(new OVirtClient.SimpleResponse<Host>() { @Override public void onResponse(Host host) throws RemoteException { final EntityFacade<Host> entityFacade = entityFacadeLocator.getFacade(Host.class); Collection<Trigger<Host>> allTriggers = entityFacade.getAllTriggers(); updateLocalEntity(host, Host.class, allTriggers); applyBatch(); } }, response)); } private void updateAll(final boolean tryEvents) throws RemoteException { initBatch(); // TODO: we really need promises here // TODO: ideally split each request and save vms, hosts, ... in separate batches oVirtClient.getVms(new OVirtClient.SimpleResponse<List<Vm>>() { @Override public void onResponse(final List<Vm> remoteVms) throws RemoteException { oVirtClient.getClusters(new OVirtClient.SimpleResponse<List<Cluster>>() { @Override public void onResponse(final List<Cluster> remoteClusters) throws RemoteException { oVirtClient.getHosts(new OVirtClient.SimpleResponse<List<Host>>() { @Override public void onResponse(final List<Host> remoteHosts) throws RemoteException { oVirtClient.getDataCenters(new OVirtClient.SimpleResponse<List<DataCenter>>() { @Override public void onResponse(final List<DataCenter> remoteDataCenters) throws RemoteException { oVirtClient.getStorageDomains(new OVirtClient.SimpleResponse<List<StorageDomain>>(){ @Override public void onResponse(final List<StorageDomain> remoteStorageDomains) throws RemoteException { updateLocalEntities(remoteClusters, Cluster.class); updateLocalEntities(remoteHosts, Host.class); updateLocalEntities(remoteVms, Vm.class); updateLocalEntities(remoteDataCenters, DataCenter.class); updateLocalEntities(remoteStorageDomains, StorageDomain.class); applyBatch(); if (tryEvents) { eventsHandler.updateEvents(false); } } }); } }); } }); } }); } @Override public void after() { sendSyncIntent(false); } }); } private void initBatch() { batch = provider.batch(); } private void applyBatch() { if (batch.isEmpty()) { Log.i(TAG, "No updates necessary"); } else { Log.i(TAG, "Applying batch update"); batch.apply(); } } private <E extends OVirtEntity> void updateLocalEntities(List<E> remoteEntities, Class<E> clazz) throws RemoteException { final Map<String, E> entityMap = groupEntitiesById(remoteEntities); final EntityMapper<E> mapper = EntityMapper.forEntity(clazz); final EntityFacade<E> entityFacade = entityFacadeLocator.getFacade(clazz); Collection<Trigger<E>> allTriggers = new ArrayList<>(); if (entityFacade != null) { allTriggers = entityFacade.getAllTriggers(); } final Cursor cursor = provider.query(clazz).asCursor(); if (cursor == null) { return; } while (cursor.moveToNext()) { E localEntity = mapper.fromCursor(cursor); E remoteEntity = entityMap.get(localEntity.getId()); if (remoteEntity == null) { // local entity obsolete, schedule delete from db Log.i(TAG, "Scheduling delete for URI" + localEntity.getUri()); batch.delete(localEntity); } else { // existing entity, update stats if changed entityMap.remove(localEntity.getId()); checkEntityChanged(localEntity, remoteEntity, entityFacade, allTriggers); } } cursor.close(); for (E entity : entityMap.values()) { Log.i(TAG, "Scheduling insert for entity: id = " + entity.getId()); batch.insert(entity); } } private <E extends OVirtEntity> void updateLocalEntity(E remoteEntity, Class<E> clazz, Collection<Trigger<E>> allTriggers) { final EntityFacade<E> triggerResolver = entityFacadeLocator.getFacade(clazz); Collection<E> localEntities = provider.query(clazz).id(remoteEntity.getId()).all(); if (localEntities.isEmpty()) { Log.i(TAG, "Scheduling insert for entity: id = " + remoteEntity.getId()); batch.insert(remoteEntity); } else { E localEntity = localEntities.iterator().next(); checkEntityChanged(localEntity, remoteEntity, triggerResolver, allTriggers); } } private <E extends OVirtEntity> void checkEntityChanged(E localEntity, E remoteEntity, EntityFacade<E> entityFacade, Collection<Trigger<E>> allTriggers) { if (!localEntity.equals(remoteEntity)) { if (entityFacade != null) { final List<Trigger<E>> triggers = entityFacade.getTriggers(localEntity, allTriggers); processEntityTriggers(triggers, localEntity, remoteEntity, entityFacade); } Log.i(TAG, "Scheduling update for URI: " + localEntity.getUri()); batch.update(remoteEntity); } } private <E extends OVirtEntity> void processEntityTriggers(List<Trigger<E>> triggers, E localEntity, E remoteEntity, EntityFacade<E> entityFacade) { Log.i(TAG, "Processing triggers for entity: " + remoteEntity.getId()); for (Trigger<E> trigger : triggers) { if (!trigger.getCondition().evaluate(localEntity) && trigger.getCondition().evaluate(remoteEntity)) { displayNotification(trigger, remoteEntity, entityFacade); } } } private <E extends OVirtEntity> void displayNotification(Trigger<E> trigger, E entity, EntityFacade<E> entityFacade) { final Context appContext = getContext().getApplicationContext(); final Intent intent = entityFacade.getDetailIntent(entity, appContext); intent.setData(entity.getUri()); notificationHelper.showTriggerNotification( trigger, entity, appContext, PendingIntent.getActivity(appContext, 0, intent, 0) ); } private void sendSyncIntent(boolean syncing) { inSync = syncing; Intent intent = new Intent(Broadcasts.IN_SYNC); intent.putExtra(Broadcasts.Extras.SYNCING, syncing); context.sendBroadcast(intent); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.costexplorer.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ce-2017-10-25/GetReservationUtilization" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetReservationUtilizationRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * Sets the start and end dates for retrieving RI utilization. The start date is inclusive, but the end date is * exclusive. For example, if <code>start</code> is <code>2017-01-01</code> and <code>end</code> is * <code>2017-05-01</code>, then the cost and usage data is retrieved from <code>2017-01-01</code> up to and * including <code>2017-04-30</code> but not including <code>2017-05-01</code>. * </p> */ private DateInterval timePeriod; /** * <p> * Groups only by <code>SUBSCRIPTION_ID</code>. Metadata is included. * </p> */ private java.util.List<GroupDefinition> groupBy; /** * <p> * If <code>GroupBy</code> is set, <code>Granularity</code> can't be set. If <code>Granularity</code> isn't set, the * response object doesn't include <code>Granularity</code>, either <code>MONTHLY</code> or <code>DAILY</code>. If * both <code>GroupBy</code> and <code>Granularity</code> aren't set, <code>GetReservationUtilization</code> * defaults to <code>DAILY</code>. * </p> * <p> * The <code>GetReservationUtilization</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * </p> */ private String granularity; /** * <p> * Filters utilization data by dimensions. You can filter by the following dimensions: * </p> * <ul> * <li> * <p> * AZ * </p> * </li> * <li> * <p> * CACHE_ENGINE * </p> * </li> * <li> * <p> * DATABASE_ENGINE * </p> * </li> * <li> * <p> * DEPLOYMENT_OPTION * </p> * </li> * <li> * <p> * INSTANCE_TYPE * </p> * </li> * <li> * <p> * LINKED_ACCOUNT * </p> * </li> * <li> * <p> * OPERATING_SYSTEM * </p> * </li> * <li> * <p> * PLATFORM * </p> * </li> * <li> * <p> * REGION * </p> * </li> * <li> * <p> * SERVICE * </p> * </li> * <li> * <p> * SCOPE * </p> * </li> * <li> * <p> * TENANCY * </p> * </li> * </ul> * <p> * <code>GetReservationUtilization</code> uses the same <a * href="http://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html">Expression</a> * object as the other operations, but only <code>AND</code> is supported among each dimension, and nesting is * supported up to only one level deep. If there are multiple values for a dimension, they are OR'd together. * </p> */ private Expression filter; /** * <p> * The token to retrieve the next set of results. AWS provides the token when the response from a previous call has * more results than the maximum page size. * </p> */ private String nextPageToken; /** * <p> * Sets the start and end dates for retrieving RI utilization. The start date is inclusive, but the end date is * exclusive. For example, if <code>start</code> is <code>2017-01-01</code> and <code>end</code> is * <code>2017-05-01</code>, then the cost and usage data is retrieved from <code>2017-01-01</code> up to and * including <code>2017-04-30</code> but not including <code>2017-05-01</code>. * </p> * * @param timePeriod * Sets the start and end dates for retrieving RI utilization. The start date is inclusive, but the end date * is exclusive. For example, if <code>start</code> is <code>2017-01-01</code> and <code>end</code> is * <code>2017-05-01</code>, then the cost and usage data is retrieved from <code>2017-01-01</code> up to and * including <code>2017-04-30</code> but not including <code>2017-05-01</code>. */ public void setTimePeriod(DateInterval timePeriod) { this.timePeriod = timePeriod; } /** * <p> * Sets the start and end dates for retrieving RI utilization. The start date is inclusive, but the end date is * exclusive. For example, if <code>start</code> is <code>2017-01-01</code> and <code>end</code> is * <code>2017-05-01</code>, then the cost and usage data is retrieved from <code>2017-01-01</code> up to and * including <code>2017-04-30</code> but not including <code>2017-05-01</code>. * </p> * * @return Sets the start and end dates for retrieving RI utilization. The start date is inclusive, but the end date * is exclusive. For example, if <code>start</code> is <code>2017-01-01</code> and <code>end</code> is * <code>2017-05-01</code>, then the cost and usage data is retrieved from <code>2017-01-01</code> up to and * including <code>2017-04-30</code> but not including <code>2017-05-01</code>. */ public DateInterval getTimePeriod() { return this.timePeriod; } /** * <p> * Sets the start and end dates for retrieving RI utilization. The start date is inclusive, but the end date is * exclusive. For example, if <code>start</code> is <code>2017-01-01</code> and <code>end</code> is * <code>2017-05-01</code>, then the cost and usage data is retrieved from <code>2017-01-01</code> up to and * including <code>2017-04-30</code> but not including <code>2017-05-01</code>. * </p> * * @param timePeriod * Sets the start and end dates for retrieving RI utilization. The start date is inclusive, but the end date * is exclusive. For example, if <code>start</code> is <code>2017-01-01</code> and <code>end</code> is * <code>2017-05-01</code>, then the cost and usage data is retrieved from <code>2017-01-01</code> up to and * including <code>2017-04-30</code> but not including <code>2017-05-01</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public GetReservationUtilizationRequest withTimePeriod(DateInterval timePeriod) { setTimePeriod(timePeriod); return this; } /** * <p> * Groups only by <code>SUBSCRIPTION_ID</code>. Metadata is included. * </p> * * @return Groups only by <code>SUBSCRIPTION_ID</code>. Metadata is included. */ public java.util.List<GroupDefinition> getGroupBy() { return groupBy; } /** * <p> * Groups only by <code>SUBSCRIPTION_ID</code>. Metadata is included. * </p> * * @param groupBy * Groups only by <code>SUBSCRIPTION_ID</code>. Metadata is included. */ public void setGroupBy(java.util.Collection<GroupDefinition> groupBy) { if (groupBy == null) { this.groupBy = null; return; } this.groupBy = new java.util.ArrayList<GroupDefinition>(groupBy); } /** * <p> * Groups only by <code>SUBSCRIPTION_ID</code>. Metadata is included. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setGroupBy(java.util.Collection)} or {@link #withGroupBy(java.util.Collection)} if you want to override * the existing values. * </p> * * @param groupBy * Groups only by <code>SUBSCRIPTION_ID</code>. Metadata is included. * @return Returns a reference to this object so that method calls can be chained together. */ public GetReservationUtilizationRequest withGroupBy(GroupDefinition... groupBy) { if (this.groupBy == null) { setGroupBy(new java.util.ArrayList<GroupDefinition>(groupBy.length)); } for (GroupDefinition ele : groupBy) { this.groupBy.add(ele); } return this; } /** * <p> * Groups only by <code>SUBSCRIPTION_ID</code>. Metadata is included. * </p> * * @param groupBy * Groups only by <code>SUBSCRIPTION_ID</code>. Metadata is included. * @return Returns a reference to this object so that method calls can be chained together. */ public GetReservationUtilizationRequest withGroupBy(java.util.Collection<GroupDefinition> groupBy) { setGroupBy(groupBy); return this; } /** * <p> * If <code>GroupBy</code> is set, <code>Granularity</code> can't be set. If <code>Granularity</code> isn't set, the * response object doesn't include <code>Granularity</code>, either <code>MONTHLY</code> or <code>DAILY</code>. If * both <code>GroupBy</code> and <code>Granularity</code> aren't set, <code>GetReservationUtilization</code> * defaults to <code>DAILY</code>. * </p> * <p> * The <code>GetReservationUtilization</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * </p> * * @param granularity * If <code>GroupBy</code> is set, <code>Granularity</code> can't be set. If <code>Granularity</code> isn't * set, the response object doesn't include <code>Granularity</code>, either <code>MONTHLY</code> or * <code>DAILY</code>. If both <code>GroupBy</code> and <code>Granularity</code> aren't set, * <code>GetReservationUtilization</code> defaults to <code>DAILY</code>.</p> * <p> * The <code>GetReservationUtilization</code> operation supports only <code>DAILY</code> and * <code>MONTHLY</code> granularities. * @see Granularity */ public void setGranularity(String granularity) { this.granularity = granularity; } /** * <p> * If <code>GroupBy</code> is set, <code>Granularity</code> can't be set. If <code>Granularity</code> isn't set, the * response object doesn't include <code>Granularity</code>, either <code>MONTHLY</code> or <code>DAILY</code>. If * both <code>GroupBy</code> and <code>Granularity</code> aren't set, <code>GetReservationUtilization</code> * defaults to <code>DAILY</code>. * </p> * <p> * The <code>GetReservationUtilization</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * </p> * * @return If <code>GroupBy</code> is set, <code>Granularity</code> can't be set. If <code>Granularity</code> isn't * set, the response object doesn't include <code>Granularity</code>, either <code>MONTHLY</code> or * <code>DAILY</code>. If both <code>GroupBy</code> and <code>Granularity</code> aren't set, * <code>GetReservationUtilization</code> defaults to <code>DAILY</code>.</p> * <p> * The <code>GetReservationUtilization</code> operation supports only <code>DAILY</code> and * <code>MONTHLY</code> granularities. * @see Granularity */ public String getGranularity() { return this.granularity; } /** * <p> * If <code>GroupBy</code> is set, <code>Granularity</code> can't be set. If <code>Granularity</code> isn't set, the * response object doesn't include <code>Granularity</code>, either <code>MONTHLY</code> or <code>DAILY</code>. If * both <code>GroupBy</code> and <code>Granularity</code> aren't set, <code>GetReservationUtilization</code> * defaults to <code>DAILY</code>. * </p> * <p> * The <code>GetReservationUtilization</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * </p> * * @param granularity * If <code>GroupBy</code> is set, <code>Granularity</code> can't be set. If <code>Granularity</code> isn't * set, the response object doesn't include <code>Granularity</code>, either <code>MONTHLY</code> or * <code>DAILY</code>. If both <code>GroupBy</code> and <code>Granularity</code> aren't set, * <code>GetReservationUtilization</code> defaults to <code>DAILY</code>.</p> * <p> * The <code>GetReservationUtilization</code> operation supports only <code>DAILY</code> and * <code>MONTHLY</code> granularities. * @return Returns a reference to this object so that method calls can be chained together. * @see Granularity */ public GetReservationUtilizationRequest withGranularity(String granularity) { setGranularity(granularity); return this; } /** * <p> * If <code>GroupBy</code> is set, <code>Granularity</code> can't be set. If <code>Granularity</code> isn't set, the * response object doesn't include <code>Granularity</code>, either <code>MONTHLY</code> or <code>DAILY</code>. If * both <code>GroupBy</code> and <code>Granularity</code> aren't set, <code>GetReservationUtilization</code> * defaults to <code>DAILY</code>. * </p> * <p> * The <code>GetReservationUtilization</code> operation supports only <code>DAILY</code> and <code>MONTHLY</code> * granularities. * </p> * * @param granularity * If <code>GroupBy</code> is set, <code>Granularity</code> can't be set. If <code>Granularity</code> isn't * set, the response object doesn't include <code>Granularity</code>, either <code>MONTHLY</code> or * <code>DAILY</code>. If both <code>GroupBy</code> and <code>Granularity</code> aren't set, * <code>GetReservationUtilization</code> defaults to <code>DAILY</code>.</p> * <p> * The <code>GetReservationUtilization</code> operation supports only <code>DAILY</code> and * <code>MONTHLY</code> granularities. * @return Returns a reference to this object so that method calls can be chained together. * @see Granularity */ public GetReservationUtilizationRequest withGranularity(Granularity granularity) { this.granularity = granularity.toString(); return this; } /** * <p> * Filters utilization data by dimensions. You can filter by the following dimensions: * </p> * <ul> * <li> * <p> * AZ * </p> * </li> * <li> * <p> * CACHE_ENGINE * </p> * </li> * <li> * <p> * DATABASE_ENGINE * </p> * </li> * <li> * <p> * DEPLOYMENT_OPTION * </p> * </li> * <li> * <p> * INSTANCE_TYPE * </p> * </li> * <li> * <p> * LINKED_ACCOUNT * </p> * </li> * <li> * <p> * OPERATING_SYSTEM * </p> * </li> * <li> * <p> * PLATFORM * </p> * </li> * <li> * <p> * REGION * </p> * </li> * <li> * <p> * SERVICE * </p> * </li> * <li> * <p> * SCOPE * </p> * </li> * <li> * <p> * TENANCY * </p> * </li> * </ul> * <p> * <code>GetReservationUtilization</code> uses the same <a * href="http://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html">Expression</a> * object as the other operations, but only <code>AND</code> is supported among each dimension, and nesting is * supported up to only one level deep. If there are multiple values for a dimension, they are OR'd together. * </p> * * @param filter * Filters utilization data by dimensions. You can filter by the following dimensions:</p> * <ul> * <li> * <p> * AZ * </p> * </li> * <li> * <p> * CACHE_ENGINE * </p> * </li> * <li> * <p> * DATABASE_ENGINE * </p> * </li> * <li> * <p> * DEPLOYMENT_OPTION * </p> * </li> * <li> * <p> * INSTANCE_TYPE * </p> * </li> * <li> * <p> * LINKED_ACCOUNT * </p> * </li> * <li> * <p> * OPERATING_SYSTEM * </p> * </li> * <li> * <p> * PLATFORM * </p> * </li> * <li> * <p> * REGION * </p> * </li> * <li> * <p> * SERVICE * </p> * </li> * <li> * <p> * SCOPE * </p> * </li> * <li> * <p> * TENANCY * </p> * </li> * </ul> * <p> * <code>GetReservationUtilization</code> uses the same <a * href="http://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html" * >Expression</a> object as the other operations, but only <code>AND</code> is supported among each * dimension, and nesting is supported up to only one level deep. If there are multiple values for a * dimension, they are OR'd together. */ public void setFilter(Expression filter) { this.filter = filter; } /** * <p> * Filters utilization data by dimensions. You can filter by the following dimensions: * </p> * <ul> * <li> * <p> * AZ * </p> * </li> * <li> * <p> * CACHE_ENGINE * </p> * </li> * <li> * <p> * DATABASE_ENGINE * </p> * </li> * <li> * <p> * DEPLOYMENT_OPTION * </p> * </li> * <li> * <p> * INSTANCE_TYPE * </p> * </li> * <li> * <p> * LINKED_ACCOUNT * </p> * </li> * <li> * <p> * OPERATING_SYSTEM * </p> * </li> * <li> * <p> * PLATFORM * </p> * </li> * <li> * <p> * REGION * </p> * </li> * <li> * <p> * SERVICE * </p> * </li> * <li> * <p> * SCOPE * </p> * </li> * <li> * <p> * TENANCY * </p> * </li> * </ul> * <p> * <code>GetReservationUtilization</code> uses the same <a * href="http://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html">Expression</a> * object as the other operations, but only <code>AND</code> is supported among each dimension, and nesting is * supported up to only one level deep. If there are multiple values for a dimension, they are OR'd together. * </p> * * @return Filters utilization data by dimensions. You can filter by the following dimensions:</p> * <ul> * <li> * <p> * AZ * </p> * </li> * <li> * <p> * CACHE_ENGINE * </p> * </li> * <li> * <p> * DATABASE_ENGINE * </p> * </li> * <li> * <p> * DEPLOYMENT_OPTION * </p> * </li> * <li> * <p> * INSTANCE_TYPE * </p> * </li> * <li> * <p> * LINKED_ACCOUNT * </p> * </li> * <li> * <p> * OPERATING_SYSTEM * </p> * </li> * <li> * <p> * PLATFORM * </p> * </li> * <li> * <p> * REGION * </p> * </li> * <li> * <p> * SERVICE * </p> * </li> * <li> * <p> * SCOPE * </p> * </li> * <li> * <p> * TENANCY * </p> * </li> * </ul> * <p> * <code>GetReservationUtilization</code> uses the same <a * href="http://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html" * >Expression</a> object as the other operations, but only <code>AND</code> is supported among each * dimension, and nesting is supported up to only one level deep. If there are multiple values for a * dimension, they are OR'd together. */ public Expression getFilter() { return this.filter; } /** * <p> * Filters utilization data by dimensions. You can filter by the following dimensions: * </p> * <ul> * <li> * <p> * AZ * </p> * </li> * <li> * <p> * CACHE_ENGINE * </p> * </li> * <li> * <p> * DATABASE_ENGINE * </p> * </li> * <li> * <p> * DEPLOYMENT_OPTION * </p> * </li> * <li> * <p> * INSTANCE_TYPE * </p> * </li> * <li> * <p> * LINKED_ACCOUNT * </p> * </li> * <li> * <p> * OPERATING_SYSTEM * </p> * </li> * <li> * <p> * PLATFORM * </p> * </li> * <li> * <p> * REGION * </p> * </li> * <li> * <p> * SERVICE * </p> * </li> * <li> * <p> * SCOPE * </p> * </li> * <li> * <p> * TENANCY * </p> * </li> * </ul> * <p> * <code>GetReservationUtilization</code> uses the same <a * href="http://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html">Expression</a> * object as the other operations, but only <code>AND</code> is supported among each dimension, and nesting is * supported up to only one level deep. If there are multiple values for a dimension, they are OR'd together. * </p> * * @param filter * Filters utilization data by dimensions. You can filter by the following dimensions:</p> * <ul> * <li> * <p> * AZ * </p> * </li> * <li> * <p> * CACHE_ENGINE * </p> * </li> * <li> * <p> * DATABASE_ENGINE * </p> * </li> * <li> * <p> * DEPLOYMENT_OPTION * </p> * </li> * <li> * <p> * INSTANCE_TYPE * </p> * </li> * <li> * <p> * LINKED_ACCOUNT * </p> * </li> * <li> * <p> * OPERATING_SYSTEM * </p> * </li> * <li> * <p> * PLATFORM * </p> * </li> * <li> * <p> * REGION * </p> * </li> * <li> * <p> * SERVICE * </p> * </li> * <li> * <p> * SCOPE * </p> * </li> * <li> * <p> * TENANCY * </p> * </li> * </ul> * <p> * <code>GetReservationUtilization</code> uses the same <a * href="http://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html" * >Expression</a> object as the other operations, but only <code>AND</code> is supported among each * dimension, and nesting is supported up to only one level deep. If there are multiple values for a * dimension, they are OR'd together. * @return Returns a reference to this object so that method calls can be chained together. */ public GetReservationUtilizationRequest withFilter(Expression filter) { setFilter(filter); return this; } /** * <p> * The token to retrieve the next set of results. AWS provides the token when the response from a previous call has * more results than the maximum page size. * </p> * * @param nextPageToken * The token to retrieve the next set of results. AWS provides the token when the response from a previous * call has more results than the maximum page size. */ public void setNextPageToken(String nextPageToken) { this.nextPageToken = nextPageToken; } /** * <p> * The token to retrieve the next set of results. AWS provides the token when the response from a previous call has * more results than the maximum page size. * </p> * * @return The token to retrieve the next set of results. AWS provides the token when the response from a previous * call has more results than the maximum page size. */ public String getNextPageToken() { return this.nextPageToken; } /** * <p> * The token to retrieve the next set of results. AWS provides the token when the response from a previous call has * more results than the maximum page size. * </p> * * @param nextPageToken * The token to retrieve the next set of results. AWS provides the token when the response from a previous * call has more results than the maximum page size. * @return Returns a reference to this object so that method calls can be chained together. */ public GetReservationUtilizationRequest withNextPageToken(String nextPageToken) { setNextPageToken(nextPageToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTimePeriod() != null) sb.append("TimePeriod: ").append(getTimePeriod()).append(","); if (getGroupBy() != null) sb.append("GroupBy: ").append(getGroupBy()).append(","); if (getGranularity() != null) sb.append("Granularity: ").append(getGranularity()).append(","); if (getFilter() != null) sb.append("Filter: ").append(getFilter()).append(","); if (getNextPageToken() != null) sb.append("NextPageToken: ").append(getNextPageToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetReservationUtilizationRequest == false) return false; GetReservationUtilizationRequest other = (GetReservationUtilizationRequest) obj; if (other.getTimePeriod() == null ^ this.getTimePeriod() == null) return false; if (other.getTimePeriod() != null && other.getTimePeriod().equals(this.getTimePeriod()) == false) return false; if (other.getGroupBy() == null ^ this.getGroupBy() == null) return false; if (other.getGroupBy() != null && other.getGroupBy().equals(this.getGroupBy()) == false) return false; if (other.getGranularity() == null ^ this.getGranularity() == null) return false; if (other.getGranularity() != null && other.getGranularity().equals(this.getGranularity()) == false) return false; if (other.getFilter() == null ^ this.getFilter() == null) return false; if (other.getFilter() != null && other.getFilter().equals(this.getFilter()) == false) return false; if (other.getNextPageToken() == null ^ this.getNextPageToken() == null) return false; if (other.getNextPageToken() != null && other.getNextPageToken().equals(this.getNextPageToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTimePeriod() == null) ? 0 : getTimePeriod().hashCode()); hashCode = prime * hashCode + ((getGroupBy() == null) ? 0 : getGroupBy().hashCode()); hashCode = prime * hashCode + ((getGranularity() == null) ? 0 : getGranularity().hashCode()); hashCode = prime * hashCode + ((getFilter() == null) ? 0 : getFilter().hashCode()); hashCode = prime * hashCode + ((getNextPageToken() == null) ? 0 : getNextPageToken().hashCode()); return hashCode; } @Override public GetReservationUtilizationRequest clone() { return (GetReservationUtilizationRequest) super.clone(); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.services.resources.admin; import org.jboss.resteasy.annotations.cache.NoCache; import org.jboss.resteasy.spi.NotFoundException; import org.jboss.resteasy.spi.ResteasyProviderFactory; import org.keycloak.common.constants.KerberosConstants; import org.keycloak.events.admin.OperationType; import org.keycloak.events.admin.ResourceType; import org.keycloak.mappers.FederationConfigValidationException; import org.keycloak.models.AuthenticationExecutionModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.RealmModel; import org.keycloak.models.UserFederationProvider; import org.keycloak.models.UserFederationProviderFactory; import org.keycloak.models.UserFederationProviderModel; import org.keycloak.models.UserFederationValidatingProviderFactory; import org.keycloak.models.utils.KeycloakModelUtils; import org.keycloak.models.utils.ModelToRepresentation; import org.keycloak.provider.ConfiguredProvider; import org.keycloak.provider.ProviderConfigProperty; import org.keycloak.provider.ProviderFactory; import org.keycloak.representations.idm.ConfigPropertyRepresentation; import org.keycloak.representations.idm.CredentialRepresentation; import org.keycloak.representations.idm.UserFederationProviderFactoryRepresentation; import org.keycloak.representations.idm.UserFederationProviderRepresentation; import org.keycloak.services.ErrorResponseException; import org.keycloak.services.ServicesLogger; import org.keycloak.services.managers.UsersSyncManager; import org.keycloak.utils.CredentialHelper; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import java.text.MessageFormat; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Properties; /** * Base resource for managing users * * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class UserFederationProvidersResource { protected static final ServicesLogger logger = ServicesLogger.ROOT_LOGGER; protected RealmModel realm; protected RealmAuth auth; protected AdminEventBuilder adminEvent; @Context protected UriInfo uriInfo; @Context protected KeycloakSession session; public UserFederationProvidersResource(RealmModel realm, RealmAuth auth, AdminEventBuilder adminEvent) { this.auth = auth; this.realm = realm; this.adminEvent = adminEvent.resource(ResourceType.USER_FEDERATION_PROVIDER); auth.init(RealmAuth.Resource.USER); } /** * Automatically add "kerberos" to required realm credentials if it's supported by saved provider * * @param realm * @param model * @return true if kerberos credentials were added */ public static boolean checkKerberosCredential(KeycloakSession session, RealmModel realm, UserFederationProviderModel model) { String allowKerberosCfg = model.getConfig().get(KerberosConstants.ALLOW_KERBEROS_AUTHENTICATION); if (Boolean.valueOf(allowKerberosCfg)) { CredentialHelper.setOrReplaceAuthenticationRequirement(session, realm, CredentialRepresentation.KERBEROS, AuthenticationExecutionModel.Requirement.ALTERNATIVE, AuthenticationExecutionModel.Requirement.DISABLED); return true; } return false; } public static void validateFederationProviderConfig(KeycloakSession session, RealmAuth auth, RealmModel realm, UserFederationProviderModel model) { UserFederationProviderFactory providerFactory = KeycloakModelUtils.getFederationProviderFactory(session, model); if (providerFactory instanceof UserFederationValidatingProviderFactory) { try { ((UserFederationValidatingProviderFactory) providerFactory).validateConfig(realm, model); } catch (FederationConfigValidationException fcve) { logger.error(fcve.getMessage()); Properties messages = AdminRoot.getMessages(session, realm, auth.getAuth().getToken().getLocale()); throw new ErrorResponseException(fcve.getMessage(), MessageFormat.format(messages.getProperty(fcve.getMessage(), fcve.getMessage()), fcve.getParameters()), Response.Status.BAD_REQUEST); } } } /** * Get available provider factories * * Returns a list of available provider factories. * * @return */ @GET @NoCache @Path("providers") @Produces(MediaType.APPLICATION_JSON) public List<UserFederationProviderFactoryRepresentation> getProviders() { auth.requireView(); List<UserFederationProviderFactoryRepresentation> providers = new LinkedList<UserFederationProviderFactoryRepresentation>(); for (ProviderFactory factory : session.getKeycloakSessionFactory().getProviderFactories(UserFederationProvider.class)) { UserFederationProviderFactoryRepresentation rep = new UserFederationProviderFactoryRepresentation(); rep.setId(factory.getId()); rep.setOptions(((UserFederationProviderFactory)factory).getConfigurationOptions()); providers.add(rep); } return providers; } /** * Get factory with given id * * @return */ @GET @NoCache @Path("providers/{id}") @Produces(MediaType.APPLICATION_JSON) public UserFederationProviderFactoryRepresentation getProvider(@PathParam("id") String id) { auth.requireView(); for (ProviderFactory factory : session.getKeycloakSessionFactory().getProviderFactories(UserFederationProvider.class)) { if (!factory.getId().equals(id)) { continue; } if (factory instanceof ConfiguredProvider) { UserFederationProviderFactoryDescription rep = new UserFederationProviderFactoryDescription(); rep.setId(factory.getId()); ConfiguredProvider cp = (ConfiguredProvider) factory; rep.setHelpText(cp.getHelpText()); rep.setProperties(toConfigPropertyRepresentationList(cp.getConfigProperties())); return rep; } UserFederationProviderFactoryRepresentation rep = new UserFederationProviderFactoryRepresentation(); rep.setId(factory.getId()); rep.setOptions(((UserFederationProviderFactory) factory).getConfigurationOptions()); return rep; } throw new NotFoundException("Could not find provider"); } /** * Create a provider * * @param rep * @return */ @POST @Path("instances") @Consumes(MediaType.APPLICATION_JSON) public Response createProviderInstance(UserFederationProviderRepresentation rep) { auth.requireManage(); String displayName = rep.getDisplayName(); if (displayName != null && displayName.trim().equals("")) { displayName = null; } UserFederationProviderModel tempModel = new UserFederationProviderModel(null, rep.getProviderName(), rep.getConfig(), rep.getPriority(), displayName, rep.getFullSyncPeriod(), rep.getChangedSyncPeriod(), rep.getLastSync()); validateFederationProviderConfig(session, auth, realm, tempModel); UserFederationProviderModel model = realm.addUserFederationProvider(rep.getProviderName(), rep.getConfig(), rep.getPriority(), displayName, rep.getFullSyncPeriod(), rep.getChangedSyncPeriod(), rep.getLastSync()); new UsersSyncManager().notifyToRefreshPeriodicSync(session, realm, model, false); boolean kerberosCredsAdded = checkKerberosCredential(session, realm, model); if (kerberosCredsAdded) { logger.addedKerberosToRealmCredentials(); } rep.setId(model.getId()); adminEvent.operation(OperationType.CREATE).resourcePath(uriInfo, model.getId()).representation(rep).success(); return Response.created(uriInfo.getAbsolutePathBuilder().path(model.getId()).build()).build(); } /** * Get configured providers * * @return */ @GET @Path("instances") @Produces(MediaType.APPLICATION_JSON) @NoCache public List<UserFederationProviderRepresentation> getUserFederationInstances() { auth.requireView(); List<UserFederationProviderRepresentation> reps = new LinkedList<UserFederationProviderRepresentation>(); for (UserFederationProviderModel model : realm.getUserFederationProviders()) { UserFederationProviderRepresentation rep = ModelToRepresentation.toRepresentation(model); reps.add(rep); } return reps; } @Path("instances/{id}") public UserFederationProviderResource getUserFederationInstance(@PathParam("id") String id) { this.auth.requireView(); UserFederationProviderModel model = KeycloakModelUtils.findUserFederationProviderById(id, realm); UserFederationProviderResource instanceResource = new UserFederationProviderResource(session, realm, this.auth, model, adminEvent); ResteasyProviderFactory.getInstance().injectProperties(instanceResource); return instanceResource; } private ConfigPropertyRepresentation toConfigPropertyRepresentation(ProviderConfigProperty prop) { ConfigPropertyRepresentation propRep = new ConfigPropertyRepresentation(); propRep.setName(prop.getName()); propRep.setLabel(prop.getLabel()); propRep.setType(prop.getType()); propRep.setDefaultValue(prop.getDefaultValue()); propRep.setHelpText(prop.getHelpText()); return propRep; } private List<ConfigPropertyRepresentation> toConfigPropertyRepresentationList(List<ProviderConfigProperty> props) { List<ConfigPropertyRepresentation> reps = new ArrayList<>(props.size()); for(ProviderConfigProperty prop : props){ reps.add(toConfigPropertyRepresentation(prop)); } return reps; } public static class UserFederationProviderFactoryDescription extends UserFederationProviderFactoryRepresentation { protected String name; protected String helpText; protected List<ConfigPropertyRepresentation> properties; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getHelpText() { return helpText; } public void setHelpText(String helpText) { this.helpText = helpText; } public List<ConfigPropertyRepresentation> getProperties() { return properties; } public void setProperties(List<ConfigPropertyRepresentation> properties) { this.properties = properties; } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.kendra.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kendra-2019-02-03/CreateIndex" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateIndexRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name for the new index. * </p> */ private String name; /** * <p> * The Amazon Kendra edition to use for the index. Choose <code>DEVELOPER_EDITION</code> for indexes intended for * development, testing, or proof of concept. Use <code>ENTERPRISE_EDITION</code> for your production databases. * Once you set the edition for an index, it can't be changed. * </p> * <p> * The <code>Edition</code> parameter is optional. If you don't supply a value, the default is * <code>ENTERPRISE_EDITION</code>. * </p> * <p> * For more information on quota limits for enterprise and developer editions, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/quotas.html">Quotas</a>. * </p> */ private String edition; /** * <p> * An Identity and Access Management(IAM) role that gives Amazon Kendra permissions to access your Amazon CloudWatch * logs and metrics. This is also the role used when you use the <code>BatchPutDocument</code> API to index * documents from an Amazon S3 bucket. * </p> */ private String roleArn; /** * <p> * The identifier of the KMScustomer managed key (CMK) to use to encrypt data indexed by Amazon Kendra. Amazon * Kendra doesn't support asymmetric CMKs. * </p> */ private ServerSideEncryptionConfiguration serverSideEncryptionConfiguration; /** * <p> * A description for the index. * </p> */ private String description; /** * <p> * A token that you provide to identify the request to create an index. Multiple calls to the * <code>CreateIndex</code> API with the same client token will create only one index. * </p> */ private String clientToken; /** * <p> * A list of key-value pairs that identify the index. You can use the tags to identify and organize your resources * and to control access to resources. * </p> */ private java.util.List<Tag> tags; /** * <p> * The user token configuration. * </p> */ private java.util.List<UserTokenConfiguration> userTokenConfigurations; /** * <p> * The user context policy. * </p> * <dl> * <dt>ATTRIBUTE_FILTER</dt> * <dd> * <p> * All indexed content is searchable and displayable for all users. If you want to filter search results on user * context, you can use the attribute filters of <code>_user_id</code> and <code>_group_ids</code> or you can * provide user and group information in <code>UserContext</code>. * </p> * </dd> * <dt>USER_TOKEN</dt> * <dd> * <p> * Enables token-based user access control to filter search results on user context. All documents with no access * control and all documents accessible to the user will be searchable and displayable. * </p> * </dd> * </dl> */ private String userContextPolicy; /** * <p> * Enables fetching access levels of groups and users from an Amazon Web Services Single Sign On identity source. To * configure this, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/API_UserGroupResolutionConfiguration.html" * >UserGroupResolutionConfiguration</a>. * </p> */ private UserGroupResolutionConfiguration userGroupResolutionConfiguration; /** * <p> * The name for the new index. * </p> * * @param name * The name for the new index. */ public void setName(String name) { this.name = name; } /** * <p> * The name for the new index. * </p> * * @return The name for the new index. */ public String getName() { return this.name; } /** * <p> * The name for the new index. * </p> * * @param name * The name for the new index. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateIndexRequest withName(String name) { setName(name); return this; } /** * <p> * The Amazon Kendra edition to use for the index. Choose <code>DEVELOPER_EDITION</code> for indexes intended for * development, testing, or proof of concept. Use <code>ENTERPRISE_EDITION</code> for your production databases. * Once you set the edition for an index, it can't be changed. * </p> * <p> * The <code>Edition</code> parameter is optional. If you don't supply a value, the default is * <code>ENTERPRISE_EDITION</code>. * </p> * <p> * For more information on quota limits for enterprise and developer editions, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/quotas.html">Quotas</a>. * </p> * * @param edition * The Amazon Kendra edition to use for the index. Choose <code>DEVELOPER_EDITION</code> for indexes intended * for development, testing, or proof of concept. Use <code>ENTERPRISE_EDITION</code> for your production * databases. Once you set the edition for an index, it can't be changed.</p> * <p> * The <code>Edition</code> parameter is optional. If you don't supply a value, the default is * <code>ENTERPRISE_EDITION</code>. * </p> * <p> * For more information on quota limits for enterprise and developer editions, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/quotas.html">Quotas</a>. * @see IndexEdition */ public void setEdition(String edition) { this.edition = edition; } /** * <p> * The Amazon Kendra edition to use for the index. Choose <code>DEVELOPER_EDITION</code> for indexes intended for * development, testing, or proof of concept. Use <code>ENTERPRISE_EDITION</code> for your production databases. * Once you set the edition for an index, it can't be changed. * </p> * <p> * The <code>Edition</code> parameter is optional. If you don't supply a value, the default is * <code>ENTERPRISE_EDITION</code>. * </p> * <p> * For more information on quota limits for enterprise and developer editions, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/quotas.html">Quotas</a>. * </p> * * @return The Amazon Kendra edition to use for the index. Choose <code>DEVELOPER_EDITION</code> for indexes * intended for development, testing, or proof of concept. Use <code>ENTERPRISE_EDITION</code> for your * production databases. Once you set the edition for an index, it can't be changed.</p> * <p> * The <code>Edition</code> parameter is optional. If you don't supply a value, the default is * <code>ENTERPRISE_EDITION</code>. * </p> * <p> * For more information on quota limits for enterprise and developer editions, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/quotas.html">Quotas</a>. * @see IndexEdition */ public String getEdition() { return this.edition; } /** * <p> * The Amazon Kendra edition to use for the index. Choose <code>DEVELOPER_EDITION</code> for indexes intended for * development, testing, or proof of concept. Use <code>ENTERPRISE_EDITION</code> for your production databases. * Once you set the edition for an index, it can't be changed. * </p> * <p> * The <code>Edition</code> parameter is optional. If you don't supply a value, the default is * <code>ENTERPRISE_EDITION</code>. * </p> * <p> * For more information on quota limits for enterprise and developer editions, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/quotas.html">Quotas</a>. * </p> * * @param edition * The Amazon Kendra edition to use for the index. Choose <code>DEVELOPER_EDITION</code> for indexes intended * for development, testing, or proof of concept. Use <code>ENTERPRISE_EDITION</code> for your production * databases. Once you set the edition for an index, it can't be changed.</p> * <p> * The <code>Edition</code> parameter is optional. If you don't supply a value, the default is * <code>ENTERPRISE_EDITION</code>. * </p> * <p> * For more information on quota limits for enterprise and developer editions, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/quotas.html">Quotas</a>. * @return Returns a reference to this object so that method calls can be chained together. * @see IndexEdition */ public CreateIndexRequest withEdition(String edition) { setEdition(edition); return this; } /** * <p> * The Amazon Kendra edition to use for the index. Choose <code>DEVELOPER_EDITION</code> for indexes intended for * development, testing, or proof of concept. Use <code>ENTERPRISE_EDITION</code> for your production databases. * Once you set the edition for an index, it can't be changed. * </p> * <p> * The <code>Edition</code> parameter is optional. If you don't supply a value, the default is * <code>ENTERPRISE_EDITION</code>. * </p> * <p> * For more information on quota limits for enterprise and developer editions, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/quotas.html">Quotas</a>. * </p> * * @param edition * The Amazon Kendra edition to use for the index. Choose <code>DEVELOPER_EDITION</code> for indexes intended * for development, testing, or proof of concept. Use <code>ENTERPRISE_EDITION</code> for your production * databases. Once you set the edition for an index, it can't be changed.</p> * <p> * The <code>Edition</code> parameter is optional. If you don't supply a value, the default is * <code>ENTERPRISE_EDITION</code>. * </p> * <p> * For more information on quota limits for enterprise and developer editions, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/quotas.html">Quotas</a>. * @return Returns a reference to this object so that method calls can be chained together. * @see IndexEdition */ public CreateIndexRequest withEdition(IndexEdition edition) { this.edition = edition.toString(); return this; } /** * <p> * An Identity and Access Management(IAM) role that gives Amazon Kendra permissions to access your Amazon CloudWatch * logs and metrics. This is also the role used when you use the <code>BatchPutDocument</code> API to index * documents from an Amazon S3 bucket. * </p> * * @param roleArn * An Identity and Access Management(IAM) role that gives Amazon Kendra permissions to access your Amazon * CloudWatch logs and metrics. This is also the role used when you use the <code>BatchPutDocument</code> API * to index documents from an Amazon S3 bucket. */ public void setRoleArn(String roleArn) { this.roleArn = roleArn; } /** * <p> * An Identity and Access Management(IAM) role that gives Amazon Kendra permissions to access your Amazon CloudWatch * logs and metrics. This is also the role used when you use the <code>BatchPutDocument</code> API to index * documents from an Amazon S3 bucket. * </p> * * @return An Identity and Access Management(IAM) role that gives Amazon Kendra permissions to access your Amazon * CloudWatch logs and metrics. This is also the role used when you use the <code>BatchPutDocument</code> * API to index documents from an Amazon S3 bucket. */ public String getRoleArn() { return this.roleArn; } /** * <p> * An Identity and Access Management(IAM) role that gives Amazon Kendra permissions to access your Amazon CloudWatch * logs and metrics. This is also the role used when you use the <code>BatchPutDocument</code> API to index * documents from an Amazon S3 bucket. * </p> * * @param roleArn * An Identity and Access Management(IAM) role that gives Amazon Kendra permissions to access your Amazon * CloudWatch logs and metrics. This is also the role used when you use the <code>BatchPutDocument</code> API * to index documents from an Amazon S3 bucket. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateIndexRequest withRoleArn(String roleArn) { setRoleArn(roleArn); return this; } /** * <p> * The identifier of the KMScustomer managed key (CMK) to use to encrypt data indexed by Amazon Kendra. Amazon * Kendra doesn't support asymmetric CMKs. * </p> * * @param serverSideEncryptionConfiguration * The identifier of the KMScustomer managed key (CMK) to use to encrypt data indexed by Amazon Kendra. * Amazon Kendra doesn't support asymmetric CMKs. */ public void setServerSideEncryptionConfiguration(ServerSideEncryptionConfiguration serverSideEncryptionConfiguration) { this.serverSideEncryptionConfiguration = serverSideEncryptionConfiguration; } /** * <p> * The identifier of the KMScustomer managed key (CMK) to use to encrypt data indexed by Amazon Kendra. Amazon * Kendra doesn't support asymmetric CMKs. * </p> * * @return The identifier of the KMScustomer managed key (CMK) to use to encrypt data indexed by Amazon Kendra. * Amazon Kendra doesn't support asymmetric CMKs. */ public ServerSideEncryptionConfiguration getServerSideEncryptionConfiguration() { return this.serverSideEncryptionConfiguration; } /** * <p> * The identifier of the KMScustomer managed key (CMK) to use to encrypt data indexed by Amazon Kendra. Amazon * Kendra doesn't support asymmetric CMKs. * </p> * * @param serverSideEncryptionConfiguration * The identifier of the KMScustomer managed key (CMK) to use to encrypt data indexed by Amazon Kendra. * Amazon Kendra doesn't support asymmetric CMKs. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateIndexRequest withServerSideEncryptionConfiguration(ServerSideEncryptionConfiguration serverSideEncryptionConfiguration) { setServerSideEncryptionConfiguration(serverSideEncryptionConfiguration); return this; } /** * <p> * A description for the index. * </p> * * @param description * A description for the index. */ public void setDescription(String description) { this.description = description; } /** * <p> * A description for the index. * </p> * * @return A description for the index. */ public String getDescription() { return this.description; } /** * <p> * A description for the index. * </p> * * @param description * A description for the index. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateIndexRequest withDescription(String description) { setDescription(description); return this; } /** * <p> * A token that you provide to identify the request to create an index. Multiple calls to the * <code>CreateIndex</code> API with the same client token will create only one index. * </p> * * @param clientToken * A token that you provide to identify the request to create an index. Multiple calls to the * <code>CreateIndex</code> API with the same client token will create only one index. */ public void setClientToken(String clientToken) { this.clientToken = clientToken; } /** * <p> * A token that you provide to identify the request to create an index. Multiple calls to the * <code>CreateIndex</code> API with the same client token will create only one index. * </p> * * @return A token that you provide to identify the request to create an index. Multiple calls to the * <code>CreateIndex</code> API with the same client token will create only one index. */ public String getClientToken() { return this.clientToken; } /** * <p> * A token that you provide to identify the request to create an index. Multiple calls to the * <code>CreateIndex</code> API with the same client token will create only one index. * </p> * * @param clientToken * A token that you provide to identify the request to create an index. Multiple calls to the * <code>CreateIndex</code> API with the same client token will create only one index. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateIndexRequest withClientToken(String clientToken) { setClientToken(clientToken); return this; } /** * <p> * A list of key-value pairs that identify the index. You can use the tags to identify and organize your resources * and to control access to resources. * </p> * * @return A list of key-value pairs that identify the index. You can use the tags to identify and organize your * resources and to control access to resources. */ public java.util.List<Tag> getTags() { return tags; } /** * <p> * A list of key-value pairs that identify the index. You can use the tags to identify and organize your resources * and to control access to resources. * </p> * * @param tags * A list of key-value pairs that identify the index. You can use the tags to identify and organize your * resources and to control access to resources. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new java.util.ArrayList<Tag>(tags); } /** * <p> * A list of key-value pairs that identify the index. You can use the tags to identify and organize your resources * and to control access to resources. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * A list of key-value pairs that identify the index. You can use the tags to identify and organize your * resources and to control access to resources. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateIndexRequest withTags(Tag... tags) { if (this.tags == null) { setTags(new java.util.ArrayList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * A list of key-value pairs that identify the index. You can use the tags to identify and organize your resources * and to control access to resources. * </p> * * @param tags * A list of key-value pairs that identify the index. You can use the tags to identify and organize your * resources and to control access to resources. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateIndexRequest withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * <p> * The user token configuration. * </p> * * @return The user token configuration. */ public java.util.List<UserTokenConfiguration> getUserTokenConfigurations() { return userTokenConfigurations; } /** * <p> * The user token configuration. * </p> * * @param userTokenConfigurations * The user token configuration. */ public void setUserTokenConfigurations(java.util.Collection<UserTokenConfiguration> userTokenConfigurations) { if (userTokenConfigurations == null) { this.userTokenConfigurations = null; return; } this.userTokenConfigurations = new java.util.ArrayList<UserTokenConfiguration>(userTokenConfigurations); } /** * <p> * The user token configuration. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setUserTokenConfigurations(java.util.Collection)} or * {@link #withUserTokenConfigurations(java.util.Collection)} if you want to override the existing values. * </p> * * @param userTokenConfigurations * The user token configuration. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateIndexRequest withUserTokenConfigurations(UserTokenConfiguration... userTokenConfigurations) { if (this.userTokenConfigurations == null) { setUserTokenConfigurations(new java.util.ArrayList<UserTokenConfiguration>(userTokenConfigurations.length)); } for (UserTokenConfiguration ele : userTokenConfigurations) { this.userTokenConfigurations.add(ele); } return this; } /** * <p> * The user token configuration. * </p> * * @param userTokenConfigurations * The user token configuration. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateIndexRequest withUserTokenConfigurations(java.util.Collection<UserTokenConfiguration> userTokenConfigurations) { setUserTokenConfigurations(userTokenConfigurations); return this; } /** * <p> * The user context policy. * </p> * <dl> * <dt>ATTRIBUTE_FILTER</dt> * <dd> * <p> * All indexed content is searchable and displayable for all users. If you want to filter search results on user * context, you can use the attribute filters of <code>_user_id</code> and <code>_group_ids</code> or you can * provide user and group information in <code>UserContext</code>. * </p> * </dd> * <dt>USER_TOKEN</dt> * <dd> * <p> * Enables token-based user access control to filter search results on user context. All documents with no access * control and all documents accessible to the user will be searchable and displayable. * </p> * </dd> * </dl> * * @param userContextPolicy * The user context policy.</p> * <dl> * <dt>ATTRIBUTE_FILTER</dt> * <dd> * <p> * All indexed content is searchable and displayable for all users. If you want to filter search results on * user context, you can use the attribute filters of <code>_user_id</code> and <code>_group_ids</code> or * you can provide user and group information in <code>UserContext</code>. * </p> * </dd> * <dt>USER_TOKEN</dt> * <dd> * <p> * Enables token-based user access control to filter search results on user context. All documents with no * access control and all documents accessible to the user will be searchable and displayable. * </p> * </dd> * @see UserContextPolicy */ public void setUserContextPolicy(String userContextPolicy) { this.userContextPolicy = userContextPolicy; } /** * <p> * The user context policy. * </p> * <dl> * <dt>ATTRIBUTE_FILTER</dt> * <dd> * <p> * All indexed content is searchable and displayable for all users. If you want to filter search results on user * context, you can use the attribute filters of <code>_user_id</code> and <code>_group_ids</code> or you can * provide user and group information in <code>UserContext</code>. * </p> * </dd> * <dt>USER_TOKEN</dt> * <dd> * <p> * Enables token-based user access control to filter search results on user context. All documents with no access * control and all documents accessible to the user will be searchable and displayable. * </p> * </dd> * </dl> * * @return The user context policy.</p> * <dl> * <dt>ATTRIBUTE_FILTER</dt> * <dd> * <p> * All indexed content is searchable and displayable for all users. If you want to filter search results on * user context, you can use the attribute filters of <code>_user_id</code> and <code>_group_ids</code> or * you can provide user and group information in <code>UserContext</code>. * </p> * </dd> * <dt>USER_TOKEN</dt> * <dd> * <p> * Enables token-based user access control to filter search results on user context. All documents with no * access control and all documents accessible to the user will be searchable and displayable. * </p> * </dd> * @see UserContextPolicy */ public String getUserContextPolicy() { return this.userContextPolicy; } /** * <p> * The user context policy. * </p> * <dl> * <dt>ATTRIBUTE_FILTER</dt> * <dd> * <p> * All indexed content is searchable and displayable for all users. If you want to filter search results on user * context, you can use the attribute filters of <code>_user_id</code> and <code>_group_ids</code> or you can * provide user and group information in <code>UserContext</code>. * </p> * </dd> * <dt>USER_TOKEN</dt> * <dd> * <p> * Enables token-based user access control to filter search results on user context. All documents with no access * control and all documents accessible to the user will be searchable and displayable. * </p> * </dd> * </dl> * * @param userContextPolicy * The user context policy.</p> * <dl> * <dt>ATTRIBUTE_FILTER</dt> * <dd> * <p> * All indexed content is searchable and displayable for all users. If you want to filter search results on * user context, you can use the attribute filters of <code>_user_id</code> and <code>_group_ids</code> or * you can provide user and group information in <code>UserContext</code>. * </p> * </dd> * <dt>USER_TOKEN</dt> * <dd> * <p> * Enables token-based user access control to filter search results on user context. All documents with no * access control and all documents accessible to the user will be searchable and displayable. * </p> * </dd> * @return Returns a reference to this object so that method calls can be chained together. * @see UserContextPolicy */ public CreateIndexRequest withUserContextPolicy(String userContextPolicy) { setUserContextPolicy(userContextPolicy); return this; } /** * <p> * The user context policy. * </p> * <dl> * <dt>ATTRIBUTE_FILTER</dt> * <dd> * <p> * All indexed content is searchable and displayable for all users. If you want to filter search results on user * context, you can use the attribute filters of <code>_user_id</code> and <code>_group_ids</code> or you can * provide user and group information in <code>UserContext</code>. * </p> * </dd> * <dt>USER_TOKEN</dt> * <dd> * <p> * Enables token-based user access control to filter search results on user context. All documents with no access * control and all documents accessible to the user will be searchable and displayable. * </p> * </dd> * </dl> * * @param userContextPolicy * The user context policy.</p> * <dl> * <dt>ATTRIBUTE_FILTER</dt> * <dd> * <p> * All indexed content is searchable and displayable for all users. If you want to filter search results on * user context, you can use the attribute filters of <code>_user_id</code> and <code>_group_ids</code> or * you can provide user and group information in <code>UserContext</code>. * </p> * </dd> * <dt>USER_TOKEN</dt> * <dd> * <p> * Enables token-based user access control to filter search results on user context. All documents with no * access control and all documents accessible to the user will be searchable and displayable. * </p> * </dd> * @return Returns a reference to this object so that method calls can be chained together. * @see UserContextPolicy */ public CreateIndexRequest withUserContextPolicy(UserContextPolicy userContextPolicy) { this.userContextPolicy = userContextPolicy.toString(); return this; } /** * <p> * Enables fetching access levels of groups and users from an Amazon Web Services Single Sign On identity source. To * configure this, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/API_UserGroupResolutionConfiguration.html" * >UserGroupResolutionConfiguration</a>. * </p> * * @param userGroupResolutionConfiguration * Enables fetching access levels of groups and users from an Amazon Web Services Single Sign On identity * source. To configure this, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/API_UserGroupResolutionConfiguration.html" * >UserGroupResolutionConfiguration</a>. */ public void setUserGroupResolutionConfiguration(UserGroupResolutionConfiguration userGroupResolutionConfiguration) { this.userGroupResolutionConfiguration = userGroupResolutionConfiguration; } /** * <p> * Enables fetching access levels of groups and users from an Amazon Web Services Single Sign On identity source. To * configure this, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/API_UserGroupResolutionConfiguration.html" * >UserGroupResolutionConfiguration</a>. * </p> * * @return Enables fetching access levels of groups and users from an Amazon Web Services Single Sign On identity * source. To configure this, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/API_UserGroupResolutionConfiguration.html" * >UserGroupResolutionConfiguration</a>. */ public UserGroupResolutionConfiguration getUserGroupResolutionConfiguration() { return this.userGroupResolutionConfiguration; } /** * <p> * Enables fetching access levels of groups and users from an Amazon Web Services Single Sign On identity source. To * configure this, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/API_UserGroupResolutionConfiguration.html" * >UserGroupResolutionConfiguration</a>. * </p> * * @param userGroupResolutionConfiguration * Enables fetching access levels of groups and users from an Amazon Web Services Single Sign On identity * source. To configure this, see <a * href="https://docs.aws.amazon.com/kendra/latest/dg/API_UserGroupResolutionConfiguration.html" * >UserGroupResolutionConfiguration</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateIndexRequest withUserGroupResolutionConfiguration(UserGroupResolutionConfiguration userGroupResolutionConfiguration) { setUserGroupResolutionConfiguration(userGroupResolutionConfiguration); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getEdition() != null) sb.append("Edition: ").append(getEdition()).append(","); if (getRoleArn() != null) sb.append("RoleArn: ").append(getRoleArn()).append(","); if (getServerSideEncryptionConfiguration() != null) sb.append("ServerSideEncryptionConfiguration: ").append(getServerSideEncryptionConfiguration()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getClientToken() != null) sb.append("ClientToken: ").append(getClientToken()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()).append(","); if (getUserTokenConfigurations() != null) sb.append("UserTokenConfigurations: ").append(getUserTokenConfigurations()).append(","); if (getUserContextPolicy() != null) sb.append("UserContextPolicy: ").append(getUserContextPolicy()).append(","); if (getUserGroupResolutionConfiguration() != null) sb.append("UserGroupResolutionConfiguration: ").append(getUserGroupResolutionConfiguration()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateIndexRequest == false) return false; CreateIndexRequest other = (CreateIndexRequest) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getEdition() == null ^ this.getEdition() == null) return false; if (other.getEdition() != null && other.getEdition().equals(this.getEdition()) == false) return false; if (other.getRoleArn() == null ^ this.getRoleArn() == null) return false; if (other.getRoleArn() != null && other.getRoleArn().equals(this.getRoleArn()) == false) return false; if (other.getServerSideEncryptionConfiguration() == null ^ this.getServerSideEncryptionConfiguration() == null) return false; if (other.getServerSideEncryptionConfiguration() != null && other.getServerSideEncryptionConfiguration().equals(this.getServerSideEncryptionConfiguration()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getClientToken() == null ^ this.getClientToken() == null) return false; if (other.getClientToken() != null && other.getClientToken().equals(this.getClientToken()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; if (other.getUserTokenConfigurations() == null ^ this.getUserTokenConfigurations() == null) return false; if (other.getUserTokenConfigurations() != null && other.getUserTokenConfigurations().equals(this.getUserTokenConfigurations()) == false) return false; if (other.getUserContextPolicy() == null ^ this.getUserContextPolicy() == null) return false; if (other.getUserContextPolicy() != null && other.getUserContextPolicy().equals(this.getUserContextPolicy()) == false) return false; if (other.getUserGroupResolutionConfiguration() == null ^ this.getUserGroupResolutionConfiguration() == null) return false; if (other.getUserGroupResolutionConfiguration() != null && other.getUserGroupResolutionConfiguration().equals(this.getUserGroupResolutionConfiguration()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getEdition() == null) ? 0 : getEdition().hashCode()); hashCode = prime * hashCode + ((getRoleArn() == null) ? 0 : getRoleArn().hashCode()); hashCode = prime * hashCode + ((getServerSideEncryptionConfiguration() == null) ? 0 : getServerSideEncryptionConfiguration().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getClientToken() == null) ? 0 : getClientToken().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); hashCode = prime * hashCode + ((getUserTokenConfigurations() == null) ? 0 : getUserTokenConfigurations().hashCode()); hashCode = prime * hashCode + ((getUserContextPolicy() == null) ? 0 : getUserContextPolicy().hashCode()); hashCode = prime * hashCode + ((getUserGroupResolutionConfiguration() == null) ? 0 : getUserGroupResolutionConfiguration().hashCode()); return hashCode; } @Override public CreateIndexRequest clone() { return (CreateIndexRequest) super.clone(); } }
/* * $Header: /home/cvs/jakarta-tomcat-catalina/webapps/admin/WEB-INF/classes/org/apache/webapp/admin/realm/SaveMemoryRealmAction.java,v 1.5 2003/04/24 07:56:34 amyroh Exp $ * $Revision: 1.5 $ * $Date: 2003/04/24 07:56:34 $ * * ==================================================================== * * The Apache Software License, Version 1.1 * * Copyright (c) 2001-2002 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, if * any, must include the following acknowlegement: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowlegement may appear in the software itself, * if and wherever such third-party acknowlegements normally appear. * * 4. The names "The Jakarta Project", "Struts", and "Apache Software * Foundation" must not be used to endorse or promote products derived * from this software without prior written permission. For written * permission, please contact apache@apache.org. * * 5. Products derived from this software may not be called "Apache" * nor may "Apache" appear in their names without prior written * permission of the Apache Group. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.webapp.admin.realm; import java.net.URLEncoder; import java.util.Iterator; import java.util.Locale; import java.io.IOException; import javax.management.Attribute; import javax.management.MBeanServer; import javax.management.MBeanServerFactory; import javax.management.QueryExp; import javax.management.Query; import javax.management.ObjectInstance; import javax.management.ObjectName; import javax.management.JMException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.struts.action.Action; import org.apache.struts.action.ActionError; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.util.MessageResources; import org.apache.webapp.admin.ApplicationServlet; import org.apache.webapp.admin.TomcatTreeBuilder; import org.apache.webapp.admin.TreeControl; import org.apache.webapp.admin.TreeControlNode; import org.apache.webapp.admin.logger.DeleteLoggerAction; /** * The <code>Action</code> that completes <em>Add Realm</em> and * <em>Edit Realm</em> transactions for Memory realm. * * @author Manveen Kaur * @version $Revision: 1.5 $ $Date: 2003/04/24 07:56:34 $ */ public final class SaveMemoryRealmAction extends Action { // ----------------------------------------------------- Instance Variables /** * Signature for the <code>createStandardRealm</code> operation. */ private String createStandardRealmTypes[] = { "java.lang.String", // parent }; /** * The MBeanServer we will be interacting with. */ private MBeanServer mBServer = null; /** * The MessageResources we will be retrieving messages from. */ private MessageResources resources = null; // --------------------------------------------------------- Public Methods /** * Process the specified HTTP request, and create the corresponding HTTP * response (or forward to another web component that will create it). * Return an <code>ActionForward</code> instance describing where and how * control should be forwarded, or <code>null</code> if the response has * already been completed. * * @param mapping The ActionMapping used to select this instance * @param actionForm The optional ActionForm bean for this request (if any) * @param request The HTTP request we are processing * @param response The HTTP response we are creating * * @exception IOException if an input/output error occurs * @exception ServletException if a servlet exception occurs */ public ActionForward perform(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { // Acquire the resources that we need HttpSession session = request.getSession(); Locale locale = (Locale) session.getAttribute(Action.LOCALE_KEY); if (resources == null) { resources = getServlet().getResources(); } // Acquire a reference to the MBeanServer containing our MBeans try { mBServer = ((ApplicationServlet) getServlet()).getServer(); } catch (Throwable t) { throw new ServletException ("Cannot acquire MBeanServer reference", t); } // Identify the requested action MemoryRealmForm rform = (MemoryRealmForm) form; String adminAction = rform.getAdminAction(); String rObjectName = rform.getObjectName(); // Perform a "Create Memory Realm" transaction (if requested) if ("Create".equals(adminAction)) { String operation = null; String values[] = null; try { String parent = rform.getParentObjectName(); String objectName = DeleteLoggerAction.getObjectName(parent, TomcatTreeBuilder.REALM_TYPE); ObjectName pname = new ObjectName(parent); StringBuffer sb = new StringBuffer(pname.getDomain()); // For service, create the corresponding Engine mBean // Parent in this case needs to be the container mBean for the service try { if ("Service".equalsIgnoreCase(pname.getKeyProperty("type"))) { sb.append(":type=Engine"); parent = sb.toString(); } } catch (Exception e) { String message = resources.getMessage("error.engineName.bad", sb.toString()); getServlet().log(message); response.sendError(HttpServletResponse.SC_BAD_REQUEST, message); return (null); } // Ensure that the requested user database name is unique ObjectName oname = new ObjectName(objectName); if (mBServer.isRegistered(oname)) { ActionErrors errors = new ActionErrors(); errors.add("realmName", new ActionError("error.realmName.exists")); saveErrors(request, errors); return (new ActionForward(mapping.getInput())); } String domain = oname.getDomain(); // Look up our MBeanFactory MBean ObjectName fname = TomcatTreeBuilder.getMBeanFactory(); // Create a new StandardRealm object values = new String[1]; values[0] = parent; operation = "createMemoryRealm"; rObjectName = (String) mBServer.invoke(fname, operation, values, createStandardRealmTypes); // Add the new Realm to our tree control node TreeControl control = (TreeControl) session.getAttribute("treeControlTest"); if (control != null) { TreeControlNode parentNode = control.findNode(rform.getParentObjectName()); if (parentNode != null) { String nodeLabel = rform.getNodeLabel(); String encodedName = URLEncoder.encode(rObjectName); TreeControlNode childNode = new TreeControlNode(rObjectName, "Realm.gif", nodeLabel, "EditRealm.do?select=" + encodedName, "content", true, domain); parentNode.addChild(childNode); // FIXME - force a redisplay } else { getServlet().log ("Cannot find parent node '" + parent + "'"); } } else { getServlet().log ("Cannot find TreeControlNode!"); } } catch (Exception e) { getServlet().log (resources.getMessage(locale, "users.error.invoke", operation), e); response.sendError (HttpServletResponse.SC_INTERNAL_SERVER_ERROR, resources.getMessage(locale, "users.error.invoke", operation)); return (null); } } // Perform attribute updates as requested String attribute = null; try { ObjectName roname = new ObjectName(rObjectName); attribute = "debug"; int debug = 0; try { debug = Integer.parseInt(rform.getDebugLvl()); } catch (Throwable t) { debug = 0; } mBServer.setAttribute(roname, new Attribute("debug", new Integer(debug))); attribute = "pathname"; mBServer.setAttribute(roname, new Attribute("pathname", rform.getPathName())); } catch (Exception e) { getServlet().log (resources.getMessage(locale, "users.error.attribute.set", attribute), e); response.sendError (HttpServletResponse.SC_INTERNAL_SERVER_ERROR, resources.getMessage(locale, "users.error.attribute.set", attribute)); return (null); } // Forward to the success reporting page session.removeAttribute(mapping.getAttribute()); return (mapping.findForward("Save Successful")); } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * Copyright (C) 2011-2012 Eugene Fradkin (eugene.fradkin@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.core; import org.eclipse.osgi.util.NLS; public class CoreMessages extends NLS { public static final String BUNDLE_NAME = "org.jkiss.dbeaver.core.CoreResources"; //$NON-NLS-1$ public static String actions_menu_about; public static String actions_menu_check_update; public static String actions_menu_database; public static String actions_menu_edit; public static String actions_menu_edit_ContentFormat; public static String actions_menu_exit_emergency; public static String actions_menu_file; public static String actions_menu_help; public static String actions_menu_navigate; public static String actions_menu_window; public static String actions_menu_window_showView; public static String action_menu_showKeyAssist; public static String action_menu_installNewSoftware; public static String action_menu_installInfo; public static String action_menu_enterpriseEdition; public static String action_menu_marketplace_extensions; public static String action_menu_transaction_manualcommit_name; public static String action_menu_transaction_autocommit_name; public static String action_menu_transaction_manualcommit_description; public static String action_menu_transaction_autocommit_description; public static String action_menu_tools_find_in_navigator; public static String action_menu_transactionMonitor_totalStatement; public static String action_menu_transactionMonitor_modifyStatement; public static String action_menu_transactionMonitor_autocommitMode; public static String action_menu_transactionMonitor_uptime; public static String action_menu_transactionMonitor_notConnected; public static String actions_navigator_bookmark_error_message; public static String actions_navigator_bookmark_error_title; public static String actions_navigator_bookmark_title; public static String common_error_sql; public static String confirm_exit_title; public static String confirm_exit_message; public static String confirm_exit_toggleMessage; public static String confirm_disconnect_txn_title; public static String confirm_disconnect_txn_message; public static String confirm_disconnect_txn_toggleMessage; public static String confirm_driver_download_title; public static String confirm_driver_download_message; public static String confirm_driver_download_toggleMessage; public static String confirm_driver_download_manual_title; public static String confirm_driver_download_manual_message; public static String confirm_driver_download_manual_toggleMessage; public static String confirm_version_check_title; public static String confirm_version_check_message; public static String confirm_version_check_toggleMessage; public static String controls_connection_properties_action_add_property; public static String controls_connection_properties_action_remove_property; public static String controls_connection_properties_category_user_properties; public static String controls_connection_properties_dialog_new_property_title; public static String controls_client_home_selector_browse; public static String controls_client_homes_panel_button_add_home; public static String controls_client_homes_panel_button_remove_home; public static String controls_client_homes_panel_confirm_remove_home_text; public static String controls_client_homes_panel_confirm_remove_home_title; public static String controls_client_homes_panel_dialog_title; public static String controls_client_homes_panel_group_information; public static String controls_client_homes_panel_label_id; public static String controls_client_homes_panel_label_name; public static String controls_client_homes_panel_label_path; public static String controls_client_homes_panel_label_product_name; public static String controls_client_homes_panel_label_product_version; public static String controls_driver_tree_column_connections; public static String controls_driver_tree_column_name; public static String controls_itemlist_action_copy; public static String model_navigator__connections; public static String model_navigator_Connection; public static String model_navigator_Connections; public static String editor_file_delete_error_title; public static String editor_file_delete_error_text; public static String editor_file_delete_this_script; public static String editor_file_delete_confirm_delete_text; public static String editor_file_delete_confirm_delete_title; public static String dialog_about_font; public static String dialog_about_label_version; public static String dialog_about_title; public static String dialog_connection_auth_checkbox_save_password; public static String dialog_connection_auth_group_user_cridentials; public static String dialog_connection_auth_label_password; public static String dialog_connection_auth_label_username; public static String dialog_connection_auth_title; public static String dialog_connection_auth_title_for_handler; public static String dialog_connection_button_test; public static String dialog_connection_events_checkbox_show_process; public static String dialog_connection_events_checkbox_terminate_at_disconnect; public static String dialog_connection_events_checkbox_wait_finish; public static String dialog_connection_events_checkbox_wait_finish_timeout; public static String dialog_connection_events_label_command; public static String dialog_connection_events_label_event; public static String dialog_connection_events_title; public static String dialog_connection_message; public static String dialog_connection_description; public static String dialog_connection_wizard_final_button_test; public static String dialog_connection_wizard_final_button_events; public static String dialog_connection_wizard_final_checkbox_filter_catalogs; public static String dialog_connection_wizard_final_checkbox_filter_schemas; public static String dialog_connection_wizard_final_checkbox_save_password_locally; public static String dialog_connection_wizard_final_checkbox_auto_commit; public static String dialog_connection_wizard_final_checkbox_show_system_objects; public static String dialog_connection_wizard_final_checkbox_show_util_objects; public static String dialog_connection_wizard_final_checkbox_connection_readonly; public static String dialog_connection_wizard_final_default_new_connection_name; public static String dialog_connection_wizard_final_description; public static String dialog_connection_wizard_final_group_filters; public static String dialog_connection_wizard_final_group_security; public static String dialog_connection_wizard_final_group_misc; public static String dialog_connection_wizard_final_header; public static String dialog_connection_wizard_final_label_connection_name; public static String dialog_connection_wizard_final_filter_catalogs; public static String dialog_connection_wizard_final_filter_schemas_users; public static String dialog_connection_wizard_final_filter_tables; public static String dialog_connection_wizard_final_filter_attributes; public static String dialog_connection_wizard_final_filter_link_tooltip; public static String dialog_connection_wizard_final_filter_link_not_supported_text; public static String dialog_connection_wizard_final_filter_link_not_supported_tooltip; public static String dialog_connection_wizard_final_button_tunneling; public static String dialog_connection_wizard_connection_init; public static String dialog_connection_wizard_connection_init_description; public static String dialog_connection_wizard_final_group_other; public static String dialog_connection_wizard_start_connection_monitor_close; public static String dialog_connection_wizard_start_connection_monitor_start; public static String dialog_connection_wizard_start_connection_monitor_subtask_test; public static String dialog_connection_wizard_start_connection_monitor_success; public static String dialog_connection_wizard_start_connection_monitor_connected; public static String dialog_connection_wizard_start_connection_monitor_thread; public static String dialog_connection_wizard_start_dialog_error_message; public static String dialog_connection_wizard_start_dialog_error_title; public static String dialog_connection_wizard_start_dialog_interrupted_message; public static String dialog_connection_wizard_start_dialog_interrupted_title; public static String dialog_connection_wizard_title; public static String dialog_driver_manager_button_delete; public static String dialog_driver_manager_button_edit; public static String dialog_driver_manager_button_new; public static String dialog_driver_manager_button_copy; public static String dialog_driver_manager_label_unavailable; public static String dialog_driver_manager_label_user_defined; public static String dialog_driver_manager_message_cant_delete_text; public static String dialog_driver_manager_message_cant_delete_title; public static String dialog_driver_manager_message_delete_driver_text; public static String dialog_driver_manager_message_delete_driver_title; public static String dialog_driver_manager_title; public static String dialog_edit_driver_button_add_file; public static String dialog_edit_driver_button_add_folder; public static String dialog_edit_driver_button_add_artifact; public static String dialog_edit_driver_button_bind_class; public static String dialog_edit_driver_button_classpath; public static String dialog_edit_driver_button_update_version; public static String dialog_edit_driver_button_details; public static String dialog_edit_driver_button_delete; public static String dialog_edit_driver_button_down; public static String dialog_edit_driver_button_up; public static String dialog_edit_driver_dialog_driver_error_message; public static String dialog_edit_driver_dialog_driver_error_title; public static String dialog_edit_driver_dialog_open_driver_directory; public static String dialog_edit_driver_dialog_open_driver_library; public static String dialog_edit_driver_label_category; public static String dialog_edit_driver_label_class_name; public static String dialog_edit_driver_label_class_name_tip; public static String dialog_edit_driver_label_default_port; public static String dialog_edit_driver_label_description; public static String dialog_edit_driver_label_driver_class; public static String dialog_edit_driver_label_driver_name; public static String dialog_edit_driver_label_sample_url; public static String dialog_edit_driver_label_sample_url_tip; public static String dialog_edit_driver_label_website; public static String dialog_edit_driver_tab_name_advanced_parameters; public static String dialog_edit_driver_tab_name_connection_properties; public static String dialog_edit_driver_tab_name_client_homes; public static String dialog_edit_driver_tab_name_driver_libraries; public static String dialog_edit_driver_tab_name_license; public static String dialog_edit_driver_tab_tooltip_advanced_parameters; public static String dialog_edit_driver_tab_tooltip_connection_properties; public static String dialog_edit_driver_tab_tooltip_driver_libraries; public static String dialog_edit_driver_tab_tooltip_license; public static String dialog_edit_driver_text_driver_license; public static String dialog_edit_driver_title_create_driver; public static String dialog_edit_driver_title_edit_driver; public static String dialog_migrate_wizard_choose_driver_description; public static String dialog_migrate_wizard_choose_driver_title; public static String dialog_migrate_wizard_name; public static String dialog_migrate_wizard_start_description; public static String dialog_migrate_wizard_start_title; public static String dialog_migrate_wizard_window_title; public static String dialog_connection_network_socket_label_host; public static String dialog_connection_network_socket_label_port; public static String dialog_connection_network_socket_label_username; public static String dialog_connection_network_socket_label_password; public static String dialog_new_connection_wizard_monitor_load_data_sources; public static String dialog_new_connection_wizard_start_description; public static String dialog_new_connection_wizard_start_title; public static String dialog_new_connection_wizard_title; public static String dialog_project_export_wizard_main_page; public static String dialog_project_export_wizard_monitor_collect_info; public static String dialog_project_export_wizard_monitor_export_driver_info; public static String dialog_project_export_wizard_monitor_export_libraries; public static String dialog_project_export_wizard_monitor_export_project; public static String dialog_project_export_wizard_page_checkbox_overwrite_files; public static String dialog_project_export_wizard_page_dialog_choose_export_dir_message; public static String dialog_project_export_wizard_page_dialog_choose_export_dir_text; public static String dialog_project_export_wizard_page_label_directory; public static String dialog_project_export_wizard_page_message_check_script; public static String dialog_project_export_wizard_page_message_configure_settings; public static String dialog_project_export_wizard_page_message_no_output_dir; public static String dialog_project_export_wizard_page_title; public static String dialog_project_export_wizard_start_archive_name_prefix; public static String dialog_project_export_wizard_start_checkbox_libraries; public static String dialog_project_export_wizard_start_dialog_directory_message; public static String dialog_project_export_wizard_start_dialog_directory_text; public static String dialog_project_export_wizard_start_label_directory; public static String dialog_project_export_wizard_start_label_output_file; public static String dialog_project_export_wizard_start_message_choose_project; public static String dialog_project_export_wizard_start_message_configure_settings; public static String dialog_project_export_wizard_start_message_empty_output_directory; public static String dialog_project_export_wizard_start_title; public static String dialog_project_export_wizard_window_title; public static String dialog_project_import_wizard_file_checkbox_import_libraries; public static String dialog_project_import_wizard_file_column_source_name; public static String dialog_project_import_wizard_file_column_target_name; public static String dialog_project_import_wizard_file_description; public static String dialog_project_import_wizard_file_dialog_export_archive_text; public static String dialog_project_import_wizard_file_group_input; public static String dialog_project_import_wizard_file_group_projects; public static String dialog_project_import_wizard_file_label_file; public static String dialog_project_import_wizard_file_message_cannt_find_projects; public static String dialog_project_import_wizard_file_message_choose_project; public static String dialog_project_import_wizard_file_message_project_exists; public static String dialog_project_import_wizard_file_message_ready; public static String dialog_project_import_wizard_file_name; public static String dialog_project_import_wizard_file_title; public static String dialog_project_import_wizard_message_success_import_message; public static String dialog_project_import_wizard_message_success_import_title; public static String dialog_project_import_wizard_monitor_import_drivers; public static String dialog_project_import_wizard_monitor_import_project; public static String dialog_project_import_wizard_monitor_import_projects; public static String dialog_project_import_wizard_monitor_load_driver; public static String dialog_project_import_wizard_monitor_load_libraries; public static String dialog_project_import_wizard_title; public static String dialog_scripts_export_wizard_page_name; public static String dialog_scripts_export_wizard_window_title; public static String dialog_scripts_import_wizard_description; public static String dialog_scripts_import_wizard_dialog_choose_dir_message; public static String dialog_scripts_import_wizard_dialog_choose_dir_text; public static String dialog_scripts_import_wizard_dialog_error_text; public static String dialog_scripts_import_wizard_dialog_error_title; public static String dialog_scripts_import_wizard_dialog_message_no_scripts; public static String dialog_scripts_import_wizard_dialog_message_success_imported; public static String dialog_scripts_import_wizard_dialog_message_title; public static String dialog_scripts_import_wizard_label_default_connection; public static String dialog_scripts_import_wizard_label_file_mask; public static String dialog_scripts_import_wizard_label_input_directory; public static String dialog_scripts_import_wizard_label_root_folder; public static String dialog_scripts_import_wizard_monitor_import_scripts; public static String dialog_scripts_import_wizard_name; public static String dialog_scripts_import_wizard_title; public static String dialog_scripts_import_wizard_window_title; public static String dialog_search_objects_button_close; public static String dialog_search_objects_button_search; public static String dialog_search_objects_column_description; public static String dialog_search_objects_column_type; public static String dialog_search_objects_combo_contains; public static String dialog_search_objects_combo_like; public static String dialog_search_objects_combo_starts_with; public static String dialog_search_objects_group_object_types; public static String dialog_search_objects_group_objects_source; public static String dialog_search_objects_item_list_info; public static String dialog_search_objects_label_name_match; public static String dialog_search_objects_label_object_name; public static String dialog_search_objects_message_no_objects_like_; public static String dialog_search_objects_message_objects_found; public static String dialog_search_objects_spinner_max_results; public static String dialog_search_objects_case_sensitive; public static String dialog_search_objects_title; public static String dialog_tunnel_checkbox_use_handler; public static String dialog_tunnel_title; public static String dialog_version_update_available_new_version; public static String dialog_version_update_button_more_info; public static String dialog_version_update_current_version; public static String dialog_version_update_n_a; public static String dialog_version_update_new_version; public static String dialog_version_update_no_new_version; public static String dialog_version_update_notes; public static String dialog_version_update_press_more_info_; public static String dialog_version_update_title; public static String dialog_view_classpath_title; public static String model_html_read_database_meta_data; public static String model_navigator_Description; public static String model_navigator_Name; public static String model_navigator_load_; public static String model_navigator_load_items_; public static String model_navigator_Model_root; public static String model_navigator_Project; public static String model_navigator_resource_exception_already_exists; public static String model_navigator_Root; public static String model_project_bookmarks_folder; public static String model_project_cant_open_bookmark; public static String model_project_open_bookmark; public static String model_project_Script; public static String model_project_Scripts; public static String pref_page_confirmations_combo_always; public static String pref_page_confirmations_combo_never; public static String pref_page_confirmations_combo_prompt; public static String pref_page_confirmations_group_general_actions; public static String pref_page_confirmations_group_object_editor; public static String pref_page_database_general_separate_meta_connection; public static String pref_page_database_general_checkbox_case_sensitive_names; public static String pref_page_database_general_checkbox_rollback_on_error; public static String pref_page_database_general_checkbox_show_row_count; public static String pref_page_database_general_group_metadata; public static String pref_page_database_general_group_ordering; public static String pref_page_error_handle_name; public static String pref_page_error_handle_description; public static String pref_page_error_handle_group_timeouts_title; public static String pref_page_error_handle_connection_open_timeout_label; public static String pref_page_error_handle_connection_open_timeout_label_tip; public static String pref_page_error_handle_connection_close_timeout_label; public static String pref_page_error_handle_connection_close_timeout_label_tip; public static String pref_page_error_handle_connection_validate_timeout_label; public static String pref_page_error_handle_connection_validate_timeout_label_tip; public static String pref_page_error_handle_group_execute_title; public static String pref_page_error_handle_recover_enabled_label; public static String pref_page_error_handle_recover_enabled_tip; public static String pref_page_error_handle_recover_retry_count_label; public static String pref_page_error_handle_recover_retry_count_tip; public static String pref_page_error_handle_group_cancel_title; public static String pref_page_error_handle_cancel_check_timeout; public static String pref_page_error_handle_cancel_check_timeout_tip; public static String pref_page_query_manager_checkbox_ddl_executions; public static String pref_page_query_manager_checkbox_metadata_read; public static String pref_page_query_manager_checkbox_metadata_write; public static String pref_page_query_manager_checkbox_other; public static String pref_page_query_manager_checkbox_queries; public static String pref_page_query_manager_checkbox_scripts; public static String pref_page_query_manager_checkbox_sessions; public static String pref_page_query_manager_checkbox_transactions; public static String pref_page_query_manager_checkbox_user_queries; public static String pref_page_query_manager_checkbox_user_filtered; public static String pref_page_query_manager_checkbox_user_scripts; public static String pref_page_query_manager_checkbox_utility_functions; public static String pref_page_query_manager_group_object_types; public static String pref_page_query_manager_group_query_types; public static String pref_page_query_manager_group_settings; public static String pref_page_query_manager_group_storage; public static String pref_page_query_manager_checkbox_store_log_file; public static String pref_page_query_manager_logs_folder; public static String pref_page_query_manager_label_days_to_store_log; public static String pref_page_query_manager_label_entries_per_page; public static String pref_page_query_manager_log_file_hint; public static String pref_page_ui_general_checkbox_automatic_updates; public static String pref_page_ui_general_combo_language; public static String pref_page_ui_general_combo_language_tip; public static String pref_page_ui_general_keep_database_editors; public static String pref_page_ui_general_refresh_editor_on_open; public static String pref_page_ui_general_group_general; public static String pref_page_ui_general_group_language; public static String pref_page_ui_general_group_editors; public static String pref_page_ui_general_group_http_proxy; public static String pref_page_ui_general_label_proxy_host; public static String pref_page_ui_general_spinner_proxy_port; public static String pref_page_ui_general_label_proxy_user; public static String pref_page_ui_general_label_proxy_password; public static String pref_page_drivers_group_location; public static String runtime_jobs_connect_name; public static String runtime_jobs_connect_status_connected; public static String runtime_jobs_connect_status_error; public static String runtime_jobs_connect_thread_name; public static String runtime_jobs_disconnect_error; public static String runtime_jobs_disconnect_name; public static String toolbar_datasource_selector_action_read_databases; public static String toolbar_datasource_selector_combo_database_tooltip; public static String toolbar_datasource_selector_combo_datasource_tooltip; public static String toolbar_datasource_selector_empty; public static String toolbar_datasource_selector_error_change_database_message; public static String toolbar_datasource_selector_error_change_database_title; public static String toolbar_datasource_selector_error_database_not_found; public static String toolbar_datasource_selector_error_database_change_not_supported; public static String toolbar_datasource_selector_resultset_segment_size; public static String toolbar_datasource_selector_connected; public static String toolbar_datasource_selector_all; public static String toolbar_editors_sql_run_statement_name; public static String toolbar_editors_sql_run_statementNew_name; public static String toolbar_editors_sql_run_script_name; public static String toolbar_editors_sql_run_scriptNew_name; public static String toolbar_editors_sql_run_explain_name; public static String tools_script_execute_wizard_task_completed; public static String tools_wizard_dialog_button_start; public static String tools_wizard_error_task_error_message; public static String tools_wizard_error_task_error_title; public static String tools_wizard_error_task_canceled; public static String tools_wizard_log_process_exit_code; public static String tools_wizard_log_io_error; public static String tools_wizard_message_client_home_not_found; public static String tools_wizard_message_no_client_home; public static String tools_wizard_page_log_task_finished; public static String tools_wizard_page_log_task_log_reader; public static String tools_wizard_page_log_task_progress; public static String tools_wizard_page_log_task_progress_log; public static String tools_wizard_page_log_task_started_at; public static String ui_actions_exit_emergency_question; public static String dialog_connection_edit_driver_button; public static String dialog_connection_driver; public static String dialog_connection_advanced_settings; public static String dialog_connection_env_variables_hint; public static String editor_file_open_in_explorer; public static String editor_file_copy_path; public static String editor_file_rename; // New Connection Wizard public static String dialog_setting_connection_wizard_title; public static String dialog_setting_connection_general; public static String dialog_setting_connection_driver_properties_title; public static String dialog_setting_connection_driver_properties_description; public static String dialog_setting_connection_driver_properties_advanced; public static String dialog_setting_connection_driver_properties_advanced_tip; public static String dialog_setting_connection_driver_properties_docs_web_reference; public static String dialog_connection_network_title; public static String dialog_connection_wizard_final_label_connection_type; public static String dialog_connection_wizard_final_label_edit; public static String dialog_connection_wizard_final_label_connection_folder; public static String dialog_connection_wizard_final_label_connection; public static String dialog_connection_wizard_final_label_isolation_level; public static String dialog_connection_wizard_final_label_default_schema; public static String dialog_connection_wizard_final_label_keepalive; public static String dialog_connection_wizard_final_label_isolation_level_tooltip; public static String dialog_connection_wizard_final_label_default_schema_tooltip; public static String dialog_connection_wizard_final_label_keepalive_tooltip; public static String dialog_connection_wizard_final_label_bootstrap_query; public static String dialog_connection_wizard_configure; public static String dialog_connection_wizard_final_label_shell_command; public static String dialog_connection_wizard_connection_init_hint; public static String dialog_connection_wizard_connection_init_hint_tip; public static String dialog_connection_wizard_description; public static String dialog_connection_wizard_final_label_bootstrap_tooltip; public static String dialog_connection_wizard_socksproxy_host; public static String dialog_connection_wizard_socksproxy_port; public static String dialog_connection_wizard_socksproxy_username; public static String dialog_connection_wizard_socksproxy_password; public static String dialog_connection_driver_treecontrol_initialText; public static String dialog_connection_driver_project; // Connections public static String pref_page_database_client_name_group; public static String pref_page_database_client_name_group_description; public static String pref_page_database_label_disable_client_application_name; public static String pref_page_database_label_override_client_application_name; public static String pref_page_database_label_client_application_name; // MetaData public static String pref_page_database_general_separate_meta_connection_tip; public static String pref_page_database_general_checkbox_case_sensitive_names_tip; public static String pref_page_database_general_checkbox_show_row_count_tip; public static String pref_page_database_general_server_side_object_filters; public static String pref_page_database_general_server_side_object_filters_tip; public static String pref_page_database_general_group_query_metadata; public static String pref_page_database_general_use_column_names; public static String pref_page_database_general_use_column_names_tip; // EntityEditor public static String pref_page_ui_general_keep_database_editors_tip; public static String pref_page_ui_general_refresh_editor_on_open_tip; public static String pref_page_ui_general_show_full_name_in_editor; public static String pref_page_ui_general_show_table_grid; public static String pref_page_ui_general_show_preview_on_save; // Drivers public static String pref_page_ui_general_group_settings; public static String pref_page_ui_general_check_new_driver_versions; public static String pref_page_drivers_local_folder; public static String pref_page_drivers_group_file_repositories; public static String pref_page_drivers_button_add; public static String pref_page_drivers_label_enter_drivers_location_url; public static String pref_page_drivers_button_remove; // DriversMaven public static String pref_page_drivers_maven_group_repositories; public static String pref_page_drivers_maven_button_add; public static String pref_page_drivers_maven_label_enter_maven_repository_url; public static String pref_page_drivers_maven_label_bad_url; public static String pref_page_drivers_maven_label_bad_url_tip; public static String pref_page_drivers_maven_button_remove; // public static String pref_page_drivers_maven_button_disable; public static String pref_page_drivers_maven_button_up; public static String pref_page_drivers_maven_button_down; public static String pref_page_drivers_maven_group_properties; public static String pref_page_drivers_maven_label_name; public static String pref_page_drivers_maven_label_scope; public static String pref_page_drivers_maven_group_authentication; public static String pref_page_drivers_maven_label_user; public static String pref_page_drivers_maven_label_password; public static String pref_page_drivers_maven_label_enable; public static String pref_page_drivers_maven_label_disable; // DatabaseGeneral public static String pref_page_ui_general_group_task_bar; public static String pref_page_ui_general_label_enable_long_operations; public static String pref_page_ui_general_label_enable_long_operations_tip; public static String pref_page_ui_general_label_long_operation_timeout; public static String pref_page_ui_general_group_notifications; public static String pref_page_ui_general_label_enable_notifications; public static String pref_page_ui_general_label_enable_notifications_tip; public static String pref_page_ui_general_label_notifications_close_delay; public static String pref_page_ui_general_group_resources; public static String pref_page_ui_general_label_default_resource_encoding; public static String pref_page_ui_general_label_set_default_resource_encoding_tip; public static String pref_page_ui_general_group_debug_logs; public static String pref_page_ui_general_label_enable_debug_logs; public static String pref_page_ui_general_label_enable_debug_logs_tip; public static String pref_page_ui_general_label_log_file_location; public static String pref_page_ui_general_label_open_file_text; public static String pref_page_ui_general_label_options_take_effect_after_restart; public static String pref_page_ui_general_label_settings; public static String pref_page_database_general_label_sync_editor_connection_with_navigator; public static String pref_page_database_general_label_sync_editor_connection_with_navigator_tip; public static String pref_page_database_general_group_toolbars; public static String pref_page_database_general_label_show_general_toolbar_everywhere; public static String pref_page_database_general_label_show_general_toolbar_everywhere_tip; public static String pref_page_database_general_label_show_edit_toolbar; public static String pref_page_database_general_label_show_edit_toolbar_tip; public static String pref_page_database_general_label_database_selector_width; public static String pref_page_database_general_label_database_selector_width_tip; public static String pref_page_database_general_label_schema_selector_width; public static String pref_page_database_general_label_schema_selector_width_tip; // ConnectionTypes public static String pref_page_connection_types_label_table_column_name; public static String pref_page_connection_types_label_table_column_description; public static String pref_page_connection_types_label_delete_connection_type; public static String pref_page_connection_types_label_delete_connection_type_description; public static String pref_page_connection_types_group_settings; public static String pref_page_connection_types_label_name; public static String pref_page_connection_types_label_description; public static String pref_page_connection_types_label_color; public static String pref_page_connection_types_label_auto_commit_by_default; public static String pref_page_connection_types_label_confirm_sql_execution; public static String pref_page_connection_label_general; public static String pref_page_connection_label_use_environment; public static String transaction_info_dialog_checkbox_show_all_queries; public static String transaction_info_dialog_checkbox_show_previous_transactions; public static String transaction_info_dialog_label_otherwise; public static String transaction_info_dialog_label_show_all_transaction_queries; public static String transaction_log_dialog_header_transaction_log; // Preference/Properties public static String transaction_log_dialog_error_connect_to_a_database; public static String transaction_log_dialog_error_no_editor; public static String transaction_log_dialog_error_open_database; // Connection edit public static String dialog_connection_edit_title; public static String dialog_connection_edit_connection_settings_variables_hint_label; public static String dialog_connection_edit_wizard_conn_conf_general_link; public static String dialog_connection_edit_wizard_conn_conf_network_link; public static String dialog_connection_edit_wizard_general; public static String dialog_connection_edit_wizard_general_bootstrap_query_title; public static String dialog_connection_edit_wizard_general_bootstrap_query_sql_label; public static String dialog_connection_edit_wizard_general_bootstrap_query_sql_title; public static String dialog_connection_edit_wizard_general_bootstrap_query_ignore_error_lable; public static String dialog_connection_edit_wizard_shell_cmd; public static String dialog_connection_edit_wizard_shell_cmd_pause_label; public static String dialog_connection_edit_wizard_shell_cmd_pause_tooltip; public static String dialog_connection_edit_wizard_shell_cmd_directory_label; public static String dialog_connection_edit_wizard_shell_cmd_directory_title; public static String dialog_connection_edit_wizard_shell_cmd_variables_hint_label; public static String dialog_connection_edit_wizard_shell_cmd_variables_hint_title; public static String dialog_connection_edit_wizard_connections; public static String dialog_connection_edit_wizard_connections_description; public static String dialog_connection_edit_wizard_metadata; public static String dialog_connection_edit_wizard_metadata_description; public static String dialog_connection_edit_wizard_resultset; public static String dialog_connection_edit_wizard_resultset_description; public static String dialog_connection_edit_wizard_editors; public static String dialog_connection_edit_wizard_editors_description; public static String dialog_connection_edit_wizard_data_format; public static String dialog_connection_edit_wizard_data_format_description; public static String dialog_connection_edit_wizard_presentation; public static String dialog_connection_edit_wizard_presentation_description; public static String dialog_connection_edit_wizard_sql_editor; public static String dialog_connection_edit_wizard_sql_editor_description; public static String dialog_connection_edit_wizard_sql_processing; public static String dialog_connection_edit_wizard_sql_processing_description; public static String dialog_connection_edit_wizard_conn_change_title; public static String dialog_connection_edit_wizard_conn_change_question; public static String dialog_connection_edit_wizard_lock_pwd_title; public static String dialog_connection_edit_wizard_bad_pwd_title; public static String dialog_connection_edit_wizard_bad_pwd_msg; public static String dialog_connection_edit_wizard_error_md5_title; public static String dialog_connection_edit_wizard_error_md5_msg; //Connection edit // Driver edit public static String dialog_edit_driver_setting; public static String dialog_edit_driver_type_label; public static String dialog_edit_driver_embedded_label; public static String dialog_edit_driver_anonymous_label; public static String dialog_edit_driver_anonymous_tip; public static String dialog_edit_driver_description; public static String dialog_edit_driver_edit_maven_title; public static String dialog_edit_driver_edit_maven_group_id_label; public static String dialog_edit_driver_edit_maven_artifact_id_label; public static String dialog_edit_driver_edit_maven_classfier_label; public static String dialog_edit_driver_edit_maven_version_label; public static String dialog_edit_driver_text_driver_library; public static String dialog_edit_driver_info; public static String dialog_edit_driver_driver; public static String dialog_edit_driver_library; public static String dialog_edit_driver_path; public static String dialog_edit_driver_version; public static String dialog_edit_driver_file; public static String dialog_edit_driver_tab_depencencies; public static String dialog_edit_driver_tab_depencencies_tooltip; public static String dialog_edit_driver_tab_detail; public static String dialog_edit_driver_tab_detail_tooltip; public static String dialog_edit_driver_text_license; // Driver edit // Driver download public static String dialog_driver_download_button_edit_dirver; public static String dialog_driver_download_button_add_jars; public static String dialog_driver_download_wizard_title_setting; public static String dialog_driver_download_wizard_title_upload_files; public static String dialog_driver_download_wizard_title_setup_files; public static String dialog_driver_download_wizard_download; public static String dialog_driver_download_wizard_open_download; public static String dialog_driver_download_page_vendor_link; public static String dialog_driver_download_page_download_conf_link; public static String dialog_driver_download_manual_page_config_driver_file; public static String dialog_driver_download_manual_page_download_driver_file; public static String dialog_driver_download_manual_page_download_config_driver_file; public static String dialog_driver_download_manual_page_driver_file_missing_text; public static String dialog_driver_download_manual_page_driver_file; public static String dialog_driver_download_manual_page_column_file; public static String dialog_driver_download_manual_page_column_required; public static String dialog_driver_download_manual_page_column_description; public static String dialog_driver_download_manual_page_yes; public static String dialog_driver_download_manual_page_no; public static String dialog_driver_download_auto_page_auto_download; public static String dialog_driver_download_auto_page_download_driver_files; public static String dialog_driver_download_auto_page_download_specific_driver_files; public static String dialog_driver_download_auto_page_driver_file_missing_text; public static String dialog_driver_download_auto_page_force_download; public static String dialog_driver_download_auto_page_force_download_tooltip; public static String dialog_driver_download_auto_page_required_files; public static String dialog_driver_download_auto_page_change_driver_version_text; public static String dialog_driver_download_auto_page_obtain_driver_files_text; public static String dialog_driver_download_auto_page_cannot_resolve_libraries_text; public static String dialog_driver_download_auto_page_driver_download_error; public static String dialog_driver_download_auto_page_driver_download_error_msg; public static String dialog_driver_download_auto_page_driver_security_warning; public static String dialog_driver_download_auto_page_driver_security_warning_msg; public static String dialog_driver_download_auto_page_download_rate; public static String dialog_driver_download_auto_page_download_failed_msg; // Driver download public static String transaction_log_dialog_error_not_connected; static { // initialize resource bundle NLS.initializeMessages(BUNDLE_NAME, CoreMessages.class); } private CoreMessages() { } }
/* * Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.security.auth.module; import java.util.*; import java.io.IOException; import javax.security.auth.*; import javax.security.auth.callback.*; import javax.security.auth.login.*; import javax.security.auth.spi.*; import java.security.Principal; import com.sun.security.auth.NTUserPrincipal; import com.sun.security.auth.NTSidUserPrincipal; import com.sun.security.auth.NTDomainPrincipal; import com.sun.security.auth.NTSidDomainPrincipal; import com.sun.security.auth.NTSidPrimaryGroupPrincipal; import com.sun.security.auth.NTSidGroupPrincipal; import com.sun.security.auth.NTNumericCredential; /** * <p> This <code>LoginModule</code> * renders a user's NT security information as some number of * <code>Principal</code>s * and associates them with a <code>Subject</code>. * * <p> This LoginModule recognizes the debug option. * If set to true in the login Configuration, * debug messages will be output to the output stream, System.out. * * <p> This LoginModule also recognizes the debugNative option. * If set to true in the login Configuration, * debug messages from the native component of the module * will be output to the output stream, System.out. * * @see javax.security.auth.spi.LoginModule */ @jdk.Exported public class NTLoginModule implements LoginModule { private NTSystem ntSystem; // initial state private Subject subject; private CallbackHandler callbackHandler; private Map<String, ?> sharedState; private Map<String, ?> options; // configurable option private boolean debug = false; private boolean debugNative = false; // the authentication status private boolean succeeded = false; private boolean commitSucceeded = false; private NTUserPrincipal userPrincipal; // user name private NTSidUserPrincipal userSID; // user SID private NTDomainPrincipal userDomain; // user domain private NTSidDomainPrincipal domainSID; // domain SID private NTSidPrimaryGroupPrincipal primaryGroup; // primary group private NTSidGroupPrincipal groups[]; // supplementary groups private NTNumericCredential iToken; // impersonation token /** * Initialize this <code>LoginModule</code>. * * <p> * * @param subject the <code>Subject</code> to be authenticated. <p> * * @param callbackHandler a <code>CallbackHandler</code> for communicating * with the end user (prompting for usernames and * passwords, for example). This particular LoginModule only * extracts the underlying NT system information, so this * parameter is ignored.<p> * * @param sharedState shared <code>LoginModule</code> state. <p> * * @param options options specified in the login * <code>Configuration</code> for this particular * <code>LoginModule</code>. */ public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String,?> sharedState, Map<String,?> options) { this.subject = subject; this.callbackHandler = callbackHandler; this.sharedState = sharedState; this.options = options; // initialize any configured options debug = "true".equalsIgnoreCase((String)options.get("debug")); debugNative="true".equalsIgnoreCase((String)options.get("debugNative")); if (debugNative == true) { debug = true; } } /** * Import underlying NT system identity information. * * <p> * * @return true in all cases since this <code>LoginModule</code> * should not be ignored. * * @exception FailedLoginException if the authentication fails. <p> * * @exception LoginException if this <code>LoginModule</code> * is unable to perform the authentication. */ public boolean login() throws LoginException { succeeded = false; // Indicate not yet successful ntSystem = new NTSystem(debugNative); if (ntSystem == null) { if (debug) { System.out.println("\t\t[NTLoginModule] " + "Failed in NT login"); } throw new FailedLoginException ("Failed in attempt to import the " + "underlying NT system identity information"); } if (ntSystem.getName() == null) { throw new FailedLoginException ("Failed in attempt to import the " + "underlying NT system identity information"); } userPrincipal = new NTUserPrincipal(ntSystem.getName()); if (debug) { System.out.println("\t\t[NTLoginModule] " + "succeeded importing info: "); System.out.println("\t\t\tuser name = " + userPrincipal.getName()); } if (ntSystem.getUserSID() != null) { userSID = new NTSidUserPrincipal(ntSystem.getUserSID()); if (debug) { System.out.println("\t\t\tuser SID = " + userSID.getName()); } } if (ntSystem.getDomain() != null) { userDomain = new NTDomainPrincipal(ntSystem.getDomain()); if (debug) { System.out.println("\t\t\tuser domain = " + userDomain.getName()); } } if (ntSystem.getDomainSID() != null) { domainSID = new NTSidDomainPrincipal(ntSystem.getDomainSID()); if (debug) { System.out.println("\t\t\tuser domain SID = " + domainSID.getName()); } } if (ntSystem.getPrimaryGroupID() != null) { primaryGroup = new NTSidPrimaryGroupPrincipal(ntSystem.getPrimaryGroupID()); if (debug) { System.out.println("\t\t\tuser primary group = " + primaryGroup.getName()); } } if (ntSystem.getGroupIDs() != null && ntSystem.getGroupIDs().length > 0) { String groupSIDs[] = ntSystem.getGroupIDs(); groups = new NTSidGroupPrincipal[groupSIDs.length]; for (int i = 0; i < groupSIDs.length; i++) { groups[i] = new NTSidGroupPrincipal(groupSIDs[i]); if (debug) { System.out.println("\t\t\tuser group = " + groups[i].getName()); } } } if (ntSystem.getImpersonationToken() != 0) { iToken = new NTNumericCredential(ntSystem.getImpersonationToken()); if (debug) { System.out.println("\t\t\timpersonation token = " + ntSystem.getImpersonationToken()); } } succeeded = true; return succeeded; } /** * <p> This method is called if the LoginContext's * overall authentication succeeded * (the relevant REQUIRED, REQUISITE, SUFFICIENT and OPTIONAL LoginModules * succeeded). * * <p> If this LoginModule's own authentication attempt * succeeded (checked by retrieving the private state saved by the * <code>login</code> method), then this method associates some * number of various <code>Principal</code>s * with the <code>Subject</code> located in the * <code>LoginModuleContext</code>. If this LoginModule's own * authentication attempted failed, then this method removes * any state that was originally saved. * * <p> * * @exception LoginException if the commit fails. * * @return true if this LoginModule's own login and commit * attempts succeeded, or false otherwise. */ public boolean commit() throws LoginException { if (succeeded == false) { if (debug) { System.out.println("\t\t[NTLoginModule]: " + "did not add any Principals to Subject " + "because own authentication failed."); } return false; } if (subject.isReadOnly()) { throw new LoginException ("Subject is ReadOnly"); } Set<Principal> principals = subject.getPrincipals(); // we must have a userPrincipal - everything else is optional if (!principals.contains(userPrincipal)) { principals.add(userPrincipal); } if (userSID != null && !principals.contains(userSID)) { principals.add(userSID); } if (userDomain != null && !principals.contains(userDomain)) { principals.add(userDomain); } if (domainSID != null && !principals.contains(domainSID)) { principals.add(domainSID); } if (primaryGroup != null && !principals.contains(primaryGroup)) { principals.add(primaryGroup); } for (int i = 0; groups != null && i < groups.length; i++) { if (!principals.contains(groups[i])) { principals.add(groups[i]); } } Set<Object> pubCreds = subject.getPublicCredentials(); if (iToken != null && !pubCreds.contains(iToken)) { pubCreds.add(iToken); } commitSucceeded = true; return true; } /** * <p> This method is called if the LoginContext's * overall authentication failed. * (the relevant REQUIRED, REQUISITE, SUFFICIENT and OPTIONAL LoginModules * did not succeed). * * <p> If this LoginModule's own authentication attempt * succeeded (checked by retrieving the private state saved by the * <code>login</code> and <code>commit</code> methods), * then this method cleans up any state that was originally saved. * * <p> * * @exception LoginException if the abort fails. * * @return false if this LoginModule's own login and/or commit attempts * failed, and true otherwise. */ public boolean abort() throws LoginException { if (debug) { System.out.println("\t\t[NTLoginModule]: " + "aborted authentication attempt"); } if (succeeded == false) { return false; } else if (succeeded == true && commitSucceeded == false) { ntSystem = null; userPrincipal = null; userSID = null; userDomain = null; domainSID = null; primaryGroup = null; groups = null; iToken = null; succeeded = false; } else { // overall authentication succeeded and commit succeeded, // but someone else's commit failed logout(); } return succeeded; } /** * Logout the user. * * <p> This method removes the <code>NTUserPrincipal</code>, * <code>NTDomainPrincipal</code>, <code>NTSidUserPrincipal</code>, * <code>NTSidDomainPrincipal</code>, <code>NTSidGroupPrincipal</code>s, * and <code>NTSidPrimaryGroupPrincipal</code> * that may have been added by the <code>commit</code> method. * * <p> * * @exception LoginException if the logout fails. * * @return true in all cases since this <code>LoginModule</code> * should not be ignored. */ public boolean logout() throws LoginException { if (subject.isReadOnly()) { throw new LoginException ("Subject is ReadOnly"); } Set<Principal> principals = subject.getPrincipals(); if (principals.contains(userPrincipal)) { principals.remove(userPrincipal); } if (principals.contains(userSID)) { principals.remove(userSID); } if (principals.contains(userDomain)) { principals.remove(userDomain); } if (principals.contains(domainSID)) { principals.remove(domainSID); } if (principals.contains(primaryGroup)) { principals.remove(primaryGroup); } for (int i = 0; groups != null && i < groups.length; i++) { if (principals.contains(groups[i])) { principals.remove(groups[i]); } } Set<Object> pubCreds = subject.getPublicCredentials(); if (pubCreds.contains(iToken)) { pubCreds.remove(iToken); } succeeded = false; commitSucceeded = false; userPrincipal = null; userDomain = null; userSID = null; domainSID = null; groups = null; primaryGroup = null; iToken = null; ntSystem = null; if (debug) { System.out.println("\t\t[NTLoginModule] " + "completed logout processing"); } return true; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.cache.CacheConfig; import com.facebook.presto.orc.OrcWriteValidation.OrcWriteValidationMode; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.schedule.NodeSelectionStrategy; import com.facebook.presto.spi.session.PropertyMetadata; import com.google.common.collect.ImmutableList; import io.airlift.units.DataSize; import io.airlift.units.Duration; import javax.inject.Inject; import java.util.List; import java.util.Optional; import java.util.concurrent.ThreadLocalRandom; import static com.facebook.presto.common.type.DoubleType.DOUBLE; import static com.facebook.presto.common.type.VarcharType.VARCHAR; import static com.facebook.presto.common.type.VarcharType.createUnboundedVarcharType; import static com.facebook.presto.hive.HiveSessionProperties.InsertExistingPartitionsBehavior.APPEND; import static com.facebook.presto.hive.HiveSessionProperties.InsertExistingPartitionsBehavior.ERROR; import static com.facebook.presto.hive.HiveSessionProperties.InsertExistingPartitionsBehavior.OVERWRITE; import static com.facebook.presto.hive.metastore.MetastoreUtil.METASTORE_HEADERS; import static com.facebook.presto.spi.StandardErrorCode.INVALID_SESSION_PROPERTY; import static com.facebook.presto.spi.session.PropertyMetadata.booleanProperty; import static com.facebook.presto.spi.session.PropertyMetadata.integerProperty; import static com.facebook.presto.spi.session.PropertyMetadata.stringProperty; import static com.google.common.base.Preconditions.checkArgument; import static java.lang.String.format; import static java.util.Locale.ENGLISH; public final class HiveSessionProperties { private static final String IGNORE_TABLE_BUCKETING = "ignore_table_bucketing"; private static final String MIN_BUCKET_COUNT_TO_NOT_IGNORE_TABLE_BUCKETING = "min_bucket_count_to_not_ignore_table_bucketing"; private static final String BUCKET_EXECUTION_ENABLED = "bucket_execution_enabled"; private static final String NODE_SELECTION_STRATEGY = "node_selection_strategy"; private static final String INSERT_EXISTING_PARTITIONS_BEHAVIOR = "insert_existing_partitions_behavior"; private static final String ORC_BLOOM_FILTERS_ENABLED = "orc_bloom_filters_enabled"; private static final String ORC_MAX_MERGE_DISTANCE = "orc_max_merge_distance"; private static final String ORC_MAX_BUFFER_SIZE = "orc_max_buffer_size"; private static final String ORC_STREAM_BUFFER_SIZE = "orc_stream_buffer_size"; private static final String ORC_TINY_STRIPE_THRESHOLD = "orc_tiny_stripe_threshold"; private static final String ORC_MAX_READ_BLOCK_SIZE = "orc_max_read_block_size"; private static final String ORC_LAZY_READ_SMALL_RANGES = "orc_lazy_read_small_ranges"; private static final String ORC_ZSTD_JNI_DECOMPRESSION_ENABLED = "orc_zstd_jni_decompression_enabled"; private static final String ORC_STRING_STATISTICS_LIMIT = "orc_string_statistics_limit"; private static final String ORC_OPTIMIZED_WRITER_ENABLED = "orc_optimized_writer_enabled"; private static final String ORC_OPTIMIZED_WRITER_VALIDATE = "orc_optimized_writer_validate"; private static final String ORC_OPTIMIZED_WRITER_VALIDATE_PERCENTAGE = "orc_optimized_writer_validate_percentage"; private static final String ORC_OPTIMIZED_WRITER_VALIDATE_MODE = "orc_optimized_writer_validate_mode"; private static final String ORC_OPTIMIZED_WRITER_MIN_STRIPE_SIZE = "orc_optimized_writer_min_stripe_size"; private static final String ORC_OPTIMIZED_WRITER_MAX_STRIPE_SIZE = "orc_optimized_writer_max_stripe_size"; private static final String ORC_OPTIMIZED_WRITER_MAX_STRIPE_ROWS = "orc_optimized_writer_max_stripe_rows"; private static final String ORC_OPTIMIZED_WRITER_MAX_DICTIONARY_MEMORY = "orc_optimized_writer_max_dictionary_memory"; private static final String PAGEFILE_WRITER_MAX_STRIPE_SIZE = "pagefile_writer_max_stripe_size"; public static final String HIVE_STORAGE_FORMAT = "hive_storage_format"; private static final String COMPRESSION_CODEC = "compression_codec"; private static final String ORC_COMPRESSION_CODEC = "orc_compression_codec"; public static final String RESPECT_TABLE_FORMAT = "respect_table_format"; private static final String CREATE_EMPTY_BUCKET_FILES = "create_empty_bucket_files"; private static final String PARQUET_USE_COLUMN_NAME = "parquet_use_column_names"; private static final String PARQUET_FAIL_WITH_CORRUPTED_STATISTICS = "parquet_fail_with_corrupted_statistics"; private static final String PARQUET_MAX_READ_BLOCK_SIZE = "parquet_max_read_block_size"; private static final String PARQUET_WRITER_BLOCK_SIZE = "parquet_writer_block_size"; private static final String PARQUET_WRITER_PAGE_SIZE = "parquet_writer_page_size"; private static final String PARQUET_OPTIMIZED_WRITER_ENABLED = "parquet_optimized_writer_enabled"; private static final String MAX_SPLIT_SIZE = "max_split_size"; private static final String MAX_INITIAL_SPLIT_SIZE = "max_initial_split_size"; public static final String RCFILE_OPTIMIZED_WRITER_ENABLED = "rcfile_optimized_writer_enabled"; private static final String RCFILE_OPTIMIZED_WRITER_VALIDATE = "rcfile_optimized_writer_validate"; private static final String SORTED_WRITING_ENABLED = "sorted_writing_enabled"; public static final String SORTED_WRITE_TO_TEMP_PATH_ENABLED = "sorted_write_to_temp_path_enabled"; public static final String SORTED_WRITE_TEMP_PATH_SUBDIRECTORY_COUNT = "sorted_write_temp_path_subdirectory_count"; private static final String STATISTICS_ENABLED = "statistics_enabled"; private static final String PARTITION_STATISTICS_SAMPLE_SIZE = "partition_statistics_sample_size"; private static final String IGNORE_CORRUPTED_STATISTICS = "ignore_corrupted_statistics"; public static final String COLLECT_COLUMN_STATISTICS_ON_WRITE = "collect_column_statistics_on_write"; public static final String PARTITION_STATISTICS_BASED_OPTIMIZATION_ENABLED = "partition_stats_based_optimization_enabled"; private static final String OPTIMIZE_MISMATCHED_BUCKET_COUNT = "optimize_mismatched_bucket_count"; private static final String S3_SELECT_PUSHDOWN_ENABLED = "s3_select_pushdown_enabled"; public static final String SHUFFLE_PARTITIONED_COLUMNS_FOR_TABLE_WRITE = "shuffle_partitioned_columns_for_table_write"; public static final String TEMPORARY_STAGING_DIRECTORY_ENABLED = "temporary_staging_directory_enabled"; private static final String TEMPORARY_STAGING_DIRECTORY_PATH = "temporary_staging_directory_path"; private static final String TEMPORARY_TABLE_SCHEMA = "temporary_table_schema"; private static final String TEMPORARY_TABLE_STORAGE_FORMAT = "temporary_table_storage_format"; private static final String TEMPORARY_TABLE_COMPRESSION_CODEC = "temporary_table_compression_codec"; private static final String TEMPORARY_TABLE_CREATE_EMPTY_BUCKET_FILES = "temporary_table_create_empty_bucket_files"; private static final String USE_PAGEFILE_FOR_HIVE_UNSUPPORTED_TYPE = "use_pagefile_for_hive_unsupported_type"; public static final String PUSHDOWN_FILTER_ENABLED = "pushdown_filter_enabled"; public static final String RANGE_FILTERS_ON_SUBSCRIPTS_ENABLED = "range_filters_on_subscripts_enabled"; public static final String ADAPTIVE_FILTER_REORDERING_ENABLED = "adaptive_filter_reordering_enabled"; public static final String VIRTUAL_BUCKET_COUNT = "virtual_bucket_count"; public static final String MAX_BUCKETS_FOR_GROUPED_EXECUTION = "max_buckets_for_grouped_execution"; public static final String OFFLINE_DATA_DEBUG_MODE_ENABLED = "offline_data_debug_mode_enabled"; public static final String FAIL_FAST_ON_INSERT_INTO_IMMUTABLE_PARTITIONS_ENABLED = "fail_fast_on_insert_into_immutable_partitions_enabled"; public static final String USE_LIST_DIRECTORY_CACHE = "use_list_directory_cache"; private static final String PARQUET_BATCH_READ_OPTIMIZATION_ENABLED = "parquet_batch_read_optimization_enabled"; private static final String PARQUET_BATCH_READER_VERIFICATION_ENABLED = "parquet_batch_reader_verification_enabled"; private static final String BUCKET_FUNCTION_TYPE_FOR_EXCHANGE = "bucket_function_type_for_exchange"; public static final String PARQUET_DEREFERENCE_PUSHDOWN_ENABLED = "parquet_dereference_pushdown_enabled"; public static final String IGNORE_UNREADABLE_PARTITION = "ignore_unreadable_partition"; public static final String PARTIAL_AGGREGATION_PUSHDOWN_ENABLED = "partial_aggregation_pushdown_enabled"; public static final String PARTIAL_AGGREGATION_PUSHDOWN_FOR_VARIABLE_LENGTH_DATATYPES_ENABLED = "partial_aggregation_pushdown_for_variable_length_datatypes_enabled"; public static final String FILE_RENAMING_ENABLED = "file_renaming_enabled"; public static final String PREFER_MANIFESTS_TO_LIST_FILES = "prefer_manifests_to_list_files"; public static final String MANIFEST_VERIFICATION_ENABLED = "manifest_verification_enabled"; public static final String NEW_PARTITION_USER_SUPPLIED_PARAMETER = "new_partition_user_supplied_parameter"; public static final String OPTIMIZED_PARTITION_UPDATE_SERIALIZATION_ENABLED = "optimized_partition_update_serialization_enabled"; public static final String PARTITION_LEASE_DURATION = "partition_lease_duration"; public static final String CACHE_ENABLED = "cache_enabled"; public static final String ENABLE_LOOSE_MEMORY_BASED_ACCOUNTING = "enable_loose_memory_based_accounting"; public static final String MATERIALIZED_VIEW_MISSING_PARTITIONS_THRESHOLD = "materialized_view_missing_partitions_threshold"; public static final String VERBOSE_RUNTIME_STATS_ENABLED = "verbose_runtime_stats_enabled"; private static final String DWRF_WRITER_STRIPE_CACHE_ENABLED = "dwrf_writer_stripe_cache_enabled"; private static final String DWRF_WRITER_STRIPE_CACHE_SIZE = "dwrf_writer_stripe_cache_size"; private final List<PropertyMetadata<?>> sessionProperties; public enum InsertExistingPartitionsBehavior { ERROR, APPEND, OVERWRITE, /**/; public static InsertExistingPartitionsBehavior valueOf(String value, boolean immutablePartition) { InsertExistingPartitionsBehavior enumValue = valueOf(value.toUpperCase(ENGLISH)); if (immutablePartition) { checkArgument(enumValue != APPEND, format("Presto is configured to treat Hive partitions as immutable. %s is not allowed to be set to %s", INSERT_EXISTING_PARTITIONS_BEHAVIOR, APPEND)); } return enumValue; } } @Inject public HiveSessionProperties(HiveClientConfig hiveClientConfig, OrcFileWriterConfig orcFileWriterConfig, ParquetFileWriterConfig parquetFileWriterConfig, CacheConfig cacheConfig) { sessionProperties = ImmutableList.of( booleanProperty( IGNORE_TABLE_BUCKETING, "Ignore table bucketing to enable reading from unbucketed partitions", hiveClientConfig.isIgnoreTableBucketing(), false), integerProperty( MIN_BUCKET_COUNT_TO_NOT_IGNORE_TABLE_BUCKETING, "Ignore table bucketing when table bucket count is less than the value specified", hiveClientConfig.getMinBucketCountToNotIgnoreTableBucketing(), true), booleanProperty( BUCKET_EXECUTION_ENABLED, "Enable bucket-aware execution: only use a single worker per bucket", hiveClientConfig.isBucketExecutionEnabled(), false), new PropertyMetadata<>( NODE_SELECTION_STRATEGY, "Node affinity selection strategy", VARCHAR, NodeSelectionStrategy.class, hiveClientConfig.getNodeSelectionStrategy(), false, value -> NodeSelectionStrategy.valueOf((String) value), NodeSelectionStrategy::toString), new PropertyMetadata<>( INSERT_EXISTING_PARTITIONS_BEHAVIOR, "Behavior on insert existing partitions; this session property doesn't control behavior on insert existing unpartitioned table", VARCHAR, InsertExistingPartitionsBehavior.class, getDefaultInsertExistingPartitionsBehavior(hiveClientConfig), false, value -> InsertExistingPartitionsBehavior.valueOf((String) value, hiveClientConfig.isImmutablePartitions()), InsertExistingPartitionsBehavior::toString), booleanProperty( ORC_BLOOM_FILTERS_ENABLED, "ORC: Enable bloom filters for predicate pushdown", hiveClientConfig.isOrcBloomFiltersEnabled(), false), dataSizeSessionProperty( ORC_MAX_MERGE_DISTANCE, "ORC: Maximum size of gap between two reads to merge into a single read", hiveClientConfig.getOrcMaxMergeDistance(), false), dataSizeSessionProperty( ORC_MAX_BUFFER_SIZE, "ORC: Maximum size of a single read", hiveClientConfig.getOrcMaxBufferSize(), false), dataSizeSessionProperty( ORC_STREAM_BUFFER_SIZE, "ORC: Size of buffer for streaming reads", hiveClientConfig.getOrcStreamBufferSize(), false), dataSizeSessionProperty( ORC_TINY_STRIPE_THRESHOLD, "ORC: Threshold below which an ORC stripe or file will read in its entirety", hiveClientConfig.getOrcTinyStripeThreshold(), false), dataSizeSessionProperty( ORC_MAX_READ_BLOCK_SIZE, "ORC: Soft max size of Presto blocks produced by ORC reader", hiveClientConfig.getOrcMaxReadBlockSize(), false), booleanProperty( ORC_LAZY_READ_SMALL_RANGES, "Experimental: ORC: Read small file segments lazily", hiveClientConfig.isOrcLazyReadSmallRanges(), false), dataSizeSessionProperty( ORC_STRING_STATISTICS_LIMIT, "ORC: Maximum size of string statistics; drop if exceeding", orcFileWriterConfig.getStringStatisticsLimit(), false), booleanProperty( ORC_OPTIMIZED_WRITER_ENABLED, "Experimental: ORC: Enable optimized writer", hiveClientConfig.isOrcOptimizedWriterEnabled(), false), booleanProperty( ORC_OPTIMIZED_WRITER_VALIDATE, "Experimental: ORC: Force all validation for files", hiveClientConfig.getOrcWriterValidationPercentage() > 0.0, false), new PropertyMetadata<>( ORC_OPTIMIZED_WRITER_VALIDATE_PERCENTAGE, "Experimental: ORC: sample percentage for validation for files", DOUBLE, Double.class, hiveClientConfig.getOrcWriterValidationPercentage(), false, value -> { double doubleValue = ((Number) value).doubleValue(); if (doubleValue < 0.0 || doubleValue > 100.0) { throw new PrestoException( INVALID_SESSION_PROPERTY, format("%s must be between 0.0 and 100.0 inclusive: %s", ORC_OPTIMIZED_WRITER_VALIDATE_PERCENTAGE, doubleValue)); } return doubleValue; }, value -> value), stringProperty( ORC_OPTIMIZED_WRITER_VALIDATE_MODE, "Experimental: ORC: Level of detail in ORC validation", hiveClientConfig.getOrcWriterValidationMode().toString(), false), dataSizeSessionProperty( ORC_OPTIMIZED_WRITER_MIN_STRIPE_SIZE, "Experimental: ORC: Min stripe size", orcFileWriterConfig.getStripeMinSize(), false), dataSizeSessionProperty( ORC_OPTIMIZED_WRITER_MAX_STRIPE_SIZE, "Experimental: ORC: Max stripe size", orcFileWriterConfig.getStripeMaxSize(), false), integerProperty( ORC_OPTIMIZED_WRITER_MAX_STRIPE_ROWS, "Experimental: ORC: Max stripe row count", orcFileWriterConfig.getStripeMaxRowCount(), false), dataSizeSessionProperty( ORC_OPTIMIZED_WRITER_MAX_DICTIONARY_MEMORY, "Experimental: ORC: Max dictionary memory", orcFileWriterConfig.getDictionaryMaxMemory(), false), dataSizeSessionProperty( PAGEFILE_WRITER_MAX_STRIPE_SIZE, "PAGEFILE: Max stripe size", hiveClientConfig.getPageFileStripeMaxSize(), false), stringProperty( HIVE_STORAGE_FORMAT, "Default storage format for new tables or partitions", hiveClientConfig.getHiveStorageFormat().toString(), false), new PropertyMetadata<>( COMPRESSION_CODEC, "The compression codec to use when writing files", VARCHAR, HiveCompressionCodec.class, hiveClientConfig.getCompressionCodec(), false, value -> HiveCompressionCodec.valueOf(((String) value).toUpperCase()), HiveCompressionCodec::name), new PropertyMetadata<>( ORC_COMPRESSION_CODEC, "The preferred compression codec to use when writing ORC and DWRF files", VARCHAR, HiveCompressionCodec.class, hiveClientConfig.getOrcCompressionCodec(), false, value -> HiveCompressionCodec.valueOf(((String) value).toUpperCase()), HiveCompressionCodec::name), booleanProperty( RESPECT_TABLE_FORMAT, "Write new partitions using table format rather than default storage format", hiveClientConfig.isRespectTableFormat(), false), booleanProperty( CREATE_EMPTY_BUCKET_FILES, "Create empty files for buckets that have no data", hiveClientConfig.isCreateEmptyBucketFiles(), false), booleanProperty( PARQUET_USE_COLUMN_NAME, "Experimental: Parquet: Access Parquet columns using names from the file", hiveClientConfig.isUseParquetColumnNames(), false), booleanProperty( PARQUET_FAIL_WITH_CORRUPTED_STATISTICS, "Parquet: Fail when scanning Parquet files with corrupted statistics", hiveClientConfig.isFailOnCorruptedParquetStatistics(), false), dataSizeSessionProperty( PARQUET_MAX_READ_BLOCK_SIZE, "Parquet: Maximum size of a block to read", hiveClientConfig.getParquetMaxReadBlockSize(), false), dataSizeSessionProperty( PARQUET_WRITER_BLOCK_SIZE, "Parquet: Writer block size", parquetFileWriterConfig.getBlockSize(), false), dataSizeSessionProperty( PARQUET_WRITER_PAGE_SIZE, "Parquet: Writer page size", parquetFileWriterConfig.getPageSize(), false), dataSizeSessionProperty( MAX_SPLIT_SIZE, "Max split size", hiveClientConfig.getMaxSplitSize(), true), dataSizeSessionProperty( MAX_INITIAL_SPLIT_SIZE, "Max initial split size", hiveClientConfig.getMaxInitialSplitSize(), true), booleanProperty( RCFILE_OPTIMIZED_WRITER_ENABLED, "Experimental: RCFile: Enable optimized writer", hiveClientConfig.isRcfileOptimizedWriterEnabled(), false), booleanProperty( RCFILE_OPTIMIZED_WRITER_VALIDATE, "Experimental: RCFile: Validate writer files", hiveClientConfig.isRcfileWriterValidate(), false), booleanProperty( SORTED_WRITING_ENABLED, "Enable writing to bucketed sorted tables", hiveClientConfig.isSortedWritingEnabled(), false), booleanProperty( SORTED_WRITE_TO_TEMP_PATH_ENABLED, "Enable writing temp files to temp path when writing to bucketed sorted tables", hiveClientConfig.isSortedWriteToTempPathEnabled(), false), integerProperty( SORTED_WRITE_TEMP_PATH_SUBDIRECTORY_COUNT, "Number of directories per partition for temp files generated by writing sorted table", hiveClientConfig.getSortedWriteTempPathSubdirectoryCount(), false), booleanProperty( STATISTICS_ENABLED, "Experimental: Expose table statistics", hiveClientConfig.isTableStatisticsEnabled(), false), integerProperty( PARTITION_STATISTICS_SAMPLE_SIZE, "Maximum sample size of the partitions column statistics", hiveClientConfig.getPartitionStatisticsSampleSize(), false), booleanProperty( IGNORE_CORRUPTED_STATISTICS, "Experimental: Ignore corrupted statistics rather than failing", hiveClientConfig.isIgnoreCorruptedStatistics(), false), booleanProperty( COLLECT_COLUMN_STATISTICS_ON_WRITE, "Experimental: Enables automatic column level statistics collection on write", hiveClientConfig.isCollectColumnStatisticsOnWrite(), false), booleanProperty( PARTITION_STATISTICS_BASED_OPTIMIZATION_ENABLED, "Enables partition stats based optimization, including partition pruning and predicate stripping", hiveClientConfig.isPartitionStatisticsBasedOptimizationEnabled(), false), booleanProperty( OPTIMIZE_MISMATCHED_BUCKET_COUNT, "Experimental: Enable optimization to avoid shuffle when bucket count is compatible but not the same", hiveClientConfig.isOptimizeMismatchedBucketCount(), false), booleanProperty( S3_SELECT_PUSHDOWN_ENABLED, "S3 Select pushdown enabled", hiveClientConfig.isS3SelectPushdownEnabled(), false), booleanProperty( TEMPORARY_STAGING_DIRECTORY_ENABLED, "Should use temporary staging directory for write operations", hiveClientConfig.isTemporaryStagingDirectoryEnabled(), false), stringProperty( TEMPORARY_STAGING_DIRECTORY_PATH, "Temporary staging directory location", hiveClientConfig.getTemporaryStagingDirectoryPath(), false), stringProperty( TEMPORARY_TABLE_SCHEMA, "Schema where to create temporary tables", hiveClientConfig.getTemporaryTableSchema(), false), new PropertyMetadata<>( TEMPORARY_TABLE_STORAGE_FORMAT, "Storage format used to store data in temporary tables", VARCHAR, HiveStorageFormat.class, hiveClientConfig.getTemporaryTableStorageFormat(), false, value -> HiveStorageFormat.valueOf(((String) value).toUpperCase()), HiveStorageFormat::name), new PropertyMetadata<>( TEMPORARY_TABLE_COMPRESSION_CODEC, "Compression codec used to store data in temporary tables", VARCHAR, HiveCompressionCodec.class, hiveClientConfig.getTemporaryTableCompressionCodec(), false, value -> HiveCompressionCodec.valueOf(((String) value).toUpperCase()), HiveCompressionCodec::name), booleanProperty( TEMPORARY_TABLE_CREATE_EMPTY_BUCKET_FILES, "Create empty files when there is no data for temporary table buckets", hiveClientConfig.isCreateEmptyBucketFilesForTemporaryTable(), false), booleanProperty( USE_PAGEFILE_FOR_HIVE_UNSUPPORTED_TYPE, "Automatically switch to PAGEFILE format for materialized exchange when encountering unsupported types", hiveClientConfig.getUsePageFileForHiveUnsupportedType(), true), booleanProperty( PUSHDOWN_FILTER_ENABLED, "Experimental: enable complex filter pushdown", hiveClientConfig.isPushdownFilterEnabled(), false), booleanProperty( RANGE_FILTERS_ON_SUBSCRIPTS_ENABLED, "Experimental: enable pushdown of range filters on subscripts (a[2] = 5) into ORC column readers", hiveClientConfig.isRangeFiltersOnSubscriptsEnabled(), false), booleanProperty( ADAPTIVE_FILTER_REORDERING_ENABLED, "Experimental: enable adaptive filter reordering", hiveClientConfig.isAdaptiveFilterReorderingEnabled(), false), integerProperty( VIRTUAL_BUCKET_COUNT, "Number of virtual bucket assigned for unbucketed tables", 0, false), integerProperty( MAX_BUCKETS_FOR_GROUPED_EXECUTION, "maximum total buckets to allow using grouped execution", hiveClientConfig.getMaxBucketsForGroupedExecution(), false), booleanProperty( OFFLINE_DATA_DEBUG_MODE_ENABLED, "allow reading from tables or partitions that are marked as offline or not readable", false, true), booleanProperty( ORC_ZSTD_JNI_DECOMPRESSION_ENABLED, "use JNI based zstd decompression for reading ORC files", hiveClientConfig.isZstdJniDecompressionEnabled(), true), booleanProperty( SHUFFLE_PARTITIONED_COLUMNS_FOR_TABLE_WRITE, "Shuffle the data on partitioned columns", false, false), booleanProperty( FAIL_FAST_ON_INSERT_INTO_IMMUTABLE_PARTITIONS_ENABLED, "Fail fast when trying to insert into an immutable partition. Increases load on the metastore", hiveClientConfig.isFailFastOnInsertIntoImmutablePartitionsEnabled(), false), booleanProperty( USE_LIST_DIRECTORY_CACHE, "Use list directory cache if available when set to true", !hiveClientConfig.getFileStatusCacheTables().isEmpty(), false), booleanProperty( PARQUET_OPTIMIZED_WRITER_ENABLED, "Experimental: Enable optimized writer", parquetFileWriterConfig.isParquetOptimizedWriterEnabled(), false), booleanProperty( PARQUET_BATCH_READ_OPTIMIZATION_ENABLED, "Is Parquet batch read optimization enabled", hiveClientConfig.isParquetBatchReadOptimizationEnabled(), false), booleanProperty( PARQUET_BATCH_READER_VERIFICATION_ENABLED, "Is Parquet batch reader verification enabled? This is for testing purposes only, not to be used in production", hiveClientConfig.isParquetBatchReaderVerificationEnabled(), false), booleanProperty( IGNORE_UNREADABLE_PARTITION, "Ignore unreadable partitions and report as warnings instead of failing the query", hiveClientConfig.isIgnoreUnreadablePartition(), false), new PropertyMetadata<>( BUCKET_FUNCTION_TYPE_FOR_EXCHANGE, "hash function type for bucketed table exchange", VARCHAR, BucketFunctionType.class, hiveClientConfig.getBucketFunctionTypeForExchange(), false, value -> BucketFunctionType.valueOf((String) value), BucketFunctionType::toString), booleanProperty( PARQUET_DEREFERENCE_PUSHDOWN_ENABLED, "Is dereference pushdown expression pushdown into Parquet reader enabled?", hiveClientConfig.isParquetDereferencePushdownEnabled(), false), booleanProperty( PARTIAL_AGGREGATION_PUSHDOWN_ENABLED, "Is partial aggregation pushdown enabled for Hive file formats", hiveClientConfig.isPartialAggregationPushdownEnabled(), false), booleanProperty( PARTIAL_AGGREGATION_PUSHDOWN_FOR_VARIABLE_LENGTH_DATATYPES_ENABLED, "Is partial aggregation pushdown enabled for variable length datatypes", hiveClientConfig.isPartialAggregationPushdownForVariableLengthDatatypesEnabled(), false), booleanProperty( FILE_RENAMING_ENABLED, "Enable renaming the files written by writers", hiveClientConfig.isFileRenamingEnabled(), false), booleanProperty( PREFER_MANIFESTS_TO_LIST_FILES, "Prefer to fetch the list of file names and sizes from manifest", hiveClientConfig.isPreferManifestsToListFiles(), false), booleanProperty( MANIFEST_VERIFICATION_ENABLED, "Enable manifest verification", hiveClientConfig.isManifestVerificationEnabled(), false), stringProperty( NEW_PARTITION_USER_SUPPLIED_PARAMETER, "\"user_supplied\" parameter added to all newly created partitions", null, true), booleanProperty( OPTIMIZED_PARTITION_UPDATE_SERIALIZATION_ENABLED, "Serialize PartitionUpdate objects using binary SMILE encoding and compress with the ZSTD compression", hiveClientConfig.isOptimizedPartitionUpdateSerializationEnabled(), true), new PropertyMetadata<>( PARTITION_LEASE_DURATION, "Partition lease duration in seconds, 0 means disabled", VARCHAR, Duration.class, hiveClientConfig.getPartitionLeaseDuration(), false, value -> Duration.valueOf((String) value), Duration::toString), booleanProperty( CACHE_ENABLED, "Enable cache for hive", cacheConfig.isCachingEnabled(), false), booleanProperty( VERBOSE_RUNTIME_STATS_ENABLED, "Enable tracking all runtime stats. Note that this may affect query performance.", hiveClientConfig.isVerboseRuntimeStatsEnabled(), false), booleanProperty( ENABLE_LOOSE_MEMORY_BASED_ACCOUNTING, "Enable loose memory accounting to avoid OOMing existing queries", hiveClientConfig.isLooseMemoryAccountingEnabled(), false), integerProperty( MATERIALIZED_VIEW_MISSING_PARTITIONS_THRESHOLD, "Materialized views with missing partitions more than this threshold falls back to the base tables at read time", hiveClientConfig.getMaterializedViewMissingPartitionsThreshold(), true), stringProperty( METASTORE_HEADERS, "The headers that will be sent in the calls to Metastore", null, false), booleanProperty( DWRF_WRITER_STRIPE_CACHE_ENABLED, "Write stripe cache for the DWRF files.", orcFileWriterConfig.isDwrfStripeCacheEnabled(), false), dataSizeSessionProperty( DWRF_WRITER_STRIPE_CACHE_SIZE, "Maximum size of DWRF stripe cache to be held in memory", orcFileWriterConfig.getDwrfStripeCacheMaxSize(), false)); } public List<PropertyMetadata<?>> getSessionProperties() { return sessionProperties; } public static boolean isBucketExecutionEnabled(ConnectorSession session) { return session.getProperty(BUCKET_EXECUTION_ENABLED, Boolean.class); } public static boolean shouldIgnoreTableBucketing(ConnectorSession session) { return session.getProperty(IGNORE_TABLE_BUCKETING, Boolean.class); } public static Integer getMinBucketCountToNotIgnoreTableBucketing(ConnectorSession session) { return session.getProperty(MIN_BUCKET_COUNT_TO_NOT_IGNORE_TABLE_BUCKETING, Integer.class); } public static int getMaxBucketsForGroupedExecution(ConnectorSession session) { return session.getProperty(MAX_BUCKETS_FOR_GROUPED_EXECUTION, Integer.class); } public static NodeSelectionStrategy getNodeSelectionStrategy(ConnectorSession session) { return session.getProperty(NODE_SELECTION_STRATEGY, NodeSelectionStrategy.class); } public static InsertExistingPartitionsBehavior getInsertExistingPartitionsBehavior(ConnectorSession session) { return session.getProperty(INSERT_EXISTING_PARTITIONS_BEHAVIOR, InsertExistingPartitionsBehavior.class); } public static boolean isOrcBloomFiltersEnabled(ConnectorSession session) { return session.getProperty(ORC_BLOOM_FILTERS_ENABLED, Boolean.class); } public static DataSize getOrcMaxMergeDistance(ConnectorSession session) { return session.getProperty(ORC_MAX_MERGE_DISTANCE, DataSize.class); } public static DataSize getOrcMaxBufferSize(ConnectorSession session) { return session.getProperty(ORC_MAX_BUFFER_SIZE, DataSize.class); } public static DataSize getOrcStreamBufferSize(ConnectorSession session) { return session.getProperty(ORC_STREAM_BUFFER_SIZE, DataSize.class); } public static DataSize getOrcTinyStripeThreshold(ConnectorSession session) { return session.getProperty(ORC_TINY_STRIPE_THRESHOLD, DataSize.class); } public static DataSize getOrcMaxReadBlockSize(ConnectorSession session) { return session.getProperty(ORC_MAX_READ_BLOCK_SIZE, DataSize.class); } public static boolean getOrcLazyReadSmallRanges(ConnectorSession session) { return session.getProperty(ORC_LAZY_READ_SMALL_RANGES, Boolean.class); } public static boolean isOrcZstdJniDecompressionEnabled(ConnectorSession session) { return session.getProperty(ORC_ZSTD_JNI_DECOMPRESSION_ENABLED, Boolean.class); } public static DataSize getOrcStringStatisticsLimit(ConnectorSession session) { return session.getProperty(ORC_STRING_STATISTICS_LIMIT, DataSize.class); } public static boolean isOrcOptimizedWriterEnabled(ConnectorSession session) { return session.getProperty(ORC_OPTIMIZED_WRITER_ENABLED, Boolean.class); } public static boolean isOrcOptimizedWriterValidate(ConnectorSession session) { boolean validate = session.getProperty(ORC_OPTIMIZED_WRITER_VALIDATE, Boolean.class); double percentage = session.getProperty(ORC_OPTIMIZED_WRITER_VALIDATE_PERCENTAGE, Double.class); checkArgument(percentage >= 0.0 && percentage <= 100.0); // session property can disabled validation if (!validate) { return false; } // session property can not force validation when sampling is enabled // todo change this if session properties support null return ThreadLocalRandom.current().nextDouble(100) < percentage; } public static OrcWriteValidationMode getOrcOptimizedWriterValidateMode(ConnectorSession session) { return OrcWriteValidationMode.valueOf(session.getProperty(ORC_OPTIMIZED_WRITER_VALIDATE_MODE, String.class).toUpperCase(ENGLISH)); } public static DataSize getOrcOptimizedWriterMinStripeSize(ConnectorSession session) { return session.getProperty(ORC_OPTIMIZED_WRITER_MIN_STRIPE_SIZE, DataSize.class); } public static DataSize getOrcOptimizedWriterMaxStripeSize(ConnectorSession session) { return session.getProperty(ORC_OPTIMIZED_WRITER_MAX_STRIPE_SIZE, DataSize.class); } public static int getOrcOptimizedWriterMaxStripeRows(ConnectorSession session) { return session.getProperty(ORC_OPTIMIZED_WRITER_MAX_STRIPE_ROWS, Integer.class); } public static DataSize getOrcOptimizedWriterMaxDictionaryMemory(ConnectorSession session) { return session.getProperty(ORC_OPTIMIZED_WRITER_MAX_DICTIONARY_MEMORY, DataSize.class); } public static DataSize getPageFileStripeMaxSize(ConnectorSession session) { return session.getProperty(PAGEFILE_WRITER_MAX_STRIPE_SIZE, DataSize.class); } public static HiveStorageFormat getHiveStorageFormat(ConnectorSession session) { return HiveStorageFormat.valueOf(session.getProperty(HIVE_STORAGE_FORMAT, String.class).toUpperCase(ENGLISH)); } public static HiveCompressionCodec getCompressionCodec(ConnectorSession session) { return session.getProperty(COMPRESSION_CODEC, HiveCompressionCodec.class); } public static HiveCompressionCodec getOrcCompressionCodec(ConnectorSession session) { return session.getProperty(ORC_COMPRESSION_CODEC, HiveCompressionCodec.class); } public static boolean isRespectTableFormat(ConnectorSession session) { return session.getProperty(RESPECT_TABLE_FORMAT, Boolean.class); } public static boolean isCreateEmptyBucketFiles(ConnectorSession session) { return session.getProperty(CREATE_EMPTY_BUCKET_FILES, Boolean.class); } public static boolean isUseParquetColumnNames(ConnectorSession session) { return session.getProperty(PARQUET_USE_COLUMN_NAME, Boolean.class); } public static boolean isFailOnCorruptedParquetStatistics(ConnectorSession session) { return session.getProperty(PARQUET_FAIL_WITH_CORRUPTED_STATISTICS, Boolean.class); } public static DataSize getParquetMaxReadBlockSize(ConnectorSession session) { return session.getProperty(PARQUET_MAX_READ_BLOCK_SIZE, DataSize.class); } public static DataSize getParquetWriterBlockSize(ConnectorSession session) { return session.getProperty(PARQUET_WRITER_BLOCK_SIZE, DataSize.class); } public static DataSize getParquetWriterPageSize(ConnectorSession session) { return session.getProperty(PARQUET_WRITER_PAGE_SIZE, DataSize.class); } public static DataSize getMaxSplitSize(ConnectorSession session) { return session.getProperty(MAX_SPLIT_SIZE, DataSize.class); } public static DataSize getMaxInitialSplitSize(ConnectorSession session) { return session.getProperty(MAX_INITIAL_SPLIT_SIZE, DataSize.class); } public static boolean isRcfileOptimizedWriterEnabled(ConnectorSession session) { return session.getProperty(RCFILE_OPTIMIZED_WRITER_ENABLED, Boolean.class); } public static boolean isRcfileOptimizedWriterValidate(ConnectorSession session) { return session.getProperty(RCFILE_OPTIMIZED_WRITER_VALIDATE, Boolean.class); } public static boolean isSortedWritingEnabled(ConnectorSession session) { return session.getProperty(SORTED_WRITING_ENABLED, Boolean.class); } public static boolean isSortedWriteToTempPathEnabled(ConnectorSession session) { return session.getProperty(SORTED_WRITE_TO_TEMP_PATH_ENABLED, Boolean.class); } public static int getSortedWriteTempPathSubdirectoryCount(ConnectorSession session) { return session.getProperty(SORTED_WRITE_TEMP_PATH_SUBDIRECTORY_COUNT, Integer.class); } public static boolean isS3SelectPushdownEnabled(ConnectorSession session) { return session.getProperty(S3_SELECT_PUSHDOWN_ENABLED, Boolean.class); } public static boolean isStatisticsEnabled(ConnectorSession session) { return session.getProperty(STATISTICS_ENABLED, Boolean.class); } public static int getPartitionStatisticsSampleSize(ConnectorSession session) { int size = session.getProperty(PARTITION_STATISTICS_SAMPLE_SIZE, Integer.class); if (size < 1) { throw new PrestoException(INVALID_SESSION_PROPERTY, format("%s must be greater than 0: %s", PARTITION_STATISTICS_SAMPLE_SIZE, size)); } return size; } public static boolean isIgnoreCorruptedStatistics(ConnectorSession session) { return session.getProperty(IGNORE_CORRUPTED_STATISTICS, Boolean.class); } public static boolean isCollectColumnStatisticsOnWrite(ConnectorSession session) { return session.getProperty(COLLECT_COLUMN_STATISTICS_ON_WRITE, Boolean.class); } public static boolean isPartitionStatisticsBasedOptimizationEnabled(ConnectorSession session) { return session.getProperty(PARTITION_STATISTICS_BASED_OPTIMIZATION_ENABLED, Boolean.class); } @Deprecated public static boolean isOptimizedMismatchedBucketCount(ConnectorSession session) { return session.getProperty(OPTIMIZE_MISMATCHED_BUCKET_COUNT, Boolean.class); } public static boolean isTemporaryStagingDirectoryEnabled(ConnectorSession session) { return session.getProperty(TEMPORARY_STAGING_DIRECTORY_ENABLED, Boolean.class); } public static String getTemporaryStagingDirectoryPath(ConnectorSession session) { return session.getProperty(TEMPORARY_STAGING_DIRECTORY_PATH, String.class); } public static String getTemporaryTableSchema(ConnectorSession session) { return session.getProperty(TEMPORARY_TABLE_SCHEMA, String.class); } public static HiveStorageFormat getTemporaryTableStorageFormat(ConnectorSession session) { return session.getProperty(TEMPORARY_TABLE_STORAGE_FORMAT, HiveStorageFormat.class); } public static HiveCompressionCodec getTemporaryTableCompressionCodec(ConnectorSession session) { return session.getProperty(TEMPORARY_TABLE_COMPRESSION_CODEC, HiveCompressionCodec.class); } public static boolean shouldCreateEmptyBucketFilesForTemporaryTable(ConnectorSession session) { return session.getProperty(TEMPORARY_TABLE_CREATE_EMPTY_BUCKET_FILES, Boolean.class); } public static boolean isUsePageFileForHiveUnsupportedType(ConnectorSession session) { return session.getProperty(USE_PAGEFILE_FOR_HIVE_UNSUPPORTED_TYPE, Boolean.class); } public static boolean isPushdownFilterEnabled(ConnectorSession session) { return session.getProperty(PUSHDOWN_FILTER_ENABLED, Boolean.class); } public static boolean isRangeFiltersOnSubscriptsEnabled(ConnectorSession session) { return session.getProperty(RANGE_FILTERS_ON_SUBSCRIPTS_ENABLED, Boolean.class); } public static boolean isAdaptiveFilterReorderingEnabled(ConnectorSession session) { return session.getProperty(ADAPTIVE_FILTER_REORDERING_ENABLED, Boolean.class); } public static int getVirtualBucketCount(ConnectorSession session) { int virtualBucketCount = session.getProperty(VIRTUAL_BUCKET_COUNT, Integer.class); if (virtualBucketCount < 0) { throw new PrestoException(INVALID_SESSION_PROPERTY, format("%s must not be negative: %s", VIRTUAL_BUCKET_COUNT, virtualBucketCount)); } return virtualBucketCount; } public static boolean isOfflineDataDebugModeEnabled(ConnectorSession session) { return session.getProperty(OFFLINE_DATA_DEBUG_MODE_ENABLED, Boolean.class); } public static boolean shouldIgnoreUnreadablePartition(ConnectorSession session) { return session.getProperty(IGNORE_UNREADABLE_PARTITION, Boolean.class); } public static boolean isShufflePartitionedColumnsForTableWriteEnabled(ConnectorSession session) { return session.getProperty(SHUFFLE_PARTITIONED_COLUMNS_FOR_TABLE_WRITE, Boolean.class); } public static boolean isParquetBatchReadsEnabled(ConnectorSession session) { return session.getProperty(PARQUET_BATCH_READ_OPTIMIZATION_ENABLED, Boolean.class); } public static boolean isParquetBatchReaderVerificationEnabled(ConnectorSession session) { return session.getProperty(PARQUET_BATCH_READER_VERIFICATION_ENABLED, Boolean.class); } public static PropertyMetadata<DataSize> dataSizeSessionProperty(String name, String description, DataSize defaultValue, boolean hidden) { return new PropertyMetadata<>( name, description, createUnboundedVarcharType(), DataSize.class, defaultValue, hidden, value -> DataSize.valueOf((String) value), DataSize::toString); } private static InsertExistingPartitionsBehavior getDefaultInsertExistingPartitionsBehavior(HiveClientConfig hiveClientConfig) { if (!hiveClientConfig.isImmutablePartitions()) { return APPEND; } return hiveClientConfig.isInsertOverwriteImmutablePartitionEnabled() ? OVERWRITE : ERROR; } public static boolean isFailFastOnInsertIntoImmutablePartitionsEnabled(ConnectorSession session) { return session.getProperty(FAIL_FAST_ON_INSERT_INTO_IMMUTABLE_PARTITIONS_ENABLED, Boolean.class); } public static boolean isUseListDirectoryCache(ConnectorSession session) { return session.getProperty(USE_LIST_DIRECTORY_CACHE, Boolean.class); } public static boolean isParquetOptimizedWriterEnabled(ConnectorSession session) { return session.getProperty(PARQUET_OPTIMIZED_WRITER_ENABLED, Boolean.class); } public static BucketFunctionType getBucketFunctionTypeForExchange(ConnectorSession session) { return session.getProperty(BUCKET_FUNCTION_TYPE_FOR_EXCHANGE, BucketFunctionType.class); } public static boolean isParquetDereferencePushdownEnabled(ConnectorSession session) { return session.getProperty(PARQUET_DEREFERENCE_PUSHDOWN_ENABLED, Boolean.class); } public static boolean isPartialAggregationPushdownEnabled(ConnectorSession session) { return session.getProperty(PARTIAL_AGGREGATION_PUSHDOWN_ENABLED, Boolean.class); } public static boolean isPartialAggregationPushdownForVariableLengthDatatypesEnabled(ConnectorSession session) { return session.getProperty(PARTIAL_AGGREGATION_PUSHDOWN_FOR_VARIABLE_LENGTH_DATATYPES_ENABLED, Boolean.class); } public static boolean isFileRenamingEnabled(ConnectorSession session) { return session.getProperty(FILE_RENAMING_ENABLED, Boolean.class); } public static boolean isPreferManifestsToListFiles(ConnectorSession session) { return session.getProperty(PREFER_MANIFESTS_TO_LIST_FILES, Boolean.class); } public static boolean isManifestVerificationEnabled(ConnectorSession session) { return session.getProperty(MANIFEST_VERIFICATION_ENABLED, Boolean.class); } public static Optional<String> getNewPartitionUserSuppliedParameter(ConnectorSession session) { return Optional.ofNullable(session.getProperty(NEW_PARTITION_USER_SUPPLIED_PARAMETER, String.class)); } public static boolean isOptimizedPartitionUpdateSerializationEnabled(ConnectorSession session) { return session.getProperty(OPTIMIZED_PARTITION_UPDATE_SERIALIZATION_ENABLED, Boolean.class); } public static Duration getLeaseDuration(ConnectorSession session) { return session.getProperty(PARTITION_LEASE_DURATION, Duration.class); } public static boolean isCacheEnabled(ConnectorSession session) { return session.getProperty(CACHE_ENABLED, Boolean.class); } public static boolean isExecutionBasedMemoryAccountingEnabled(ConnectorSession session) { return session.getProperty(ENABLE_LOOSE_MEMORY_BASED_ACCOUNTING, Boolean.class); } public static int getMaterializedViewMissingPartitionsThreshold(ConnectorSession session) { return session.getProperty(MATERIALIZED_VIEW_MISSING_PARTITIONS_THRESHOLD, Integer.class); } public static boolean isVerboseRuntimeStatsEnabled(ConnectorSession session) { return session.getProperty(VERBOSE_RUNTIME_STATS_ENABLED, Boolean.class); } public static boolean isDwrfWriterStripeCacheEnabled(ConnectorSession session) { return session.getProperty(DWRF_WRITER_STRIPE_CACHE_ENABLED, Boolean.class); } public static DataSize getDwrfWriterStripeCacheeMaxSize(ConnectorSession session) { return session.getProperty(DWRF_WRITER_STRIPE_CACHE_SIZE, DataSize.class); } }
/* * Copyright 2017-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.features.rust; import com.facebook.buck.core.build.buildable.context.BuildableContext; import com.facebook.buck.core.build.context.BuildContext; import com.facebook.buck.core.description.BuildRuleParams; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.rulekey.AddToRuleKey; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.rules.SourcePathRuleFinder; import com.facebook.buck.core.rules.attr.SupportsInputBasedRuleKey; import com.facebook.buck.core.sourcepath.ExplicitBuildTargetSourcePath; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.core.sourcepath.resolver.SourcePathResolver; import com.facebook.buck.core.toolchain.tool.Tool; import com.facebook.buck.cxx.CxxPrepareForLinkStep; import com.facebook.buck.cxx.toolchain.linker.Linker; import com.facebook.buck.io.BuildCellRelativePath; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.rules.AbstractBuildRuleWithDeclaredAndExtraDeps; import com.facebook.buck.rules.BuildableSupport; import com.facebook.buck.rules.args.Arg; import com.facebook.buck.shell.ShellStep; import com.facebook.buck.shell.SymlinkFilesIntoDirectoryStep; import com.facebook.buck.step.ExecutionContext; import com.facebook.buck.step.Step; import com.facebook.buck.step.fs.MakeCleanDirectoryStep; import com.facebook.buck.util.MoreSuppliers; import com.facebook.buck.util.Verbosity; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import java.nio.file.Path; import java.util.Optional; import java.util.stream.Stream; /** Generate a rustc command line with all appropriate dependencies in place. */ public class RustCompileRule extends AbstractBuildRuleWithDeclaredAndExtraDeps implements SupportsInputBasedRuleKey { @AddToRuleKey private final Tool compiler; @AddToRuleKey private final Linker linker; @AddToRuleKey private final ImmutableList<Arg> args; @AddToRuleKey private final ImmutableList<Arg> depArgs; @AddToRuleKey private final ImmutableList<Arg> linkerArgs; @AddToRuleKey private final SourcePath rootModule; @AddToRuleKey private final ImmutableSortedSet<SourcePath> srcs; @AddToRuleKey private final RustBuckConfig.RemapSrcPaths remapSrcPaths; private final Path scratchDir; private final String filename; @AddToRuleKey private final boolean hasOutput; /** * Work out how to invoke the Rust compiler, rustc. * * <p>In Rust, a crate is the equivalent of a package in other languages. It's also the basic unit * of compilation. * * <p>A crate can either be a "binary crate" - which generates an executable - or a "library * crate", which makes an .rlib file. .rlib files contain both interface details (function * signatures, inline functions, macros, etc) and compiled object code, and so are equivalent to * both header files and library archives. There are also dynamic crates which compile to .so * files. * * <p>All crates are compiled from at least one source file, which is its main (or top, or root) * module. It may have references to other modules, which may be in other source files. Rustc only * needs the main module filename and will find the rest of the source files from there (akin to * #include in C/C++). If the crate also has dependencies on other crates, then those .rlib files * must also be passed to rustc for the interface details, and to be linked if its a binary crate. */ protected RustCompileRule( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams buildRuleParams, String filename, Tool compiler, Linker linker, ImmutableList<Arg> args, ImmutableList<Arg> depArgs, ImmutableList<Arg> linkerArgs, ImmutableSortedSet<SourcePath> srcs, SourcePath rootModule, boolean hasOutput, RustBuckConfig.RemapSrcPaths remapSrcPaths) { super(buildTarget, projectFilesystem, buildRuleParams); this.filename = filename; this.compiler = compiler; this.linker = linker; this.args = args; this.depArgs = depArgs; this.linkerArgs = linkerArgs; this.rootModule = rootModule; this.srcs = srcs; this.scratchDir = BuildTargets.getScratchPath(getProjectFilesystem(), getBuildTarget(), "%s-container"); this.hasOutput = hasOutput; this.remapSrcPaths = remapSrcPaths; } public static RustCompileRule from( SourcePathRuleFinder ruleFinder, BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, String filename, Tool compiler, Linker linker, ImmutableList<Arg> args, ImmutableList<Arg> depArgs, ImmutableList<Arg> linkerArgs, ImmutableSortedSet<SourcePath> sources, SourcePath rootModule, boolean hasOutput, RustBuckConfig.RemapSrcPaths remapSrcPaths) { return new RustCompileRule( buildTarget, projectFilesystem, params.withExtraDeps( MoreSuppliers.memoize( () -> ImmutableSortedSet.<BuildRule>naturalOrder() .addAll(BuildableSupport.getDepsCollection(compiler, ruleFinder)) .addAll(BuildableSupport.getDepsCollection(linker, ruleFinder)) .addAll( Stream.of(args, depArgs, linkerArgs) .flatMap( a -> a.stream() .flatMap( arg -> BuildableSupport.getDeps(arg, ruleFinder))) .iterator()) .addAll(ruleFinder.filterBuildRuleInputs(ImmutableList.of(rootModule))) .addAll(ruleFinder.filterBuildRuleInputs(sources)) .build())), filename, compiler, linker, args, depArgs, linkerArgs, sources, rootModule, hasOutput, remapSrcPaths); } protected static Path getOutputDir(BuildTarget target, ProjectFilesystem filesystem) { return BuildTargets.getGenPath(filesystem, target, "%s"); } private Path getOutput() { return getOutputDir(getBuildTarget(), getProjectFilesystem()).resolve(filename); } @Override public ImmutableList<Step> getBuildSteps( BuildContext buildContext, BuildableContext buildableContext) { Path output = getOutput(); if (hasOutput) { buildableContext.recordArtifact(output); } SourcePathResolver resolver = buildContext.getSourcePathResolver(); Path argFilePath = getProjectFilesystem() .getRootPath() .resolve( BuildTargets.getScratchPath( getProjectFilesystem(), getBuildTarget(), "%s.argsfile")); Path fileListPath = getProjectFilesystem() .getRootPath() .resolve( BuildTargets.getScratchPath( getProjectFilesystem(), getBuildTarget(), "%s__filelist.txt")); return new ImmutableList.Builder<Step>() .addAll( MakeCleanDirectoryStep.of( BuildCellRelativePath.fromCellRelativePath( buildContext.getBuildCellRootPath(), getProjectFilesystem(), scratchDir))) .add( new SymlinkFilesIntoDirectoryStep( getProjectFilesystem(), getProjectFilesystem().getRootPath(), srcs.stream() .map(resolver::getRelativePath) .collect(ImmutableList.toImmutableList()), scratchDir)) .addAll( MakeCleanDirectoryStep.of( BuildCellRelativePath.fromCellRelativePath( buildContext.getBuildCellRootPath(), getProjectFilesystem(), getOutputDir(getBuildTarget(), getProjectFilesystem())))) .addAll( CxxPrepareForLinkStep.create( argFilePath, fileListPath, linker.fileList(fileListPath), output, linkerArgs, linker, getBuildTarget().getCellPath(), resolver)) .add( new ShellStep(Optional.of(getBuildTarget()), getProjectFilesystem().getRootPath()) { @Override protected ImmutableList<String> getShellCommandInternal( ExecutionContext executionContext) { ImmutableList<String> linkerCmd = linker.getCommandPrefix(resolver); ImmutableList.Builder<String> cmd = ImmutableList.builder(); // Accumulate Args into set to dedup them while retaining their order, // since there are often many duplicates for things like library paths. // // NOTE: this means that all logical args should be a single string on the command // line (ie "-Lfoo", not ["-L", "foo"]) ImmutableSet.Builder<String> dedupArgs = ImmutableSet.builder(); dedupArgs.addAll(Arg.stringify(depArgs, buildContext.getSourcePathResolver())); Path src = scratchDir.resolve(resolver.getRelativePath(rootModule)); cmd.addAll(compiler.getCommandPrefix(resolver)); if (executionContext.getAnsi().isAnsiTerminal()) { cmd.add("--color=always"); } remapSrcPaths.addRemapOption(cmd, workingDirectory.toString(), scratchDir + "/"); // Generate a target-unique string to distinguish distinct crates with the same // name. String metadata = RustCompileUtils.hashForTarget(RustCompileRule.this.getBuildTarget()); cmd.add(String.format("-Clinker=%s", linkerCmd.get(0))) .add(String.format("-Clink-arg=@%s", argFilePath)) .add(String.format("-Cmetadata=%s", metadata)) .add(String.format("-Cextra-filename=-%s", metadata)) .addAll(Arg.stringify(args, buildContext.getSourcePathResolver())) .addAll(dedupArgs.build()) .add("--out-dir", output.getParent().toString()) .add(src.toString()); return cmd.build(); } /* * Make sure all stderr output from rustc is emitted, since its either a warning or an * error. In general Rust code should have zero warnings, or all warnings as errors. * Regardless, respect requests for silence. */ @Override protected boolean shouldPrintStderr(Verbosity verbosity) { return !verbosity.isSilent(); } @Override public ImmutableMap<String, String> getEnvironmentVariables( ExecutionContext context) { ImmutableMap.Builder<String, String> env = ImmutableMap.builder(); env.putAll(compiler.getEnvironment(buildContext.getSourcePathResolver())); Path root = getProjectFilesystem().getRootPath(); Path basePath = getBuildTarget().get().getBasePath(); // These need to be set as absolute paths - the intended use // is within an `include!(concat!(env!("..."), "...")` // invocation in Rust source, and if the path isn't absolute // it will be treated as relative to the current file including // it. The trailing '/' is also to assist this use-case. env.put("RUSTC_BUILD_CONTAINER", root.resolve(scratchDir) + "/"); env.put( "RUSTC_BUILD_CONTAINER_BASE_PATH", root.resolve(scratchDir.resolve(basePath)) + "/"); return env.build(); } @Override public String getShortName() { return "rust-build"; } }) .build(); } @Override public SourcePath getSourcePathToOutput() { return ExplicitBuildTargetSourcePath.of(getBuildTarget(), getOutput()); } SourcePath getCrateRoot() { return rootModule; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.PerPartitionCategorizationConfig; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.lessThan; public class TransportEstimateModelMemoryActionTests extends ESTestCase { public void testCalculateDetectorRequirementBytes() { Map<String, Long> overallCardinality = new HashMap<>(); overallCardinality.put("part", 100L); overallCardinality.put("buy", 200L); overallCardinality.put("ovr", 300L); String function = randomFrom("mean", "min", "max", "sum"); Detector noSplit = createDetector(function, "field", null, null, null); assertThat(TransportEstimateModelMemoryAction.calculateDetectorRequirementBytes(noSplit, 900, overallCardinality), is(49152L)); Detector withByField = createDetector(function, "field", "buy", null, null); assertThat(TransportEstimateModelMemoryAction.calculateDetectorRequirementBytes(withByField, 900, overallCardinality), is(134 * 49152L)); Detector withPartitionField = createDetector(function, "field", null, null, "part"); assertThat(TransportEstimateModelMemoryAction.calculateDetectorRequirementBytes(withPartitionField, 900, overallCardinality), is(100 * 49152L)); Detector withByAndPartitionFields = createDetector(function, "field", "buy", null, "part"); assertThat(TransportEstimateModelMemoryAction.calculateDetectorRequirementBytes(withByAndPartitionFields, 900, overallCardinality), is((long) Math.ceil(200 / Math.sqrt(100) * 2 / 3) * 100 * 49152L)); } public void testCalculateInfluencerRequirementBytes() { Map<String, Long> maxBucketCardinality = new HashMap<>(); maxBucketCardinality.put("part", 100L); maxBucketCardinality.put("inf1", 200L); maxBucketCardinality.put("inf2", 300L); AnalysisConfig noInfluencers = createCountAnalysisConfig(null, null); assertThat(TransportEstimateModelMemoryAction.calculateInfluencerRequirementBytes(noInfluencers, maxBucketCardinality), is(0L)); AnalysisConfig influencerAlsoPartitionField = createCountAnalysisConfig(null, "part", "part"); assertThat(TransportEstimateModelMemoryAction.calculateInfluencerRequirementBytes(influencerAlsoPartitionField, maxBucketCardinality), is(0L)); AnalysisConfig influencerNotPartitionField = createCountAnalysisConfig(null, "part", "inf1"); assertThat(TransportEstimateModelMemoryAction.calculateInfluencerRequirementBytes(influencerNotPartitionField, maxBucketCardinality), is(200 * TransportEstimateModelMemoryAction.BYTES_PER_INFLUENCER_VALUE)); AnalysisConfig otherInfluencerAsWellAsPartitionField = createCountAnalysisConfig(null, "part", "part", "inf1"); assertThat(TransportEstimateModelMemoryAction.calculateInfluencerRequirementBytes(otherInfluencerAsWellAsPartitionField, maxBucketCardinality), is(200 * TransportEstimateModelMemoryAction.BYTES_PER_INFLUENCER_VALUE)); AnalysisConfig twoInfluencersNotPartitionField = createCountAnalysisConfig(null, "part", "part", "inf1", "inf2"); assertThat(TransportEstimateModelMemoryAction.calculateInfluencerRequirementBytes(twoInfluencersNotPartitionField, maxBucketCardinality), is((200 + 300) * TransportEstimateModelMemoryAction.BYTES_PER_INFLUENCER_VALUE)); } public void testCalculateCategorizationRequirementBytesNoCategorization() { Map<String, Long> overallCardinality = new HashMap<>(); overallCardinality.put("part", randomLongBetween(10, 1000)); AnalysisConfig analysisConfig = createCountAnalysisConfig(null, randomBoolean() ? "part" : null); assertThat(TransportEstimateModelMemoryAction.calculateCategorizationRequirementBytes(analysisConfig, overallCardinality), is(0L)); } public void testCalculateCategorizationRequirementBytesSimpleCategorization() { Map<String, Long> overallCardinality = new HashMap<>(); overallCardinality.put("part", randomLongBetween(10, 1000)); AnalysisConfig analysisConfig = createCountAnalysisConfig(randomAlphaOfLength(10), randomBoolean() ? "part" : null); assertThat(TransportEstimateModelMemoryAction.calculateCategorizationRequirementBytes(analysisConfig, overallCardinality), is(10L * 1024 * 1024)); } public void testCalculateCategorizationRequirementBytesPerPartitionCategorization() { long partitionCardinality = randomLongBetween(10, 1000); Map<String, Long> overallCardinality = new HashMap<>(); overallCardinality.put("part", partitionCardinality); boolean isStopOnWarn = randomBoolean(); AnalysisConfig analysisConfig = createCountAnalysisConfigBuilder(randomAlphaOfLength(10), "part") .setPerPartitionCategorizationConfig(new PerPartitionCategorizationConfig(true, isStopOnWarn)).build(); assertThat(TransportEstimateModelMemoryAction.calculateCategorizationRequirementBytes(analysisConfig, overallCardinality), is(partitionCardinality * 10L * (isStopOnWarn ? 1 : 2) * 1024 * 1024)); } public void testRoundUpToNextMb() { assertThat(TransportEstimateModelMemoryAction.roundUpToNextMb(0), equalTo(ByteSizeValue.ofBytes(0))); assertThat(TransportEstimateModelMemoryAction.roundUpToNextMb(1), equalTo(ByteSizeValue.ofMb(1))); assertThat(TransportEstimateModelMemoryAction.roundUpToNextMb(randomIntBetween(1, 1024 * 1024)), equalTo(ByteSizeValue.ofMb(1))); assertThat(TransportEstimateModelMemoryAction.roundUpToNextMb(1024 * 1024), equalTo(ByteSizeValue.ofMb(1))); assertThat(TransportEstimateModelMemoryAction.roundUpToNextMb(1024 * 1024 + 1), equalTo(ByteSizeValue.ofMb(2))); assertThat(TransportEstimateModelMemoryAction.roundUpToNextMb(2 * 1024 * 1024), equalTo(ByteSizeValue.ofMb(2))); // We don't round up at the extremes, to ensure that the resulting value can be represented as bytes in a long // (At such extreme scale it won't be possible to actually run the analysis, so ease of use trumps precision) assertThat(TransportEstimateModelMemoryAction.roundUpToNextMb(Long.MAX_VALUE - randomIntBetween(0, 1000000)), equalTo(ByteSizeValue.ofMb(Long.MAX_VALUE / ByteSizeValue.ofMb(1).getBytes() ))); } public void testReducedCardinality() { long cardinalityToReduce = randomIntBetween(1001, Integer.MAX_VALUE); long saneBucketSpan = randomFrom(1, 30, 60, 300, 600, 900, 1800, 3600, 10800, 21600, 43200, 86400); assertThat(TransportEstimateModelMemoryAction.reducedCardinality(0, randomNonNegativeLong(), saneBucketSpan), closeTo(0.0, 1e-15)); assertThat(TransportEstimateModelMemoryAction.reducedCardinality(cardinalityToReduce, 1, saneBucketSpan), closeTo(cardinalityToReduce, 1e-6)); assertThat(TransportEstimateModelMemoryAction.reducedCardinality(cardinalityToReduce, 1000, 900), closeTo(cardinalityToReduce / Math.sqrt(1000), cardinalityToReduce / 20.0)); assertThat(TransportEstimateModelMemoryAction.reducedCardinality( cardinalityToReduce, randomIntBetween(2, Integer.MAX_VALUE), saneBucketSpan), lessThan((double) cardinalityToReduce)); assertThat(TransportEstimateModelMemoryAction.reducedCardinality(cardinalityToReduce, 1000, 10000000), closeTo(cardinalityToReduce / 1000.0, 1e-4)); } public void testAddNonNegativeLongsWithMaxValueCap() { assertThat(TransportEstimateModelMemoryAction.addNonNegativeLongsWithMaxValueCap(0, 0), is(0L)); assertThat(TransportEstimateModelMemoryAction.addNonNegativeLongsWithMaxValueCap(0, 1), is(1L)); assertThat(TransportEstimateModelMemoryAction.addNonNegativeLongsWithMaxValueCap(1, 0), is(1L)); assertThat(TransportEstimateModelMemoryAction.addNonNegativeLongsWithMaxValueCap(1, 1), is(2L)); assertThat(TransportEstimateModelMemoryAction.addNonNegativeLongsWithMaxValueCap(Long.MAX_VALUE, Long.MAX_VALUE), is(Long.MAX_VALUE)); assertThat(TransportEstimateModelMemoryAction.addNonNegativeLongsWithMaxValueCap( Long.MAX_VALUE - randomIntBetween(1, Integer.MAX_VALUE), Long.MAX_VALUE - randomIntBetween(1, Integer.MAX_VALUE)), is(Long.MAX_VALUE)); } public void testMultiplyNonNegativeLongsWithMaxValueCap() { assertThat(TransportEstimateModelMemoryAction.multiplyNonNegativeLongsWithMaxValueCap(0, 0), is(0L)); assertThat(TransportEstimateModelMemoryAction.multiplyNonNegativeLongsWithMaxValueCap(randomNonNegativeLong(), 0), is(0L)); assertThat(TransportEstimateModelMemoryAction.multiplyNonNegativeLongsWithMaxValueCap(0, randomNonNegativeLong()), is(0L)); assertThat(TransportEstimateModelMemoryAction.multiplyNonNegativeLongsWithMaxValueCap(1, 1), is(1L)); assertThat(TransportEstimateModelMemoryAction.multiplyNonNegativeLongsWithMaxValueCap(Long.MAX_VALUE, Long.MAX_VALUE), is(Long.MAX_VALUE)); assertThat(TransportEstimateModelMemoryAction.multiplyNonNegativeLongsWithMaxValueCap( Long.MAX_VALUE, Math.max(1L, randomNonNegativeLong())), is(Long.MAX_VALUE)); assertThat(TransportEstimateModelMemoryAction.multiplyNonNegativeLongsWithMaxValueCap( Math.max(1L, randomNonNegativeLong()), Long.MAX_VALUE), is(Long.MAX_VALUE)); assertThat(TransportEstimateModelMemoryAction.multiplyNonNegativeLongsWithMaxValueCap(0, Long.MAX_VALUE), is(0L)); assertThat(TransportEstimateModelMemoryAction.multiplyNonNegativeLongsWithMaxValueCap(Long.MAX_VALUE, 0), is(0L)); } public static Detector createDetector(String function, String fieldName, String byFieldName, String overFieldName, String partitionFieldName) { Detector.Builder detectorBuilder = new Detector.Builder(function, fieldName); detectorBuilder.setByFieldName(byFieldName); detectorBuilder.setOverFieldName(overFieldName); detectorBuilder.setPartitionFieldName(partitionFieldName); return detectorBuilder.build(); } public static AnalysisConfig createCountAnalysisConfig(String categorizationFieldName, String partitionFieldName, String... influencerFieldNames) { return createCountAnalysisConfigBuilder(categorizationFieldName, partitionFieldName, influencerFieldNames).build(); } public static AnalysisConfig.Builder createCountAnalysisConfigBuilder(String categorizationFieldName, String partitionFieldName, String... influencerFieldNames) { Detector.Builder detectorBuilder = new Detector.Builder("count", null); detectorBuilder.setByFieldName((categorizationFieldName != null) ? AnalysisConfig.ML_CATEGORY_FIELD : null); detectorBuilder.setPartitionFieldName(partitionFieldName); AnalysisConfig.Builder builder = new AnalysisConfig.Builder(Collections.singletonList(detectorBuilder.build())); if (categorizationFieldName != null) { builder.setCategorizationFieldName(categorizationFieldName); } if (influencerFieldNames.length > 0) { builder.setInfluencers(Arrays.asList(influencerFieldNames)); } return builder; } }
/* * Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.identity.application.authentication.framework.services; import org.powermock.api.mockito.PowerMockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import org.wso2.carbon.identity.application.authentication.framework.config.model.SequenceConfig; import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext; import org.wso2.carbon.identity.application.authentication.framework.exception.PostAuthenticationFailedException; import org.wso2.carbon.identity.application.authentication.framework.handler.request.AbstractPostAuthnHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.request.PostAuthnHandlerFlowStatus; import org.wso2.carbon.identity.application.authentication.framework.internal.FrameworkServiceDataHolder; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser; import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants; import org.wso2.carbon.identity.testutil.IdentityBaseTest; import java.io.IOException; import java.util.UUID; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import static org.mockito.Matchers.any; import static org.powermock.api.mockito.PowerMockito.doAnswer; import static org.powermock.api.mockito.PowerMockito.when; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; /** * Tests for PostAuthenticationManagement Service. */ public class PostAuthenticationMgtServiceTest extends IdentityBaseTest { private PostAuthenticationMgtService postAuthenticationMgtService = new PostAuthenticationMgtService(); private TestPostHandlerWithRedirect testPostHandlerWithRedirect = new TestPostHandlerWithRedirect(); private static String FIRST_REDIRECT_TRIGGERED = "firstRedirectTriggered"; private static String SECOND_REDIRECT_TRIGGERED = "secondRedirectTriggered"; private static String ADMIN_USERNAME = "admin"; private static String SPECIAL_USER = "specialUser"; private static String DUMMY_EXTERNAL_ENDPOINT = "https://localhost/somecontext"; @BeforeMethod void setup() { testPostHandlerWithRedirect.setEnabled(true); FrameworkServiceDataHolder.getInstance().addPostAuthenticationHandler(testPostHandlerWithRedirect); } @DataProvider(name = "singlePostAuthenticatorData") public Object[][] singlePostAuthenticatorData() { return new Object[][]{ // Sample authenticator is enabled, an admin user authenticated. Hence just a single redirection. {true, ADMIN_USERNAME}, // No post authenticator is enabled. {false, ADMIN_USERNAME}, // Sample authenticator is enabled. A special user is authenticated. Hence two redirections take place. {true, SPECIAL_USER} }; } @Test(dataProvider = "singlePostAuthenticatorData") public void testHandlePostAuthentication(boolean isSampleAuthenticatorEnabled, String userName) throws Exception { HttpServletRequest request = PowerMockito.mock(HttpServletRequest.class); HttpServletResponse response = PowerMockito.mock(HttpServletResponse.class); AuthenticationContext context = new AuthenticationContext(); context.setContextIdentifier(String.valueOf(UUID.randomUUID())); Cookie[] cookies = new Cookie[1]; doAnswer((mock) -> cookies[0] = (Cookie) mock.getArguments()[0]).when(response).addCookie(any(Cookie.class)); addSequence(context, true); setUser(context, userName); if (!isSampleAuthenticatorEnabled) { this.testPostHandlerWithRedirect.setEnabled(false); } postAuthenticationMgtService.handlePostAuthentication(request, response, context); if (isSampleAuthenticatorEnabled && !SPECIAL_USER.equalsIgnoreCase(userName)) { assertNotNull(context.getParameter(FIRST_REDIRECT_TRIGGERED)); when(request.getCookies()).thenReturn(cookies); postAuthenticationMgtService.handlePostAuthentication(request, response, context); assertTrue(Boolean.parseBoolean(context.getParameter(FrameworkConstants .POST_AUTHENTICATION_EXTENSION_COMPLETED).toString())); } else if (SPECIAL_USER.equalsIgnoreCase(userName)) { assertNotNull(context.getParameter(FIRST_REDIRECT_TRIGGERED)); when(request.getCookies()).thenReturn(cookies); postAuthenticationMgtService.handlePostAuthentication(request, response, context); assertNull(context.getParameter(FrameworkConstants .POST_AUTHENTICATION_EXTENSION_COMPLETED)); assertNotNull(context.getParameter(SECOND_REDIRECT_TRIGGERED)); } else { assertTrue(Boolean.parseBoolean(context.getParameter(FrameworkConstants .POST_AUTHENTICATION_EXTENSION_COMPLETED).toString())); } postAuthenticationMgtService.handlePostAuthentication(request, response, context); if (SPECIAL_USER.equalsIgnoreCase(userName)) { assertNotNull(context.getParameter(SECOND_REDIRECT_TRIGGERED)); } postAuthenticationMgtService.handlePostAuthentication(request, response, context); } @DataProvider(name = "singlePostAuthenticatorUnsuccessData") public Object[][] singlePostAuthenticatorUnsuccessData() { return new Object[][]{ // Alter cookie before sending response to first redirection. Hence should fail {true, ADMIN_USERNAME}, // If the user is neither an admin, nor special user, them post authentication should fail. {false, ADMIN_USERNAME + "suffix"} }; } @Test(dataProvider = "singlePostAuthenticatorUnsuccessData", expectedExceptions = PostAuthenticationFailedException.class) public void testHandlePostAuthenticationExceptions(boolean alterCookie, String userName) throws Exception { HttpServletRequest request = PowerMockito.mock(HttpServletRequest.class); HttpServletResponse response = PowerMockito.mock(HttpServletResponse.class); AuthenticationContext context = new AuthenticationContext(); context.setContextIdentifier(String.valueOf(UUID.randomUUID())); Cookie[] cookies = new Cookie[1]; doAnswer((mock) -> cookies[0] = (Cookie) mock.getArguments()[0]).when(response).addCookie(any(Cookie.class)); addSequence(context, true); setUser(context, userName); postAuthenticationMgtService.handlePostAuthentication(request, response, context); if (alterCookie && ADMIN_USERNAME.equalsIgnoreCase(userName)) { cookies[0].setValue(cookies[0].getValue() + "gibberish"); } when(request.getCookies()).thenReturn(cookies); postAuthenticationMgtService.handlePostAuthentication(request, response, context); } private void addSequence(AuthenticationContext context, boolean isCompleted) { SequenceConfig sequenceConfig = new SequenceConfig(); sequenceConfig.setCompleted(isCompleted); context.setSequenceConfig(sequenceConfig); } private void setUser(AuthenticationContext context, String userName) { AuthenticatedUser authenticatedUser = new AuthenticatedUser(); authenticatedUser.setAuthenticatedSubjectIdentifier(userName); context.getSequenceConfig().setAuthenticatedUser(authenticatedUser); } /** * Sample post authentication handler for tests. */ public static class TestPostHandlerWithRedirect extends AbstractPostAuthnHandler { private boolean isEnabled = true; @Override public PostAuthnHandlerFlowStatus handle(HttpServletRequest request, HttpServletResponse response, AuthenticationContext context) throws PostAuthenticationFailedException { // If not authenticated just return. if (context.getSequenceConfig().getAuthenticatedUser() == null) { return PostAuthnHandlerFlowStatus.SUCCESS_COMPLETED; } else if (context.getParameter(FIRST_REDIRECT_TRIGGERED) != null && context.getParameter (SECOND_REDIRECT_TRIGGERED) == null) { // First redirection has taken place. Decide whether second redirection needs or can finish the flow String authenticatedUsername = context.getSequenceConfig().getAuthenticatedUser() .getAuthenticatedSubjectIdentifier(); if (ADMIN_USERNAME.equalsIgnoreCase(authenticatedUsername)) { // If the user is admin, the flow is success. return PostAuthnHandlerFlowStatus.SUCCESS_COMPLETED; } else if (SPECIAL_USER.equalsIgnoreCase(authenticatedUsername)) { // If the user is a special user. then do a second redirection before completing. try { response.sendRedirect(DUMMY_EXTERNAL_ENDPOINT); context.addParameter(SECOND_REDIRECT_TRIGGERED, true); return PostAuthnHandlerFlowStatus.INCOMPLETE; } catch (IOException e) { throw new PostAuthenticationFailedException("Error while checking admin user", "Error while " + "redirecting"); } } else { throw new PostAuthenticationFailedException("Not an admin user", "User is not an admin"); } } if (context.getParameter(SECOND_REDIRECT_TRIGGERED) != null) { return PostAuthnHandlerFlowStatus.INCOMPLETE; } try { response.sendRedirect(DUMMY_EXTERNAL_ENDPOINT); context.addParameter(FIRST_REDIRECT_TRIGGERED, true); return PostAuthnHandlerFlowStatus.INCOMPLETE; } catch (IOException e) { throw new PostAuthenticationFailedException("Error while checking admin user", "Error while " + "redirecting"); } } @Override public boolean isEnabled() { return this.isEnabled; } public void setEnabled(boolean isEnabled) { this.isEnabled = isEnabled; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.cli; import com.facebook.presto.client.ClientSession; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableMap; import com.google.common.net.HostAndPort; import io.airlift.airline.Option; import io.airlift.units.Duration; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.CharsetEncoder; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.TimeZone; import static com.facebook.presto.client.KerberosUtil.defaultCredentialCachePath; import static com.google.common.base.Preconditions.checkArgument; import static java.nio.charset.StandardCharsets.US_ASCII; import static java.util.Collections.emptyMap; import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.MINUTES; public class ClientOptions { @Option(name = "--server", title = "server", description = "Presto server location (default: localhost:8080)") public String server = "localhost:8080"; @Option(name = "--enable-authentication", title = "enable authentication", description = "Enable client authentication") public boolean authenticationEnabled; @Option(name = "--krb5-remote-service-name", title = "krb5 remote service name", description = "Remote peer's kerberos service name") public String krb5RemoteServiceName; @Option(name = "--krb5-config-path", title = "krb5 config path", description = "Kerberos config file path (default: /etc/krb5.conf)") public String krb5ConfigPath = "/etc/krb5.conf"; @Option(name = "--krb5-keytab-path", title = "krb5 keytab path", description = "Kerberos key table path (default: /etc/krb5.keytab)") public String krb5KeytabPath = "/etc/krb5.keytab"; @Option(name = "--krb5-credential-cache-path", title = "krb5 credential cache path", description = "Kerberos credential cache path") public String krb5CredentialCachePath = defaultCredentialCachePath().orElse(null); @Option(name = "--krb5-principal", title = "krb5 principal", description = "Kerberos principal to be used") public String krb5Principal; @Option(name = "--krb5-disable-remote-service-hostname-canonicalization", title = "krb5 disable remote service hostname canonicalization", description = "Disable service hostname canonicalization using the DNS reverse lookup") public boolean krb5DisableRemoteServiceHostnameCanonicalization; @Option(name = "--keystore-path", title = "keystore path", description = "Keystore path") public String keystorePath; @Option(name = "--keystore-password", title = "keystore password", description = "Keystore password") public String keystorePassword; @Option(name = "--truststore-path", title = "truststore path", description = "Truststore path") public String truststorePath; @Option(name = "--truststore-password", title = "truststore password", description = "Truststore password") public String truststorePassword; @Option(name = "--user", title = "user", description = "Username") public String user = System.getProperty("user.name"); @Option(name = "--password", title = "password", description = "Prompt for password") public boolean password; @Option(name = "--source", title = "source", description = "Name of source making query") public String source = "presto-cli"; @Option(name = "--catalog", title = "catalog", description = "Default catalog") public String catalog; @Option(name = "--schema", title = "schema", description = "Default schema") public String schema; @Option(name = {"-f", "--file"}, title = "file", description = "Execute statements from file and exit") public String file; @Option(name = "--debug", title = "debug", description = "Enable debug information") public boolean debug; @Option(name = "--log-levels-file", title = "log levels file", description = "Configure log levels for debugging using this file") public String logLevelsFile; @Option(name = "--execute", title = "execute", description = "Execute specified statements and exit") public String execute; @Option(name = "--output-format", title = "output-format", description = "Output format for batch mode [ALIGNED, VERTICAL, CSV, TSV, CSV_HEADER, TSV_HEADER, NULL] (default: CSV)") public OutputFormat outputFormat = OutputFormat.CSV; @Option(name = "--session", title = "session", description = "Session property (property can be used multiple times; format is key=value; use 'SHOW SESSION' to see available properties)") public final List<ClientSessionProperty> sessionProperties = new ArrayList<>(); @Option(name = "--socks-proxy", title = "socks-proxy", description = "SOCKS proxy to use for server connections") public HostAndPort socksProxy; @Option(name = "--http-proxy", title = "http-proxy", description = "HTTP proxy to use for server connections") public HostAndPort httpProxy; @Option(name = "--client-request-timeout", title = "client request timeout", description = "Client request timeout (default: 2m)") public Duration clientRequestTimeout = new Duration(2, MINUTES); public enum OutputFormat { ALIGNED, VERTICAL, CSV, TSV, CSV_HEADER, TSV_HEADER, NULL } public ClientSession toClientSession() { return new ClientSession( parseServer(server), user, source, null, // client-supplied payload field not yet supported in CLI catalog, schema, TimeZone.getDefault().getID(), Locale.getDefault(), toProperties(sessionProperties), emptyMap(), null, debug, clientRequestTimeout); } public static URI parseServer(String server) { server = server.toLowerCase(ENGLISH); if (server.startsWith("http://") || server.startsWith("https://")) { return URI.create(server); } HostAndPort host = HostAndPort.fromString(server); try { return new URI("http", null, host.getHost(), host.getPortOrDefault(80), null, null, null); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } } public static Map<String, String> toProperties(List<ClientSessionProperty> sessionProperties) { ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); for (ClientSessionProperty sessionProperty : sessionProperties) { String name = sessionProperty.getName(); if (sessionProperty.getCatalog().isPresent()) { name = sessionProperty.getCatalog().get() + "." + name; } builder.put(name, sessionProperty.getValue()); } return builder.build(); } public static final class ClientSessionProperty { private static final Splitter NAME_VALUE_SPLITTER = Splitter.on('=').limit(2); private static final Splitter NAME_SPLITTER = Splitter.on('.'); private final Optional<String> catalog; private final String name; private final String value; public ClientSessionProperty(String property) { List<String> nameValue = NAME_VALUE_SPLITTER.splitToList(property); checkArgument(nameValue.size() == 2, "Session property: %s", property); List<String> nameParts = NAME_SPLITTER.splitToList(nameValue.get(0)); checkArgument(nameParts.size() == 1 || nameParts.size() == 2, "Invalid session property: %s", property); if (nameParts.size() == 1) { catalog = Optional.empty(); name = nameParts.get(0); } else { catalog = Optional.of(nameParts.get(0)); name = nameParts.get(1); } value = nameValue.get(1); verifyProperty(catalog, name, value); } public ClientSessionProperty(Optional<String> catalog, String name, String value) { this.catalog = requireNonNull(catalog, "catalog is null"); this.name = requireNonNull(name, "name is null"); this.value = requireNonNull(value, "value is null"); verifyProperty(catalog, name, value); } private static void verifyProperty(Optional<String> catalog, String name, String value) { checkArgument(!catalog.isPresent() || !catalog.get().isEmpty(), "Invalid session property: %s.%s:%s", catalog, name, value); checkArgument(!name.isEmpty(), "Session property name is empty"); CharsetEncoder charsetEncoder = US_ASCII.newEncoder(); checkArgument(catalog.orElse("").indexOf('=') < 0, "Session property catalog must not contain '=': %s", name); checkArgument(charsetEncoder.canEncode(catalog.orElse("")), "Session property catalog is not US_ASCII: %s", name); checkArgument(name.indexOf('=') < 0, "Session property name must not contain '=': %s", name); checkArgument(charsetEncoder.canEncode(name), "Session property name is not US_ASCII: %s", name); checkArgument(charsetEncoder.canEncode(value), "Session property value is not US_ASCII: %s", value); } public Optional<String> getCatalog() { return catalog; } public String getName() { return name; } public String getValue() { return value; } @Override public String toString() { return (catalog.isPresent() ? catalog.get() + '.' : "") + name + '=' + value; } @Override public int hashCode() { return Objects.hash(catalog, name, value); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } ClientSessionProperty other = (ClientSessionProperty) obj; return Objects.equals(this.catalog, other.catalog) && Objects.equals(this.name, other.name) && Objects.equals(this.value, other.value); } } }
/* * Licensed to GraphHopper and Peter Karich under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.reader; import static com.graphhopper.util.Helper.nf; import com.graphhopper.util.SimpleKalmanFilter; import gnu.trove.list.TLongList; import gnu.trove.list.array.TLongArrayList; import gnu.trove.map.TIntLongMap; import gnu.trove.map.TLongLongMap; import gnu.trove.map.hash.TIntLongHashMap; import gnu.trove.map.hash.TLongLongHashMap; import gnu.trove.set.TLongSet; import gnu.trove.set.hash.TLongHashSet; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.xml.stream.XMLStreamException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.graphhopper.coll.GHLongIntBTree; import com.graphhopper.coll.LongIntMap; import com.graphhopper.reader.OSMTurnRelation.TurnCostTableEntry; import com.graphhopper.reader.dem.ElevationProvider; import com.graphhopper.routing.util.*; import com.graphhopper.storage.*; import com.graphhopper.util.*; import com.graphhopper.util.shapes.GHPoint; import gnu.trove.map.TLongObjectMap; import gnu.trove.map.hash.TLongObjectHashMap; import java.util.*; /** * This class parses an OSM xml or pbf file and creates a graph from it. It does so in a two phase * parsing processes in order to reduce memory usage compared to a single parsing processing. * <p> * 1. a) Reads ways from OSM file and stores all associated node ids in osmNodeIdToIndexMap. If a * node occurs once it is a pillar node and if more it is a tower node, otherwise * osmNodeIdToIndexMap returns EMPTY. * <p> * 1. b) Reads relations from OSM file. In case that the relation is a route relation, it stores * specific relation attributes required for routing into osmWayIdToRouteWeigthMap for all the ways * of the relation. * <p> * 2.a) Reads nodes from OSM file and stores lat+lon information either into the intermediate * datastructure for the pillar nodes (pillarLats/pillarLons) or, if a tower node, directly into the * graphStorage via setLatitude/setLongitude. It can also happen that a pillar node needs to be * transformed into a tower node e.g. via barriers or different speed values for one way. * <p> * 2.b) Reads ways OSM file and creates edges while calculating the speed etc from the OSM tags. * When creating an edge the pillar node information from the intermediate datastructure will be * stored in the way geometry of that edge. * <p> * @author Peter Karich */ public class OSMReader implements DataReader { protected static final int EMPTY = -1; // pillar node is >= 3 protected static final int PILLAR_NODE = 1; // tower node is <= -3 protected static final int TOWER_NODE = -2; private static final Logger logger = LoggerFactory.getLogger(OSMReader.class); private long locations; private long skippedLocations; private final GraphStorage ghStorage; private final Graph graph; private final NodeAccess nodeAccess; private EncodingManager encodingManager = null; private int workerThreads = -1; protected long zeroCounter = 0; // Using the correct Map<Long, Integer> is hard. We need a memory efficient and fast solution for big data sets! // // very slow: new SparseLongLongArray // only append and update possible (no unordered storage like with this doubleParse): new OSMIDMap // same here: not applicable as ways introduces the nodes in 'wrong' order: new OSMIDSegmentedMap // memory overhead due to open addressing and full rehash: // nodeOsmIdToIndexMap = new BigLongIntMap(expectedNodes, EMPTY); // smaller memory overhead for bigger data sets because of avoiding a "rehash" // remember how many times a node was used to identify tower nodes private LongIntMap osmNodeIdToInternalNodeMap; private TLongLongHashMap osmNodeIdToNodeFlagsMap; private TLongLongHashMap osmWayIdToRouteWeightMap; // stores osm way ids used by relations to identify which edge ids needs to be mapped later private TLongHashSet osmWayIdSet = new TLongHashSet(); private TIntLongMap edgeIdToOsmWayIdMap; private final TLongList barrierNodeIds = new TLongArrayList(); protected PillarInfo pillarInfo; private final DistanceCalc distCalc = Helper.DIST_EARTH; private final DistanceCalc3D distCalc3D = Helper.DIST_3D; private final DouglasPeucker simplifyAlgo = new DouglasPeucker(); private boolean doSimplify = true; private int nextTowerId = 0; private int nextPillarId = 0; // negative but increasing to avoid clash with custom created OSM files private long newUniqueOsmId = -Long.MAX_VALUE; private ElevationProvider eleProvider = ElevationProvider.NOOP; private String smoothingFilter = ""; private final boolean exitOnlyPillarNodeException = true; private File osmFile; private final Map<FlagEncoder, EdgeExplorer> outExplorerMap = new HashMap<FlagEncoder, EdgeExplorer>(); private final Map<FlagEncoder, EdgeExplorer> inExplorerMap = new HashMap<FlagEncoder, EdgeExplorer>(); public OSMReader( GraphHopperStorage ghStorage ) { this.ghStorage = ghStorage; this.graph = ghStorage; this.nodeAccess = graph.getNodeAccess(); osmNodeIdToInternalNodeMap = new GHLongIntBTree(200); osmNodeIdToNodeFlagsMap = new TLongLongHashMap(200, .5f, 0, 0); osmWayIdToRouteWeightMap = new TLongLongHashMap(200, .5f, 0, 0); pillarInfo = new PillarInfo(nodeAccess.is3D(), ghStorage.getDirectory()); } @Override public void readGraph() throws IOException { if (encodingManager == null) throw new IllegalStateException("Encoding manager was not set."); if (osmFile == null) throw new IllegalStateException("No OSM file specified"); if (!osmFile.exists()) throw new IllegalStateException("Your specified OSM file does not exist:" + osmFile.getAbsolutePath()); StopWatch sw1 = new StopWatch().start(); preProcess(osmFile); sw1.stop(); StopWatch sw2 = new StopWatch().start(); writeOsm2Graph(osmFile); sw2.stop(); logger.info("time(pass1): " + (int) sw1.getSeconds() + " pass2: " + (int) sw2.getSeconds() + " total:" + ((int) (sw1.getSeconds() + sw2.getSeconds()))); } /** * Preprocessing of OSM file to select nodes which are used for highways. This allows a more * compact graph data structure. */ void preProcess( File osmFile ) { OSMInputFile in = null; try { in = new OSMInputFile(osmFile).setWorkerThreads(workerThreads).open(); long tmpWayCounter = 1; long tmpRelationCounter = 1; OSMElement item; while ((item = in.getNext()) != null) { if (item.isType(OSMElement.WAY)) { final OSMWay way = (OSMWay) item; boolean valid = filterWay(way); if (valid) { TLongList wayNodes = way.getNodes(); int s = wayNodes.size(); for (int index = 0; index < s; index++) { prepareHighwayNode(wayNodes.get(index)); } if (++tmpWayCounter % 5000000 == 0) { logger.info(nf(tmpWayCounter) + " (preprocess), osmIdMap:" + nf(getNodeMap().getSize()) + " (" + getNodeMap().getMemoryUsage() + "MB) " + Helper.getMemInfo()); } } } if (item.isType(OSMElement.RELATION)) { final OSMRelation relation = (OSMRelation) item; if (!relation.isMetaRelation() && relation.hasTag("type", "route")) prepareWaysWithRelationInfo(relation); if (relation.hasTag("type", "restriction")) prepareRestrictionRelation(relation); if (++tmpRelationCounter % 50000 == 0) { logger.info(nf(tmpRelationCounter) + " (preprocess), osmWayMap:" + nf(getRelFlagsMap().size()) + " " + Helper.getMemInfo()); } } } } catch (Exception ex) { throw new RuntimeException("Problem while parsing file", ex); } finally { Helper.close(in); } } private void prepareRestrictionRelation( OSMRelation relation ) { OSMTurnRelation turnRelation = createTurnRelation(relation); if (turnRelation != null) { getOsmWayIdSet().add(turnRelation.getOsmIdFrom()); getOsmWayIdSet().add(turnRelation.getOsmIdTo()); } } /** * @return all required osmWayIds to process e.g. relations. */ private TLongSet getOsmWayIdSet() { return osmWayIdSet; } private TIntLongMap getEdgeIdToOsmWayIdMap() { if (edgeIdToOsmWayIdMap == null) edgeIdToOsmWayIdMap = new TIntLongHashMap(getOsmWayIdSet().size(), 0.5f, -1, -1); return edgeIdToOsmWayIdMap; } /** * Filter ways but do not analyze properties wayNodes will be filled with participating node * ids. * <p> * @return true the current xml entry is a way entry and has nodes */ boolean filterWay( OSMWay item ) { // ignore broken geometry if (item.getNodes().size() < 2) return false; // ignore multipolygon geometry if (!item.hasTags()) return false; return encodingManager.acceptWay(item) > 0; } /** * Creates the edges and nodes files from the specified osm file. */ private void writeOsm2Graph( File osmFile ) { int tmp = (int) Math.max(getNodeMap().getSize() / 50, 100); logger.info("creating graph. Found nodes (pillar+tower):" + nf(getNodeMap().getSize()) + ", " + Helper.getMemInfo()); ghStorage.create(tmp); long wayStart = -1; long relationStart = -1; long counter = 1; OSMInputFile in = null; try { in = new OSMInputFile(osmFile).setWorkerThreads(workerThreads).open(); LongIntMap nodeFilter = getNodeMap(); OSMElement item; while ((item = in.getNext()) != null) { switch (item.getType()) { case OSMElement.NODE: if (nodeFilter.get(item.getId()) != -1) { processNode((OSMNode) item); } break; case OSMElement.WAY: if (wayStart < 0) { logger.info(nf(counter) + ", now parsing WAYS"); wayStart = counter; } processWay((OSMWay) item); break; case OSMElement.RELATION: if (relationStart < 0) { logger.info(nf(counter) + ", now parsing relations"); relationStart = counter; } processRelation((OSMRelation) item); break; } if (++counter % 100000000 == 0) { logger.info(nf(counter) + ", locs:" + nf(locations) + " (" + skippedLocations + ") " + Helper.getMemInfo()); } } // logger.info("storage nodes:" + storage.nodes() + " vs. graph nodes:" + storage.getGraph().nodes()); } catch (Exception ex) { throw new RuntimeException("Couldn't process file " + osmFile + ", error: " + ex.getMessage(), ex); } finally { Helper.close(in); } finishedReading(); if (graph.getNodes() == 0) throw new IllegalStateException("osm must not be empty. read " + counter + " lines and " + locations + " locations"); } /** * Process properties, encode flags and create edges for the way. */ void processWay( OSMWay way ) { if (way.getNodes().size() < 2) return; // ignore multipolygon geometry if (!way.hasTags()) return; long wayOsmId = way.getId(); long includeWay = encodingManager.acceptWay(way); if (includeWay == 0) return; long relationFlags = getRelFlagsMap().get(way.getId()); // TODO move this after we have created the edge and know the coordinates => encodingManager.applyWayTags TLongList osmNodeIds = way.getNodes(); // Estimate length of ways containing a route tag e.g. for ferry speed calculation if (osmNodeIds.size() > 1) { int first = getNodeMap().get(osmNodeIds.get(0)); int last = getNodeMap().get(osmNodeIds.get(osmNodeIds.size() - 1)); double firstLat = getTmpLatitude(first), firstLon = getTmpLongitude(first); double lastLat = getTmpLatitude(last), lastLon = getTmpLongitude(last); double estimatedDist = 0; if (!Double.isNaN(firstLat) && !Double.isNaN(firstLon) && !Double.isNaN(lastLat) && !Double.isNaN(lastLon)) { estimatedDist = distCalc.calcDist(firstLat, firstLon, lastLat, lastLon); // Add artificial tag for the estamated distance and center way.setTag("estimated_distance", estimatedDist); way.setTag("estimated_center", new GHPoint((firstLat + lastLat) / 2, (firstLon + lastLon) / 2)); } //Smoothing filter for elevation on ways if(smoothingFilter.equalsIgnoreCase("kalman") || smoothingFilter.equalsIgnoreCase("mean")) { double[] tmpElevations = new double[osmNodeIds.size()]; double[] tmpDistances = new double[osmNodeIds.size()-1]; int osmNodeId = getNodeMap().get(osmNodeIds.get(0)); tmpElevations[0] = getElevation(osmNodeId); for (int i = 1; i < tmpElevations.length; i++) { osmNodeId = getNodeMap().get(osmNodeIds.get(i)); tmpElevations[i] = getElevation(osmNodeId); int firstNode = getNodeMap().get(osmNodeIds.get(i-1)); int lastNode = getNodeMap().get(osmNodeIds.get(i)); double firstNodeLat = getTmpLatitude(firstNode), firstNodeLon = getTmpLongitude(firstNode); double lastNodeLat = getTmpLatitude(lastNode), lastNodeLon = getTmpLongitude(lastNode); if (!Double.isNaN(firstNodeLat) && !Double.isNaN(firstNodeLon) && !Double.isNaN(lastNodeLat) && !Double.isNaN(lastNodeLon)) { double tmpDist = distCalc.calcDist(firstNodeLat, firstNodeLon, lastNodeLat, lastNodeLon); tmpDistances[i-1] = tmpDist; } } SmoothingFilter filter; if(smoothingFilter.equalsIgnoreCase("mean")) filter = new MeanFilter(tmpDistances, 100); else filter = new SimpleKalmanFilter(SimpleKalmanFilter.COMBINED, 6, tmpDistances, 60); double[] estimatedElevations = filter.smooth(tmpElevations); for (int i = 0; i < estimatedElevations.length; i++) { osmNodeId = getNodeMap().get(osmNodeIds.get(i)); updateTmpElevation(osmNodeId, estimatedElevations[i], (i == 0 || i == estimatedElevations.length - 1)); } } } if (way.getTag("duration") != null) { try { long dur = OSMTagParser.parseDuration(way.getTag("duration")); // Provide the duration value in seconds in an artificial graphhopper specific tag: way.setTag("duration:seconds", Long.toString(dur)); } catch(Exception ex) { logger.warn("Parsing error in way with OSMID=" + way.getId() + " : " + ex.getMessage()); } } long wayFlags = encodingManager.handleWayTags(way, includeWay, relationFlags); if (wayFlags == 0) return; List<EdgeIteratorState> createdEdges = new ArrayList<EdgeIteratorState>(); // look for barriers along the way final int size = osmNodeIds.size(); int lastBarrier = -1; for (int i = 0; i < size; i++) { long nodeId = osmNodeIds.get(i); long nodeFlags = getNodeFlagsMap().get(nodeId); // barrier was spotted and way is otherwise passable for that mode of travel if (nodeFlags > 0) { if ((nodeFlags & wayFlags) > 0) { // remove barrier to avoid duplicates getNodeFlagsMap().put(nodeId, 0); // create shadow node copy for zero length edge long newNodeId = addBarrierNode(nodeId); if (i > 0) { // start at beginning of array if there was no previous barrier if (lastBarrier < 0) lastBarrier = 0; // add way up to barrier shadow node long transfer[] = osmNodeIds.toArray(lastBarrier, i - lastBarrier + 1); transfer[transfer.length - 1] = newNodeId; TLongList partIds = new TLongArrayList(transfer); createdEdges.addAll(addOSMWay(partIds, wayFlags, wayOsmId)); // create zero length edge for barrier createdEdges.addAll(addBarrierEdge(newNodeId, nodeId, wayFlags, nodeFlags, wayOsmId)); } else { // run edge from real first node to shadow node createdEdges.addAll(addBarrierEdge(nodeId, newNodeId, wayFlags, nodeFlags, wayOsmId)); // exchange first node for created barrier node osmNodeIds.set(0, newNodeId); } // remember barrier for processing the way behind it lastBarrier = i; } } } // just add remainder of way to graph if barrier was not the last node if (lastBarrier >= 0) { if (lastBarrier < size - 1) { long transfer[] = osmNodeIds.toArray(lastBarrier, size - lastBarrier); TLongList partNodeIds = new TLongArrayList(transfer); createdEdges.addAll(addOSMWay(partNodeIds, wayFlags, wayOsmId)); } } else { // no barriers - simply add the whole way createdEdges.addAll(addOSMWay(way.getNodes(), wayFlags, wayOsmId)); } for (EdgeIteratorState edge : createdEdges) { encodingManager.applyWayTags(way, edge); edge.fetchWayGeometry(3); } } public void processRelation( OSMRelation relation ) throws XMLStreamException { if (relation.hasTag("type", "restriction")) { OSMTurnRelation turnRelation = createTurnRelation(relation); if (turnRelation != null) { GraphExtension extendedStorage = graph.getExtension(); if (extendedStorage instanceof TurnCostExtension) { TurnCostExtension tcs = (TurnCostExtension) extendedStorage; Collection<TurnCostTableEntry> entries = analyzeTurnRelation(turnRelation); for (TurnCostTableEntry entry : entries) { tcs.addTurnInfo(entry.edgeFrom, entry.nodeVia, entry.edgeTo, entry.flags); } } } } } public Collection<TurnCostTableEntry> analyzeTurnRelation( OSMTurnRelation turnRelation ) { TLongObjectMap<TurnCostTableEntry> entries = new TLongObjectHashMap<OSMTurnRelation.TurnCostTableEntry>(); for (FlagEncoder encoder : encodingManager.fetchEdgeEncoders()) { for (TurnCostTableEntry entry : analyzeTurnRelation(encoder, turnRelation)) { TurnCostTableEntry oldEntry = entries.get(entry.getItemId()); if (oldEntry != null) { // merging different encoders oldEntry.flags |= entry.flags; } else { entries.put(entry.getItemId(), entry); } } } return entries.valueCollection(); } public Collection<TurnCostTableEntry> analyzeTurnRelation( FlagEncoder encoder, OSMTurnRelation turnRelation ) { if (!encoder.supports(TurnWeighting.class)) return Collections.emptyList(); EdgeExplorer edgeOutExplorer = outExplorerMap.get(encoder); EdgeExplorer edgeInExplorer = inExplorerMap.get(encoder); if (edgeOutExplorer == null || edgeInExplorer == null) { edgeOutExplorer = graph.createEdgeExplorer(new DefaultEdgeFilter(encoder, false, true)); outExplorerMap.put(encoder, edgeOutExplorer); edgeInExplorer = graph.createEdgeExplorer(new DefaultEdgeFilter(encoder, true, false)); inExplorerMap.put(encoder, edgeInExplorer); } return turnRelation.getRestrictionAsEntries(encoder, edgeOutExplorer, edgeInExplorer, this); } /** * @return OSM way ID from specified edgeId. Only previously stored OSM-way-IDs are returned in * order to reduce memory overhead. */ public long getOsmIdOfInternalEdge( int edgeId ) { return getEdgeIdToOsmWayIdMap().get(edgeId); } public int getInternalNodeIdOfOsmNode( long nodeOsmId ) { int id = getNodeMap().get(nodeOsmId); if (id < TOWER_NODE) return -id - 3; return EMPTY; } // TODO remove this ugly stuff via better preparsing phase! E.g. putting every tags etc into a helper file! double getTmpLatitude( int id ) { if (id == EMPTY) return Double.NaN; if (id < TOWER_NODE) { // tower node id = -id - 3; return nodeAccess.getLatitude(id); } else if (id > -TOWER_NODE) { // pillar node id = id - 3; return pillarInfo.getLatitude(id); } else // e.g. if id is not handled from preparse (e.g. was ignored via isInBounds) return Double.NaN; } double getTmpLongitude( int id ) { if (id == EMPTY) return Double.NaN; if (id < TOWER_NODE) { // tower node id = -id - 3; return nodeAccess.getLongitude(id); } else if (id > -TOWER_NODE) { // pillar node id = id - 3; return pillarInfo.getLon(id); } else // e.g. if id is not handled from preparse (e.g. was ignored via isInBounds) return Double.NaN; } //TODO update elevation boolean updateTmpElevation( int id, double ele, boolean average ){ if (id == EMPTY) return false; if(id < TOWER_NODE){ if(average) ele = (getTmpElevation(id) + ele) / 2; id = -id -3; nodeAccess.setElevation(id, ele); return true; } else if (id > -TOWER_NODE) { // pillar node id = id - 3; pillarInfo.setElevation(id, ele); return true; } else // e.g. if id is not handled from preparse (e.g. was ignored via isInBounds) return false; } double getTmpElevation( int nodeId ) { if (nodeId == EMPTY) return Double.NaN; if (nodeId < TOWER_NODE) { // tower node nodeId = -nodeId - 3; return nodeAccess.getElevation(nodeId); } else if (nodeId > -TOWER_NODE) { // pillar node nodeId = nodeId - 3; return pillarInfo.getElevation(nodeId); } else // e.g. if id is not handled from preparse (e.g. was ignored via isInBounds) return Double.NaN; } private void processNode( OSMNode node ) { if (isInBounds(node)) { addNode(node); // analyze node tags for barriers if (node.hasTags()) { long nodeFlags = encodingManager.handleNodeTags(node); if (nodeFlags != 0) getNodeFlagsMap().put(node.getId(), nodeFlags); } locations++; } else { skippedLocations++; } } //TODO update node elevation with Kalman boolean addNode( OSMNode node ) { int nodeType = getNodeMap().get(node.getId()); if (nodeType == EMPTY) return false; double lat = node.getLat(); double lon = node.getLon(); double ele = getElevation(node); if (nodeType == TOWER_NODE) { addTowerNode(node.getId(), lat, lon, ele); } else if (nodeType == PILLAR_NODE) { pillarInfo.setNode(nextPillarId, lat, lon, ele); getNodeMap().put(node.getId(), nextPillarId + 3); nextPillarId++; } return true; } protected double getElevation( OSMNode node ) { return eleProvider.getEle(node.getLat(), node.getLon()); } protected double getElevation( int osmNodeId ){ double lat = getTmpLatitude(osmNodeId); double lon = getTmpLongitude(osmNodeId); return eleProvider.getEle(lat, lon); } void prepareWaysWithRelationInfo( OSMRelation osmRelation ) { // is there at least one tag interesting for the registed encoders? if (encodingManager.handleRelationTags(osmRelation, 0) == 0) return; int size = osmRelation.getMembers().size(); for (int index = 0; index < size; index++) { OSMRelation.Member member = osmRelation.getMembers().get(index); if (member.type() != OSMRelation.Member.WAY) continue; long osmId = member.ref(); long oldRelationFlags = getRelFlagsMap().get(osmId); // Check if our new relation data is better comparated to the the last one long newRelationFlags = encodingManager.handleRelationTags(osmRelation, oldRelationFlags); if (oldRelationFlags != newRelationFlags) getRelFlagsMap().put(osmId, newRelationFlags); } } void prepareHighwayNode( long osmId ) { int tmpIndex = getNodeMap().get(osmId); if (tmpIndex == EMPTY) { // osmId is used exactly once getNodeMap().put(osmId, PILLAR_NODE); } else if (tmpIndex > EMPTY) { // mark node as tower node as it occured at least twice times getNodeMap().put(osmId, TOWER_NODE); } else { // tmpIndex is already negative (already tower node) } } int addTowerNode( long osmId, double lat, double lon, double ele ) { if (nodeAccess.is3D()) nodeAccess.setNode(nextTowerId, lat, lon, ele); else nodeAccess.setNode(nextTowerId, lat, lon); int id = -(nextTowerId + 3); getNodeMap().put(osmId, id); nextTowerId++; return id; } /** * This method creates from an OSM way (via the osm ids) one or more edges in the graph. */ Collection<EdgeIteratorState> addOSMWay( final TLongList osmNodeIds, final long flags, final long wayOsmId ) { PointList pointList = new PointList(osmNodeIds.size(), nodeAccess.is3D()); List<EdgeIteratorState> newEdges = new ArrayList<EdgeIteratorState>(5); int firstNode = -1; int lastIndex = osmNodeIds.size() - 1; int lastInBoundsPillarNode = -1; try { for (int i = 0; i < osmNodeIds.size(); i++) { long osmId = osmNodeIds.get(i); int tmpNode = getNodeMap().get(osmId); if (tmpNode == EMPTY) continue; // skip osmIds with no associated pillar or tower id (e.g. !OSMReader.isBounds) if (tmpNode == TOWER_NODE) continue; if (tmpNode == PILLAR_NODE) { // In some cases no node information is saved for the specified osmId. // ie. a way references a <node> which does not exist in the current file. // => if the node before was a pillar node then convert into to tower node (as it is also end-standing). if (!pointList.isEmpty() && lastInBoundsPillarNode > -TOWER_NODE) { // transform the pillar node to a tower node tmpNode = lastInBoundsPillarNode; tmpNode = handlePillarNode(tmpNode, osmId, null, true); tmpNode = -tmpNode - 3; if (pointList.getSize() > 1 && firstNode >= 0) { // TOWER node newEdges.add(addEdge(firstNode, tmpNode, pointList, flags, wayOsmId)); pointList.clear(); pointList.add(nodeAccess, tmpNode); } firstNode = tmpNode; lastInBoundsPillarNode = -1; } continue; } if (tmpNode <= -TOWER_NODE && tmpNode >= TOWER_NODE) throw new AssertionError("Mapped index not in correct bounds " + tmpNode + ", " + osmId); if (tmpNode > -TOWER_NODE) { boolean convertToTowerNode = i == 0 || i == lastIndex; if (!convertToTowerNode) { lastInBoundsPillarNode = tmpNode; } // PILLAR node, but convert to towerNode if end-standing tmpNode = handlePillarNode(tmpNode, osmId, pointList, convertToTowerNode); } if (tmpNode < TOWER_NODE) { // TOWER node tmpNode = -tmpNode - 3; pointList.add(nodeAccess, tmpNode); if (firstNode >= 0) { newEdges.add(addEdge(firstNode, tmpNode, pointList, flags, wayOsmId)); pointList.clear(); pointList.add(nodeAccess, tmpNode); } firstNode = tmpNode; } } } catch (RuntimeException ex) { logger.error("Couldn't properly add edge with osm ids:" + osmNodeIds, ex); if (exitOnlyPillarNodeException) throw ex; } return newEdges; } EdgeIteratorState addEdge( int fromIndex, int toIndex, PointList pointList, long flags, long wayOsmId ) { // sanity checks if (fromIndex < 0 || toIndex < 0) throw new AssertionError("to or from index is invalid for this edge " + fromIndex + "->" + toIndex + ", points:" + pointList); if (pointList.getDimension() != nodeAccess.getDimension()) throw new AssertionError("Dimension does not match for pointList vs. nodeAccess " + pointList.getDimension() + " <-> " + nodeAccess.getDimension()); double towerNodeDistance = 0; double prevLat = pointList.getLatitude(0); double prevLon = pointList.getLongitude(0); double prevEle = pointList.is3D() ? pointList.getElevation(0) : Double.NaN; double lat, lon, ele = Double.NaN; PointList pillarNodes = new PointList(pointList.getSize() - 2, nodeAccess.is3D()); int nodes = pointList.getSize(); for (int i = 1; i < nodes; i++) { // we could save some lines if we would use pointList.calcDistance(distCalc); lat = pointList.getLatitude(i); lon = pointList.getLongitude(i); if (pointList.is3D()) { ele = pointList.getElevation(i); towerNodeDistance += distCalc3D.calcDist(prevLat, prevLon, prevEle, lat, lon, ele); prevEle = ele; } else towerNodeDistance += distCalc.calcDist(prevLat, prevLon, lat, lon); prevLat = lat; prevLon = lon; if (nodes > 2 && i < nodes - 1) { if (pillarNodes.is3D()) pillarNodes.add(lat, lon, ele); else pillarNodes.add(lat, lon); } } if (towerNodeDistance < 0.0001) { // As investigation shows often two paths should have crossed via one identical point // but end up in two very close points. zeroCounter++; towerNodeDistance = 0.0001; } if (Double.isInfinite(towerNodeDistance) || Double.isNaN(towerNodeDistance)) { logger.warn("Bug in OSM or GraphHopper. Illegal tower node distance " + towerNodeDistance + " reset to 1m, osm way " + wayOsmId); towerNodeDistance = 1; } EdgeIteratorState iter = graph.edge(fromIndex, toIndex).setDistance(towerNodeDistance).setFlags(flags); if (nodes > 2) { if (doSimplify) simplifyAlgo.simplify(pillarNodes); iter.setWayGeometry(pillarNodes); } storeOsmWayID(iter.getEdge(), wayOsmId); return iter; } /** * Stores only osmWayIds which are required for relations */ protected void storeOsmWayID( int edgeId, long osmWayId ) { if (getOsmWayIdSet().contains(osmWayId)) { getEdgeIdToOsmWayIdMap().put(edgeId, osmWayId); } } /** * @return converted tower node */ private int handlePillarNode( int tmpNode, long osmId, PointList pointList, boolean convertToTowerNode ) { tmpNode = tmpNode - 3; double lat = pillarInfo.getLatitude(tmpNode); double lon = pillarInfo.getLongitude(tmpNode); double ele = pillarInfo.getElevation(tmpNode); if (lat == Double.MAX_VALUE || lon == Double.MAX_VALUE) throw new RuntimeException("Conversion pillarNode to towerNode already happended!? " + "osmId:" + osmId + " pillarIndex:" + tmpNode); if (convertToTowerNode) { // convert pillarNode type to towerNode, make pillar values invalid pillarInfo.setNode(tmpNode, Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE); tmpNode = addTowerNode(osmId, lat, lon, ele); } else { if (pointList.is3D()) pointList.add(lat, lon, ele); else pointList.add(lat, lon); } return (int) tmpNode; } protected void finishedReading() { printInfo("way"); pillarInfo.clear(); eleProvider.release(); osmNodeIdToInternalNodeMap = null; osmNodeIdToNodeFlagsMap = null; osmWayIdToRouteWeightMap = null; osmWayIdSet = null; edgeIdToOsmWayIdMap = null; } /** * Create a copy of the barrier node */ long addBarrierNode( long nodeId ) { OSMNode newNode; int graphIndex = getNodeMap().get(nodeId); if (graphIndex < TOWER_NODE) { graphIndex = -graphIndex - 3; newNode = new OSMNode(createNewNodeId(), nodeAccess, graphIndex); } else { graphIndex = graphIndex - 3; newNode = new OSMNode(createNewNodeId(), pillarInfo, graphIndex); } final long id = newNode.getId(); prepareHighwayNode(id); addNode(newNode); return id; } private long createNewNodeId() { return newUniqueOsmId++; } /** * Add a zero length edge with reduced routing options to the graph. */ Collection<EdgeIteratorState> addBarrierEdge( long fromId, long toId, long flags, long nodeFlags, long wayOsmId ) { // clear barred directions from routing flags flags &= ~nodeFlags; // add edge barrierNodeIds.clear(); barrierNodeIds.add(fromId); barrierNodeIds.add(toId); return addOSMWay(barrierNodeIds, flags, wayOsmId); } /** * Creates an OSM turn relation out of an unspecified OSM relation * <p> * @return the OSM turn relation, <code>null</code>, if unsupported turn relation */ OSMTurnRelation createTurnRelation( OSMRelation relation ) { OSMTurnRelation.Type type = OSMTurnRelation.Type.getRestrictionType(relation.getTag("restriction")); if (type != OSMTurnRelation.Type.UNSUPPORTED) { long fromWayID = -1; long viaNodeID = -1; long toWayID = -1; for (OSMRelation.Member member : relation.getMembers()) { if (OSMElement.WAY == member.type()) { if ("from".equals(member.role())) { fromWayID = member.ref(); } else if ("to".equals(member.role())) { toWayID = member.ref(); } } else if (OSMElement.NODE == member.type() && "via".equals(member.role())) { viaNodeID = member.ref(); } } if (fromWayID >= 0 && toWayID >= 0 && viaNodeID >= 0) { return new OSMTurnRelation(fromWayID, viaNodeID, toWayID, type); } } return null; } /** * Filter method, override in subclass */ boolean isInBounds( OSMNode node ) { return true; } /** * Maps OSM IDs (long) to internal node IDs (int) */ protected LongIntMap getNodeMap() { return osmNodeIdToInternalNodeMap; } protected TLongLongMap getNodeFlagsMap() { return osmNodeIdToNodeFlagsMap; } TLongLongHashMap getRelFlagsMap() { return osmWayIdToRouteWeightMap; } /** * Specify the type of the path calculation (car, bike, ...). */ public OSMReader setEncodingManager( EncodingManager em ) { this.encodingManager = em; return this; } public OSMReader setWayPointMaxDistance( double maxDist ) { doSimplify = maxDist > 0; simplifyAlgo.setMaxDistance(maxDist); return this; } public OSMReader setWorkerThreads( int numOfWorkers ) { this.workerThreads = numOfWorkers; return this; } public OSMReader setElevationProvider( ElevationProvider eleProvider ) { if (eleProvider == null) throw new IllegalStateException("Use the NOOP elevation provider instead of null or don't call setElevationProvider"); if (!nodeAccess.is3D() && ElevationProvider.NOOP != eleProvider) throw new IllegalStateException("Make sure you graph accepts 3D data"); this.eleProvider = eleProvider; return this; } public OSMReader setElevationFilter( String filter ) { this.smoothingFilter = filter; return this; } public OSMReader setOSMFile( File osmFile ) { this.osmFile = osmFile; return this; } private void printInfo( String str ) { logger.info("finished " + str + " processing." + " nodes: " + graph.getNodes() + ", osmIdMap.size:" + getNodeMap().getSize() + ", osmIdMap:" + getNodeMap().getMemoryUsage() + "MB" + ", nodeFlagsMap.size:" + getNodeFlagsMap().size() + ", relFlagsMap.size:" + getRelFlagsMap().size() + ", zeroCounter:" + zeroCounter + " " + Helper.getMemInfo()); } @Override public String toString() { return getClass().getSimpleName(); } }
/* Derby - Class com.pivotal.gemfirexd.internal.jdbc.EmbedPooledConnection Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* * Changes for GemFireXD distributed data platform (some marked by "GemStone changes") * * Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.pivotal.gemfirexd.internal.jdbc; /* import impl class */ import com.pivotal.gemfirexd.internal.iapi.error.ExceptionSeverity; import com.pivotal.gemfirexd.internal.iapi.jdbc.BrokeredConnection; import com.pivotal.gemfirexd.internal.iapi.jdbc.BrokeredConnectionControl; import com.pivotal.gemfirexd.internal.iapi.jdbc.EngineConnection; import com.pivotal.gemfirexd.internal.iapi.reference.Property; import com.pivotal.gemfirexd.internal.iapi.services.sanity.SanityManager; import com.pivotal.gemfirexd.internal.iapi.sql.conn.LanguageConnectionContext; import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedCallableStatement; import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedConnection; import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedPreparedStatement; import com.pivotal.gemfirexd.internal.impl.jdbc.Util; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.PreparedStatement; import java.sql.CallableStatement; import java.util.ArrayList; import java.util.Iterator; import javax.sql.StatementEventListener; /* -- New jdbc 20 extension types --- */ import javax.sql.ConnectionEventListener; import javax.sql.ConnectionEvent; /** A PooledConnection object is a connection object that provides hooks for connection pool management. <P>This is Derby's implementation of a PooledConnection for use in the following environments: <UL> <LI> JDBC 3.0 - Java 2 - JDK 1.4, J2SE 5.0 <LI> JDBC 2.0 - Java 2 - JDK 1.2,1.3 </UL> */ class EmbedPooledConnection implements javax.sql.PooledConnection, BrokeredConnectionControl { /** Static counter for connection ids */ private static int idCounter = 0; /** The id for this connection. */ private int connectionId; /** the connection string */ private String connString; /** * The list of {@code ConnectionEventListener}s. It is initially {@code * null} and will be initialized lazily when the first listener is added. */ private ArrayList eventListener; /** * The number of iterators going through the list of connection event * listeners at the current time. Only one thread may be iterating over the * list at any time (because of synchronization), but a single thread may * have multiple iterators if for instance an event listener performs * database calls that trigger a new event. */ private int eventIterators; EmbedConnection realConnection; int defaultIsolationLevel; private boolean defaultReadOnly; BrokeredConnection currentConnectionHandle; // set up once by the data source final ReferenceableDataSource dataSource; private final String username; private final String password; /** True if the password was passed in on the connection request, false if it came from the data source property. */ private final boolean requestPassword; protected boolean isActive; private synchronized int nextId() { return idCounter++; } EmbedPooledConnection(ReferenceableDataSource ds, String u, String p, boolean requestPassword) throws SQLException { connectionId = nextId(); dataSource = ds; username = u; password = p; this.requestPassword = requestPassword; isActive = true; // open the connection up front in order to do authentication openRealConnection(); } String getUsername() { if (username == null || username.equals("")) return Property.DEFAULT_USER_NAME; else return username; } String getPassword() { if (password == null) return ""; else return password; } /** Create an object handle for a database connection. @return a Connection object @exception SQLException - if a database-access error occurs. */ public synchronized Connection getConnection() throws SQLException { checkActive(); // RealConnection is not null if the app server yanks a local // connection from one client and give it to another. In this case, // the real connection is ready to be used. Otherwise, set it up if (realConnection == null) { // first time we establish a connection openRealConnection(); } else { resetRealConnection(); } // Need to do this in case the connection is forcibly removed without // first being closed. Must be performed after resetRealConnection(), // otherwise closing the logical connection may fail if the transaction // is not idle. closeCurrentConnectionHandle(); // now make a brokered connection wrapper and give this to the user // we reuse the EmbedConnection(ie realConnection). Connection c = getNewCurrentConnectionHandle(); return c; } final void openRealConnection() throws SQLException { // first time we establish a connection Connection rc = dataSource.getConnection(username, password, requestPassword); this.realConnection = (EmbedConnection) rc; defaultIsolationLevel = rc.getTransactionIsolation(); defaultReadOnly = rc.isReadOnly(); if (currentConnectionHandle != null) realConnection.setApplicationConnection(currentConnectionHandle); } final Connection getNewCurrentConnectionHandle() { Connection applicationConnection = currentConnectionHandle = ((com.pivotal.gemfirexd.internal.jdbc.Driver20) (realConnection.getLocalDriver())).newBrokeredConnection(this); realConnection.setApplicationConnection(applicationConnection); return applicationConnection; } /** In this case the Listeners are *not* notified. JDBC 3.0 spec section 11.4 */ private void closeCurrentConnectionHandle() throws SQLException { if (currentConnectionHandle != null) { ArrayList tmpEventListener = eventListener; eventListener = null; try { currentConnectionHandle.close(); } finally { eventListener = tmpEventListener; } currentConnectionHandle = null; } } void resetRealConnection() throws SQLException { // ensure any outstanding changes from the previous // user are rolledback. realConnection.rollback(); // clear any warnings that are left over realConnection.clearWarnings(); // need to reset transaction isolation, autocommit, readonly, holdability states if (realConnection.getTransactionIsolation() != defaultIsolationLevel) { realConnection.setTransactionIsolation(defaultIsolationLevel); } if (!realConnection.getAutoCommit()) realConnection.setAutoCommit(true); if (realConnection.isReadOnly() != defaultReadOnly) realConnection.setReadOnly(defaultReadOnly); if (realConnection.getHoldability() != ResultSet.HOLD_CURSORS_OVER_COMMIT) realConnection.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); // reset any remaining state of the connection realConnection.resetFromPool(); if (SanityManager.DEBUG) { SanityManager.ASSERT(realConnection.transactionIsIdle(), "real connection should have been idle at this point"); } } /** Close the Pooled connection. @exception SQLException - if a database-access error occurs. */ public synchronized void close() throws SQLException { if (!isActive) return; closeCurrentConnectionHandle(); try { if (realConnection != null) { if (!realConnection.isClosed()) realConnection.close(); } } finally { realConnection = null; // make sure I am not accessed again. isActive = false; eventListener = null; } } /** Add an event listener. */ public final synchronized void addConnectionEventListener(ConnectionEventListener listener) { if (!isActive) return; if (listener == null) return; if (eventListener == null) { eventListener = new ArrayList(); } else if (eventIterators > 0) { // DERBY-3401: Someone is iterating over the ArrayList, and since // we were able to synchronize on this, that someone is us. Clone // the list of listeners in order to prevent invalidation of the // iterator. eventListener = (ArrayList) eventListener.clone(); } eventListener.add(listener); } /** Remove an event listener. */ public final synchronized void removeConnectionEventListener(ConnectionEventListener listener) { if (listener == null || eventListener == null) { return; } if (eventIterators > 0) { // DERBY-3401: Someone is iterating over the ArrayList, and since // we were able to synchronize on this, that someone is us. Clone // the list of listeners in order to prevent invalidation of the // iterator. eventListener = (ArrayList) eventListener.clone(); } eventListener.remove(listener); } /* * class specific method */ // called by ConnectionHandle when it needs to forward things to the // underlying connection public synchronized EngineConnection getRealConnection() throws SQLException { checkActive(); return realConnection; } /** * @return The underlying language connection. */ public synchronized LanguageConnectionContext getLanguageConnection() throws SQLException { checkActive(); return realConnection.getLanguageConnection(); } // my conneciton handle has caught an error (actually, the real connection // has already handled the error, we just need to nofity the listener an // error is about to be thrown to the app). public synchronized void notifyError(SQLException exception) { // only report fatal error to the connection pool manager if (exception.getErrorCode() < ExceptionSeverity.SESSION_SEVERITY) return; // tell my listeners an exception is about to be thrown fireConnectionEventListeners(exception); } /** * Fire all the {@code ConnectionEventListener}s registered. Callers must * synchronize on {@code this} to prevent others from modifying the list of * listeners. * * @param exception the exception that caused the event, or {@code null} if * it is a close event */ private void fireConnectionEventListeners(SQLException exception) { if (eventListener != null && !eventListener.isEmpty()) { ConnectionEvent event = new ConnectionEvent(this, exception); eventIterators++; try { for (Iterator it = eventListener.iterator(); it.hasNext();) { ConnectionEventListener l = (ConnectionEventListener) it.next(); if (exception == null) { l.connectionClosed(event); } else { l.connectionErrorOccurred(event); } } } finally { eventIterators--; } } } final void checkActive() throws SQLException { if (!isActive) throw Util.noCurrentConnection(); } /* ** BrokeredConnectionControl api */ /** Returns true if isolation level has been set using either JDBC api or SQL */ public boolean isIsolationLevelSetUsingSQLorJDBC() throws SQLException { if (realConnection != null) return realConnection.getLanguageConnection().isIsolationLevelSetUsingSQLorJDBC(); else return false; } /** Reset the isolation level flag used to keep state in BrokeredConnection. It will get set to true when isolation level is set using JDBC/SQL. It will get reset to false at the start and the end of a global transaction. */ public void resetIsolationLevelFlag() throws SQLException { realConnection.getLanguageConnection().resetIsolationLevelFlagUsedForSQLandJDBC(); } /** Notify the control class that a SQLException was thrown during a call on one of the brokered connection's methods. */ public void notifyException(SQLException sqle) { this.notifyError(sqle); } /** Allow control over setting auto commit mode. */ public void checkAutoCommit(boolean autoCommit) throws SQLException { } /** Are held cursors allowed. */ public int checkHoldCursors(int holdability, boolean downgrade) throws SQLException { return holdability; } /** Allow control over creating a Savepoint (JDBC 3.0) */ public void checkSavepoint() throws SQLException { } /** Allow control over calling rollback. */ public void checkRollback() throws SQLException { } /** Allow control over calling commit. */ public void checkCommit() throws SQLException { } /** @see BrokeredConnectionControl#checkClose() */ public void checkClose() throws SQLException { if (realConnection != null) { realConnection.checkForTransactionInProgress(); } } /** Close called on BrokeredConnection. If this call returns true then getRealConnection().close() will be called. Notify listners that connection is closed. Don't close the underlying real connection as it is pooled. */ public synchronized boolean closingConnection() throws SQLException { //DERBY-2142-Null out the connection handle BEFORE notifying listeners. //At time of the callback the PooledConnection must be //disassociated from its previous logical connection. //If not there is a risk that the Pooled //Connection could be returned to the pool, ready for pickup by a //new thread. This new thread then might obtain a java.sql.Connection //whose reference might get assigned to the currentConnectionHandle //field, meanwhile the previous thread completes the close making //the newly assigned currentConnectionHandle null, resulting in an NPE. currentConnectionHandle = null; // tell my listeners I am closed fireConnectionEventListeners(null); return false; } /** No need to wrap statements for PooledConnections. */ public Statement wrapStatement(Statement s) throws SQLException { return s; } /** * Call the setBrokeredConnectionControl method inside the * EmbedPreparedStatement class to set the BrokeredConnectionControl * variable to this instance of EmbedPooledConnection * This will then be used to call the onStatementErrorOccurred * and onStatementClose events when the corresponding events * occur on the PreparedStatement * * @param ps PreparedStatment to be wrapped * @param sql String * @param generatedKeys Object * @return returns the wrapped PreparedStatement * @throws java.sql.SQLException */ public PreparedStatement wrapStatement(PreparedStatement ps, String sql, Object generatedKeys) throws SQLException { /* */ EmbedPreparedStatement ps_ = (EmbedPreparedStatement)ps; ps_.setBrokeredConnectionControl(this); return (PreparedStatement)ps_; } /** * Call the setBrokeredConnectionControl method inside the * EmbedCallableStatement class to set the BrokeredConnectionControl * variable to this instance of EmbedPooledConnection * This will then be used to call the onStatementErrorOccurred * and onStatementClose events when the corresponding events * occur on the CallableStatement * * @param cs CallableStatment to be wrapped * @param sql String * @return returns the wrapped CallableStatement * @throws java.sql.SQLException */ public CallableStatement wrapStatement(CallableStatement cs, String sql) throws SQLException { EmbedCallableStatement cs_ = (EmbedCallableStatement)cs; cs_.setBrokeredConnectionControl(this); return (CallableStatement)cs_; } /** * Get the string representation of this pooled connection. * * A pooled connection is assigned a separate id from a physical * connection. When a container calls PooledConnection.toString(), * it gets the string representation of this id. This is useful for * developers implementing connection pools when they are trying to * debug pooled connections. * * @return a string representation of the uniquie id for this pooled * connection. * */ public String toString() { if ( connString == null ) { String physicalConnString = isActive ? realConnection.toString() : "<none>"; connString = this.getClass().getName() + "@" + this.hashCode() + " " + "(ID = " + connectionId + "), " + "Physical Connection = " + physicalConnString; } return connString; } /*-----------------------------------------------------------------*/ /* * These methods are from the BrokeredConnectionControl interface. * These methods are needed to provide StatementEvent support for * derby. * They are actually implemented in EmbedPooledConnection40 but have * a dummy implementation here so that the compilation wont fail when they * are compiled with jdk1.4 */ /** * Dummy implementation for the actual methods found in * com.pivotal.gemfirexd.internal.jdbc.EmbedPooledConnection40 * @param statement PreparedStatement */ public void onStatementClose(PreparedStatement statement) { } /** * Dummy implementation for the actual methods found in * com.pivotal.gemfirexd.internal.jdbc.EmbedPooledConnection40 * @param statement PreparedStatement * @param sqle SQLException */ public void onStatementErrorOccurred(PreparedStatement statement, SQLException sqle) { } // GemStone changes BEGIN // Dummy JDBC 4.0 methods so can compile with JDK 1.6 public void removeStatementEventListener(StatementEventListener listener) { throw new AssertionError("should have been overridden in JDBC 4.0"); } public void addStatementEventListener(StatementEventListener listener) { throw new AssertionError("should have been overridden in JDBC 4.0"); } // GemStone changes END }
/* * Copyright to the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.rioproject.impl.admin; import net.jini.core.discovery.LookupLocator; import net.jini.core.entry.Entry; import net.jini.core.lookup.ServiceRegistrar; import net.jini.discovery.DiscoveryGroupManagement; import net.jini.discovery.DiscoveryLocatorManagement; import net.jini.discovery.DiscoveryManagement; import net.jini.export.Exporter; import net.jini.id.Uuid; import net.jini.id.UuidFactory; import net.jini.lookup.JoinManager; import org.rioproject.admin.ServiceAdmin; import org.rioproject.admin.ServiceBeanControlException; import org.rioproject.impl.servicebean.DefaultServiceBeanContext; import org.rioproject.servicebean.ServiceBeanContext; import org.rioproject.impl.container.ServiceAdvertiser; import org.rioproject.impl.servicebean.ServiceBeanAdapter; import org.rioproject.opstring.ServiceBeanConfig; import org.rioproject.opstring.ServiceElement; import org.rioproject.proxy.admin.ServiceAdminProxy; import org.rioproject.impl.persistence.SnapshotHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.rmi.RemoteException; /** * The ServiceAdminImpl class implements the ServiceAdmin interface providing * administrative support for ServiceProducer implementations. * * @author Dennis Reedy */ public class ServiceAdminImpl implements ServiceAdmin { /** The ServiceBean instance */ private ServiceBeanAdapter service; /** ServiceBeanContext */ private ServiceBeanContext context; /** The Exporter for the ServiceAdmin */ protected final Exporter exporter; /** The ServiceAdminProxy */ protected ServiceAdminProxy adminProxy; /** A snapshot handler */ private final SnapshotHandler snapshotHandler; /** The time the service was started */ private final long started; /** A Logger */ private static final Logger logger = LoggerFactory.getLogger("org.rioproject.admin"); /** * Create a ServiceAdmin Impl * * @param service Concrete implementation of a ServiceBeanAdapter * @param exporter The Exporter to export this object */ public ServiceAdminImpl(ServiceBeanAdapter service, Exporter exporter) { this(service, exporter, null); } /** * Create a ServiceAdmin Impl * * @param service Concrete implementation of a ServiceBeanAdapter * @param exporter The Exporter to export this object * @param snapshotHandler The service's snapshot handler used for persistence */ public ServiceAdminImpl(ServiceBeanAdapter service, Exporter exporter, SnapshotHandler snapshotHandler) { if (service==null) throw new IllegalArgumentException("service is null"); if (service.getServiceBeanContext()==null) throw new IllegalArgumentException("context is null"); this.service = service; this.exporter = exporter; this.snapshotHandler = snapshotHandler; this.started = service.getStartTime(); this.context = service.getServiceBeanContext(); } /** * @see org.rioproject.admin.ServiceAdmin#getJoinSet() */ public ServiceRegistrar[] getJoinSet() { JoinManager mgr = service.getJoinManager(); if (mgr!=null) return(mgr.getJoinSet()); else logger.warn("JoinManager is null"); return new ServiceRegistrar[0]; } /** * Get the object to communicate with the ServiceAdminImpl * * @return The ServiceAdmin * * @throws RemoteException if an communication errors occur */ public ServiceAdmin getServiceAdmin() throws RemoteException { if (adminProxy==null) { ServiceAdmin serviceAdminRemoteRef = (ServiceAdmin)exporter.export(this); adminProxy = ServiceAdminProxy.getInstance(serviceAdminRemoteRef, UuidFactory.generate()); } return adminProxy; } /** * Unexport the ServiceAdmin * * @param force - If true forcibly unexport */ @SuppressWarnings("unused") public void unexport(boolean force) { if (exporter!=null) { try { exporter.unexport(true); } catch (IllegalStateException e) { logger.warn("ServiceAdminImpl not exported"); } } } /*------------------- * DestroyAdmin *------------------*/ /** * This method terminates the service */ public void destroy() { if (service != null) { service.destroy(); service = null; } } /*------------------- * ServiceBeanAdmin *------------------*/ /** @see org.rioproject.admin.ServiceBeanAdmin#getServiceElement */ public ServiceElement getServiceElement() { return service.getServiceBeanContext().getServiceElement(); } /** @see org.rioproject.admin.ServiceBeanAdmin#setServiceElement */ public void setServiceElement(ServiceElement sElem) { try { ServiceBeanContext sbc = service.getServiceBeanContext(); if (sbc instanceof DefaultServiceBeanContext) ((DefaultServiceBeanContext)sbc).setServiceElement(sElem); else logger.warn("ServiceBeanContext {} not an instance of DefaultServiceBeanContext. Unable to set ServiceElement", sbc.toString()); } catch (Throwable t) { logger.warn("Setting ServiceElement", t); } } /** @see org.rioproject.admin.ServiceBeanAdmin#getUpTime */ public long getUpTime() { return System.currentTimeMillis()-started; } /** @see org.rioproject.admin.ServiceBeanAdmin#getServiceBeanInstantiatorUuid */ public Uuid getServiceBeanInstantiatorUuid() { return service.getServiceBeanInstantiatorUuid(); } /*------------------- * ServiceBeanControl *------------------*/ /** * @see org.rioproject.admin.ServiceBeanControl#start */ public Object start() throws ServiceBeanControlException { Object proxy; try { proxy = service.start(context); } catch (Exception e) { throw new ServiceBeanControlException("start failed", e); } return proxy ; } /** * @see org.rioproject.admin.ServiceBeanControl#stop */ public void stop(boolean force) throws ServiceBeanControlException { try { service.stop(force); } catch (Exception e) { throw new ServiceBeanControlException("stop failed", e); } } /** * @see org.rioproject.admin.ServiceBeanControl#advertise */ public void advertise() throws ServiceBeanControlException { try { if (context != null) { Entry[] configuredAttrs = ServiceAdvertiser.getConfiguredAttributes(context); if (configuredAttrs.length > 0) service.addAttributes(configuredAttrs); } else { logger.warn("ServiceBeanContext is null"); } service.advertise(); } catch (Exception e) { throw new ServiceBeanControlException("advertise failed", e); } } /** * @see org.rioproject.admin.ServiceBeanControl#unadvertise */ public void unadvertise() throws ServiceBeanControlException { try { service.unadvertise(); } catch (Exception e) { throw new ServiceBeanControlException("unadvertise failed", e); } } /** * Set the ServiceBeanContext * * @param context The ServiceBeanContext */ public void setServiceBeanContext(ServiceBeanContext context) { if (context==null) throw new IllegalArgumentException("context is null"); this.context = context; } /*-------------- * JoinAdmin *-------------*/ public Entry[] getLookupAttributes() { JoinManager mgr = service.getJoinManager(); if (mgr != null) return mgr.getAttributes(); else { logger.debug("JoinManager is null"); } return new Entry[0]; } public void addLookupAttributes(Entry[] attrs) { //service.addAttributes(attrs); JoinManager mgr = service.getJoinManager(); if (mgr != null) { if (attrs != null && attrs.length != 0) mgr.addAttributes(attrs, true); if (snapshotHandler!=null) { try { snapshotHandler.takeSnapshot(); } catch (IOException ioe) { logger.warn("Persisting Added Lookup Attributes", ioe); } } } else { logger.warn("JoinManager is null"); } } public void modifyLookupAttributes(Entry[] attrSetTemplates, Entry[] attrSets) { JoinManager mgr = service.getJoinManager(); if (mgr != null) { mgr.modifyAttributes(attrSetTemplates, attrSets, true); if (snapshotHandler != null) { try { snapshotHandler.takeSnapshot(); } catch (IOException ioe) { logger.warn("Persisting Modified Lookup Attributes", ioe); } } } else { logger.warn("JoinManager is null"); } } public String[] getLookupGroups() { DiscoveryManagement dm; JoinManager mgr = service.getJoinManager(); if (mgr != null) { dm = mgr.getDiscoveryManager(); } else { try { dm = service.getServiceBeanContext().getDiscoveryManagement(); } catch (IOException e) { logger.warn("Getting DiscoveryManagement ", e); return new String[0]; } } DiscoveryGroupManagement dgm = (DiscoveryGroupManagement)dm; return dgm.getGroups(); } public void addLookupGroups(String[] groups) { JoinManager mgr = service.getJoinManager(); if (mgr != null) { try { DiscoveryManagement dm = mgr.getDiscoveryManager(); DiscoveryGroupManagement dgm = (DiscoveryGroupManagement)dm; dgm.addGroups(groups); if (snapshotHandler != null) { try { snapshotHandler.takeSnapshot(); } catch (IOException ioe) { logger.warn("Persisting Added Lookup groups", ioe); } } /* Update ServiceBeanConfig */ setGroups(dgm.getGroups()); } catch (IOException ioe) { logger.warn("Adding Lookup Groups", ioe); } } else logger.warn("JoinManager is null"); } public void removeLookupGroups(String[] groups) { JoinManager mgr = service.getJoinManager(); if (mgr!=null) { try { DiscoveryManagement dm = mgr.getDiscoveryManager(); DiscoveryGroupManagement dgm = (DiscoveryGroupManagement)dm; dgm.removeGroups(groups); if (snapshotHandler!=null) { try { snapshotHandler.takeSnapshot(); } catch (IOException ioe) { logger.warn("Persisting removed Lookup groups", ioe); } } /* Update ServiceBeanConfig */ setGroups(dgm.getGroups()); } catch (Exception e) { logger.warn("Removing Lookup groups", e); } } } public void setLookupGroups(String[] groups) { JoinManager mgr = service.getJoinManager(); if (mgr!=null) { try { DiscoveryManagement dm = mgr.getDiscoveryManager(); DiscoveryGroupManagement dgm = (DiscoveryGroupManagement)dm; dgm.setGroups(groups); if (snapshotHandler!=null) { try { snapshotHandler.takeSnapshot(); } catch (IOException ioe) { logger.warn("Persisting Lookup groups", ioe); } } /* Update ServiceBeanConfig */ setGroups(dgm.getGroups()); } catch (IOException ioe) { logger.warn("Setting Lookup groups", ioe); } } } public LookupLocator[] getLookupLocators() { DiscoveryManagement dm; JoinManager mgr = service.getJoinManager(); if (mgr!=null) { dm = mgr.getDiscoveryManager(); } else { try { dm = service.getServiceBeanContext().getDiscoveryManagement(); } catch (IOException e) { logger.warn( "Getting DiscoveryManagement", e); return(new LookupLocator[0]); } } DiscoveryLocatorManagement dlm = (DiscoveryLocatorManagement)dm; return(dlm.getLocators()); } public void addLookupLocators(LookupLocator[] locators) { JoinManager mgr = service.getJoinManager(); if (mgr!=null) { DiscoveryManagement dm = mgr.getDiscoveryManager(); DiscoveryLocatorManagement dlm = (DiscoveryLocatorManagement)dm; dlm.addLocators(locators); if (snapshotHandler!=null) { try { snapshotHandler.takeSnapshot(); } catch (IOException ioe) { logger.warn( "Persisting Added LookupLocators", ioe); } } /* Update ServiceBeanConfig */ setLocators(dlm.getLocators()); } else logger.warn("JoinManager is null"); } public void removeLookupLocators(LookupLocator[] locators) { JoinManager mgr = service.getJoinManager(); if (mgr!=null) { DiscoveryManagement dm = mgr.getDiscoveryManager(); DiscoveryLocatorManagement dlm = (DiscoveryLocatorManagement)dm; dlm.removeLocators(locators); if (snapshotHandler!=null) { try { snapshotHandler.takeSnapshot(); } catch (IOException ioe) { logger.warn("Persisting removed LookupLocators", ioe); } } /* Update ServiceBeanConfig */ setLocators(dlm.getLocators()); } else logger.warn("JoinManager is null"); } public void setLookupLocators(LookupLocator[] locators) { JoinManager mgr = service.getJoinManager(); if (mgr!=null) { DiscoveryManagement dm = mgr.getDiscoveryManager(); DiscoveryLocatorManagement dlm = (DiscoveryLocatorManagement)dm; dlm.setLocators(locators); if (snapshotHandler!=null) { try { snapshotHandler.takeSnapshot(); } catch (IOException ioe) { logger.warn("Persisting LookupLocators", ioe); } } /* Update ServiceBeanConfig */ setLocators(dlm.getLocators()); } else logger.warn("JoinManager is null"); } /** * Set new groups into the ServiceBeanConfig and update the ServiceBeanConfig * using the ServiceBeanManager * * @param groups Array of groups names to set */ private void setGroups(String[] groups) { ServiceBeanConfig sbConfig = service.getServiceBeanContext().getServiceBeanConfig(); sbConfig.setGroups(groups); try { service.getServiceBeanContext().getServiceBeanManager().update(sbConfig); } catch (Exception e) { logger.warn("Setting groups", e); } } /** * Set new LookupLocators into the ServiceBeanConfig and update the * ServiceBeanConfig using the ServiceBeanManager * * @param locators Array of LookupLocator names to set */ private void setLocators(LookupLocator[] locators) { ServiceBeanConfig sbConfig = service.getServiceBeanContext().getServiceBeanConfig(); sbConfig.setLocators(locators); try { service.getServiceBeanContext().getServiceBeanManager().update(sbConfig); } catch (Exception e) { logger.warn("Setting LookupLocators", e); } } }
package org.testobject.kernel.imaging.segmentation; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import java.awt.Graphics; import java.awt.image.BufferedImage; import java.io.IOException; import java.util.Collection; import java.util.Comparator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.PriorityQueue; import java.util.Queue; import org.junit.Test; import org.testobject.commons.math.algebra.Rectangle; import org.testobject.commons.util.collections.Lists; import org.testobject.commons.util.collections.Maps; import org.testobject.commons.util.config.Debug; import org.testobject.commons.util.image.Image; import org.testobject.commons.util.image.ImageUtil; import org.testobject.commons.util.io.FileUtil; import org.testobject.kernel.imaging.procedural.Color; /** * * @author enijkamp * */ public class LinearColorBlobBuilderTest { public static final boolean debug = Debug.toDebugMode(false); @Test public void segmentSingleRectangle() { final int width = 50, height = 50; final BufferedImage test = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); { final Graphics g = test.getGraphics(); g.setColor(java.awt.Color.WHITE); g.fillRect(0, 0, width, height); } LinearColorBlobBuilder builder = new LinearColorBlobBuilder(); Blob root = builder.build(ImageUtil.Convert.toImage(test)); BlobUtils.Print.printBlobs(root); assertThat(root.id, is(1)); assertThat(root.bbox, is(new Rectangle.Int(0, 0, 50, 50))); assertThat(root.children.size(), is(0)); } @Test public void segmentNestedRectangles() { final int width = 50, height = 50; final BufferedImage test = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); { final Graphics g = test.getGraphics(); g.setColor(java.awt.Color.WHITE); g.fillRect(0, 0, width, height); g.setColor(java.awt.Color.RED); g.fillRect(10, 10, 30, 30); } LinearColorBlobBuilder builder = new LinearColorBlobBuilder(); Blob root = builder.build(ImageUtil.Convert.toImage(test)); BlobUtils.Print.printBlobs(root); assertThat(root.id, is(1)); assertThat(root.children.size(), is(1)); assertThat(root.bbox, is(new Rectangle.Int(0, 0, 50, 50))); Blob inner = root.children.get(0); assertThat(inner.children.size(), is(0)); assertThat(inner.bbox, is(new Rectangle.Int(10, 10, 30, 30))); } private static class Segment { public List<Blob> blobs = new LinkedList<>(); public Color color; public int size; public Segment(Blob blob, Color color) { this.blobs.add(blob); this.size = blob.area; this.color = color; } public void add(Blob blob, Color color) { this.blobs.add(blob); this.size += blob.area; int r = ((this.color.r) * (this.size - blob.area) + (color.r * blob.area)) / this.size; int g = ((this.color.g) * (this.size - blob.area) + (color.g * blob.area)) / this.size; int b = ((this.color.b) * (this.size - blob.area) + (color.b * blob.area)) / this.size; this.color = new Color(r, g, b, 255); } } private static class Component { public final Segment a, b; public Component(Segment a, Segment b) { this.a = a; this.b = b; } public double distance() { return (a.size + b.size) * l2(a.color, b.color); } private double l2(Color a, Color b) { double l2 = 0d; l2 += (a.r - b.r) * (a.r - b.r); l2 += (a.g - b.g) * (a.g - b.g); l2 += (a.b - b.b) * (a.b - b.b); return Math.sqrt(l2); } } public static Color extractColor(Image.Int image, Blob blob) { int sum_r = 0, sum_g = 0, sum_b = 0, sum_a = 0, sum = 0; for (int x = 0; x < blob.bbox.w; x++) { for (int y = 0; y < blob.bbox.h; y++) { if (blob.get(x, y)) { int rgba = image.pixels[(blob.bbox.y + y) * image.w + blob.bbox.x + x]; { int a = (rgba >> 24) & 0xff; int r = (rgba >> 16) & 0xff; int g = (rgba >> 8) & 0xff; int b = (rgba >> 0) & 0xff; sum_a += a; sum_r += r; sum_g += g; sum_b += b; sum++; } } } } return new Color((sum_r / sum), (sum_g / sum), (sum_b / sum), (sum_a / sum)); } @Test public void segment() throws IOException { Image.Int image = ImageUtil.Read.read(FileUtil.readFileFromClassPath("small.png")); LinearColorBlobBuilder builder = new LinearColorBlobBuilder(); Blob blob = builder.build(image); BlobUtils.Print.printBlobs(blob); // if(debug) VisualizerUtil.show("0", image); // // if(debug) VisualizerUtil.show("1", BlobUtils.Draw.drawHierarchyByLevel(blob)); // // if(debug) // { // GraphBlobBuilder b = new GraphBlobBuilder(image.w, image.h, 0, 0); // VisualizerUtil.show("1", BlobUtils.Draw.drawHierarchyByLevel(b.build(image)[0])); // } List<Segment> segments = Lists.newArrayList(BlobUtils.Auxiliary.countBlobs(blob)); Map<Segment, Segment> neighbours = Maps.newIdentityMap(); toSegment(blob, image, segments, neighbours); neighbours.clear(); findNeighbours(segments, neighbours); Queue<Component> components = new PriorityQueue<>(BlobUtils.Auxiliary.countBlobs(blob) << 2, new Comparator<Component>() { @Override public int compare(Component c1, Component c2) { return Double.compare(c1.distance(), c2.distance()); } }); components.addAll(toComponents(neighbours)); System.out.println(segments.size()); while(components.isEmpty() == false) { Component component = components.poll(); System.out.println(component.distance()); addTo(component.a, component.b, image); segments.remove(component.b); neighbours.remove(component.b); } System.out.println(segments.size()); if(debug) draw("2", segments, image.w, image.h); if(debug) System.in.read(); } private void findNeighbours(List<Segment> segments, Map<Segment, Segment> neighbours) { for(Segment a : segments) { for(Segment b : segments) { if(a != b && touches(a, b)) { neighbours.put(a, b); } } } } private void draw(String title, List<Segment> segments, int width, int height) { int[] pixels = new int[width * height]; Image.Int image = new Image.Int(pixels, 0, 0, width, height, width, Image.Int.Type.RGB); java.awt.Color[] colors = BlobUtils.Draw.generateColors(); for(Segment segment : segments) { Blob first = segment.blobs.get(0); int position = first.bbox.y * image.w + first.bbox.x + first.bbox.w + first.bbox.h; java.awt.Color c = colors[position % colors.length]; for(Blob blob : segment.blobs) { for(int y = 0; y < blob.bbox.h; y++) { for(int x = 0; x < blob.bbox.w; x++) { if(blob.get(x, y)) { pixels[((blob.bbox.y + y) * width) + blob.bbox.x + x] = c.getRGB(); } } } } } // VisualizerUtil.show(title, image); } private void addTo(Segment a, Segment b, Image.Int image) { for(Blob blob : b.blobs) { a.add(blob, extractColor(image, blob)); } } private Collection<? extends Component> toComponents(Map<Segment, Segment> neighbours) { List<Component> components = Lists.newArrayList(neighbours.size()); for(Map.Entry<Segment, Segment> entry : neighbours.entrySet()) { components.add(new Component(entry.getKey(), entry.getValue())); } return components; } private Segment toSegment(Blob blob, Image.Int image, List<Segment> segments, Map<Segment, Segment> neighbours) { Segment parentSegment = new Segment(blob, extractColor(image, blob)); segments.add(parentSegment); List<Segment> childSegments = Lists.newArrayList(blob.children.size()); for(Blob childBlob : blob.children) { Segment childSegment = toSegment(childBlob, image, segments, neighbours); childSegments.add(childSegment); neighbours.put(parentSegment, childSegment); } for(int i = 0; i < childSegments.size(); i++) { for(int j = i + 1; j < childSegments.size(); j++) { Segment a = childSegments.get(i); Segment b = childSegments.get(j); if(touches(a, b)) { neighbours.put(a, b); } } } return parentSegment; } public static boolean touches(Segment a, Segment b) { Blob child1 = a.blobs.get(0); Blob child2 = b.blobs.get(0); // a b c // d e // f g h Rectangle.Int box1 = child1.getBoundingBox(); for(int y = box1.y; y < box1.y + box1.h; y++) { for(int x = box1.x; x < box1.x + box1.w; x++) { // a if(y > 0 && x > 0) { if(child1.ids[y-1][x-1] == child2.id) { return true; } } // b if(y > 0) { if(child1.ids[y-1][x] == child2.id) { return true; } } // c if(y > 0 && x+1 < child1.ids[0].length) { if(child1.ids[y-1][x+1] == child2.id) { return true; } } // d if(x > 0) { if(child1.ids[y][x-1] == child2.id) { return true; } } // e if(x+1 < child1.ids[0].length) { if(child1.ids[y][x+1] == child2.id) { return true; } } // f if(y+1 < child1.ids.length && x > 0) { if(child1.ids[y+1][x-1] == child2.id) { return true; } } // g if(y+1 < child1.ids.length) { if(child1.ids[y+1][x] == child2.id) { return true; } } // h if(y+1 < child1.ids.length && x+1 < child1.ids[0].length) { if(child1.ids[y+1][x+1] == child2.id) { return true; } } } } return false; } }
/** * Copyright (C) 2011 LShift Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.lshift.hibernate.migrations; import org.junit.Test; import java.sql.Connection; import java.sql.Types; import static net.lshift.hibernate.migrations.HibernateHelper.mockExecutablePreparedStatement; import static org.easymock.EasyMock.*; /** * Test cases for the alter table builder. */ public class AlterTableBuilderTest { @Test public void shouldDropColumnUsingCapitalizedName() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("some_table").dropColumn("some_col"); Connection conn = createStrictMock(Connection.class); expect(conn.prepareStatement("alter table some_table drop column \"SOME_COL\"")).andReturn(mockExecutablePreparedStatement()); replay(conn); mb.apply(conn); verify(conn); } @Test(expected = IllegalArgumentException.class) public void shouldGenerateExceptionWhenAddingColumnThatIsNonNullableAndHasNoDefaultValue() { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("some_table").addColumn("some_col", Types.VARCHAR, 255, false, null); } @Test public void shouldGenerateAddColumn() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").addColumn("bar", Types.VARCHAR, 255, false, "baz"); VerificationUtil.verifyMigrationBuilder(mb, "alter table foo add column bar varchar(255) default 'baz' not null"); } @Test public void shouldGenerateAlterColumn() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").alterColumn("bar", Types.VARCHAR, 1024, false, "baz"); VerificationUtil.verifyMigrationBuilder(mb, "alter table foo alter column bar varchar(1024) default 'baz' not null"); } @Test public void shouldSetColumnNullable() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").setColumnNullable("bar", Types.VARCHAR, 255, true); VerificationUtil.verifyMigrationBuilder(mb, "alter table foo alter column bar set null"); } @Test public void shouldSetColumnNotNull() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").setColumnNullable("bar", Types.VARCHAR, 255, false); VerificationUtil.verifyMigrationBuilder(mb, "alter table foo alter column bar set not null"); } @Test public void shouldGenerateForeignKeyConstraint() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").addForeignKey("FK80C74EA1C3C204DC", "bar", "baz", "name"); Connection conn = createStrictMock(Connection.class); expect(conn.prepareStatement("alter table foo add constraint FK80C74EA1C3C204DC foreign key (bar) references baz")). andReturn(mockExecutablePreparedStatement()); replay(conn); mb.apply(conn); verify(conn); } @Test public void shouldGenerateSingleUniqueKeyConstraint() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").addUniqueConstraint("blaz"); Connection conn = createStrictMock(Connection.class); expect(conn.prepareStatement("alter table foo add unique (blaz)")). andReturn(mockExecutablePreparedStatement()); replay(conn); mb.apply(conn); verify(conn); } @Test public void shouldGenerateMultipleUniqueKeyConstraint() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").addUniqueConstraint("U123123", "blaz", "boz"); Connection conn = createStrictMock(Connection.class); expect(conn.prepareStatement("alter table foo add constraint U123123 unique (blaz, boz)")). andReturn(mockExecutablePreparedStatement()); replay(conn); mb.apply(conn); verify(conn); } @Test public void shouldGenerateDropConstraint() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").dropConstraint("FK80C74EA1C3C204DC"); Connection conn = createStrictMock(Connection.class); expect(conn.prepareStatement("alter table foo drop constraint FK80C74EA1C3C204DC")). andReturn(mockExecutablePreparedStatement()); replay(conn); mb.apply(conn); verify(conn); } @Test public void shouldGenerateDropForeignKey() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").dropConstraint("FK80C74EA1C3C204DC"); Connection conn = createStrictMock(Connection.class); expect(conn.prepareStatement("alter table foo drop constraint FK80C74EA1C3C204DC")). andReturn(mockExecutablePreparedStatement()); replay(conn); mb.apply(conn); verify(conn); } @Test public void shouldGenerateDropPrimaryKey() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").dropPrimaryKey(); Connection conn = createStrictMock(Connection.class); expect(conn.prepareStatement("alter table foo drop primary key")). andReturn(mockExecutablePreparedStatement()); replay(conn); mb.apply(conn); verify(conn); } @Test public void shouldGenerateAddPrimaryKey() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").addPrimaryKey("bar", "baz"); Connection conn = createStrictMock(Connection.class); expect(conn.prepareStatement("alter table foo add primary key (bar, baz)")). andReturn(mockExecutablePreparedStatement()); replay(conn); mb.apply(conn); verify(conn); } @Test public void shouldGenerateReplacePrimaryKey() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").replacePrimaryKey("bar", "baz"); Connection conn = createStrictMock(Connection.class); expect(conn.prepareStatement("alter table foo drop primary key")). andReturn(mockExecutablePreparedStatement()); expect(conn.prepareStatement("alter table foo add primary key (bar, baz)")). andReturn(mockExecutablePreparedStatement()); replay(conn); mb.apply(conn); verify(conn); } @Test public void shouldGenerateAddPartitionForHashing() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").addPartition(); Connection conn = createStrictMock(Connection.class); expect(conn.prepareStatement("alter table foo add partition")). andReturn(mockExecutablePreparedStatement()); replay(conn); mb.apply(conn); verify(conn); } @Test public void shouldRenameTable() throws Exception { MigrationBuilder mb = new MigrationBuilder(HibernateHelper.configuration()); mb.alterTable("foo").renameTo("bar"); Connection conn = createStrictMock(Connection.class); expect(conn.prepareStatement("alter table foo rename to bar")). andReturn(mockExecutablePreparedStatement()); replay(conn); mb.apply(conn); verify(conn); } }
package org.mutabilitydetector.unittesting.matchers.reasons; /* * #%L * MutabilityDetector * %% * Copyright (C) 2008 - 2014 Graham Allan * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static com.google.common.collect.Iterables.transform; import static java.util.Arrays.asList; import static java.util.Collections.singleton; import static org.hamcrest.core.AnyOf.anyOf; import static org.mutabilitydetector.MutabilityReason.ABSTRACT_TYPE_TO_FIELD; import static org.mutabilitydetector.MutabilityReason.COLLECTION_FIELD_WITH_MUTABLE_ELEMENT_TYPE; import static org.mutabilitydetector.MutabilityReason.MUTABLE_TYPE_TO_FIELD; import java.util.stream.StreamSupport; import org.hamcrest.Matcher; import org.mutabilitydetector.MutableReasonDetail; import org.mutabilitydetector.checkers.MutableTypeToFieldChecker; import org.mutabilitydetector.locations.CodeLocation; import org.mutabilitydetector.locations.CodeLocation.FieldLocation; import org.mutabilitydetector.locations.Dotted; import com.google.common.collect.Iterables; public final class ProvidedOtherClass { private final Iterable<Dotted> dottedClassNames; private ProvidedOtherClass(Iterable<Dotted> dottedClassName) { this.dottedClassNames = dottedClassName; } public static ProvidedOtherClass provided(Dotted className) { return provided(singleton(className)); } public static ProvidedOtherClass provided(Dotted... classNames) { return provided(asList(classNames)); } public static ProvidedOtherClass provided(Iterable<Dotted> classNames) { return new ProvidedOtherClass(classNames); } /** * Assumes that the selected type is immutable, preventing warnings related * to transitive mutability. * <p> * One common way for classes to be rendered mutable is to contain a mutable * field. Another way is that the class is extensible (non-final). The * interaction of these rules can occasionally conflict with the notion of * abstraction. For example, consider the following classes: * * <pre> * </code> * // implementations MUST be immutable * public interface Named { * String getName(); * } * * public final class HasSomethingNamed { * private final Named named; * public HasSomethingNamed(Named named) { * this.named = named; * } * * public String getNameOfYourThing() { * return this.named.getName(); * } * } * </code> * </pre> * * In this contrived example, the interface Named is abstracting something. * It would be preferable to be able to depend on that abstraction, rather * than a concrete implementation. Unfortunately, any implementation of * Named <strong>could</strong> violate the condition that it must be * immutable. If the Named implementation given to the constructor of * HasSomethingNamed is actually mutable, it causes HasSomething named to be * mutable as well. Consider this code: * * <pre> * </code> * SneakyMutableNamed n = new SneakyMutableNamed("Jimmy"); * HasSomethingNamed h = new HasSomethingNamed(n); * * String nameOnFirstCall = h.getNameOfYourThing(); * n.myReassignableName = "Bobby"; * String nameOnSecondCall = h.getNameOfYourThing(); * </code> * </pre> * * Here, because a sneaky subclass of Named is mutated, the instance of * HasSomethingNamed has been observed to change (getNameOfYourThing() first * returns "Jimmy" then "Bobby"). * <p> * Despite this limitation, it can still be preferable that the abstract * class is given as a parameter. Perhaps you are able to trust that all * implementations <strong>are</strong> immutable. In that case, Mutability * Detector raising a warning on HasSomethingNamed would be considered a * false positive. This reason allows the test to pass. * <p> * Example usage: * * <pre><code> * assertInstancesOf(HasSomethingNamed.class, * areImmutable(), * AllowedReason.provided(Named.class).isAlsoImmutable()); * </pre></code> * * Not that this also allows a field which is a collection type, with Named as a generic element type. */ public Matcher<MutableReasonDetail> isAlsoImmutable() { final Matcher<MutableReasonDetail> allowGenericTypes = new AllowedIfOtherClassIsGenericTypeOfCollectionField(dottedClassNames); return anyOf(allowGenericTypes, anyOf(transform(dottedClassNames, AllowedIfOtherClassIsImmutable::new)), new AllowedIfOtherClassIsGenericTypeParameter(dottedClassNames)); } /** * Assumes that the selected type is immutable, preventing warnings related * to transitive mutability. * <p> * One common way for classes to be rendered mutable is to contain a mutable * field. Another way is that the class is extensible (non-final). The * interaction of these rules can occasionally conflict with the notion of * abstraction. For example, consider the following classes: * * <pre> * </code> * // implementations MUST be immutable * public interface Named { * String getName(); * } * * public final class HasSomethingNamed { * private final Named named; * public HasSomethingNamed(Named named) { * this.named = named; * } * * public String getNameOfYourThing() { * return this.named.getName(); * } * } * </code> * </pre> * * In this contrived example, the interface Named is abstracting something. * It would be preferable to be able to depend on that abstraction, rather * than a concrete implementation. Unfortunately, any implementation of * Named <strong>could</strong> violate the condition that it must be * immutable. If the Named implementation given to the constructor of * HasSomethingNamed is actually mutable, it causes HasSomething named to be * mutable as well. Consider this code: * * <pre> * </code> * SneakyMutableNamed n = new SneakyMutableNamed("Jimmy"); * HasSomethingNamed h = new HasSomethingNamed(n); * * String nameOnFirstCall = h.getNameOfYourThing(); * n.myReassignableName = "Bobby"; * String nameOnSecondCall = h.getNameOfYourThing(); * </code> * </pre> * * Here, because a sneaky subclass of Named is mutated, the instance of * HasSomethingNamed has been observed to change (getNameOfYourThing() first * returns "Jimmy" then "Bobby)". * <p> * Despite this limitation, it can still be preferable that the abstract * class is given as a parameter. Perhaps you are able to trust that all * implementations <strong>are</strong> immutable. In that case, Mutability * Detector raising a warning on HasSomethingNamed would be considered a * false positive. This reason allows the test to pass. * <p> * Example usage: * * <pre><code> * assertInstancesOf(HasSomethingNamed.class, * areImmutable(), * AllowedReason.provided(Named.class).areAlsoImmutable()); * </pre></code> * * Not that this also allows a field which is a collection type, with Named as a generic element type. */ public Matcher<MutableReasonDetail> areAlsoImmutable() { return isAlsoImmutable(); } private static final class AllowedIfOtherClassIsImmutable extends BaseMutableReasonDetailMatcher { private final Dotted className; public AllowedIfOtherClassIsImmutable(Dotted dottedClassName) { this.className = dottedClassName; } @Override protected boolean matchesSafely(MutableReasonDetail reasonDetail) { return isAssignedField(reasonDetail); } private boolean isAssignedField(MutableReasonDetail reasonDetail) { return reasonDetail.reason().isOneOf(ABSTRACT_TYPE_TO_FIELD, MUTABLE_TYPE_TO_FIELD) && reasonDetail.message().contains(classNameAsItAppearsInDescription()); } /** * This matcher has to check against string created by the checker, which may change. * @see MutableTypeToFieldChecker */ private String classNameAsItAppearsInDescription() { return "(" + className.asString() + ")"; } } private static final class AllowedIfOtherClassIsGenericTypeOfCollectionField extends BaseMutableReasonDetailMatcher { private final Iterable<Dotted> classNames; public AllowedIfOtherClassIsGenericTypeOfCollectionField(Iterable<Dotted> classNames) { this.classNames = classNames; } @Override protected boolean matchesSafely(MutableReasonDetail reasonDetail) { return allowedIfCollectionTypeWhereAllGenericElementsAreConsideredImmutable(reasonDetail); } private boolean allowedIfCollectionTypeWhereAllGenericElementsAreConsideredImmutable(MutableReasonDetail reasonDetail) { return reasonDetail.reason().isOneOf(COLLECTION_FIELD_WITH_MUTABLE_ELEMENT_TYPE) && allElementTypesAreConsideredImmutable(reasonDetail.message()); } /** * This matcher has to check against string created by the checker, which may change. * @see MutableTypeToFieldChecker */ private boolean allElementTypesAreConsideredImmutable(String message) { String fieldTypeDescription = message.substring(message.indexOf("("), message.indexOf(")") + 1); String generics = fieldTypeDescription.substring(fieldTypeDescription.indexOf("<") + 1, fieldTypeDescription.lastIndexOf(">")); String[] genericsTypesDescription = generics.contains(", ") ? generics.split(", ") : new String[] { generics }; for (String genericType : genericsTypesDescription) { if (!Iterables.contains(classNames, Dotted.dotted(genericType))) { return false; } } return true; } } private static final class AllowedIfOtherClassIsGenericTypeParameter extends BaseMutableReasonDetailMatcher { private final Iterable<Dotted> classNames; public AllowedIfOtherClassIsGenericTypeParameter(final Iterable<Dotted> classNames) { if (classNames == null) { throw new IllegalArgumentException("classNames cannot be null"); } this.classNames = classNames; } @Override protected boolean matchesSafely(final MutableReasonDetail reasonDetail) { final CodeLocation<?> codeLocation = reasonDetail.codeLocation(); if (reasonDetail.reason().isOneOf(MUTABLE_TYPE_TO_FIELD) && codeLocation instanceof FieldLocation) { final FieldLocation location = (FieldLocation) reasonDetail.codeLocation(); final Dotted fieldType = location.fieldType(); return StreamSupport.stream(classNames.spliterator(), false).anyMatch(fieldType::equals); } return false; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.analytics.synapse.artifacts.implementation; import com.azure.analytics.synapse.artifacts.models.ArtifactRenameRequest; import com.azure.analytics.synapse.artifacts.models.CloudErrorAutoGeneratedException; import com.azure.analytics.synapse.artifacts.models.LinkedServiceListResponse; import com.azure.analytics.synapse.artifacts.models.LinkedServiceResource; import com.azure.core.annotation.BodyParam; import com.azure.core.annotation.Delete; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.Post; import com.azure.core.annotation.Put; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in LinkedServices. */ public final class LinkedServicesImpl { /** The proxy service used to perform REST calls. */ private final LinkedServicesService service; /** The service client containing this operation class. */ private final ArtifactsClientImpl client; /** * Initializes an instance of LinkedServicesImpl. * * @param client the instance of the service client containing this operation class. */ LinkedServicesImpl(ArtifactsClientImpl client) { this.service = RestProxy.create(LinkedServicesService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for ArtifactsClientLinkedServices to be used by the proxy service to * perform REST calls. */ @Host("{endpoint}") @ServiceInterface(name = "ArtifactsClientLinke") private interface LinkedServicesService { @Get("/linkedservices") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(CloudErrorAutoGeneratedException.class) Mono<Response<LinkedServiceListResponse>> getLinkedServicesByWorkspace( @HostParam("endpoint") String endpoint, @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context); @Put("/linkedservices/{linkedServiceName}") @ExpectedResponses({200, 202}) @UnexpectedResponseExceptionType(CloudErrorAutoGeneratedException.class) Mono<Response<LinkedServiceResource>> createOrUpdateLinkedService( @HostParam("endpoint") String endpoint, @PathParam("linkedServiceName") String linkedServiceName, @QueryParam("api-version") String apiVersion, @HeaderParam("If-Match") String ifMatch, @BodyParam("application/json") LinkedServiceResource linkedService, @HeaderParam("Accept") String accept, Context context); @Get("/linkedservices/{linkedServiceName}") @ExpectedResponses({200, 304}) @UnexpectedResponseExceptionType(CloudErrorAutoGeneratedException.class) Mono<Response<LinkedServiceResource>> getLinkedService( @HostParam("endpoint") String endpoint, @PathParam("linkedServiceName") String linkedServiceName, @QueryParam("api-version") String apiVersion, @HeaderParam("If-None-Match") String ifNoneMatch, @HeaderParam("Accept") String accept, Context context); @Delete("/linkedservices/{linkedServiceName}") @ExpectedResponses({200, 202, 204}) @UnexpectedResponseExceptionType(CloudErrorAutoGeneratedException.class) Mono<Response<Void>> deleteLinkedService( @HostParam("endpoint") String endpoint, @PathParam("linkedServiceName") String linkedServiceName, @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context); @Post("/linkedservices/{linkedServiceName}/rename") @ExpectedResponses({200, 202}) @UnexpectedResponseExceptionType(CloudErrorAutoGeneratedException.class) Mono<Response<Void>> renameLinkedService( @HostParam("endpoint") String endpoint, @PathParam("linkedServiceName") String linkedServiceName, @QueryParam("api-version") String apiVersion, @BodyParam("application/json") ArtifactRenameRequest request, @HeaderParam("Accept") String accept, Context context); @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(CloudErrorAutoGeneratedException.class) Mono<Response<LinkedServiceListResponse>> getLinkedServicesByWorkspaceNext( @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("endpoint") String endpoint, @HeaderParam("Accept") String accept, Context context); } /** * Lists linked services. * * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of linked service resources along with {@link PagedResponse} on successful completion of {@link * Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PagedResponse<LinkedServiceResource>> getLinkedServicesByWorkspaceSinglePageAsync() { final String apiVersion = "2020-12-01"; final String accept = "application/json"; return FluxUtil.withContext( context -> service.getLinkedServicesByWorkspace( this.client.getEndpoint(), apiVersion, accept, context)) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().getValue(), res.getValue().getNextLink(), null)); } /** * Lists linked services. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of linked service resources along with {@link PagedResponse} on successful completion of {@link * Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PagedResponse<LinkedServiceResource>> getLinkedServicesByWorkspaceSinglePageAsync(Context context) { final String apiVersion = "2020-12-01"; final String accept = "application/json"; return service.getLinkedServicesByWorkspace(this.client.getEndpoint(), apiVersion, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().getValue(), res.getValue().getNextLink(), null)); } /** * Lists linked services. * * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of linked service resources as paginated response with {@link PagedFlux}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedServiceResource> getLinkedServicesByWorkspaceAsync() { return new PagedFlux<>( () -> getLinkedServicesByWorkspaceSinglePageAsync(), nextLink -> getLinkedServicesByWorkspaceNextSinglePageAsync(nextLink)); } /** * Lists linked services. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of linked service resources as paginated response with {@link PagedFlux}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedServiceResource> getLinkedServicesByWorkspaceAsync(Context context) { return new PagedFlux<>( () -> getLinkedServicesByWorkspaceSinglePageAsync(context), nextLink -> getLinkedServicesByWorkspaceNextSinglePageAsync(nextLink, context)); } /** * Lists linked services. * * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of linked service resources as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<LinkedServiceResource> getLinkedServicesByWorkspace() { return new PagedIterable<>(getLinkedServicesByWorkspaceAsync()); } /** * Lists linked services. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of linked service resources as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<LinkedServiceResource> getLinkedServicesByWorkspace(Context context) { return new PagedIterable<>(getLinkedServicesByWorkspaceAsync(context)); } /** * Creates or updates a linked service. * * @param linkedServiceName The linked service name. * @param linkedService Linked service resource definition. * @param ifMatch ETag of the linkedService entity. Should only be specified for update, for which it should match * existing entity or can be * for unconditional update. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linked service resource type along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<LinkedServiceResource>> createOrUpdateLinkedServiceWithResponseAsync( String linkedServiceName, LinkedServiceResource linkedService, String ifMatch) { final String apiVersion = "2020-12-01"; final String accept = "application/json"; return FluxUtil.withContext( context -> service.createOrUpdateLinkedService( this.client.getEndpoint(), linkedServiceName, apiVersion, ifMatch, linkedService, accept, context)); } /** * Creates or updates a linked service. * * @param linkedServiceName The linked service name. * @param linkedService Linked service resource definition. * @param ifMatch ETag of the linkedService entity. Should only be specified for update, for which it should match * existing entity or can be * for unconditional update. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linked service resource type along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<LinkedServiceResource>> createOrUpdateLinkedServiceWithResponseAsync( String linkedServiceName, LinkedServiceResource linkedService, String ifMatch, Context context) { final String apiVersion = "2020-12-01"; final String accept = "application/json"; return service.createOrUpdateLinkedService( this.client.getEndpoint(), linkedServiceName, apiVersion, ifMatch, linkedService, accept, context); } /** * Creates or updates a linked service. * * @param linkedServiceName The linked service name. * @param linkedService Linked service resource definition. * @param ifMatch ETag of the linkedService entity. Should only be specified for update, for which it should match * existing entity or can be * for unconditional update. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linked service resource type on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<LinkedServiceResource> createOrUpdateLinkedServiceAsync( String linkedServiceName, LinkedServiceResource linkedService, String ifMatch) { return createOrUpdateLinkedServiceWithResponseAsync(linkedServiceName, linkedService, ifMatch) .flatMap( (Response<LinkedServiceResource> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Creates or updates a linked service. * * @param linkedServiceName The linked service name. * @param linkedService Linked service resource definition. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linked service resource type on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<LinkedServiceResource> createOrUpdateLinkedServiceAsync( String linkedServiceName, LinkedServiceResource linkedService) { final String ifMatch = null; return createOrUpdateLinkedServiceWithResponseAsync(linkedServiceName, linkedService, ifMatch) .flatMap( (Response<LinkedServiceResource> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Creates or updates a linked service. * * @param linkedServiceName The linked service name. * @param linkedService Linked service resource definition. * @param ifMatch ETag of the linkedService entity. Should only be specified for update, for which it should match * existing entity or can be * for unconditional update. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linked service resource type on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<LinkedServiceResource> createOrUpdateLinkedServiceAsync( String linkedServiceName, LinkedServiceResource linkedService, String ifMatch, Context context) { return createOrUpdateLinkedServiceWithResponseAsync(linkedServiceName, linkedService, ifMatch, context) .flatMap( (Response<LinkedServiceResource> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Creates or updates a linked service. * * @param linkedServiceName The linked service name. * @param linkedService Linked service resource definition. * @param ifMatch ETag of the linkedService entity. Should only be specified for update, for which it should match * existing entity or can be * for unconditional update. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linked service resource type. */ @ServiceMethod(returns = ReturnType.SINGLE) public LinkedServiceResource createOrUpdateLinkedService( String linkedServiceName, LinkedServiceResource linkedService, String ifMatch) { return createOrUpdateLinkedServiceAsync(linkedServiceName, linkedService, ifMatch).block(); } /** * Creates or updates a linked service. * * @param linkedServiceName The linked service name. * @param linkedService Linked service resource definition. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linked service resource type. */ @ServiceMethod(returns = ReturnType.SINGLE) public LinkedServiceResource createOrUpdateLinkedService( String linkedServiceName, LinkedServiceResource linkedService) { final String ifMatch = null; return createOrUpdateLinkedServiceAsync(linkedServiceName, linkedService, ifMatch).block(); } /** * Creates or updates a linked service. * * @param linkedServiceName The linked service name. * @param linkedService Linked service resource definition. * @param ifMatch ETag of the linkedService entity. Should only be specified for update, for which it should match * existing entity or can be * for unconditional update. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linked service resource type along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<LinkedServiceResource> createOrUpdateLinkedServiceWithResponse( String linkedServiceName, LinkedServiceResource linkedService, String ifMatch, Context context) { return createOrUpdateLinkedServiceWithResponseAsync(linkedServiceName, linkedService, ifMatch, context).block(); } /** * Gets a linked service. * * @param linkedServiceName The linked service name. * @param ifNoneMatch ETag of the linked service entity. Should only be specified for get. If the ETag matches the * existing entity tag, or if * was provided, then no content will be returned. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a linked service along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<LinkedServiceResource>> getLinkedServiceWithResponseAsync( String linkedServiceName, String ifNoneMatch) { final String apiVersion = "2020-12-01"; final String accept = "application/json"; return FluxUtil.withContext( context -> service.getLinkedService( this.client.getEndpoint(), linkedServiceName, apiVersion, ifNoneMatch, accept, context)); } /** * Gets a linked service. * * @param linkedServiceName The linked service name. * @param ifNoneMatch ETag of the linked service entity. Should only be specified for get. If the ETag matches the * existing entity tag, or if * was provided, then no content will be returned. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a linked service along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<LinkedServiceResource>> getLinkedServiceWithResponseAsync( String linkedServiceName, String ifNoneMatch, Context context) { final String apiVersion = "2020-12-01"; final String accept = "application/json"; return service.getLinkedService( this.client.getEndpoint(), linkedServiceName, apiVersion, ifNoneMatch, accept, context); } /** * Gets a linked service. * * @param linkedServiceName The linked service name. * @param ifNoneMatch ETag of the linked service entity. Should only be specified for get. If the ETag matches the * existing entity tag, or if * was provided, then no content will be returned. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a linked service on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<LinkedServiceResource> getLinkedServiceAsync(String linkedServiceName, String ifNoneMatch) { return getLinkedServiceWithResponseAsync(linkedServiceName, ifNoneMatch) .flatMap( (Response<LinkedServiceResource> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Gets a linked service. * * @param linkedServiceName The linked service name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a linked service on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<LinkedServiceResource> getLinkedServiceAsync(String linkedServiceName) { final String ifNoneMatch = null; return getLinkedServiceWithResponseAsync(linkedServiceName, ifNoneMatch) .flatMap( (Response<LinkedServiceResource> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Gets a linked service. * * @param linkedServiceName The linked service name. * @param ifNoneMatch ETag of the linked service entity. Should only be specified for get. If the ETag matches the * existing entity tag, or if * was provided, then no content will be returned. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a linked service on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<LinkedServiceResource> getLinkedServiceAsync( String linkedServiceName, String ifNoneMatch, Context context) { return getLinkedServiceWithResponseAsync(linkedServiceName, ifNoneMatch, context) .flatMap( (Response<LinkedServiceResource> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Gets a linked service. * * @param linkedServiceName The linked service name. * @param ifNoneMatch ETag of the linked service entity. Should only be specified for get. If the ETag matches the * existing entity tag, or if * was provided, then no content will be returned. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a linked service. */ @ServiceMethod(returns = ReturnType.SINGLE) public LinkedServiceResource getLinkedService(String linkedServiceName, String ifNoneMatch) { return getLinkedServiceAsync(linkedServiceName, ifNoneMatch).block(); } /** * Gets a linked service. * * @param linkedServiceName The linked service name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a linked service. */ @ServiceMethod(returns = ReturnType.SINGLE) public LinkedServiceResource getLinkedService(String linkedServiceName) { final String ifNoneMatch = null; return getLinkedServiceAsync(linkedServiceName, ifNoneMatch).block(); } /** * Gets a linked service. * * @param linkedServiceName The linked service name. * @param ifNoneMatch ETag of the linked service entity. Should only be specified for get. If the ETag matches the * existing entity tag, or if * was provided, then no content will be returned. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a linked service along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<LinkedServiceResource> getLinkedServiceWithResponse( String linkedServiceName, String ifNoneMatch, Context context) { return getLinkedServiceWithResponseAsync(linkedServiceName, ifNoneMatch, context).block(); } /** * Deletes a linked service. * * @param linkedServiceName The linked service name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> deleteLinkedServiceWithResponseAsync(String linkedServiceName) { final String apiVersion = "2020-12-01"; final String accept = "application/json"; return FluxUtil.withContext( context -> service.deleteLinkedService( this.client.getEndpoint(), linkedServiceName, apiVersion, accept, context)); } /** * Deletes a linked service. * * @param linkedServiceName The linked service name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> deleteLinkedServiceWithResponseAsync(String linkedServiceName, Context context) { final String apiVersion = "2020-12-01"; final String accept = "application/json"; return service.deleteLinkedService(this.client.getEndpoint(), linkedServiceName, apiVersion, accept, context); } /** * Deletes a linked service. * * @param linkedServiceName The linked service name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return A {@link Mono} that completes when a successful response is received. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> deleteLinkedServiceAsync(String linkedServiceName) { return deleteLinkedServiceWithResponseAsync(linkedServiceName).flatMap((Response<Void> res) -> Mono.empty()); } /** * Deletes a linked service. * * @param linkedServiceName The linked service name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return A {@link Mono} that completes when a successful response is received. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> deleteLinkedServiceAsync(String linkedServiceName, Context context) { return deleteLinkedServiceWithResponseAsync(linkedServiceName, context) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Deletes a linked service. * * @param linkedServiceName The linked service name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) public void deleteLinkedService(String linkedServiceName) { deleteLinkedServiceAsync(linkedServiceName).block(); } /** * Deletes a linked service. * * @param linkedServiceName The linked service name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteLinkedServiceWithResponse(String linkedServiceName, Context context) { return deleteLinkedServiceWithResponseAsync(linkedServiceName, context).block(); } /** * Renames a linked service. * * @param linkedServiceName The linked service name. * @param request proposed new name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> renameLinkedServiceWithResponseAsync( String linkedServiceName, ArtifactRenameRequest request) { final String apiVersion = "2020-12-01"; final String accept = "application/json"; return FluxUtil.withContext( context -> service.renameLinkedService( this.client.getEndpoint(), linkedServiceName, apiVersion, request, accept, context)); } /** * Renames a linked service. * * @param linkedServiceName The linked service name. * @param request proposed new name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> renameLinkedServiceWithResponseAsync( String linkedServiceName, ArtifactRenameRequest request, Context context) { final String apiVersion = "2020-12-01"; final String accept = "application/json"; return service.renameLinkedService( this.client.getEndpoint(), linkedServiceName, apiVersion, request, accept, context); } /** * Renames a linked service. * * @param linkedServiceName The linked service name. * @param request proposed new name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return A {@link Mono} that completes when a successful response is received. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> renameLinkedServiceAsync(String linkedServiceName, ArtifactRenameRequest request) { return renameLinkedServiceWithResponseAsync(linkedServiceName, request) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Renames a linked service. * * @param linkedServiceName The linked service name. * @param request proposed new name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return A {@link Mono} that completes when a successful response is received. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> renameLinkedServiceAsync( String linkedServiceName, ArtifactRenameRequest request, Context context) { return renameLinkedServiceWithResponseAsync(linkedServiceName, request, context) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Renames a linked service. * * @param linkedServiceName The linked service name. * @param request proposed new name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) public void renameLinkedService(String linkedServiceName, ArtifactRenameRequest request) { renameLinkedServiceAsync(linkedServiceName, request).block(); } /** * Renames a linked service. * * @param linkedServiceName The linked service name. * @param request proposed new name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> renameLinkedServiceWithResponse( String linkedServiceName, ArtifactRenameRequest request, Context context) { return renameLinkedServiceWithResponseAsync(linkedServiceName, request, context).block(); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of linked service resources along with {@link PagedResponse} on successful completion of {@link * Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PagedResponse<LinkedServiceResource>> getLinkedServicesByWorkspaceNextSinglePageAsync(String nextLink) { final String accept = "application/json"; return FluxUtil.withContext( context -> service.getLinkedServicesByWorkspaceNext( nextLink, this.client.getEndpoint(), accept, context)) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().getValue(), res.getValue().getNextLink(), null)); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws CloudErrorAutoGeneratedException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of linked service resources along with {@link PagedResponse} on successful completion of {@link * Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PagedResponse<LinkedServiceResource>> getLinkedServicesByWorkspaceNextSinglePageAsync( String nextLink, Context context) { final String accept = "application/json"; return service.getLinkedServicesByWorkspaceNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().getValue(), res.getValue().getNextLink(), null)); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.admin.indices.mapping.get; import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import java.io.IOException; import java.io.InputStream; import java.util.Collections; import java.util.Map; import java.util.Objects; import static java.util.Collections.unmodifiableMap; /** * Response object for {@link GetFieldMappingsRequest} API * * Note: there is a new class with the same name for the Java HLRC that uses a typeless format. * Any changes done to this class should go to that client class as well. */ public class GetFieldMappingsResponse extends ActionResponse implements ToXContentObject { private static final ParseField MAPPINGS = new ParseField("mappings"); private final Map<String, Map<String, FieldMappingMetadata>> mappings; GetFieldMappingsResponse(Map<String, Map<String, FieldMappingMetadata>> mappings) { this.mappings = mappings; } GetFieldMappingsResponse(StreamInput in) throws IOException { super(in); mappings = unmodifiableMap(in.readMap(StreamInput::readString, mapIn -> { if (mapIn.getVersion().before(Version.V_8_0_0)) { int typesSize = mapIn.readVInt(); assert typesSize == 1 || typesSize == 0 : "Expected 0 or 1 types but got " + typesSize; if (typesSize == 0) { return Collections.emptyMap(); } mapIn.readString(); // type } return unmodifiableMap(mapIn.readMap(StreamInput::readString, inpt -> new FieldMappingMetadata(inpt.readString(), inpt.readBytesReference()))); })); } /** returns the retrieved field mapping. The return map keys are index, field (as specified in the request). */ public Map<String, Map<String, FieldMappingMetadata>> mappings() { return mappings; } /** * Returns the mappings of a specific field. * * @param field field name as specified in the {@link GetFieldMappingsRequest} * @return FieldMappingMetadata for the requested field or null if not found. */ public FieldMappingMetadata fieldMappings(String index, String field) { Map<String, FieldMappingMetadata> indexMapping = mappings.get(index); if (indexMapping == null) { return null; } return indexMapping.get(field); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); for (Map.Entry<String, Map<String, FieldMappingMetadata>> indexEntry : mappings.entrySet()) { builder.startObject(indexEntry.getKey()); builder.startObject(MAPPINGS.getPreferredName()); if (indexEntry.getValue() != null) { addFieldMappingsToBuilder(builder, params, indexEntry.getValue()); } builder.endObject(); builder.endObject(); } builder.endObject(); return builder; } private void addFieldMappingsToBuilder(XContentBuilder builder, Params params, Map<String, FieldMappingMetadata> mappings) throws IOException { for (Map.Entry<String, FieldMappingMetadata> fieldEntry : mappings.entrySet()) { builder.startObject(fieldEntry.getKey()); fieldEntry.getValue().toXContent(builder, params); builder.endObject(); } } public static class FieldMappingMetadata implements ToXContentFragment { private static final ParseField FULL_NAME = new ParseField("full_name"); private static final ParseField MAPPING = new ParseField("mapping"); private static final ConstructingObjectParser<FieldMappingMetadata, String> PARSER = new ConstructingObjectParser<>("field_mapping_meta_data", true, a -> new FieldMappingMetadata((String)a[0], (BytesReference)a[1]) ); private final String fullName; private final BytesReference source; public FieldMappingMetadata(String fullName, BytesReference source) { this.fullName = fullName; this.source = source; } public String fullName() { return fullName; } /** Returns the mappings as a map. Note that the returned map has a single key which is always the field's {@link Mapper#name}. */ public Map<String, Object> sourceAsMap() { return XContentHelper.convertToMap(source, true, XContentType.JSON).v2(); } //pkg-private for testing BytesReference getSource() { return source; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(FULL_NAME.getPreferredName(), fullName); if (params.paramAsBoolean("pretty", false)) { builder.field("mapping", sourceAsMap()); } else { try (InputStream stream = source.streamInput()) { builder.rawField(MAPPING.getPreferredName(), stream, XContentType.JSON); } } return builder; } @Override public String toString() { return "FieldMappingMetadata{fullName='" + fullName + '\'' + ", source=" + source + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof FieldMappingMetadata)) return false; FieldMappingMetadata that = (FieldMappingMetadata) o; return Objects.equals(fullName, that.fullName) && Objects.equals(source, that.source); } @Override public int hashCode() { return Objects.hash(fullName, source); } } @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(mappings, StreamOutput::writeString, (outpt, map) -> { if (outpt.getVersion().before(Version.V_8_0_0)) { outpt.writeVInt(1); outpt.writeString(MapperService.SINGLE_MAPPING_NAME); } outpt.writeMap(map, StreamOutput::writeString, (o, v) -> { o.writeString(v.fullName()); o.writeBytesReference(v.source); }); }); } @Override public String toString() { return "GetFieldMappingsResponse{" + "mappings=" + mappings + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof GetFieldMappingsResponse)) return false; GetFieldMappingsResponse that = (GetFieldMappingsResponse) o; return Objects.equals(mappings, that.mappings); } @Override public int hashCode() { return Objects.hash(mappings); } }
package org.simpleflatmapper.util; import java.lang.annotation.Annotation; import java.lang.reflect.Array; import java.lang.reflect.GenericArrayType; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.lang.reflect.WildcardType; import java.util.*; public final class TypeHelper { private TypeHelper() {} @SuppressWarnings("unchecked") public static <T> Class<T> toClass(Type target) { if (target instanceof Class) { return (Class<T>) target; } else if (target instanceof ParameterizedType) { return toClass(((ParameterizedType) target).getRawType()); } else if (target instanceof TypeVariable) { return toClass(((TypeVariable) target).getBounds()[0]); } else if (target instanceof WildcardType) { return toClass(((WildcardType)target).getUpperBounds()[0]); } else if (target instanceof GenericArrayType) { return (Class<T>) Array.newInstance(toClass(((GenericArrayType) target).getGenericComponentType()), 0).getClass(); } throw new UnsupportedOperationException("Cannot extract class from type " + target + " " + target.getClass()); } public static ClassLoader getClassLoader(Type target, ClassLoader defaultClassLoader) { if (target == null) return defaultClassLoader; Class<?> clazz = toClass(target); if (clazz == null) return defaultClassLoader; return clazz.getClassLoader(); } public static <T> Map<TypeVariable<?>, Type> getTypesMap(Type targetType) { Class<T> targetClass = TypeHelper.toClass(targetType); Map<TypeVariable<?>, Type> genericTypes = Collections.emptyMap(); if (targetType instanceof ParameterizedType) { TypeVariable<Class<T>>[] typeParameters = targetClass.getTypeParameters(); Type[] actualTypeArguments = ((ParameterizedType) targetType).getActualTypeArguments(); genericTypes = new HashMap<TypeVariable<?>, Type>(); for (int i = 0; i < typeParameters.length; i++) { TypeVariable<?> typeParameter = typeParameters[i]; Type typeArgument = actualTypeArguments[i]; genericTypes.put(typeParameter, typeArgument); } } return genericTypes; } public static boolean isPrimitive(Type type) { return toClass(type).isPrimitive(); } public static Class<?> wrap(Class<?> target) { if (target.isPrimitive()) { return wrappers.get(target); } else { return target; } } public static boolean areCompatible(Class<?> target, Class<?> source) { Class<?> wrapTarget = wrap(target); Class<?> wrapSource = wrap(source); return wrapTarget.isAssignableFrom(wrapSource); } public static boolean isNumber(Type target) { return Number.class.isAssignableFrom(wrap(TypeHelper.toClass(target))); } private final static Map<Class<?>, Class<?>> wrappers = new HashMap<Class<?>, Class<?>>(); static { wrappers.put(boolean.class, Boolean.class); wrappers.put(byte.class, Byte.class); wrappers.put(short.class, Short.class); wrappers.put(char.class, Character.class); wrappers.put(int.class, Integer.class); wrappers.put(long.class, Long.class); wrappers.put(float.class, Float.class); wrappers.put(double.class, Double.class); wrappers.put(void.class, Void.class); } public static boolean isArray(Type outType) { return TypeHelper.toClass(outType).isArray(); } public static Type getComponentTypeOfListOrArray(Type outType) { Class<?> target = toClass(outType); if (target.isArray()) { return toClass(outType).getComponentType(); } else { Type[] parameterTypes = getGenericParameterForClass(outType, Iterable.class); if (parameterTypes != null) { Type parameterType = parameterTypes[0]; if (parameterType != null) { return parameterType; } } } return Object.class; } public static MapEntryTypes getKeyValueTypeOfMap(Type outType) { Type[] parameterTypes = getGenericParameterForClass(outType, Map.class); if (parameterTypes != null) { return new MapEntryTypes(parameterTypes[0], parameterTypes[1]); } return MapEntryTypes.OBJECT_OBJECT; } private static Type getGenericInterface(Type t, Class<?> i) { if (TypeHelper.areEquals(t, i)) { return t; } Type[] genericInterfaces = TypeHelper.toClass(t).getGenericInterfaces(); for(Type it : genericInterfaces) { if (isAssignable(i, it)) { if (areEquals(it, i)) { return it; } else { return getGenericInterface(it, i); } } } return null; } private static Type getGenericSuperType(Type t) { return TypeHelper.toClass(t).getGenericSuperclass(); } public static boolean isAssignable(Type type, Type from) { return isAssignable(TypeHelper.toBoxedClass(type), from); } public static boolean isAssignable(Class<?> class1, Type from) { return class1.isAssignableFrom(toBoxedClass(from)); } public static boolean isJavaLang(Type target) { Class<?> clazz = TypeHelper.toClass(target); return clazz.isPrimitive() || (clazz.getPackage() != null && clazz.getPackage().getName().equals("java.lang")); } public static boolean isInPackage(Type target, Predicate<String> packagePredicate) { Class<?> clazz = TypeHelper.toClass(target); Package clazzPackage = clazz.getPackage(); if (clazzPackage != null) { return packagePredicate.test(clazzPackage.getName()); } return false; } public static boolean isEnum(Type target) { Class<?> clazz = TypeHelper.toClass(target); return clazz.isEnum(); } public static Class<?> toBoxedClass(Type type) { return TypeHelper.toBoxedClass(toClass(type)); } public static Class<?> toBoxedClass(Class<?> target) { if (target.isPrimitive()) { Class<?> clazz = wrappers.get(target); if (clazz == null) { throw new RuntimeException("Unexpected primitive type " + target); } return clazz; } else { return target; } } public static boolean areEquals(Type target, Type clazz) { return TypeHelper.toClass(clazz).equals(TypeHelper.toClass(target)); } public static Type[] getGenericParameterForClass(Type type, Class<?> interfaceClass) { if (isAssignable(interfaceClass, type)) { // first look for the interface Type genericInterface = getGenericInterface(type, interfaceClass); final Type[] types; if (genericInterface != null) { if (genericInterface instanceof ParameterizedType) { types = ((ParameterizedType) genericInterface).getActualTypeArguments(); } else { return null; } } else { types = getGenericParameterForClass(TypeHelper.getGenericSuperType(type), interfaceClass); } resolveTypeVariables(type, types); return types; } else { throw new IllegalArgumentException("type " + type + " does not implement/extends " + interfaceClass); } } public static void resolveTypeVariables(Type source, Type[] types) { for(int i = 0; i < types.length; i++) { Type t = types[i]; if (t instanceof TypeVariable) { types[i] = resolveTypeVariable(source, (TypeVariable) t); } } } public static Type resolveTypeVariable(Type type, TypeVariable t) { TypeVariable<Class<Object>>[] typeParameters = TypeHelper.toClass(type).getTypeParameters(); for(int i = 0; i < typeParameters.length; i++) { TypeVariable<Class<Object>> typeVariable = typeParameters[i]; if (typeVariable.getName().equals(t.getName())) { if (type instanceof ParameterizedType) { return ((ParameterizedType) type).getActualTypeArguments()[i]; } else { return Object.class; } } } if (typeParameters.length == 1 && type instanceof ParameterizedType && ((ParameterizedType) type).getActualTypeArguments().length == 1) { return ((ParameterizedType) type).getActualTypeArguments()[0]; } return Object.class; } public static boolean isKotlinClass(Type target) { Annotation[] annotations = TypeHelper.toClass(target).getDeclaredAnnotations(); if (annotations != null) { for(int i = 0; i < annotations.length;i++) { Annotation a = annotations[i]; if (a.annotationType().getName().equals("kotlin.Metadata")) { return true; } } } return false; } public static class MapEntryTypes { public static final MapEntryTypes OBJECT_OBJECT = new MapEntryTypes(Object.class, Object.class); private final Type keyType; private final Type valueType; public MapEntryTypes(Type keyType, Type valueType) { this.keyType = keyType; this.valueType = valueType; } public Type getKeyType() { return keyType; } public Type getValueType() { return valueType; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; MapEntryTypes that = (MapEntryTypes) o; if (keyType != null ? !keyType.equals(that.keyType) : that.keyType != null) return false; return valueType != null ? valueType.equals(that.valueType) : that.valueType == null; } @Override public int hashCode() { int result = keyType != null ? keyType.hashCode() : 0; result = 31 * result + (valueType != null ? valueType.hashCode() : 0); return result; } @Override public String toString() { return "MapEntryTypes{" + "keyType=" + keyType + ", valueType=" + valueType + '}'; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jasper.xmlparser; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.io.UTFDataFormatException; import org.apache.jasper.compiler.Localizer; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; /** * @author Andy Clark, IBM * * @deprecated Will be removed in Tomcat 9.0.x onwards */ @Deprecated public class UTF8Reader extends Reader { private final Log log = LogFactory.getLog(UTF8Reader.class); // debugging /** Debug read. */ private static final boolean DEBUG_READ = false; // // Data // /** Input stream. */ private final InputStream fInputStream; /** Byte buffer. */ private final byte[] fBuffer; /** Offset into buffer. */ private int fOffset; /** Surrogate character. */ private int fSurrogate = -1; // // Constructors // /** * Constructs a UTF-8 reader from the specified input stream, * buffer size and MessageFormatter. * * @param inputStream The input stream. * @param size The initial buffer size. */ public UTF8Reader(InputStream inputStream, int size) { fInputStream = inputStream; fBuffer = new byte[size]; } // // Reader methods // /** * Read a single character. This method will block until a character is * available, an I/O error occurs, or the end of the stream is reached. * * <p> Subclasses that intend to support efficient single-character input * should override this method. * * @return The character read, as an integer in the range 0 to 16383 * (<tt>0x00-0xffff</tt>), or -1 if the end of the stream has * been reached * * @exception IOException If an I/O error occurs */ @Override public int read() throws IOException { // decode character int c = fSurrogate; if (fSurrogate == -1) { // NOTE: We use the index into the buffer if there are remaining // bytes from the last block read. -Ac int index = 0; // get first byte int b0 = index == fOffset ? fInputStream.read() : fBuffer[index++] & 0x00FF; if (b0 == -1) { return -1; } // UTF-8: [0xxx xxxx] // Unicode: [0000 0000] [0xxx xxxx] if (b0 < 0x80) { c = (char)b0; } // UTF-8: [110y yyyy] [10xx xxxx] // Unicode: [0000 0yyy] [yyxx xxxx] else if ((b0 & 0xE0) == 0xC0) { int b1 = index == fOffset ? fInputStream.read() : fBuffer[index++] & 0x00FF; if (b1 == -1) { expectedByte(2, 2); } if ((b1 & 0xC0) != 0x80) { invalidByte(2, 2); } c = ((b0 << 6) & 0x07C0) | (b1 & 0x003F); } // UTF-8: [1110 zzzz] [10yy yyyy] [10xx xxxx] // Unicode: [zzzz yyyy] [yyxx xxxx] else if ((b0 & 0xF0) == 0xE0) { int b1 = index == fOffset ? fInputStream.read() : fBuffer[index++] & 0x00FF; if (b1 == -1) { expectedByte(2, 3); } if ((b1 & 0xC0) != 0x80) { invalidByte(2, 3); } int b2 = index == fOffset ? fInputStream.read() : fBuffer[index++] & 0x00FF; if (b2 == -1) { expectedByte(3, 3); } if ((b2 & 0xC0) != 0x80) { invalidByte(3, 3); } c = ((b0 << 12) & 0xF000) | ((b1 << 6) & 0x0FC0) | (b2 & 0x003F); } // UTF-8: [1111 0uuu] [10uu zzzz] [10yy yyyy] [10xx xxxx]* // Unicode: [1101 10ww] [wwzz zzyy] (high surrogate) // [1101 11yy] [yyxx xxxx] (low surrogate) // * uuuuu = wwww + 1 else if ((b0 & 0xF8) == 0xF0) { int b1 = index == fOffset ? fInputStream.read() : fBuffer[index++] & 0x00FF; if (b1 == -1) { expectedByte(2, 4); } if ((b1 & 0xC0) != 0x80) { invalidByte(2, 3); } int b2 = index == fOffset ? fInputStream.read() : fBuffer[index++] & 0x00FF; if (b2 == -1) { expectedByte(3, 4); } if ((b2 & 0xC0) != 0x80) { invalidByte(3, 3); } int b3 = index == fOffset ? fInputStream.read() : fBuffer[index++] & 0x00FF; if (b3 == -1) { expectedByte(4, 4); } if ((b3 & 0xC0) != 0x80) { invalidByte(4, 4); } int uuuuu = ((b0 << 2) & 0x001C) | ((b1 >> 4) & 0x0003); if (uuuuu > 0x10) { invalidSurrogate(uuuuu); } int wwww = uuuuu - 1; int hs = 0xD800 | ((wwww << 6) & 0x03C0) | ((b1 << 2) & 0x003C) | ((b2 >> 4) & 0x0003); int ls = 0xDC00 | ((b2 << 6) & 0x03C0) | (b3 & 0x003F); c = hs; fSurrogate = ls; } // error else { invalidByte(1, 1); } } // use surrogate else { fSurrogate = -1; } // return character if (DEBUG_READ) { if (log.isDebugEnabled()) log.debug("read(): 0x"+Integer.toHexString(c)); } return c; } // read():int /** * Read characters into a portion of an array. This method will block * until some input is available, an I/O error occurs, or the end of the * stream is reached. * * @param ch Destination buffer * @param offset Offset at which to start storing characters * @param length Maximum number of characters to read * * @return The number of characters read, or -1 if the end of the * stream has been reached * * @exception IOException If an I/O error occurs */ @Override public int read(char ch[], int offset, int length) throws IOException { // handle surrogate int out = offset; if (fSurrogate != -1) { ch[offset + 1] = (char)fSurrogate; fSurrogate = -1; length--; out++; } // read bytes int count = 0; if (fOffset == 0) { // adjust length to read if (length > fBuffer.length) { length = fBuffer.length; } // perform read operation count = fInputStream.read(fBuffer, 0, length); if (count == -1) { return -1; } count += out - offset; } // skip read; last character was in error // NOTE: Having an offset value other than zero means that there was // an error in the last character read. In this case, we have // skipped the read so we don't consume any bytes past the // error. By signaling the error on the next block read we // allow the method to return the most valid characters that // it can on the previous block read. -Ac else { count = fOffset; fOffset = 0; } // convert bytes to characters final int total = count; for (int in = 0; in < total; in++) { int b0 = fBuffer[in] & 0x00FF; // UTF-8: [0xxx xxxx] // Unicode: [0000 0000] [0xxx xxxx] if (b0 < 0x80) { ch[out++] = (char)b0; continue; } // UTF-8: [110y yyyy] [10xx xxxx] // Unicode: [0000 0yyy] [yyxx xxxx] if ((b0 & 0xE0) == 0xC0) { int b1 = -1; if (++in < total) { b1 = fBuffer[in] & 0x00FF; } else { b1 = fInputStream.read(); if (b1 == -1) { if (out > offset) { fBuffer[0] = (byte)b0; fOffset = 1; return out - offset; } expectedByte(2, 2); } count++; } if ((b1 & 0xC0) != 0x80) { if (out > offset) { fBuffer[0] = (byte)b0; fBuffer[1] = (byte)b1; fOffset = 2; return out - offset; } invalidByte(2, 2); } int c = ((b0 << 6) & 0x07C0) | (b1 & 0x003F); ch[out++] = (char)c; count -= 1; continue; } // UTF-8: [1110 zzzz] [10yy yyyy] [10xx xxxx] // Unicode: [zzzz yyyy] [yyxx xxxx] if ((b0 & 0xF0) == 0xE0) { int b1 = -1; if (++in < total) { b1 = fBuffer[in] & 0x00FF; } else { b1 = fInputStream.read(); if (b1 == -1) { if (out > offset) { fBuffer[0] = (byte)b0; fOffset = 1; return out - offset; } expectedByte(2, 3); } count++; } if ((b1 & 0xC0) != 0x80) { if (out > offset) { fBuffer[0] = (byte)b0; fBuffer[1] = (byte)b1; fOffset = 2; return out - offset; } invalidByte(2, 3); } int b2 = -1; if (++in < total) { b2 = fBuffer[in] & 0x00FF; } else { b2 = fInputStream.read(); if (b2 == -1) { if (out > offset) { fBuffer[0] = (byte)b0; fBuffer[1] = (byte)b1; fOffset = 2; return out - offset; } expectedByte(3, 3); } count++; } if ((b2 & 0xC0) != 0x80) { if (out > offset) { fBuffer[0] = (byte)b0; fBuffer[1] = (byte)b1; fBuffer[2] = (byte)b2; fOffset = 3; return out - offset; } invalidByte(3, 3); } int c = ((b0 << 12) & 0xF000) | ((b1 << 6) & 0x0FC0) | (b2 & 0x003F); ch[out++] = (char)c; count -= 2; continue; } // UTF-8: [1111 0uuu] [10uu zzzz] [10yy yyyy] [10xx xxxx]* // Unicode: [1101 10ww] [wwzz zzyy] (high surrogate) // [1101 11yy] [yyxx xxxx] (low surrogate) // * uuuuu = wwww + 1 if ((b0 & 0xF8) == 0xF0) { int b1 = -1; if (++in < total) { b1 = fBuffer[in] & 0x00FF; } else { b1 = fInputStream.read(); if (b1 == -1) { if (out > offset) { fBuffer[0] = (byte)b0; fOffset = 1; return out - offset; } expectedByte(2, 4); } count++; } if ((b1 & 0xC0) != 0x80) { if (out > offset) { fBuffer[0] = (byte)b0; fBuffer[1] = (byte)b1; fOffset = 2; return out - offset; } invalidByte(2, 4); } int b2 = -1; if (++in < total) { b2 = fBuffer[in] & 0x00FF; } else { b2 = fInputStream.read(); if (b2 == -1) { if (out > offset) { fBuffer[0] = (byte)b0; fBuffer[1] = (byte)b1; fOffset = 2; return out - offset; } expectedByte(3, 4); } count++; } if ((b2 & 0xC0) != 0x80) { if (out > offset) { fBuffer[0] = (byte)b0; fBuffer[1] = (byte)b1; fBuffer[2] = (byte)b2; fOffset = 3; return out - offset; } invalidByte(3, 4); } int b3 = -1; if (++in < total) { b3 = fBuffer[in] & 0x00FF; } else { b3 = fInputStream.read(); if (b3 == -1) { if (out > offset) { fBuffer[0] = (byte)b0; fBuffer[1] = (byte)b1; fBuffer[2] = (byte)b2; fOffset = 3; return out - offset; } expectedByte(4, 4); } count++; } if ((b3 & 0xC0) != 0x80) { if (out > offset) { fBuffer[0] = (byte)b0; fBuffer[1] = (byte)b1; fBuffer[2] = (byte)b2; fBuffer[3] = (byte)b3; fOffset = 4; return out - offset; } invalidByte(4, 4); } // decode bytes into surrogate characters int uuuuu = ((b0 << 2) & 0x001C) | ((b1 >> 4) & 0x0003); if (uuuuu > 0x10) { invalidSurrogate(uuuuu); } int wwww = uuuuu - 1; int zzzz = b1 & 0x000F; int yyyyyy = b2 & 0x003F; int xxxxxx = b3 & 0x003F; int hs = 0xD800 | ((wwww << 6) & 0x03C0) | (zzzz << 2) | (yyyyyy >> 4); int ls = 0xDC00 | ((yyyyyy << 6) & 0x03C0) | xxxxxx; // set characters ch[out++] = (char)hs; ch[out++] = (char)ls; count -= 2; continue; } // error if (out > offset) { fBuffer[0] = (byte)b0; fOffset = 1; return out - offset; } invalidByte(1, 1); } // return number of characters converted if (DEBUG_READ) { if (log.isDebugEnabled()) log.debug("read(char[],"+offset+','+length+"): count="+count); } return count; } // read(char[],int,int) /** * Skip characters. This method will block until some characters are * available, an I/O error occurs, or the end of the stream is reached. * * @param n The number of characters to skip * * @return The number of characters actually skipped * * @exception IOException If an I/O error occurs */ @Override public long skip(long n) throws IOException { long remaining = n; final char[] ch = new char[fBuffer.length]; do { int length = ch.length < remaining ? ch.length : (int)remaining; int count = read(ch, 0, length); if (count > 0) { remaining -= count; } else { break; } } while (remaining > 0); long skipped = n - remaining; return skipped; } // skip(long):long /** * Tell whether this stream is ready to be read. * * @return True if the next read() is guaranteed not to block for input, * false otherwise. Note that returning false does not guarantee that the * next read will block. * * @exception IOException If an I/O error occurs */ @Override public boolean ready() throws IOException { return false; } // ready() /** * Tell whether this stream supports the mark() operation. */ @Override public boolean markSupported() { return false; } // markSupported() /** * Mark the present position in the stream. Subsequent calls to reset() * will attempt to reposition the stream to this point. Not all * character-input streams support the mark() operation. * * @param readAheadLimit Limit on the number of characters that may be * read while still preserving the mark. After * reading this many characters, attempting to * reset the stream may fail. * * @exception IOException If the stream does not support mark(), * or if some other I/O error occurs */ @Override public void mark(int readAheadLimit) throws IOException { throw new IOException( Localizer.getMessage("jsp.error.xml.operationNotSupported", "mark()", "UTF-8")); } /** * Reset the stream. If the stream has been marked, then attempt to * reposition it at the mark. If the stream has not been marked, then * attempt to reset it in some way appropriate to the particular stream, * for example by repositioning it to its starting point. Not all * character-input streams support the reset() operation, and some support * reset() without supporting mark(). * * @exception IOException If the stream has not been marked, * or if the mark has been invalidated, * or if the stream does not support reset(), * or if some other I/O error occurs */ @Override public void reset() throws IOException { fOffset = 0; fSurrogate = -1; } // reset() /** * Close the stream. Once a stream has been closed, further read(), * ready(), mark(), or reset() invocations will throw an IOException. * Closing a previously-closed stream, however, has no effect. * * @exception IOException If an I/O error occurs */ @Override public void close() throws IOException { fInputStream.close(); } // close() // // Private methods // /** Throws an exception for expected byte. */ private void expectedByte(int position, int count) throws UTFDataFormatException { throw new UTFDataFormatException( Localizer.getMessage("jsp.error.xml.expectedByte", Integer.toString(position), Integer.toString(count))); } /** Throws an exception for invalid byte. */ private void invalidByte(int position, int count) throws UTFDataFormatException { throw new UTFDataFormatException( Localizer.getMessage("jsp.error.xml.invalidByte", Integer.toString(position), Integer.toString(count))); } /** Throws an exception for invalid surrogate bits. */ private void invalidSurrogate(int uuuuu) throws UTFDataFormatException { throw new UTFDataFormatException( Localizer.getMessage("jsp.error.xml.invalidHighSurrogate", Integer.toHexString(uuuuu))); } } // class UTF8Reader
import java.util.LinkedHashMap; import java.util.Map; public class s { private final String a; private Map<String, String> b = new LinkedHashMap(); private Throwable c; public static int d; private static final String[] e; public s(String var1) { this.a = var1; } public s a(Throwable var1) { this.c = var1; return this; } public s a(String var1, Object var2) { try { if (var2 != null) { this.b.put(var1, var2.toString()); } return this; } catch (a_ var3) { throw var3; } } public a9 a() { return r.b(this.b()); } protected p b() { // $FF: Couldn't be decompiled } protected static String b(Throwable var0) { String var1 = "-"; StackTraceElement[] var2; if (var0 != null) { var2 = var0.getStackTrace(); if (var2.length > 0) { var1 = var2[0].getFileName() + ":" + var2[0].getLineNumber() + "[" + var2[0].getClassName() + "." + var2[0].getMethodName() + "]"; } } else { var2 = Thread.currentThread().getStackTrace(); if (var2.length > 5) { var1 = var2[5].getFileName() + ":" + var2[5].getLineNumber() + "[" + var2[5].getClassName() + "." + var2[5].getMethodName() + "]"; } } return var1; } static { String[] var10000 = new String[5]; char[] var10003 = "][BX\u0014\u0014\u000bsI\u0018\u001fA'".toCharArray(); int var10005 = var10003.length; int var1 = 0; char[] var10004 = var10003; int var2 = var10005; char[] var4; int var10006; char var10007; byte var10008; if (var10005 <= 1) { var4 = var10003; var10006 = var1; var10007 = var10003[var1]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { label316: { var10000[0] = (new String(var10003)).intern(); var10003 = "\u001e\tnG\u001e\u001f".toCharArray(); var10005 = var10003.length; var1 = 0; var10004 = var10003; var2 = var10005; if (var10005 <= 1) { var4 = var10003; var10006 = var1; } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { break label316; } var4 = var10003; var10006 = var1; } while(true) { var10007 = var4[var10006]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; } else { if (var2 <= var1) { break; } var4 = var10004; var10006 = var1; } } } var10000[1] = (new String(var10004)).intern(); var10003 = "K[".toCharArray(); var10005 = var10003.length; var1 = 0; var10004 = var10003; var2 = var10005; if (var10005 <= 1) { var4 = var10003; var10006 = var1; var10007 = var10003[var1]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { label384: { var10000[2] = (new String(var10003)).intern(); var10003 = "QF'".toCharArray(); var10005 = var10003.length; var1 = 0; var10004 = var10003; var2 = var10005; if (var10005 <= 1) { var4 = var10003; var10006 = var1; } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { break label384; } var4 = var10003; var10006 = var1; } while(true) { var10007 = var4[var10006]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; } else { if (var2 <= var1) { break; } var4 = var10004; var10006 = var1; } } } var10000[3] = (new String(var10004)).intern(); var10003 = "][".toCharArray(); var10005 = var10003.length; var1 = 0; var10004 = var10003; var2 = var10005; if (var10005 <= 1) { var4 = var10003; var10006 = var1; } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { var10000[4] = (new String(var10003)).intern(); e = var10000; return; } var4 = var10003; var10006 = var1; } while(true) { var10007 = var4[var10006]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; } else { if (var2 <= var1) { var10000[4] = (new String(var10004)).intern(); e = var10000; return; } var4 = var10004; var10006 = var1; } } } var4 = var10003; var10006 = var1; var10007 = var10003[var1]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } } while(true) { while(true) { var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; var10007 = var10004[var2]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } } else { if (var2 <= var1) { label492: { var10000[2] = (new String(var10004)).intern(); var10003 = "QF'".toCharArray(); var10005 = var10003.length; var1 = 0; var10004 = var10003; var2 = var10005; if (var10005 <= 1) { var4 = var10003; var10006 = var1; } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { break label492; } var4 = var10003; var10006 = var1; } while(true) { var10007 = var4[var10006]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; } else { if (var2 <= var1) { break; } var4 = var10004; var10006 = var1; } } } var10000[3] = (new String(var10004)).intern(); var10003 = "][".toCharArray(); var10005 = var10003.length; var1 = 0; var10004 = var10003; var2 = var10005; if (var10005 <= 1) { var4 = var10003; var10006 = var1; } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { var10000[4] = (new String(var10003)).intern(); e = var10000; return; } var4 = var10003; var10006 = var1; } while(true) { var10007 = var4[var10006]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; } else { if (var2 <= var1) { var10000[4] = (new String(var10004)).intern(); e = var10000; return; } var4 = var10004; var10006 = var1; } } } var4 = var10004; var10006 = var1; var10007 = var10004[var1]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } } } } } var4 = var10003; var10006 = var1; var10007 = var10003[var1]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } } while(true) { while(true) { var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; var10007 = var10004[var2]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } } else { if (var2 <= var1) { label129: { var10000[0] = (new String(var10004)).intern(); var10003 = "\u001e\tnG\u001e\u001f".toCharArray(); var10005 = var10003.length; var1 = 0; var10004 = var10003; var2 = var10005; if (var10005 <= 1) { var4 = var10003; var10006 = var1; } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { break label129; } var4 = var10003; var10006 = var1; } while(true) { var10007 = var4[var10006]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; } else { if (var2 <= var1) { break; } var4 = var10004; var10006 = var1; } } } var10000[1] = (new String(var10004)).intern(); var10003 = "K[".toCharArray(); var10005 = var10003.length; var1 = 0; var10004 = var10003; var2 = var10005; if (var10005 <= 1) { var4 = var10003; var10006 = var1; var10007 = var10003[var1]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { label173: { var10000[2] = (new String(var10003)).intern(); var10003 = "QF'".toCharArray(); var10005 = var10003.length; var1 = 0; var10004 = var10003; var2 = var10005; if (var10005 <= 1) { var4 = var10003; var10006 = var1; } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { break label173; } var4 = var10003; var10006 = var1; } while(true) { var10007 = var4[var10006]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; } else { if (var2 <= var1) { break; } var4 = var10004; var10006 = var1; } } } var10000[3] = (new String(var10004)).intern(); var10003 = "][".toCharArray(); var10005 = var10003.length; var1 = 0; var10004 = var10003; var2 = var10005; if (var10005 <= 1) { var4 = var10003; var10006 = var1; } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { var10000[4] = (new String(var10003)).intern(); e = var10000; return; } var4 = var10003; var10006 = var1; } while(true) { var10007 = var4[var10006]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; } else { if (var2 <= var1) { var10000[4] = (new String(var10004)).intern(); e = var10000; return; } var4 = var10004; var10006 = var1; } } } var4 = var10003; var10006 = var1; var10007 = var10003[var1]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } } while(true) { while(true) { var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; var10007 = var10004[var2]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } } else { if (var2 <= var1) { label93: { var10000[2] = (new String(var10004)).intern(); var10003 = "QF'".toCharArray(); var10005 = var10003.length; var1 = 0; var10004 = var10003; var2 = var10005; if (var10005 <= 1) { var4 = var10003; var10006 = var1; } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { break label93; } var4 = var10003; var10006 = var1; } while(true) { var10007 = var4[var10006]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; } else { if (var2 <= var1) { break; } var4 = var10004; var10006 = var1; } } } var10000[3] = (new String(var10004)).intern(); var10003 = "][".toCharArray(); var10005 = var10003.length; var1 = 0; var10004 = var10003; var2 = var10005; if (var10005 <= 1) { var4 = var10003; var10006 = var1; } else { var10004 = var10003; var2 = var10005; if (var10005 <= var1) { var10000[4] = (new String(var10003)).intern(); e = var10000; return; } var4 = var10003; var10006 = var1; } while(true) { var10007 = var4[var10006]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } var4[var10006] = (char)(var10007 ^ var10008); ++var1; if (var2 == 0) { var10006 = var2; var4 = var10004; } else { if (var2 <= var1) { var10000[4] = (new String(var10004)).intern(); e = var10000; return; } var4 = var10004; var10006 = var1; } } } var4 = var10004; var10006 = var1; var10007 = var10004[var1]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } } } } } var4 = var10004; var10006 = var1; var10007 = var10004[var1]; switch(var1 % 5) { case 0: var10008 = 113; break; case 1: var10008 = 123; break; case 2: var10008 = 7; break; case 3: var10008 = 32; break; default: var10008 = 119; } } } } } }
package com.nsysmon.measure.jdbc; import com.nsysmon.measure.ACollectingMeasurement; import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; import java.net.URL; import java.sql.*; import java.util.Calendar; import java.util.Map; /** * @author arno */ public class NSysMonResultSet implements ResultSet { private final ResultSet inner; private final Statement stmt; private final ACollectingMeasurement m; public NSysMonResultSet(ResultSet inner, Statement stmt, ACollectingMeasurement m) { this.inner = inner; this.stmt = stmt; this.m = m; } //---------------------- Wrapper interface @Override public <T> T unwrap(Class<T> iface) throws SQLException { return inner.unwrap(iface); //TODO dynamic proxy? } @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { return inner.isWrapperFor(iface); } //-------------------------- misc @Override public Statement getStatement() throws SQLException { return stmt; } //---------------------- navigation @Override public boolean next() throws SQLException { m.startDetail("result set access"); try { return inner.next(); } finally { m.finishDetail(); } } @Override public void beforeFirst() throws SQLException { m.startDetail("result set access"); try { inner.beforeFirst(); } finally { m.finishDetail(); } } @Override public void afterLast() throws SQLException { m.startDetail("result set access"); try { inner.afterLast(); } finally { m.finishDetail(); } } @Override public boolean first() throws SQLException { m.startDetail("result set access"); try { return inner.first(); } finally { m.finishDetail(); } } @Override public boolean last() throws SQLException { m.startDetail("result set access"); try { return inner.last(); } finally { m.finishDetail(); } } @Override public boolean absolute(int row) throws SQLException { m.startDetail("result set access"); try { return inner.absolute(row); } finally { m.finishDetail(); } } @Override public boolean relative(int rows) throws SQLException { m.startDetail("result set access"); try { return inner.relative(rows); } finally { m.finishDetail(); } } @Override public boolean previous() throws SQLException { m.startDetail("result set access"); try { return inner.previous(); } finally { m.finishDetail(); } } //----------------- ignored by NSysMon @Override public void close() throws SQLException { m.finish(); inner.close(); } @Override public boolean wasNull() throws SQLException { return inner.wasNull(); } @Override public String getString(int columnIndex) throws SQLException { return inner.getString(columnIndex); } @Override public boolean getBoolean(int columnIndex) throws SQLException { return inner.getBoolean(columnIndex); } @Override public byte getByte(int columnIndex) throws SQLException { return inner.getByte(columnIndex); } @Override public short getShort(int columnIndex) throws SQLException { return inner.getShort(columnIndex); } @Override public int getInt(int columnIndex) throws SQLException { return inner.getInt(columnIndex); } @Override public long getLong(int columnIndex) throws SQLException { return inner.getLong(columnIndex); } @Override public float getFloat(int columnIndex) throws SQLException { return inner.getFloat(columnIndex); } @Override public double getDouble(int columnIndex) throws SQLException { return inner.getDouble(columnIndex); } @Override @Deprecated public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { return inner.getBigDecimal(columnIndex, scale); } @Override public byte[] getBytes(int columnIndex) throws SQLException { return inner.getBytes(columnIndex); } @Override public Date getDate(int columnIndex) throws SQLException { return inner.getDate(columnIndex); } @Override public Time getTime(int columnIndex) throws SQLException { return inner.getTime(columnIndex); } @Override public Timestamp getTimestamp(int columnIndex) throws SQLException { return inner.getTimestamp(columnIndex); } @Override public InputStream getAsciiStream(int columnIndex) throws SQLException { return inner.getAsciiStream(columnIndex); } @Override @Deprecated public InputStream getUnicodeStream(int columnIndex) throws SQLException { return inner.getUnicodeStream(columnIndex); } @Override public InputStream getBinaryStream(int columnIndex) throws SQLException { return inner.getBinaryStream(columnIndex); } @Override public String getString(String columnLabel) throws SQLException { return inner.getString(columnLabel); } @Override public boolean getBoolean(String columnLabel) throws SQLException { return inner.getBoolean(columnLabel); } @Override public byte getByte(String columnLabel) throws SQLException { return inner.getByte(columnLabel); } @Override public short getShort(String columnLabel) throws SQLException { return inner.getShort(columnLabel); } @Override public int getInt(String columnLabel) throws SQLException { return inner.getInt(columnLabel); } @Override public long getLong(String columnLabel) throws SQLException { return inner.getLong(columnLabel); } @Override public float getFloat(String columnLabel) throws SQLException { return inner.getFloat(columnLabel); } @Override public double getDouble(String columnLabel) throws SQLException { return inner.getDouble(columnLabel); } @Override @Deprecated public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { return inner.getBigDecimal(columnLabel, scale); } @Override public byte[] getBytes(String columnLabel) throws SQLException { return inner.getBytes(columnLabel); } @Override public Date getDate(String columnLabel) throws SQLException { return inner.getDate(columnLabel); } @Override public Time getTime(String columnLabel) throws SQLException { return inner.getTime(columnLabel); } @Override public Timestamp getTimestamp(String columnLabel) throws SQLException { return inner.getTimestamp(columnLabel); } @Override public InputStream getAsciiStream(String columnLabel) throws SQLException { return inner.getAsciiStream(columnLabel); } @Override @Deprecated public InputStream getUnicodeStream(String columnLabel) throws SQLException { return inner.getUnicodeStream(columnLabel); } @Override public InputStream getBinaryStream(String columnLabel) throws SQLException { return inner.getBinaryStream(columnLabel); } @Override public SQLWarning getWarnings() throws SQLException { return inner.getWarnings(); } @Override public void clearWarnings() throws SQLException { inner.clearWarnings(); } @Override public String getCursorName() throws SQLException { return inner.getCursorName(); } @Override public ResultSetMetaData getMetaData() throws SQLException { return inner.getMetaData(); } @Override public Object getObject(int columnIndex) throws SQLException { return inner.getObject(columnIndex); } @Override public Object getObject(String columnLabel) throws SQLException { return inner.getObject(columnLabel); } @Override public int findColumn(String columnLabel) throws SQLException { return inner.findColumn(columnLabel); } @Override public Reader getCharacterStream(int columnIndex) throws SQLException { return inner.getCharacterStream(columnIndex); } @Override public Reader getCharacterStream(String columnLabel) throws SQLException { return inner.getCharacterStream(columnLabel); } @Override public BigDecimal getBigDecimal(int columnIndex) throws SQLException { return inner.getBigDecimal(columnIndex); } @Override public BigDecimal getBigDecimal(String columnLabel) throws SQLException { return inner.getBigDecimal(columnLabel); } @Override public boolean isBeforeFirst() throws SQLException { return inner.isBeforeFirst(); } @Override public boolean isAfterLast() throws SQLException { return inner.isAfterLast(); } @Override public boolean isFirst() throws SQLException { return inner.isFirst(); } @Override public boolean isLast() throws SQLException { return inner.isLast(); } @Override public int getRow() throws SQLException { return inner.getRow(); } @Override public void setFetchDirection(int direction) throws SQLException { inner.setFetchDirection(direction); } @Override public int getFetchDirection() throws SQLException { return inner.getFetchDirection(); } @Override public void setFetchSize(int rows) throws SQLException { inner.setFetchSize(rows); } @Override public int getFetchSize() throws SQLException { return inner.getFetchSize(); } @Override public int getType() throws SQLException { return inner.getType(); } @Override public int getConcurrency() throws SQLException { return inner.getConcurrency(); } @Override public boolean rowUpdated() throws SQLException { return inner.rowUpdated(); } @Override public boolean rowInserted() throws SQLException { return inner.rowInserted(); } @Override public boolean rowDeleted() throws SQLException { return inner.rowDeleted(); } @Override public void updateNull(int columnIndex) throws SQLException { inner.updateNull(columnIndex); } @Override public void updateBoolean(int columnIndex, boolean x) throws SQLException { inner.updateBoolean(columnIndex, x); } @Override public void updateByte(int columnIndex, byte x) throws SQLException { inner.updateByte(columnIndex, x); } @Override public void updateShort(int columnIndex, short x) throws SQLException { inner.updateShort(columnIndex, x); } @Override public void updateInt(int columnIndex, int x) throws SQLException { inner.updateInt(columnIndex, x); } @Override public void updateLong(int columnIndex, long x) throws SQLException { inner.updateLong(columnIndex, x); } @Override public void updateFloat(int columnIndex, float x) throws SQLException { inner.updateFloat(columnIndex, x); } @Override public void updateDouble(int columnIndex, double x) throws SQLException { inner.updateDouble(columnIndex, x); } @Override public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { inner.updateBigDecimal(columnIndex, x); } @Override public void updateString(int columnIndex, String x) throws SQLException { inner.updateString(columnIndex, x); } @Override public void updateBytes(int columnIndex, byte[] x) throws SQLException { inner.updateBytes(columnIndex, x); } @Override public void updateDate(int columnIndex, Date x) throws SQLException { inner.updateDate(columnIndex, x); } @Override public void updateTime(int columnIndex, Time x) throws SQLException { inner.updateTime(columnIndex, x); } @Override public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { inner.updateTimestamp(columnIndex, x); } @Override public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { inner.updateAsciiStream(columnIndex, x, length); } @Override public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { inner.updateBinaryStream(columnIndex, x, length); } @Override public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { inner.updateCharacterStream(columnIndex, x, length); } @Override public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { inner.updateObject(columnIndex, x, scaleOrLength); } @Override public void updateObject(int columnIndex, Object x) throws SQLException { inner.updateObject(columnIndex, x); } @Override public void updateNull(String columnLabel) throws SQLException { inner.updateNull(columnLabel); } @Override public void updateBoolean(String columnLabel, boolean x) throws SQLException { inner.updateBoolean(columnLabel, x); } @Override public void updateByte(String columnLabel, byte x) throws SQLException { inner.updateByte(columnLabel, x); } @Override public void updateShort(String columnLabel, short x) throws SQLException { inner.updateShort(columnLabel, x); } @Override public void updateInt(String columnLabel, int x) throws SQLException { inner.updateInt(columnLabel, x); } @Override public void updateLong(String columnLabel, long x) throws SQLException { inner.updateLong(columnLabel, x); } @Override public void updateFloat(String columnLabel, float x) throws SQLException { inner.updateFloat(columnLabel, x); } @Override public void updateDouble(String columnLabel, double x) throws SQLException { inner.updateDouble(columnLabel, x); } @Override public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { inner.updateBigDecimal(columnLabel, x); } @Override public void updateString(String columnLabel, String x) throws SQLException { inner.updateString(columnLabel, x); } @Override public void updateBytes(String columnLabel, byte[] x) throws SQLException { inner.updateBytes(columnLabel, x); } @Override public void updateDate(String columnLabel, Date x) throws SQLException { inner.updateDate(columnLabel, x); } @Override public void updateTime(String columnLabel, Time x) throws SQLException { inner.updateTime(columnLabel, x); } @Override public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { inner.updateTimestamp(columnLabel, x); } @Override public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { inner.updateAsciiStream(columnLabel, x, length); } @Override public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException { inner.updateBinaryStream(columnLabel, x, length); } @Override public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException { inner.updateCharacterStream(columnLabel, reader, length); } @Override public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { inner.updateObject(columnLabel, x, scaleOrLength); } @Override public void updateObject(String columnLabel, Object x) throws SQLException { inner.updateObject(columnLabel, x); } @Override public void insertRow() throws SQLException { inner.insertRow(); } @Override public void updateRow() throws SQLException { inner.updateRow(); } @Override public void deleteRow() throws SQLException { inner.deleteRow(); } @Override public void refreshRow() throws SQLException { inner.refreshRow(); } @Override public void cancelRowUpdates() throws SQLException { inner.cancelRowUpdates(); } @Override public void moveToInsertRow() throws SQLException { inner.moveToInsertRow(); } @Override public void moveToCurrentRow() throws SQLException { inner.moveToCurrentRow(); } @Override public Object getObject(int columnIndex, Map<String, Class<?>> map) throws SQLException { return inner.getObject(columnIndex, map); } @Override public Ref getRef(int columnIndex) throws SQLException { return inner.getRef(columnIndex); } @Override public Blob getBlob(int columnIndex) throws SQLException { return inner.getBlob(columnIndex); } @Override public Clob getClob(int columnIndex) throws SQLException { return inner.getClob(columnIndex); } @Override public Array getArray(int columnIndex) throws SQLException { return inner.getArray(columnIndex); } @Override public Object getObject(String columnLabel, Map<String, Class<?>> map) throws SQLException { return inner.getObject(columnLabel, map); } @Override public Ref getRef(String columnLabel) throws SQLException { return inner.getRef(columnLabel); } @Override public Blob getBlob(String columnLabel) throws SQLException { return inner.getBlob(columnLabel); } @Override public Clob getClob(String columnLabel) throws SQLException { return inner.getClob(columnLabel); } @Override public Array getArray(String columnLabel) throws SQLException { return inner.getArray(columnLabel); } @Override public Date getDate(int columnIndex, Calendar cal) throws SQLException { return inner.getDate(columnIndex, cal); } @Override public Date getDate(String columnLabel, Calendar cal) throws SQLException { return inner.getDate(columnLabel, cal); } @Override public Time getTime(int columnIndex, Calendar cal) throws SQLException { return inner.getTime(columnIndex, cal); } @Override public Time getTime(String columnLabel, Calendar cal) throws SQLException { return inner.getTime(columnLabel, cal); } @Override public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { return inner.getTimestamp(columnIndex, cal); } @Override public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { return inner.getTimestamp(columnLabel, cal); } @Override public URL getURL(int columnIndex) throws SQLException { return inner.getURL(columnIndex); } @Override public URL getURL(String columnLabel) throws SQLException { return inner.getURL(columnLabel); } @Override public void updateRef(int columnIndex, Ref x) throws SQLException { inner.updateRef(columnIndex, x); } @Override public void updateRef(String columnLabel, Ref x) throws SQLException { inner.updateRef(columnLabel, x); } @Override public void updateBlob(int columnIndex, Blob x) throws SQLException { inner.updateBlob(columnIndex, x); } @Override public void updateBlob(String columnLabel, Blob x) throws SQLException { inner.updateBlob(columnLabel, x); } @Override public void updateClob(int columnIndex, Clob x) throws SQLException { inner.updateClob(columnIndex, x); } @Override public void updateClob(String columnLabel, Clob x) throws SQLException { inner.updateClob(columnLabel, x); } @Override public void updateArray(int columnIndex, Array x) throws SQLException { inner.updateArray(columnIndex, x); } @Override public void updateArray(String columnLabel, Array x) throws SQLException { inner.updateArray(columnLabel, x); } @Override public RowId getRowId(int columnIndex) throws SQLException { return inner.getRowId(columnIndex); } @Override public RowId getRowId(String columnLabel) throws SQLException { return inner.getRowId(columnLabel); } @Override public void updateRowId(int columnIndex, RowId x) throws SQLException { inner.updateRowId(columnIndex, x); } @Override public void updateRowId(String columnLabel, RowId x) throws SQLException { inner.updateRowId(columnLabel, x); } @Override public int getHoldability() throws SQLException { return inner.getHoldability(); } @Override public boolean isClosed() throws SQLException { return inner.isClosed(); } @Override public void updateNString(int columnIndex, String nString) throws SQLException { inner.updateNString(columnIndex, nString); } @Override public void updateNString(String columnLabel, String nString) throws SQLException { inner.updateNString(columnLabel, nString); } @Override public void updateNClob(int columnIndex, NClob nClob) throws SQLException { inner.updateNClob(columnIndex, nClob); } @Override public void updateNClob(String columnLabel, NClob nClob) throws SQLException { inner.updateNClob(columnLabel, nClob); } @Override public NClob getNClob(int columnIndex) throws SQLException { return inner.getNClob(columnIndex); } @Override public NClob getNClob(String columnLabel) throws SQLException { return inner.getNClob(columnLabel); } @Override public SQLXML getSQLXML(int columnIndex) throws SQLException { return inner.getSQLXML(columnIndex); } @Override public SQLXML getSQLXML(String columnLabel) throws SQLException { return inner.getSQLXML(columnLabel); } @Override public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { inner.updateSQLXML(columnIndex, xmlObject); } @Override public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { inner.updateSQLXML(columnLabel, xmlObject); } @Override public String getNString(int columnIndex) throws SQLException { return inner.getNString(columnIndex); } @Override public String getNString(String columnLabel) throws SQLException { return inner.getNString(columnLabel); } @Override public Reader getNCharacterStream(int columnIndex) throws SQLException { return inner.getNCharacterStream(columnIndex); } @Override public Reader getNCharacterStream(String columnLabel) throws SQLException { return inner.getNCharacterStream(columnLabel); } @Override public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { inner.updateNCharacterStream(columnIndex, x, length); } @Override public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { inner.updateNCharacterStream(columnLabel, reader, length); } @Override public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { inner.updateAsciiStream(columnIndex, x, length); } @Override public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { inner.updateBinaryStream(columnIndex, x, length); } @Override public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { inner.updateCharacterStream(columnIndex, x, length); } @Override public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException { inner.updateAsciiStream(columnLabel, x, length); } @Override public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException { inner.updateBinaryStream(columnLabel, x, length); } @Override public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { inner.updateCharacterStream(columnLabel, reader, length); } @Override public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException { inner.updateBlob(columnIndex, inputStream, length); } @Override public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException { inner.updateBlob(columnLabel, inputStream, length); } @Override public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { inner.updateClob(columnIndex, reader, length); } @Override public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { inner.updateClob(columnLabel, reader, length); } @Override public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { inner.updateNClob(columnIndex, reader, length); } @Override public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { inner.updateNClob(columnLabel, reader, length); } @Override public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { inner.updateNCharacterStream(columnIndex, x); } @Override public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { inner.updateNCharacterStream(columnLabel, reader); } @Override public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { inner.updateAsciiStream(columnIndex, x); } @Override public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { inner.updateBinaryStream(columnIndex, x); } @Override public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { inner.updateCharacterStream(columnIndex, x); } @Override public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { inner.updateAsciiStream(columnLabel, x); } @Override public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { inner.updateBinaryStream(columnLabel, x); } @Override public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { inner.updateCharacterStream(columnLabel, reader); } @Override public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { inner.updateBlob(columnIndex, inputStream); } @Override public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { inner.updateBlob(columnLabel, inputStream); } @Override public void updateClob(int columnIndex, Reader reader) throws SQLException { inner.updateClob(columnIndex, reader); } @Override public void updateClob(String columnLabel, Reader reader) throws SQLException { inner.updateClob(columnLabel, reader); } @Override public void updateNClob(int columnIndex, Reader reader) throws SQLException { inner.updateNClob(columnIndex, reader); } @Override public void updateNClob(String columnLabel, Reader reader) throws SQLException { inner.updateNClob(columnLabel, reader); } // introduced with JDK 1.7 --> no @Override to maintain compatibility with JDK 1.6 public <T> T getObject(int columnIndex, Class<T> type) throws SQLException { return inner.getObject(columnIndex, type); } public <T> T getObject(String columnLabel, Class<T> type) throws SQLException { return inner.getObject(columnLabel, type); } }
// // ======================================================================== // Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.websocket.server.pathmap; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.TimeUnit; import org.eclipse.jetty.http.PathMap; import org.eclipse.jetty.toolchain.test.AdvancedRunner; import org.eclipse.jetty.toolchain.test.annotation.Stress; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.log.Logger; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(AdvancedRunner.class) public class PathMappingsBenchmarkTest { public static abstract class AbstractPathMapThread extends Thread { private int iterations; private CyclicBarrier barrier; @SuppressWarnings("unused") private long success; @SuppressWarnings("unused") private long error; public AbstractPathMapThread(int iterations, CyclicBarrier barrier) { this.iterations = iterations; this.barrier = barrier; } public abstract String getMatchedResource(String path); @Override public void run() { int llen = LOOKUPS.length; String path; String expectedResource; String matchedResource; await(barrier); for (int iter = 0; iter < iterations; iter++) { for (int li = 0; li < llen; li++) { path = LOOKUPS[li][0]; expectedResource = LOOKUPS[li][1]; matchedResource = getMatchedResource(path); if (matchedResource.equals(expectedResource)) { success++; } else { error++; } } } await(barrier); } } public static class PathMapMatchThread extends AbstractPathMapThread { private PathMap<String> pathmap; public PathMapMatchThread(PathMap<String> pathmap, int iters, CyclicBarrier barrier) { super(iters,barrier); this.pathmap = pathmap; } @Override public String getMatchedResource(String path) { return pathmap.getMatch(path).getValue(); } } public static class PathMatchThread extends AbstractPathMapThread { private PathMappings<String> pathmap; public PathMatchThread(PathMappings<String> pathmap, int iters, CyclicBarrier barrier) { super(iters,barrier); this.pathmap = pathmap; } @Override public String getMatchedResource(String path) { return pathmap.getMatch(path).getResource(); } } private static final Logger LOG = Log.getLogger(PathMappingsBenchmarkTest.class); private static final String[][] LOOKUPS; private int runs = 20; private int threads = 200; private int iters = 10000; static { LOOKUPS = new String[][] { // @formatter:off { "/abs/path", "path" }, { "/abs/path/longer","longpath" }, { "/abs/path/foo","default" }, { "/main.css","default" }, { "/downloads/script.gz","gzipped" }, { "/downloads/distribution.tar.gz","tarball" }, { "/downloads/readme.txt","default" }, { "/downloads/logs.tgz","default" }, { "/animal/horse/mustang","animals" }, { "/animal/bird/eagle/bald","birds" }, { "/animal/fish/shark/hammerhead","fishes" }, { "/animal/insect/ladybug","animals" }, // @formatter:on }; } private static void await(CyclicBarrier barrier) { try { barrier.await(); } catch (Exception x) { throw new RuntimeException(x); } } @Stress("High CPU") @Test public void testServletPathMap() { // Setup (old) PathMap PathMap<String> p = new PathMap<>(); p.put("/abs/path","path"); p.put("/abs/path/longer","longpath"); p.put("/animal/bird/*","birds"); p.put("/animal/fish/*","fishes"); p.put("/animal/*","animals"); p.put("*.tar.gz","tarball"); p.put("*.gz","gzipped"); p.put("/","default"); final CyclicBarrier barrier = new CyclicBarrier(threads + 1); for (int r = 0; r < runs; r++) { for (int t = 0; t < threads; t++) { PathMapMatchThread thread = new PathMapMatchThread(p,iters,barrier); thread.start(); } await(barrier); long begin = System.nanoTime(); await(barrier); long end = System.nanoTime(); long elapsed = TimeUnit.NANOSECONDS.toMillis(end - begin); int totalMatches = threads * iters * LOOKUPS.length; LOG.info("jetty-http/PathMap (Servlet only) threads:{}/iters:{}/total-matches:{} => {} ms",threads,iters,totalMatches,elapsed); } } @Stress("High CPU") @Test public void testServletPathMappings() { // Setup (new) PathMappings PathMappings<String> p = new PathMappings<>(); p.put(new ServletPathSpec("/abs/path"),"path"); p.put(new ServletPathSpec("/abs/path/longer"),"longpath"); p.put(new ServletPathSpec("/animal/bird/*"),"birds"); p.put(new ServletPathSpec("/animal/fish/*"),"fishes"); p.put(new ServletPathSpec("/animal/*"),"animals"); p.put(new ServletPathSpec("*.tar.gz"),"tarball"); p.put(new ServletPathSpec("*.gz"),"gzipped"); p.put(new ServletPathSpec("/"),"default"); final CyclicBarrier barrier = new CyclicBarrier(threads + 1); for (int r = 0; r < runs; r++) { for (int t = 0; t < threads; t++) { PathMatchThread thread = new PathMatchThread(p,iters,barrier); thread.start(); } await(barrier); long begin = System.nanoTime(); await(barrier); long end = System.nanoTime(); long elapsed = TimeUnit.NANOSECONDS.toMillis(end - begin); int totalMatches = threads * iters * LOOKUPS.length; LOG.info("jetty-websocket/PathMappings (Servlet only) threads:{}/iters:{}/total-matches:{} => {} ms",threads,iters,totalMatches,elapsed); } } }
package exm.stc.frontend; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ListMultimap; import exm.stc.ast.SwiftAST; import exm.stc.ast.antlr.ExMParser; import exm.stc.common.exceptions.InvalidSyntaxException; import exm.stc.common.exceptions.STCRuntimeError; import exm.stc.common.exceptions.TypeMismatchException; import exm.stc.common.exceptions.UserException; import exm.stc.common.lang.ExecTarget; import exm.stc.common.lang.FnID; import exm.stc.common.lang.Types; import exm.stc.common.lang.Types.FunctionType; import exm.stc.common.lang.Types.Type; import exm.stc.common.lang.Types.UnionType; import exm.stc.common.lang.Var; import exm.stc.frontend.tree.FunctionDecl; import exm.stc.frontend.tree.InlineCode; import exm.stc.frontend.tree.Literals; import exm.stc.ic.STCMiddleEnd; import exm.stc.tclbackend.TclOpTemplate; /** * Manage wrapper functions * */ public class WrapperGen { private final STCMiddleEnd backend; /** * Saved wrappers */ private final Map<FnID, SavedWrapper> saved = new HashMap<FnID, SavedWrapper>(); /** * Wrappers that have already been generated */ private final ListMultimap<FnID, GeneratedWrapper> generated = ArrayListMultimap.create(); /** * Used function names to avoid duplicates */ private final Set<String> usedFunNames = new HashSet<String>(); public WrapperGen(STCMiddleEnd backend) { this.backend = backend; } public TclOpTemplate loadTclTemplate(Context context, FnID id, FunctionDecl fdecl, FunctionType ft, SwiftAST inlineTclTree) throws InvalidSyntaxException, UserException { assert(inlineTclTree.getType() == ExMParser.INLINE_TCL); checkInlineTclTypes(context, id, ft, false); TclOpTemplate inlineTcl; assert(inlineTclTree.getChildCount() == 1); String tclTemplateString = Literals.extractLiteralString(context, inlineTclTree.child(0)); inlineTcl = InlineCode.templateFromString(context, tclTemplateString); List<String> inNames = fdecl.getInNames(); inlineTcl.addInNames(inNames); if (ft.hasVarargs()) { inlineTcl.setVarArgIn(inNames.get(inNames.size() - 1)); } inlineTcl.addOutNames(fdecl.getOutNames()); inlineTcl.verifyNames(context); context.getForeignFunctions().addLocalImpl(id, id); return inlineTcl; } /** * Check that the compiler can handle TCL templates with these * argument types * @param function * @param ftype * @param concreteType true if this is type with all polymorphism * removed */ private void checkInlineTclTypes(Context context, FnID id, FunctionType ftype, boolean concreteType) throws TypeMismatchException { for (Type in: ftype.getInputs()) { List<Type> alts = UnionType.getAlternatives(in); assert(!concreteType || alts.size() == 1) : "polymorphic type but concrete expected " + in; for (Type alt: alts) { if (Types.isPrimFuture(alt)) { // OK } else if (Types.isPrimUpdateable(alt)) { // OK } else if (Types.isContainer(alt) || Types.isStruct(alt)) { // OK: can store // TODO: check for recursive? } else if (!concreteType && (Types.isWildcard(alt) || Types.isTypeVar(alt))) { // Defer checking until type parameters filled in } else { throw new TypeMismatchException(context, "Type " + alt.typeName() + " is" + " not currently supported as an input to inline TCL code" + " for function " + id.originalName()); } } } for (Type out: ftype.getOutputs()) { List<Type> alts = UnionType.getAlternatives(out); assert(!concreteType || alts.size() == 1) : "polymorphic type but concrete expected " + out; for (Type alt: alts) { if (Types.isContainer(alt) || Types.isStruct(alt)) { // OK: can store // TODO: check for recursive? } else if (Types.isPrimUpdateable(alt)) { // OK: will pass in standard repr } else if (Types.isPrimFuture(alt)) { // OK } else if (!concreteType && (Types.isWildcard(alt) || Types.isTypeVar(alt))) { // Defer checking until type parameters filled in } else { throw new TypeMismatchException(context, "Type " + alt.typeName() + " is" + " not currently supported as an out to inline TCL code" + " for function " + id.originalName()); } } } } /** * Save a wrapper for future use. * @param function * @param ft * @param decl * @param taskMode * @param isParallel * @param isTargetable */ public void saveWrapper(Context context, FnID id, FunctionType ft, FunctionDecl decl, ExecTarget taskMode, boolean isParallel, boolean isTargetable) { assert(context.getForeignFunctions().hasLocalImpl(id)) : "Expected inline version for " + id; SavedWrapper wrapper = new SavedWrapper(id, ft, decl, taskMode, isParallel, isTargetable); saved.put(id, wrapper); } /** * Actually generate the wrapper for an invocation of a function with * specific argument types, returning a previously created one if this * has already been done. * @param function * @param concrete the actual type of input and output vars * @throws UserException * @returns the name of the function that should be called */ public FnID generateWrapper(Context context, FnID id, FunctionType concrete) throws UserException { SavedWrapper wrapper = saved.get(id); assert(wrapper != null) : "Unsaved wrapper " + id; for (GeneratedWrapper gen: generated.get(id)) { if (concrete.equals(gen.concrete)) { // We already generated one with the right type return gen.generatedID; } } return generateWrapper(context, wrapper, concrete); } /** * Generate a function that wraps some inline tcl * @returns generated function name */ private FnID generateWrapper(Context context, SavedWrapper wrapper, FunctionType concrete) throws UserException { if (wrapper.isParallel) { //TODO: figure out what output types are valid throw new STCRuntimeError("Don't support wrapping parallel functions yet"); } // Use sorted map to order type vars by name SortedMap<String, Type> typeVarBindings = new TreeMap<String, Type>(); // Track which variables are chosen for unions List<Type> unionBindings = new ArrayList<Type>(); // fill in type vars if needed int nIn = concrete.getInputs().size(); int nOut = concrete.getOutputs().size(); List<Var> inVars = wrapper.decl.getInVars(context); assert(nIn == inVars.size()); List<Var> outVars = wrapper.decl.getOutVars(context); assert(nOut == outVars.size()); List<Var> concreteIn = new ArrayList<Var>(nIn); List<Var> concreteOut = new ArrayList<Var>(nOut); for (int i = 0; i < nIn; i++) { Var in = inVars.get(i); Type concreteT = concrete.getInputs().get(i); concreteIn.add(in.substituteType(concreteT)); updateTypeInfo(typeVarBindings, unionBindings, in.type(), concreteT); } for (int i = 0; i < nOut; i++) { Var out = outVars.get(i); Type concreteT = concrete.getOutputs().get(i); concreteOut.add(out.substituteType(concreteT)); updateTypeInfo(typeVarBindings, unionBindings, out.type(), concreteT); } // generate function name based on type FnID wrapperID = chooseWrapperID(context, wrapper.original, typeVarBindings, unionBindings); // Check concrete types that were substituted are ok checkInlineTclTypes(context, wrapper.original, concrete, true); backend.generateWrappedBuiltin(wrapperID, wrapper.original, VarRepr.backendFnType(concrete), VarRepr.backendVars(concreteOut), VarRepr.backendVars(concreteIn), wrapper.taskMode, wrapper.isParallel, wrapper.isTargetable); // Save for later use GeneratedWrapper genWrapper = new GeneratedWrapper(wrapperID, concrete); generated.put(wrapper.original, genWrapper); // Copy over template context.getForeignFunctions().addLocalImpl(wrapperID, wrapper.original); return wrapperID; } private void updateTypeInfo(SortedMap<String, Type> typeVarBindings, List<Type> unionChoices, Type abstractT, Type concreteT) { // Can't match type vars for union if (Types.isUnion(abstractT)) { boolean found = false; for (Type opt: UnionType.getAlternatives(abstractT)) { if (opt.equals(concreteT)) { found = true; } } assert (found) : "No match for concrete type " + concreteT + " in " + "union " + abstractT; unionChoices.add(concreteT); } else { Map<String, Type> b = abstractT.matchTypeVars(concreteT); assert(b != null) : abstractT + " " + concreteT; for (String tv: b.keySet()) { assert(!typeVarBindings.containsKey(tv) || typeVarBindings.get(tv).equals(b.get(tv))); } typeVarBindings.putAll(b); } } private FnID chooseWrapperID(Context context, FnID originalID, SortedMap<String, Type> typeVarBindings, List<Type> unionBindings) { String prefix = originalID.uniqueName(); // avoid clash with user functions by using invalid characters : and = for (Entry<String, Type> tv: typeVarBindings.entrySet()) { prefix += ":" + tv.getKey() + "=" + tv.getValue().typeName(); } for (Type t: unionBindings) { prefix += ":" + t.typeName(); } // Replace invalid characters, e.g. not valid in Tcl function name prefix = prefix.replaceAll("[\\[\\]]", "_"); int attempt = 0; String trial = prefix; while (usedFunNames.contains(trial)) { trial = prefix + ":" + attempt; attempt++; } usedFunNames.add(trial); if (trial.equals(originalID.uniqueName())) { return originalID; } FnID wrapperID = new FnID(trial, originalID.originalName()); // Copy over properties of original context.getForeignFunctions().copyProperties(wrapperID, originalID); return wrapperID; } /** * Information required for generation of a wrapper. */ static class SavedWrapper { final FnID original; final FunctionType type; final FunctionDecl decl; final ExecTarget taskMode; final boolean isParallel; final boolean isTargetable; public SavedWrapper(FnID original, FunctionType type, FunctionDecl decl, ExecTarget taskMode, boolean isParallel, boolean isTargetable) { this.original = original; this.type = type; this.decl = decl; this.taskMode = taskMode; this.isParallel = isParallel; this.isTargetable = isTargetable; } } /** * Information about a wrapper that has already been generated */ static class GeneratedWrapper { final FnID generatedID; final FunctionType concrete; public GeneratedWrapper(FnID generatedID, FunctionType concrete) { this.generatedID = generatedID; this.concrete = concrete; } } }
package com.pacoapp.paco.shared.model2; import java.io.Serializable; import java.util.ArrayList; import java.util.Date; import java.util.List; public class ExperimentDAOCore extends ModelBase implements Validatable, Serializable { public static final int APP_USAGE_BROWSER_HISTORY_DATA_COLLECTION = 1; public static final int LOCATION_DATA_COLLECTION = 2; public static final int PHONE_DETAILS = 3; public static final List<Integer> EXTRA_DATA_COLLECTION_DECLS = new ArrayList<Integer>(); static { EXTRA_DATA_COLLECTION_DECLS.add(APP_USAGE_BROWSER_HISTORY_DATA_COLLECTION); EXTRA_DATA_COLLECTION_DECLS.add(LOCATION_DATA_COLLECTION); EXTRA_DATA_COLLECTION_DECLS.add(PHONE_DETAILS); } protected String title; protected String description; protected String creator; protected String organization; protected String contactEmail; protected String contactPhone; protected String joinDate; protected Long id; protected String informedConsentForm; private Boolean recordPhoneDetails = false; private List<Integer> extraDataCollectionDeclarations; protected Boolean deleted = false; private Date earliestStartDate; private Date latestEndDate; public ExperimentDAOCore(Long id, String title, String description, String informedConsentForm, String creatorEmail, String joinDate, Boolean recordPhoneDetails, Boolean deleted2, List<Integer> extraDataCollectionDeclarationsList, String organization, String contactPhone, String contactEmail, Date earliestStartDate, Date latestEndDate) { super(); this.id = id; this.title = title; this.description = description; this.informedConsentForm = informedConsentForm; this.creator = creatorEmail; this.organization = organization; this.contactEmail = contactEmail; this.contactPhone = contactPhone; this.joinDate = joinDate; this.setRecordPhoneDetails(recordPhoneDetails); this.deleted = deleted != null ? deleted : false; this.extraDataCollectionDeclarations = ListMaker.paramOrNewList(extraDataCollectionDeclarationsList, Integer.class); this.earliestStartDate = earliestStartDate; this.latestEndDate = latestEndDate; } /** * */ public ExperimentDAOCore() { super(); this.extraDataCollectionDeclarations = new java.util.ArrayList(); } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getInformedConsentForm() { return informedConsentForm; } public void setInformedConsentForm(String informedConsentForm) { this.informedConsentForm = informedConsentForm; } public String getCreator() { return creator; } public void setCreator(String creator) { this.creator = creator; } public String getJoinDate() { return joinDate; } public void setJoinDate(String joinDate) { this.joinDate = joinDate; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public void setRecordPhoneDetails(Boolean recordDetails) { if (recordDetails != null) { this.recordPhoneDetails = recordDetails; } } public Boolean getDeleted() { return deleted; } public void setDeleted(Boolean deleted) { this.deleted = deleted; } public List<Integer> getExtraDataCollectionDeclarations() { return extraDataCollectionDeclarations; } public void setExtraDataCollectionDeclarations(List<Integer> extraDataCollectionDeclarations) { this.extraDataCollectionDeclarations = extraDataCollectionDeclarations; } public String getOrganization() { return organization; } public void setOrganization(String organization) { this.organization = organization; } public String getContactEmail() { return contactEmail; } public void setContactEmail(String contactEmail) { this.contactEmail = contactEmail; } public String getContactPhone() { return contactPhone; } public void setContactPhone(String contactPhone) { this.contactPhone = contactPhone; } public Boolean getRecordPhoneDetails() { return recordPhoneDetails; } @Override public void validateWith(Validator validator) { // System.out.println("VALIDATING CORE"); validator.isNotNullAndNonEmptyString(title, "Experiment title cannot be null"); validator.isValidEmail(creator, "Experiment creator must be a valid email address"); if (contactEmail != null && contactEmail.length() > 0) { validator.isValidEmail(contactEmail, "Experiment contact must be a valid email address"); } validator.isNotNull(deleted, "deleted is not properly initialized"); validator.isNotNull(recordPhoneDetails, "recordPhoneDetails is not properly initialized"); validator.isNotNullCollection(extraDataCollectionDeclarations, "extra data declaration if you use extra data"); if (joinDate != null) { validator.isValidDateString(joinDate, "join date should be a valid date string"); } if (organization != null && organization.length() > 0) { validator.isNotNullAndNonEmptyString(organization, "organization must be non null if it is specified"); } } public Date getEarliestStartDate() { return earliestStartDate; } public void setEarliestStartDate(Date earliestStartDate) { this.earliestStartDate = earliestStartDate; } public Date getLatestEndDate() { return latestEndDate; } public void setLatestEndDate(Date latestEndDate) { this.latestEndDate = latestEndDate; } }
package org.kairosdb.plugin.rabbitmq.client; import com.google.common.base.Strings; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; import com.rabbitmq.client.ConnectionFactory; import com.rabbitmq.client.Consumer; import org.kairosdb.plugin.rabbitmq.datastore.Datastore; import javax.inject.Inject; import javax.inject.Named; import java.io.IOException; import java.util.concurrent.TimeoutException; public class RabbitMQClientImpl implements RabbitMQClient { /** * The RabbitMQ host. */ @Inject @Named("kairosdb.plugin.rabbitmq.host") private String host = com.rabbitmq.client.ConnectionFactory.DEFAULT_HOST; /** * Default content type for messages when not defined */ @Inject @Named("kairosdb.plugin.rabbitmq.default.content.type") private String defaultContentType = "application/json"; /** * The RabbitMQ queue. */ @Inject @Named("kairosdb.plugin.rabbitmq.queue") private String queue = "kairosdb"; /** * Automatically declare RabbitMQ queue. */ @Inject @Named("kairosdb.plugin.rabbitmq.queue.declare") private Boolean queueDeclare = true; /** * The RabbitMQ exchange for rejected messages. */ @Inject @Named("kairosdb.plugin.rabbitmq.rejected.exchange") private String rejectedExchange = "kairosdb.rejected"; /** * Automatically declare RabbitMQ exchange for rejected messages. */ @Inject @Named("kairosdb.plugin.rabbitmq.rejected.exchange.declare") private Boolean rejectedExchangeDeclare = true; /** * The RabbitMQ queue for rejected messages. */ @Inject @Named("kairosdb.plugin.rabbitmq.rejected.queue") private String rejectedQueue = "kairosdb.rejected"; /** * Automatically declare RabbitMQ queue for rejected messages. */ @Inject @Named("kairosdb.plugin.rabbitmq.rejected.queue.declare") private Boolean rejectedQueueDeclare = true; /** * The RabbitMQ virtual host. */ @Inject @Named("kairosdb.plugin.rabbitmq.virtual.host") private String virtualHost = com.rabbitmq.client.ConnectionFactory.DEFAULT_VHOST; /** * The RabbitMQ username. */ @Inject @Named("kairosdb.plugin.rabbitmq.username") private String username = com.rabbitmq.client.ConnectionFactory.DEFAULT_USER; /** * The RabbitMQ password. */ @Inject @Named("kairosdb.plugin.rabbitmq.password") private String password = com.rabbitmq.client.ConnectionFactory.DEFAULT_PASS; /** * The RabbitMQ port. */ @Inject @Named("kairosdb.plugin.rabbitmq.port") private int port = com.rabbitmq.client.ConnectionFactory.USE_DEFAULT_PORT; /** * The RabbitMQ connection timeout. */ @Inject @Named("kairosdb.plugin.rabbitmq.connection.timeout") private int connectionTimeout = com.rabbitmq.client.ConnectionFactory.DEFAULT_CONNECTION_TIMEOUT; /** * The RabbitMQ channel max. */ @Inject @Named("kairosdb.plugin.rabbitmq.requested.channel.max") private int requestedChannelMax = com.rabbitmq.client.ConnectionFactory.DEFAULT_CHANNEL_MAX; /** * The RabbitMQ frame max. */ @Inject @Named("kairosdb.plugin.rabbitmq.requested.frame.max") private int requestedFrameMax = com.rabbitmq.client.ConnectionFactory.DEFAULT_FRAME_MAX; /** * The RabbitMQ heartbeat. */ @Inject @Named("kairosdb.plugin.rabbitmq.requested.heartbeat") private int requestedHeartbeat = com.rabbitmq.client.ConnectionFactory.DEFAULT_HEARTBEAT; @Inject private Datastore datastore; private Connection connection = null; @Override public void start() throws IOException, TimeoutException { this.connection = this.createConnection(); Channel channel = this.createChannel(connection); this.createQueue(channel); this.createRejectedExchange(channel); this.createRejectedQueue(channel); this.startConsuming(channel); } @Override public Connection createConnection() throws IOException, TimeoutException { ConnectionFactory factory = new ConnectionFactory(); factory.setHost(host); factory.setVirtualHost(virtualHost); factory.setUsername(username); factory.setPassword(password); factory.setPort(port); factory.setConnectionTimeout(connectionTimeout); factory.setRequestedChannelMax(requestedChannelMax); factory.setRequestedFrameMax(requestedFrameMax); factory.setRequestedHeartbeat(requestedHeartbeat); return factory.newConnection(); } @Override public Channel createChannel(Connection connection) throws IOException { return connection.createChannel(); } @Override public void createQueue(Channel channel) throws IOException { if(this.queueDeclare) { channel.queueDeclare(queue, true, false, false, null); } } @Override public void createRejectedExchange(Channel channel) throws IOException { if(this.rejectedExchangeDeclare && !Strings.isNullOrEmpty(this.rejectedExchange)) { channel.exchangeDeclare(rejectedExchange, "fanout", true); } } @Override public void createRejectedQueue(Channel channel) throws IOException { if(this.rejectedQueueDeclare && !Strings.isNullOrEmpty(this.rejectedExchange) && !Strings.isNullOrEmpty(this.rejectedQueue)) { channel.queueDeclare(rejectedQueue, true, false, false, null); channel.queueBind(rejectedQueue, rejectedExchange, ""); } } @Override public void stop() throws IOException { if(connection != null && connection.isOpen()) { connection.close(); } } @Override public void startConsuming(Channel channel) throws IOException { Consumer consumer = new RabbitMQConsumerImpl(channel, datastore, defaultContentType, rejectedExchange); channel.basicConsume(queue, true, consumer); } }
/* * Copyright 2016 IBM Corp. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jsonstore.security; import android.content.Context; import android.util.Base64; import com.jsonstore.util.JSONStoreLogger; import com.jsonstore.util.JSONStoreUtil; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.security.InvalidKeyException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateEncodingException; import java.security.cert.X509Certificate; import java.util.Arrays; import javax.crypto.BadPaddingException; import javax.crypto.Cipher; import javax.crypto.IllegalBlockSizeException; import javax.crypto.NoSuchPaddingException; import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; import javax.net.ssl.TrustManagerFactory; import javax.net.ssl.X509TrustManager; public class FipsWrapper { private static final String LIBSSL_FILE_NAME = "libssl.so.1.0.0"; private static final String ERROR_LOG_PREFIX = "Error processing X509Certificate: "; private static JSONStoreLogger logger = JSONStoreUtil.getCoreLogger(); private static byte[] _encryptAES (byte[] key, byte[] iv, String to_encrypt ) { byte[] encryptedText = new byte[256]; byte[] plaintext; Cipher cipher = null; try { //Get instance of cipher for aes cipher = Cipher.getInstance("AES/CBC/NoPadding"); //Create hash for key using sha1 MessageDigest sha = MessageDigest.getInstance("SHA-1"); key = sha.digest(key); String keyString = SecurityUtils.encodeBytesAsHexString(key); key = Arrays.copyOf(keyString.getBytes("UTF-8"), 16); // use only first 128 bit SecretKeySpec secretKey = new SecretKeySpec(key, "AES"); IvParameterSpec ivSpec = new IvParameterSpec(iv); cipher.init(Cipher.ENCRYPT_MODE, secretKey, ivSpec); plaintext = to_encrypt.getBytes("UTF-8"); encryptedText = cipher.doFinal(plaintext); } catch(NoSuchAlgorithmException e){ e.printStackTrace(); } catch(NoSuchPaddingException e){ e.printStackTrace(); } catch(InvalidKeyException e){ e.printStackTrace(); } catch(IllegalBlockSizeException e){ e.printStackTrace(); System.out.println(e.toString()); } catch(BadPaddingException e){ e.printStackTrace(); System.out.println(e.toString()); } finally { return encryptedText; } } private static byte[] _decryptAES (byte[] key, byte[] iv, byte[] encryptedData){ byte[] plaintext = new byte[256]; Cipher cipher = null; try { //Get instance of cipher for aes cipher = Cipher.getInstance("AES/CBC/NoPadding"); //Create hash for key using sha1 MessageDigest sha = MessageDigest.getInstance("SHA-1"); key = sha.digest(key); String keyString = SecurityUtils.encodeBytesAsHexString(key); key = Arrays.copyOf(keyString.getBytes("UTF-8"), 16); // use only first 128 bit SecretKeySpec secretKey = new SecretKeySpec(key, "AES"); IvParameterSpec ivSpec = new IvParameterSpec(iv); cipher.init(Cipher.DECRYPT_MODE, secretKey, ivSpec); plaintext = cipher.doFinal(encryptedData); } catch(NoSuchAlgorithmException e){ e.printStackTrace(); } catch(NoSuchPaddingException e){ e.printStackTrace(); } catch(InvalidKeyException e){ e.printStackTrace(); } catch(IllegalBlockSizeException e){ e.printStackTrace(); } catch(BadPaddingException e){ e.printStackTrace(); } finally { return plaintext; } } public static final byte[] hexStringToByteArray(String s) { int len = s.length(); byte[] data = new byte[len / 2]; for(int i = 0; i < len; i += 2) { data[i / 2] = (byte)((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i + 1), 16)); } return data; } public static byte[] encryptAES (String key, String iv, String clearText) { byte[] keyByteArray = hexStringToByteArray(key); byte[] ivByteArray = hexStringToByteArray(iv); byte[] encBytes = _encryptAES(keyByteArray ,ivByteArray, clearText); return encBytes; } // Throws exception if can't decode public static String decryptAES (String key, String iv, byte[] encryptedBytes) { byte[] keyByteArray = hexStringToByteArray(key); byte[] ivByteArray = hexStringToByteArray(iv); byte[] decryptedBytes = _decryptAES(keyByteArray, ivByteArray, encryptedBytes); String decryptedText; try { CharsetDecoder charsetDecoder = Charset.forName("UTF-8") //$NON-NLS-1$ .newDecoder (); CharBuffer charBuffer = charsetDecoder.decode (ByteBuffer.wrap(decryptedBytes)); decryptedText = new String(decryptedBytes, "UTF-8"); // in case the default charset is not UTF-8 //$NON-NLS-1$ } catch (Throwable t) { t.printStackTrace(); decryptedText = null; } Arrays.fill(decryptedBytes, (byte) 0); return decryptedText; } private static void saveCrtBundle(Context ctx) { TrustManagerFactory tmf = null; File localStorage = new File(JSONStoreUtil.getNoBackupFilesDir(ctx), "ca-bundle.crt"); Exception ex = null; try { OutputStream ostr = new FileOutputStream(localStorage); tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); tmf.init((KeyStore) null); X509TrustManager xtm = (X509TrustManager) tmf.getTrustManagers()[0]; for (X509Certificate cert : xtm.getAcceptedIssuers()) { try { String certEnc = "-----BEGIN CERTIFICATE-----\n"; byte[] array = certEnc.getBytes("UTF-8"); ostr.write(array, 0, array.length); array = cert.getEncoded(); array = Base64.encode(array, Base64.DEFAULT); ostr.write(array, 0, array.length); certEnc = "-----END CERTIFICATE-----\n"; array = certEnc.getBytes("UTF-8"); ostr.write(array, 0, array.length); } catch (IOException e) { logger.logError(ERROR_LOG_PREFIX, e); // do not rethrow } catch (CertificateEncodingException e) { logger.logError(ERROR_LOG_PREFIX, e); // do not rethrow } } ostr.flush(); ostr.close(); } catch (IOException e) { ex = e; } catch (NoSuchAlgorithmException e) { ex = e; } catch (KeyStoreException e) { ex = e; } if (ex != null) { logger.logError(ERROR_LOG_PREFIX, ex); } } public static void enableFips(Context context){ JSONStoreUtil.loadLib(context, LIBSSL_FILE_NAME); if(System.getProperty("javax.net.ssl.trustStore") != null) { File localStorage = new File(JSONStoreUtil.getNoBackupFilesDir(context), "ca-bundle.crt"); if(!localStorage.exists()) { saveCrtBundle(context); } } else { saveCrtBundle(context); } } }
package sitent.classifiers; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.io.FilenameUtils; import org.apache.log4j.Logger; /** * * @author afried * * Takes all the ARFF files in one directory and makes them compatible * (by merging the headers). It assumes that all features with the same * name have the same type. * */ public class MakeArffCompatible { static Logger log = Logger.getLogger(MakeArffCompatible.class.getName()); private List<ArffDoc> docs; private Map<String, Set<String>> jointFeatures; private Map<String, String> jointFeatureTypes; private Map<String, String[]> sortedJointFeatures; // joint header string private StringBuffer header; private Map<String, Integer> featNameToIndex; private Map<Integer, String> featIndextoName; private List<String> featNames; public MakeArffCompatible() { docs = new LinkedList<ArffDoc>(); } /** * Reads one document. * * @param path * @return * @throws IOException */ private ArffDoc readDocument(String path, boolean sparse, String prefix) throws IOException { ArffDoc doc = new ArffDoc(path, prefix); BufferedReader r = new BufferedReader(new FileReader(path)); String line; boolean data = false; while ((line = r.readLine()) != null) { if (data) { // map: feature name to feature value Map<String, String> instance = new HashMap<String, String>(); if (!sparse) { String[] values = line.trim().split(","); for (int i = 0; i < values.length; i++) { String value = values[i]; // remove quotes if necessary String featName = doc.orderedFeatures.get(i); instance.put(featName, value); } } else { // remove brackets line = line.trim().substring(1, line.trim().length() - 1); if (line.trim().equals("")) { continue; } String[] values = line.split(","); for (String value : values) { // remove quotes if necessary String[] parts = value.trim().split(" "); int featIndex = Integer.parseInt(parts[0]); String featName = doc.orderedFeatures.get(featIndex); if (parts[1].equals("QUOTE")) { // this was wrong in Intercorp features?! parts[1] = "\"QUOTE\""; } instance.put(featName, parts[1]); } } doc.instances.add(instance); } else if (line.startsWith("@attribute")) { String[] parts = line.split(" "); String featureName = parts[1]; if (parts.length < 3) { continue; } // quote feature names if (!featureName.startsWith("\"")) { featureName = "\"" + featureName.replaceAll("\"|``", "QUOTE").replaceAll(",", "COMMA").replaceAll(" ", "SPACE") + "\""; } if (parts[2].equals("numeric")) { doc.featureType.put(featureName, "numeric"); } else if (parts[2].equals("string")) { doc.featureType.put(featureName, "string"); } else { doc.featureType.put(featureName, "nominal"); String[] values = parts[2].substring(1, parts[2].length() - 1).split(","); doc.features.put(featureName, values); } doc.orderedFeatures.add(featureName); } else if (line.startsWith("@data")) { data = true; } } r.close(); return doc; } /** * Reads in ARFFs in input directory. * * @param inputDir * @throws IOException */ public void readArffs(String prefix, String inputDir, boolean sparse) throws IOException { log.info("Reading ARFFs from directory: " + prefix + "/" + inputDir); int num = 0; int total = new File(prefix + "/" + inputDir).list().length; for (String path : new File(prefix + "/" + inputDir).list()) { log.info("... " + path + " " + num++ + "/" + total); docs.add(readDocument(prefix + "/" + inputDir + "/" + path, sparse, prefix)); } } public class ArffDoc { String path; Map<String, String> featureType; // features as ordered in this document List<String> orderedFeatures; // features and values in the ARFF Map<String, String[]> features; // instances and their values List<Map<String, String>> instances; public ArffDoc(String path, String prefix) { // keep only 'local' path this.path = path.replace(prefix, ""); // System.out.println("Path for this document: " + this.path); features = new HashMap<String, String[]>(); instances = new LinkedList<Map<String, String>>(); featureType = new HashMap<String, String>(); orderedFeatures = new LinkedList<String>(); } } /** * collect info for joint header * */ private void collectJointHeader(String className) { log.info("Collecting joint header... (this may take a while)"); jointFeatures = new HashMap<String, Set<String>>(); jointFeatureTypes = new HashMap<String, String>(); sortedJointFeatures = new HashMap<String, String[]>(); for (ArffDoc doc : docs) { System.out.println(doc.path); for (String featName : doc.featureType.keySet()) { String featureType = doc.featureType.get(featName); if (jointFeatures.containsKey(featName)) { if (jointFeatureTypes.get(featName).equals("string")) { featureType = "string"; } } if (!jointFeatureTypes.containsKey(featName)) { jointFeatureTypes.put(featName, featureType); } if (featureType.equals("numeric") && jointFeatureTypes.get(featName).equals("nominal")) { // keep it that way } else if (featureType.equals("nominal") && jointFeatureTypes.get(featName).equals("numeric")) { jointFeatureTypes.put(featName, featureType); } if (featureType.equals("nominal")) { if (!jointFeatures.containsKey(featName)) { jointFeatures.put(featName, new HashSet<String>()); } for (String featVal : doc.features.get(featName)) { jointFeatures.get(featName).add(featVal); } } } } // sort feature values for joint header log.info("sorting features ... "); for (String featName : jointFeatures.keySet()) { List<String> featVals = new LinkedList<String>(jointFeatures.get(featName)); // add all values from document Set<String> featSet = new HashSet<String>(featVals); for (ArffDoc doc : docs) { for (Map<String, String> inst : doc.instances) { if (inst.get(featName) != null) { featSet.add(inst.get(featName)); } } } featVals = new LinkedList<String>(featSet); // ... done Collections.sort(featVals); // put dummy value first, making sure there is no second dummy value // due to quotation featVals.remove("\"THE-DUMMY-VALUE\""); featVals.remove("THE-DUMMY-VALUE"); featVals.add(0, "\"THE-DUMMY-VALUE\""); String[] featValArray = new String[featVals.size()]; featVals.toArray(featValArray); sortedJointFeatures.put(featName, featValArray); } // sorted list of feature names featNames = new LinkedList<String>(jointFeatureTypes.keySet()); Collections.sort(featNames); // move class to end className = "\"" + className + "\""; featNames.remove(className); featNames.add(className); // feature index for each feature name featNameToIndex = new HashMap<String, Integer>(); featIndextoName = new HashMap<Integer, String>(); for (int i = 0; i < featNames.size(); i++) { featNameToIndex.put(featNames.get(i), i); featIndextoName.put(i, featNames.get(i)); } log.info("Done collecting joint header string."); // create header string header = new StringBuffer("@relation sitent\n"); for (String featName : featNames) { if (featName.equals("\"null\"")) { continue; } if (jointFeatureTypes.get(featName).equals("numeric")) { header.append("@attribute " + featName + " numeric\n"); } else if (jointFeatureTypes.get(featName).equals("string")) { header.append("@attribute " + featName + " string\n"); } else { StringBuffer values = new StringBuffer(""); for (String value : sortedJointFeatures.get(featName)) { if (!value.trim().equals("")) { values.append(value + ","); } } values = new StringBuffer(values.substring(0, values.length() - 1)); header.append("@attribute " + featName + " {" + values + "}\n"); } } header.append("\n"); log.info("Done creating new header string."); } private void writeCompatibleArffs(String outputDir, boolean sparse, boolean keepDirectoryStructure) throws IOException { // add one file with all instances // PrintWriter wAll = new PrintWriter(new FileWriter(outputDir + // "/allData.arff")); // wAll.println(header); // wAll.println("@data"); // System.out.println("opening all writer..."); int num = 0; int total = docs.size(); for (ArffDoc doc : docs) { String[] parts = doc.path.split("/"); String filename = parts[parts.length - 1]; System.out.println("Writing: " + filename + " " + num++ + "/" + total); String outPath = outputDir + "/" + filename; if (keepDirectoryStructure) { // use doc.path here instead of filename if using dev/test outPath = outputDir + "/" + doc.path; } // write header PrintWriter w = new PrintWriter(new FileWriter(outPath)); w.println(header); w.println("@data"); for (Map<String, String> instances : doc.instances) { if (!sparse) { String line = ""; for (String featName : featNames) { if (instances.containsKey(featName)) { line += instances.get(featName) + ","; } else { line += "?,"; } } line = line.substring(0, line.length() - 1); w.println(line); // wAll.println(line); } else { // sparse format String values = ""; List<Integer> indices = new LinkedList<Integer>(); for (String featName : instances.keySet()) { indices.add(featNameToIndex.get(featName)); } Collections.sort(indices); for (int index : indices) { values += index + " " + instances.get(featIndextoName.get(index)) + ", "; } String line = "{" + values.substring(0, values.length() - 2) + "}"; w.println(line); // wAll.println(line); // wAll.flush(); } } w.close(); } // wAll.close(); } public static void main(String[] args) { Options options = new Options(); options.addOption("input", true, "Input path with ARFFs: one or more directories."); options.addOption("output", true, "Output path for compatible ARFFs."); options.addOption("sparse", false, "Arff in sparse format?"); options.addOption("classAttribute", true, "last attribute in ARFF"); options.addOption("keepDirs", false, "if given, keep directory structure for output (otherwise all ARFFs are written into one directory)"); // Parse command line and configure CommandLineParser parser = new DefaultParser(); CommandLine cmd; try { cmd = parser.parse(options, args); String inputDir = cmd.getOptionValue("input"); String outputDir = cmd.getOptionValue("output"); boolean sparse = cmd.hasOption("sparse"); String classAttribute = cmd.getOptionValue("classAttribute"); boolean keepDirectoryStructure = cmd.hasOption("keepDirs"); File outputDirFile = new File(outputDir); if (outputDirFile.exists()) { outputDirFile.delete(); } outputDirFile.mkdirs(); MakeArffCompatible mac = new MakeArffCompatible(); // if input directory has subdirectories, process them all // assumes only one level of subdirs! File inputFile = new File(inputDir); log.info("input directory: " + inputFile); log.info("is directory? " + inputFile.isDirectory()); String[] inputDirs = null; // are there subdirectories? boolean subDirs = false; for (String subFile : inputFile.list()) { if (new File(inputFile + "/" + subFile).isDirectory()) { subDirs = true; break; } } log.info("has subdirectories? " + subDirs); if (subDirs) { inputDirs = inputFile.list(); } else { String directory = FilenameUtils.getName(inputDir); log.info("the directory: " + directory); inputDirs = new String[] { directory }; } // add the files to the list of documents. for (String id : inputDirs) { if (inputDirs.length == 1) { // only one input directory String prefix = FilenameUtils.getPath(inputDir); if (inputDir.startsWith("/")) { // absolute path prefix = "/" + prefix; } log.info("prefix: " + prefix); mac.readArffs(prefix, id, sparse); } else { mac.readArffs(inputDir, id, sparse); } // create the matching output directories File outDir = new File(outputDir); if (outDir.exists()) { outDir.delete(); } outDir.mkdirs(); if (keepDirectoryStructure) { outDir = new File (outputDir + "/" + id); outDir.mkdirs(); } } log.info("now starting to collect the joint header."); mac.collectJointHeader(classAttribute); log.info("done collecting joint header."); mac.writeCompatibleArffs(outputDir, sparse, keepDirectoryStructure); log.info("done writing ARFFs"); } catch (ParseException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }
package apavlov.list; import java.util.Arrays; import java.util.Iterator; import java.util.NoSuchElementException; /** * The class MyLinkedList - enrollment methods for work with link list. * * @param <E> This describes my type parameter * @author Pavlov Artem * @since 18.09.2017 */ public class MyLinkedList<E> implements MyList<E> { /** * The var - text exception. */ private String msgException = "The element is not found..."; /** * The var - count elements to list. */ private int size; /** * The var - link first Node. */ private Node first; /** * The var - link last Node. */ private Node last; /** * The default constructor for class MyLinkedList. */ public MyLinkedList() { } /** * The constructor for class MyLinkedList. Add array to list. * * @param values - array for add; */ public MyLinkedList(E[] values) { this.addAll(values); } @Override public int size() { return size; } @Override public boolean isEmpty() { return this.size == 0; } @Override public void clear() { size = 0; first = null; last = null; } @Override public Object[] toArray() { Object[] resultArray = new Object[this.size]; int index = 0; for (Node link = first; link != null; link = link.next) { resultArray[index++] = link.value; } return resultArray; } @Override public String toString() { return Arrays.toString(toArray()); } @Override public void add(E value) { if (this.first == null) { first = new Node(null, null, value); } else { Node prevElement = this.last == null ? this.first : this.last; this.last = new Node(prevElement, null, value); prevElement.next = this.last; } this.size++; } /** * The private method check index equal range array. * * @param index - index for check; * @return - true - index is range; false - index is not range; */ private boolean checkIndexToRange(int index) { return index >= 0 && index < this.size; } /** * The method get link Node by index. * * @param index - index for search; * @return - link Node; */ private Node getLinkByIndex(int index) { Node result; if (this.size >> 1 >= index) { result = this.first; for (int i = 0; i < index; i++) { result = result.next; } } else { result = this.last; for (int i = this.size - 1; i > index; i--) { result = result.prev; } } return result; } @Override public boolean add(int index, E value) { boolean result = true; if (index == this.size) { add(value); } else if (checkIndexToRange(index)) { Node oldElement = getLinkByIndex(index); Node newElement = new Node(oldElement.prev, oldElement, value); if (oldElement.prev == null) { this.first = newElement; this.last = oldElement; } else { oldElement.prev.next = newElement; oldElement.prev = newElement; } this.size++; } else { result = false; } return result; } @Override public boolean addAll(E[] values) { boolean result = values != null && values.length > 0; if (result) { for (E value : values) { add(value); } } return result; } /** * The method return link Node by value. * * @param value - value; * @return - link Node; */ private Node getLinkByValue(E value) { Node result = null; for (Node element = first; element != null; element = element.next) { if (element.value.equals(value)) { result = element; break; } } return result; } @Override public int get(E value) { int result = -1; int index = 0; for (Node node = first; node != null; node = node.next) { if (node.value.equals(value)) { result = index; break; } index++; } return result; } @Override public E get(int index) { E result; if (checkIndexToRange(index)) { result = getLinkByIndex(index).value; } else { throw new NoSuchElementException(this.msgException); } return result; } /** * The method delete Node by link. * * @param node - link Node; * @return true - is delete; false - is not delete; */ private boolean deleteByLink(Node node) { boolean result = node != null; if (result) { if (node.next == null && node.prev == null) { first = null; last = null; } else if (node.prev == null) { first = node.next; first.prev = null; } else if (node.next == null) { last = node.prev; last.next = null; } else { node.prev.next = node.next; node.next.prev = node.prev; } size--; } return result; } @Override public boolean delete(int index) { boolean result = checkIndexToRange(index); if (result) { result = deleteByLink(getLinkByIndex(index)); } return result; } @Override public boolean delete(E value) { return deleteByLink(getLinkByValue(value)); } @Override public E update(int index, E value) { E result; if (checkIndexToRange(index)) { Node temp = getLinkByIndex(index); result = temp.value; temp.value = value; } else { throw new NoSuchElementException(this.msgException); } return result; } @Override public Iterator<E> iterator() { return new IteratorLinked(); } /** * The inner class Node - object for save values user and links. * * @author Pavlov Artem * @since 18.09.2017 */ private class Node { /** * The var - link to previous element Node. */ private Node prev; /** * The var - link to next element Node. */ private Node next; /** * The var - value to list. */ private E value; /** * The constructor for class Node. * * @param prev - link previous element; * @param next - link next element; * @param value - value; */ Node(Node prev, Node next, E value) { this.prev = prev; this.next = next; this.value = value; } } /** * The inner class IteratorLinked - iterator for class MyLinkedList. * * @author Pavlov Artem * @since 18.09.2017 */ private class IteratorLinked implements Iterator<E> { /** * The var - cursor by linked list. */ private Node cursor = first; @Override public boolean hasNext() { return this.cursor != null; } @Override public E next() { if (!hasNext()) { throw new NoSuchElementException(msgException); } E result = cursor.value; this.cursor = this.cursor.next; return result; } } }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.yunos.alicontacts.widget; import android.content.Context; import android.graphics.Canvas; import android.graphics.Rect; import android.graphics.RectF; import android.util.AttributeSet; import android.util.Log; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.AbsListView.OnScrollListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.ListAdapter; /** * A ListView that maintains a header pinned at the top of the list. The * pinned header can be pushed up and dissolved as needed. */ public class PinnedHeaderListView extends AutoScrollListView implements OnScrollListener, OnItemSelectedListener { private static final String TAG = "PinnedHeaderListView"; /** * Adapter interface. The list adapter must implement this interface. */ public interface PinnedHeaderAdapter { /** * Returns the overall number of pinned headers, visible or not. */ int getPinnedHeaderCount(); /** * Creates or updates the pinned header view. */ View getPinnedHeaderView(int viewIndex, View convertView, ViewGroup parent); /** * Configures the pinned headers to match the visible list items. The * adapter should call {@link PinnedHeaderListView#setHeaderPinnedAtTop}, * {@link PinnedHeaderListView#setHeaderPinnedAtBottom}, * {@link PinnedHeaderListView#setFadingHeader} or * {@link PinnedHeaderListView#setHeaderInvisible}, for each header that * needs to change its position or visibility. */ void configurePinnedHeaders(PinnedHeaderListView listView); /** * Returns the list position to scroll to if the pinned header is touched. * Return -1 if the list does not need to be scrolled. */ int getScrollPositionForHeader(int viewIndex); } private static final int MAX_ALPHA = 255; private static final int TOP = 0; private static final int BOTTOM = 1; private static final int FADING = 2; private boolean mListLock = false; private static final int DEFAULT_ANIMATION_DURATION = 20; private static final int POSITION_MAX_HIT_TIMES = 3; private static final class PinnedHeader { View view; boolean visible; int y; int height; int alpha; int state; boolean animating; boolean targetVisible; int sourceY; int targetY; long targetTime; } private PinnedHeaderAdapter mAdapter; private int mSize; private PinnedHeader[] mHeaders; private RectF mBounds = new RectF(); private Rect mClipRect = new Rect(); private OnScrollListener mOnScrollListener; private OnItemSelectedListener mOnItemSelectedListener; private int mScrollState; private int mAnimationDuration = DEFAULT_ANIMATION_DURATION; private boolean mAnimating; private long mAnimationTargetTime; private int mHeaderPaddingLeft; private int mHeaderWidth; public PinnedHeaderListView(Context context) { this(context, null); } public PinnedHeaderListView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public PinnedHeaderListView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); super.setOnScrollListener(this); super.setOnItemSelectedListener(this); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { Log.i(TAG, "onLayout: nanoTime="+System.nanoTime()); super.onLayout(changed, l, t, r, b); mHeaderPaddingLeft = getPaddingLeft(); mHeaderWidth = r - l - mHeaderPaddingLeft - getPaddingRight(); ensurePinnedHeaderLayout(0); } public void setPinnedHeaderAnimationDuration(int duration) { mAnimationDuration = duration; } @Override public void setAdapter(ListAdapter adapter) { Log.i(TAG, "setAdapter: nanoTime="+System.nanoTime()); mAdapter = (PinnedHeaderAdapter)adapter; super.setAdapter(adapter); } @Override public void setOnScrollListener(OnScrollListener onScrollListener) { mOnScrollListener = onScrollListener; super.setOnScrollListener(this); } @Override public void setOnItemSelectedListener(OnItemSelectedListener listener) { mOnItemSelectedListener = listener; super.setOnItemSelectedListener(this); } @Override public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { if (mAdapter != null) { int count = mAdapter.getPinnedHeaderCount(); if (count != mSize) { mSize = count; if (mHeaders == null) { mHeaders = new PinnedHeader[mSize]; } else if (mHeaders.length < mSize) { PinnedHeader[] headers = mHeaders; mHeaders = new PinnedHeader[mSize]; System.arraycopy(headers, 0, mHeaders, 0, headers.length); } } for (int i = 0; i < mSize; i++) { if (mHeaders[i] == null) { mHeaders[i] = new PinnedHeader(); } mHeaders[i].view = mAdapter.getPinnedHeaderView(i, mHeaders[i].view, this); } mAnimationTargetTime = System.currentTimeMillis() + mAnimationDuration; mAdapter.configurePinnedHeaders(this); invalidateIfAnimating(); } if (mOnScrollListener != null) { mOnScrollListener.onScroll(this, firstVisibleItem, visibleItemCount, totalItemCount); } } @Override protected float getTopFadingEdgeStrength() { // Disable vertical fading at the top when the pinned header is present return mSize > 0 ? 0 : super.getTopFadingEdgeStrength(); } @Override public void onScrollStateChanged(AbsListView view, int scrollState) { mScrollState = scrollState; if (mOnScrollListener != null) { mOnScrollListener.onScrollStateChanged(this, scrollState); } } /** * Ensures that the selected item is positioned below the top-pinned headers * and above the bottom-pinned ones. */ @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { int height = getHeight(); int windowTop = 0; int windowBottom = height; for (int i = 0; i < mSize; i++) { PinnedHeader header = mHeaders[i]; if (header.visible) { if (header.state == TOP) { windowTop = header.y + header.height; } else if (header.state == BOTTOM) { windowBottom = header.y; break; } } } View selectedView = getSelectedView(); if (selectedView != null) { if (selectedView.getTop() < windowTop) { setSelectionFromTop(position, windowTop); } else if (selectedView.getBottom() > windowBottom) { setSelectionFromTop(position, windowBottom - selectedView.getHeight()); } } if (mOnItemSelectedListener != null) { mOnItemSelectedListener.onItemSelected(parent, view, position, id); } } @Override public void onNothingSelected(AdapterView<?> parent) { if (mOnItemSelectedListener != null) { mOnItemSelectedListener.onNothingSelected(parent); } } public int getPinnedHeaderHeight(int viewIndex) { if (!ensurePinnedHeaderLayout(viewIndex)) { return 0; } return mHeaders[viewIndex].view.getHeight(); } /** * Set header to be pinned at the top. * * @param viewIndex index of the header view * @param y is position of the header in pixels. * @param animate true if the transition to the new coordinate should be animated */ public void setHeaderPinnedAtTop(int viewIndex, int y, boolean animate) { if (!ensurePinnedHeaderLayout(viewIndex)) { return; } PinnedHeader header = mHeaders[viewIndex]; header.visible = true; header.y = y; header.state = TOP; // TODO perhaps we should animate at the top as well header.animating = false; } /** * Set header to be pinned at the bottom. * * @param viewIndex index of the header view * @param y is position of the header in pixels. * @param animate true if the transition to the new coordinate should be animated */ public void setHeaderPinnedAtBottom(int viewIndex, int y, boolean animate) { if (!ensurePinnedHeaderLayout(viewIndex)) { return; } PinnedHeader header = mHeaders[viewIndex]; header.state = BOTTOM; if (header.animating) { header.targetTime = mAnimationTargetTime; header.sourceY = header.y; header.targetY = y; } else if (animate && (header.y != y || !header.visible)) { if (header.visible) { header.sourceY = header.y; } else { header.visible = true; header.sourceY = y + header.height; } header.animating = true; header.targetVisible = true; header.targetTime = mAnimationTargetTime; header.targetY = y; } else { header.visible = true; header.y = y; } } /** * Set header to be pinned at the top of the first visible item. * * @param viewIndex index of the header view * @param position is position of the header in pixels. */ public void setFadingHeader(int viewIndex, int position, boolean fade) { //ensurePinnedHeaderLayout(viewIndex); View child = getChildAt(position - getFirstVisiblePosition()); if (child == null) return; PinnedHeader header = mHeaders[viewIndex]; header.visible = true; header.state = FADING; header.alpha = MAX_ALPHA; header.animating = false; int top = getTotalTopPinnedHeaderHeight(); header.y = top; if (fade) { int bottom = child.getBottom() - top; int headerHeight = header.height; if (bottom < headerHeight) { int portion = bottom - headerHeight; header.alpha = MAX_ALPHA * (headerHeight + portion) / headerHeight; header.y = top + portion; } } } /** * Makes header invisible. * * @param viewIndex index of the header view * @param animate true if the transition to the new coordinate should be animated */ public void setHeaderInvisible(int viewIndex, boolean animate) { PinnedHeader header = mHeaders[viewIndex]; if (header.visible && (animate || header.animating) && header.state == BOTTOM) { header.sourceY = header.y; if (!header.animating) { header.visible = true; header.targetY = getBottom() + header.height; } header.animating = true; header.targetTime = mAnimationTargetTime; header.targetVisible = false; } else { header.visible = false; } } private boolean ensurePinnedHeaderLayout(int viewIndex) { if ((mHeaders != null) && (mHeaders[viewIndex] != null)) { View view = mHeaders[viewIndex].view; if (view.isLayoutRequested()) { int widthSpec = MeasureSpec.makeMeasureSpec(mHeaderWidth, MeasureSpec.EXACTLY); int heightSpec; ViewGroup.LayoutParams layoutParams = view.getLayoutParams(); if (layoutParams != null && layoutParams.height > 0) { heightSpec = MeasureSpec.makeMeasureSpec(layoutParams.height, MeasureSpec.EXACTLY); } else { heightSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED); } view.measure(widthSpec, heightSpec); int height = view.getMeasuredHeight(); mHeaders[viewIndex].height = height; view.layout(0, 0, mHeaderWidth, height); } return true; } else { Log.e(VIEW_LOG_TAG, "ensurePinnedHeaderLayout("+viewIndex+") mHeaders is NULL!"); return false; } } /** * Returns the sum of heights of headers pinned to the top. */ public int getTotalTopPinnedHeaderHeight() { for (int i = mSize; --i >= 0;) { PinnedHeader header = mHeaders[i]; if (header.visible && header.state == TOP) { return header.y + header.height; } } return 0; } /** * Returns the list item position at the specified y coordinate. */ public int getPositionAt(int y) { int hitTime = 0; do { int position = pointToPosition(getPaddingLeft() + 1, y); if (position != -1) { return position; } // If position == -1, we must have hit a separator. Let's examine // a nearby pixel y++; } while (++ hitTime < POSITION_MAX_HIT_TIMES); return 0; } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { if (mListLock) { return false; } if (mScrollState == SCROLL_STATE_IDLE) { final int y = (int)ev.getY(); for (int i = mSize; --i >= 0;) { PinnedHeader header = mHeaders[i]; if (header.visible && header.y <= y && header.y + header.height > y) { if (ev.getAction() == MotionEvent.ACTION_DOWN) { return smoothScrollToPartition(i); } else { return true; } } } } return super.onInterceptTouchEvent(ev); } private boolean smoothScrollToPartition(int partition) { final int position = mAdapter.getScrollPositionForHeader(partition); if (position == -1) { return false; } int offset = 0; for (int i = 0; i < partition; i++) { PinnedHeader header = mHeaders[i]; if (header.visible) { offset += header.height; } } smoothScrollToPositionFromTop(position + getHeaderViewsCount(), offset); return true; } private void invalidateIfAnimating() { mAnimating = false; for (int i = 0; i < mSize; i++) { if (mHeaders[i].animating) { mAnimating = true; invalidate(); return; } } } @Override protected void dispatchDraw(Canvas canvas) { long currentTime = mAnimating ? System.currentTimeMillis() : 0; int top = 0; int bottom = getBottom(); boolean hasVisibleHeaders = false; for (int i = 0; i < mSize; i++) { PinnedHeader header = mHeaders[i]; if (header.visible) { hasVisibleHeaders = true; if (header.state == BOTTOM && header.y < bottom) { bottom = header.y; } else if (header.state == TOP || header.state == FADING) { int newTop = header.y + header.height; if (newTop > top) { top = newTop; } } } } if (hasVisibleHeaders) { canvas.save(); mClipRect.set(0, top, getWidth(), bottom); canvas.clipRect(mClipRect); } super.dispatchDraw(canvas); if (hasVisibleHeaders) { canvas.restore(); // First draw top headers, then the bottom ones to handle the Z axis correctly for (int i = mSize; --i >= 0;) { PinnedHeader header = mHeaders[i]; if (header.visible && (header.state == TOP || header.state == FADING)) { drawHeader(canvas, header, currentTime); } } for (int i = 0; i < mSize; i++) { PinnedHeader header = mHeaders[i]; if (header.visible && header.state == BOTTOM) { drawHeader(canvas, header, currentTime); } } } invalidateIfAnimating(); } private void drawHeader(Canvas canvas, PinnedHeader header, long currentTime) { if (header.animating) { int timeLeft = (int)(header.targetTime - currentTime); if (timeLeft <= 0) { header.y = header.targetY; header.visible = header.targetVisible; header.animating = false; } else { header.y = header.targetY + (header.sourceY - header.targetY) * timeLeft / mAnimationDuration; } } if (header.visible) { View view = header.view; int saveCount = canvas.save(); canvas.translate(mHeaderPaddingLeft, header.y); if (header.state == FADING) { mBounds.set(0, 0, mHeaderWidth, view.getHeight()); canvas.saveLayerAlpha(mBounds, header.alpha, Canvas.ALL_SAVE_FLAG); } view.draw(canvas); canvas.restoreToCount(saveCount); } } public void lockListMove(boolean lock) { mListLock = lock; } }
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.parallel; import java.util.List; import org.junit.Test; import io.reactivex.*; import io.reactivex.exceptions.*; import io.reactivex.functions.*; import io.reactivex.internal.functions.Functions; import io.reactivex.plugins.RxJavaPlugins; import io.reactivex.subscribers.TestSubscriber; public class ParallelMapTryTest implements Consumer<Object> { volatile int calls; @Override public void accept(Object t) throws Exception { calls++; } @Test public void mapNoError() { for (ParallelFailureHandling e : ParallelFailureHandling.values()) { Flowable.just(1) .parallel(1) .map(Functions.identity(), e) .sequential() .test() .assertResult(1); } } @Test public void mapErrorNoError() { for (ParallelFailureHandling e : ParallelFailureHandling.values()) { Flowable.<Integer>error(new TestException()) .parallel(1) .map(Functions.identity(), e) .sequential() .test() .assertFailure(TestException.class); } } @Test public void mapConditionalNoError() { for (ParallelFailureHandling e : ParallelFailureHandling.values()) { Flowable.just(1) .parallel(1) .map(Functions.identity(), e) .filter(Functions.alwaysTrue()) .sequential() .test() .assertResult(1); } } @Test public void mapErrorConditionalNoError() { for (ParallelFailureHandling e : ParallelFailureHandling.values()) { Flowable.<Integer>error(new TestException()) .parallel(1) .map(Functions.identity(), e) .filter(Functions.alwaysTrue()) .sequential() .test() .assertFailure(TestException.class); } } @Test public void mapFailWithError() { Flowable.range(0, 2) .parallel(1) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) throws Exception { return 1 / v; } }, ParallelFailureHandling.ERROR) .sequential() .test() .assertFailure(ArithmeticException.class); } @Test public void mapFailWithStop() { Flowable.range(0, 2) .parallel(1) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) throws Exception { return 1 / v; } }, ParallelFailureHandling.STOP) .sequential() .test() .assertResult(); } @Test public void mapFailWithRetry() { Flowable.range(0, 2) .parallel(1) .map(new Function<Integer, Integer>() { int count; @Override public Integer apply(Integer v) throws Exception { if (count++ == 1) { return -1; } return 1 / v; } }, ParallelFailureHandling.RETRY) .sequential() .test() .assertResult(-1, 1); } @Test public void mapFailWithRetryLimited() { Flowable.range(0, 2) .parallel(1) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) throws Exception { return 1 / v; } }, new BiFunction<Long, Throwable, ParallelFailureHandling>() { @Override public ParallelFailureHandling apply(Long n, Throwable e) throws Exception { return n < 5 ? ParallelFailureHandling.RETRY : ParallelFailureHandling.SKIP; } }) .sequential() .test() .assertResult(1); } @Test public void mapFailWithSkip() { Flowable.range(0, 2) .parallel(1) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) throws Exception { return 1 / v; } }, ParallelFailureHandling.SKIP) .sequential() .test() .assertResult(1); } @SuppressWarnings("unchecked") @Test public void mapFailHandlerThrows() { TestSubscriber<Integer> ts = Flowable.range(0, 2) .parallel(1) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) throws Exception { return 1 / v; } }, new BiFunction<Long, Throwable, ParallelFailureHandling>() { @Override public ParallelFailureHandling apply(Long n, Throwable e) throws Exception { throw new TestException(); } }) .sequential() .test() .assertFailure(CompositeException.class); TestHelper.assertCompositeExceptions(ts, ArithmeticException.class, TestException.class); } @Test public void mapWrongParallelism() { TestHelper.checkInvalidParallelSubscribers( Flowable.just(1).parallel(1) .map(Functions.identity(), ParallelFailureHandling.ERROR) ); } @Test public void mapInvalidSource() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new ParallelInvalid() .map(Functions.identity(), ParallelFailureHandling.ERROR) .sequential() .test(); TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test public void mapFailWithErrorConditional() { Flowable.range(0, 2) .parallel(1) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) throws Exception { return 1 / v; } }, ParallelFailureHandling.ERROR) .filter(Functions.alwaysTrue()) .sequential() .test() .assertFailure(ArithmeticException.class); } @Test public void mapFailWithStopConditional() { Flowable.range(0, 2) .parallel(1) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) throws Exception { return 1 / v; } }, ParallelFailureHandling.STOP) .filter(Functions.alwaysTrue()) .sequential() .test() .assertResult(); } @Test public void mapFailWithRetryConditional() { Flowable.range(0, 2) .parallel(1) .map(new Function<Integer, Integer>() { int count; @Override public Integer apply(Integer v) throws Exception { if (count++ == 1) { return -1; } return 1 / v; } }, ParallelFailureHandling.RETRY) .filter(Functions.alwaysTrue()) .sequential() .test() .assertResult(-1, 1); } @Test public void mapFailWithRetryLimitedConditional() { Flowable.range(0, 2) .parallel(1) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) throws Exception { return 1 / v; } }, new BiFunction<Long, Throwable, ParallelFailureHandling>() { @Override public ParallelFailureHandling apply(Long n, Throwable e) throws Exception { return n < 5 ? ParallelFailureHandling.RETRY : ParallelFailureHandling.SKIP; } }) .filter(Functions.alwaysTrue()) .sequential() .test() .assertResult(1); } @Test public void mapFailWithSkipConditional() { Flowable.range(0, 2) .parallel(1) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) throws Exception { return 1 / v; } }, ParallelFailureHandling.SKIP) .filter(Functions.alwaysTrue()) .sequential() .test() .assertResult(1); } @SuppressWarnings("unchecked") @Test public void mapFailHandlerThrowsConditional() { TestSubscriber<Integer> ts = Flowable.range(0, 2) .parallel(1) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) throws Exception { return 1 / v; } }, new BiFunction<Long, Throwable, ParallelFailureHandling>() { @Override public ParallelFailureHandling apply(Long n, Throwable e) throws Exception { throw new TestException(); } }) .filter(Functions.alwaysTrue()) .sequential() .test() .assertFailure(CompositeException.class); TestHelper.assertCompositeExceptions(ts, ArithmeticException.class, TestException.class); } @Test public void mapWrongParallelismConditional() { TestHelper.checkInvalidParallelSubscribers( Flowable.just(1).parallel(1) .map(Functions.identity(), ParallelFailureHandling.ERROR) .filter(Functions.alwaysTrue()) ); } @Test public void mapInvalidSourceConditional() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new ParallelInvalid() .map(Functions.identity(), ParallelFailureHandling.ERROR) .filter(Functions.alwaysTrue()) .sequential() .test(); TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } }
/********************************************************************************** * $URL: https://source.sakaiproject.org/svn/sam/tags/sakai-10.1/samigo-app/src/java/org/sakaiproject/tool/assessment/ui/bean/evaluation/QuestionScoresBean.java $ * $Id: QuestionScoresBean.java 305964 2014-02-14 01:05:35Z ktsao@stanford.edu $ *********************************************************************************** * * Copyright (c) 2004, 2005, 2006, 2007, 2008, 2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.assessment.ui.bean.evaluation; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import javax.faces.event.ActionEvent; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.jsf.model.PhaseAware; import org.sakaiproject.tool.assessment.business.entity.RecordingData; import org.sakaiproject.tool.assessment.data.dao.assessment.AssessmentAccessControl; import org.sakaiproject.tool.assessment.data.ifc.assessment.PublishedAssessmentIfc; import org.sakaiproject.tool.assessment.ui.bean.util.Validator; import org.sakaiproject.tool.assessment.ui.listener.evaluation.QuestionScoreListener; import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil; import org.sakaiproject.tool.assessment.util.AttachmentUtil; import org.sakaiproject.util.ResourceLoader; /** * <p>Description: class form for evaluating question scores</p> * */ public class QuestionScoresBean implements Serializable, PhaseAware { private String assessmentId; private String publishedId; /** Use serialVersionUID for interoperability. */ private final static long serialVersionUID = 5517587781720762296L; public static final String SHOW_SA_RATIONALE_RESPONSES_INLINE = "2"; public static final String SHOW_SA_RATIONALE_RESPONSES_POPUP = "1"; private String assessmentName; private String itemName; private String partName; private String itemId; private String anonymous; private String groupName; private String maxScore; private Collection agents; //private Collection sortedAgents; private Collection sections; private Collection deliveryItem; private String score; private String discount; private String answer; private String questionScoreComments; //private String sortProperty; private String lateHandling; // read-only property set for UI late handling private String dueDate; private String sortType; private boolean sortAscending = true; private String roleSelection; private String allSubmissions; private RecordingData recordingData; private String totalPeople; private String typeId; private HashMap scoresByItem; private static Log log = LogFactory.getLog(QuestionScoresBean.class); //private String selectedSectionFilterValue = TotalScoresBean.ALL_SECTIONS_SELECT_VALUE; private String selectedSectionFilterValue = null; private String selectedSARationaleView =SHOW_SA_RATIONALE_RESPONSES_POPUP; private ArrayList allAgents; private boolean haveModelShortAnswer; //Paging. private int firstScoreRow; private int maxDisplayedScoreRows; private int scoreDataRows; private int audioMaxDisplayedScoreRows; private int othersMaxDisplayedScoreRows; private boolean hasAudioMaxDisplayedScoreRowsChanged; //Searching private String searchString; private String defaultSearchString; private Map userIdMap; private HashMap agentResultsByItemGradingId; private boolean isAnyItemGradingAttachmentListModified; private Boolean releasedToGroups = null; /** * Creates a new QuestionScoresBean object. */ public QuestionScoresBean() { log.debug("Creating a new QuestionScoresBean"); resetFields(); } protected void init() { defaultSearchString = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.EvaluationMessages", "search_default_student_search_string"); if (searchString == null) { searchString = defaultSearchString; } // Get allAgents only at the first time if (allAgents == null) { allAgents = getAllAgents(); } ArrayList matchingAgents; if (isFilteredSearch()) { matchingAgents = findMatchingAgents(searchString); } else { matchingAgents = allAgents; } scoreDataRows = matchingAgents.size(); ArrayList newAgents = null; if (maxDisplayedScoreRows == 0) { newAgents = matchingAgents; } else { int nextPageRow = Math.min(firstScoreRow + maxDisplayedScoreRows, scoreDataRows); newAgents = new ArrayList(matchingAgents.subList(firstScoreRow, nextPageRow)); log.debug("init(): subList " + firstScoreRow + ", " + nextPageRow); } agents = newAgents; } // Following three methods are for interface PhaseAware public void endProcessValidators() { log.debug("endProcessValidators"); } public void endProcessUpdates() { log.debug("endProcessUpdates"); } public void startRenderResponse() { log.debug("startRenderResponse"); init(); } /** * get assessment name * * @return the name */ public String getAssessmentName() { return Validator.check(assessmentName, "N/A"); } /** * set assessment name * * @param passessmentName the name */ public void setAssessmentName(String passessmentName) { assessmentName = passessmentName; } /** * get part name * * @return the name */ public String getPartName() { return Validator.check(partName, "N/A"); } /** * set part name * * @param ppartName the name */ public void setPartName(String ppartName) { partName = ppartName; } /** * get item name * * @return the name */ public String getItemName() { return Validator.check(itemName, "N/A"); } /** * set item name * * @param pitemName the name */ public void setItemName(String pitemName) { itemName = pitemName; } /** * get item id * * @return the id */ public String getItemId() { return Validator.check(itemId, "1"); } /** * set item id * * @param pitemId the id */ public void setItemId(String pitemId) { itemId = pitemId; } /** * get assessment id * * @return the assessment id */ public String getAssessmentId() { return Validator.check(assessmentId, "0"); } /** * set assessment id * * @param passessmentId the id */ public void setAssessmentId(String passessmentId) { assessmentId = passessmentId; } /** * get published id * * @return the published id */ public String getPublishedId() { return Validator.check(publishedId, "0"); } /** * set published id * * @param passessmentId the id */ public void setPublishedId(String ppublishedId) { publishedId = ppublishedId; } /** * Is this anonymous grading? * * @return anonymous grading? true or false */ public String getAnonymous() { return Validator.check(anonymous, "false"); } /** * Set switch if this is anonymous grading. * * @param panonymous anonymous grading? true or false */ public void setAnonymous(String panonymous) { anonymous = panonymous; } /** * Get the group name * @return group name */ public String getGroupName() { return Validator.check(groupName, "N/A"); } /** * set the group name * * @param pgroupName the name */ public void setGroupName(String pgroupName) { groupName = pgroupName; } /** * get the max score * * @return the max score */ public String getMaxScore() { return Validator.check(maxScore, "N/A"); } /** * set max score * * @param pmaxScore set the max score */ public void setMaxScore(String pmaxScore) { maxScore = pmaxScore; } /** * get the max Point * * @return the max point */ public String getMaxPoint() { ResourceLoader rb=new ResourceLoader("org.sakaiproject.tool.assessment.bundle.EvaluationMessages"); try{ if (Double.parseDouble(this.getMaxScore())==1.0) return this.getMaxScore()+ " " + rb.getString("point"); else return this.getMaxScore()+ " " + rb.getString("points"); } catch(NumberFormatException e){ return this.getMaxScore()+ " " + rb.getString("point"); } } /** * get an agent result collection * * @return the collection */ public Collection getAgents() { if (agents == null) return new ArrayList(); return agents; } /** * set the agent collection * * @param pagents the collection */ public void setAgents(Collection pagents) { agents = pagents; } /** * get a list of sections * * @return the collection */ public Collection getSections() { if (sections == null) return new ArrayList(); return sections; } /** * set the section list * * @param psections the collection */ public void setSections(Collection psections) { sections = psections; } /** * get the item to display * * @return the collection */ public Collection getDeliveryItem() { if (deliveryItem == null) return new ArrayList(); return deliveryItem; } /** * set the delivery item * * @param pitem the collection */ public void setDeliveryItem(Collection pitem) { deliveryItem = pitem; } /** This is a read-only calculated property. * @return list of uppercase student initials */ public String getAgentInitials() { Collection c = getAgents(); StringBuilder initialsbuf = new StringBuilder(); if (c.isEmpty()) { return ""; } Iterator it = c.iterator(); while (it.hasNext()) { try { AgentResults ar = (AgentResults) it.next(); String initial = ar.getLastInitial(); initialsbuf.append(initial); } catch (Exception ex) { log.warn(ex.getMessage()); // if there is any problem, we skip, and go on } } String initials = initialsbuf.toString(); return initials.toUpperCase(); } /** * get agent resutls as an array * * @return the array */ public Object[] getAgentArray() { if (agents == null) return new Object[0]; return agents.toArray(); } /** * get the total number of students for this assessment * * @return the number */ public String getTotalPeople() { return Validator.check(totalPeople, "N/A"); } /** * set the total number of people * * @param ptotalPeople the total */ public void setTotalPeople(String ptotalPeople) { totalPeople = ptotalPeople; } /** * * @return the score */ public String getScore() { return Validator.check(score, "N/A"); } /** * set the score * * @param pScore the score */ public void setScore(String pScore) { score = pScore; } /** * * @return the discount */ public String getDiscount() { return Validator.check(discount, "N/A"); } /** * set the discount * * @param pDiscount the discount */ public void setDiscount(String pDiscount) { discount = pDiscount; } /** * get the answer text * * @return the answer text */ public String getAnswer() { return Validator.check(answer, "N/A"); } /** * set the answer text * * @param pAnswertext the answer text */ public void setAnswer(String pAnswertext) { answer = pAnswertext; } /** * get comments * * @return the comments */ public String getQuestionScoreComments() { return Validator.check(questionScoreComments, ""); } /** * set comments for question score * * @param pQuestionScoreComments the comments */ public void setQuestionScoreComments(String pQuestionScoreComments) { log.debug("setting question score comments to "+pQuestionScoreComments); questionScoreComments = pQuestionScoreComments; } /** * get late handling * * @return late handlign */ public String getLateHandling() { return Validator.check(lateHandling, "1"); } /** * set late handling * * @param plateHandling the late handling */ public void setLateHandling(String plateHandling) { lateHandling = plateHandling; } /** * get the due date * * @return the due date as a String */ public String getDueDate() { return Validator.check(dueDate, "N/A"); } /** * set due date string * * @param dateString the date string */ public void setDueDate(String dateString) { dueDate = dateString; } /** * get sort type * @return sort type */ public String getSortType() { if (!Boolean.parseBoolean(getAnonymous())) { return Validator.check(sortType, "lastName"); } else { return Validator.check(sortType, "assessmentGradingId"); } } /** * set sort type, trigger property sorts * @param psortType the type */ public void setSortType(String psortType) { sortType = psortType; } /** * is scores table sorted in ascending order * @return true if it is */ public boolean isSortAscending() { return sortAscending; } /** * * @param sortAscending is scores table sorted in ascending order */ public void setSortAscending(boolean sortAscending) { this.sortAscending = sortAscending; } /** * Is this an all submissions or, just the largest * @return true if is is, else false */ public String getAllSubmissions() { return allSubmissions; } /** * set whether all submissions are to be exposed * @param pallSubmissions true if it is */ public void setAllSubmissions(String pallSubmissions) { if (!pallSubmissions.equals(this.allSubmissions)) { this.allSubmissions = pallSubmissions; setFirstRow(0); // clear the paging when we update the search } } /** * DOCUMENTATION PENDING * * @return DOCUMENTATION PENDING */ public String getRoleSelection() { return Validator.check(roleSelection, "N/A"); } /** * DOCUMENTATION PENDING * * @param proleSelection DOCUMENTATION PENDING */ public void setRoleSelection(String proleSelection) { roleSelection = proleSelection; } /** * DOCUMENTATION PENDING * * @return DOCUMENTATION PENDING */ public String getTypeId() { return Validator.check(typeId, "1"); } /** * DOCUMENTATION PENDING * * @param ptypeId DOCUMENTATION PENDING */ public void setTypeId(String ptypeId) { typeId = ptypeId; } /** * reset the fields */ public void resetFields() { //agents = new ArrayList(); //setAgents(agents); } /** * encapsulates audio recording info * @return recording data */ public RecordingData getRecordingData() { return this.recordingData; } /** * encapsulates audio recording info * @param rd */ public void setRecordingData(RecordingData rd) { this.recordingData = rd; } public HashMap getScoresByItem() { return scoresByItem; } public void setScoresByItem(HashMap newScores) { scoresByItem = newScores; } public String getSelectedSectionFilterValue() { // lazy initialization if (selectedSectionFilterValue == null) { if (isReleasedToGroups()) { setSelectedSectionFilterValue(TotalScoresBean.RELEASED_SECTIONS_GROUPS_SELECT_VALUE); } else { setSelectedSectionFilterValue(TotalScoresBean.ALL_SECTIONS_SELECT_VALUE); } } return selectedSectionFilterValue; } public void setSelectedSectionFilterValue(String param ) { if (!param.equals(this.selectedSectionFilterValue)) { this.selectedSectionFilterValue = param; setFirstRow(0); // clear the paging when we update the search } } // itemScoresMap = (publishedItemId, HashMap) // = (Long publishedItemId, (Long publishedItemId, Array itemGradings)) private HashMap itemScoresMap; public void setItemScoresMap(HashMap itemScoresMap){ this.itemScoresMap = itemScoresMap; } public HashMap getItemScoresMap(){ return itemScoresMap; } private PublishedAssessmentIfc publishedAssessment; public void setPublishedAssessment(PublishedAssessmentIfc publishedAssessment){ this.publishedAssessment = publishedAssessment; } public PublishedAssessmentIfc getPublishedAssessment(){ return publishedAssessment; } public String getSelectedSARationaleView() { return selectedSARationaleView; } public void setSelectedSARationaleView(String selectedSARationaleView) { this.selectedSARationaleView = selectedSARationaleView; } public int getFirstRow() { return firstScoreRow; } public void setFirstRow(int firstRow) { firstScoreRow = firstRow; } public int getMaxDisplayedRows() { return maxDisplayedScoreRows; } public void setMaxDisplayedRows(int maxDisplayedRows) { maxDisplayedScoreRows = maxDisplayedRows; } public int getAudioMaxDisplayedScoreRows() { return audioMaxDisplayedScoreRows; } public void setAudioMaxDisplayedScoreRows(int audioMaxDisplayedRows) { audioMaxDisplayedScoreRows = audioMaxDisplayedRows; } public int getOtherMaxDisplayedScoreRows() { return othersMaxDisplayedScoreRows; } public void setOtherMaxDisplayedScoreRows(int otherMaxDisplayedRows) { othersMaxDisplayedScoreRows = otherMaxDisplayedRows; } public boolean getHasAudioMaxDisplayedScoreRowsChanged() { return hasAudioMaxDisplayedScoreRowsChanged; } public void setHasAudioMaxDisplayedScoreRowsChanged(boolean hasAudioMaxDisplayedRowsChanged) { hasAudioMaxDisplayedScoreRowsChanged = hasAudioMaxDisplayedRowsChanged; } public int getDataRows() { return scoreDataRows; } public void setAllAgents(ArrayList allAgents) { this.allAgents = allAgents; } public ArrayList getAllAgents() { String publishedId = ContextUtil.lookupParam("publishedId"); QuestionScoreListener questionScoreListener = new QuestionScoreListener(); if (!questionScoreListener.questionScores(publishedId, this, false)) { throw new RuntimeException("failed to call questionScores."); } return allAgents; } public String getSearchString() { return searchString; } public void setSearchString(String searchString) { if (StringUtils.trimToNull(searchString) == null) { searchString = defaultSearchString; } if (!StringUtils.equals(searchString, this.searchString)) { log.debug("setSearchString " + searchString); this.searchString = searchString; setFirstRow(0); // clear the paging when we update the search } } public void search(ActionEvent event) { // We don't need to do anything special here, since init will handle the search log.debug("search"); } public void clear(ActionEvent event) { log.debug("clear"); setSearchString(null); } private boolean isFilteredSearch() { return !StringUtils.equals(searchString, defaultSearchString); } public ArrayList findMatchingAgents(final String pattern) { ArrayList filteredList = new ArrayList(); // name1 example: John Doe StringBuilder name1; // name2 example: Doe, John StringBuilder name2; for(Iterator iter = allAgents.iterator(); iter.hasNext();) { AgentResults result = (AgentResults)iter.next(); // name1 example: John Doe name1 = new StringBuilder(result.getFirstName()); name1.append(" "); name1.append(result.getLastName()); // name2 example: Doe, John name2 = new StringBuilder(result.getLastName()); name2.append(", "); name2.append(result.getFirstName()); if (result.getFirstName().toLowerCase().startsWith(pattern.toLowerCase()) || result.getLastName().toLowerCase().startsWith(pattern.toLowerCase()) || result.getAgentEid().toLowerCase().startsWith(pattern.toLowerCase()) || name1.toString().toLowerCase().startsWith(pattern.toLowerCase()) || name2.toString().toLowerCase().startsWith(pattern.toLowerCase())) { filteredList.add(result); } } return filteredList; } public boolean getHaveModelShortAnswer() { return haveModelShortAnswer; } public void setHaveModelShortAnswer(boolean haveModelShortAnswer) { this.haveModelShortAnswer = haveModelShortAnswer; } public boolean isReleasedToGroups() { return this.getPublishedAssessment().getAssessmentAccessControl().getReleaseTo().equals(AssessmentAccessControl.RELEASE_TO_SELECTED_GROUPS); } public Map getUserIdMap() { return userIdMap; } public void setUserIdMap(Map userIdMap) { this.userIdMap = userIdMap; } public void setAttachment(Long itemGradingId){ List itemGradingAttachmentList = new ArrayList(); AgentResults agentResults = (AgentResults) agentResultsByItemGradingId.get(itemGradingId); if (agentResults != null) { AttachmentUtil attachmentUtil = new AttachmentUtil(); Set attachmentSet = new HashSet(); if (agentResults.getItemGradingAttachmentList() != null) { attachmentSet = new HashSet(agentResults.getItemGradingAttachmentList()); } itemGradingAttachmentList = attachmentUtil.prepareAssessmentAttachment(agentResults.getItemGrading(), attachmentSet); agentResults.setItemGradingAttachmentList(itemGradingAttachmentList); } } public HashMap getAgentResultsByItemGradingId() { return agentResultsByItemGradingId; } public void setAgentResultsByItemGradingId(HashMap agentResultsByItemGradingId) { this.agentResultsByItemGradingId = agentResultsByItemGradingId; } public boolean getIsAnyItemGradingAttachmentListModified() { return isAnyItemGradingAttachmentListModified; } public void setIsAnyItemGradingAttachmentListModified(boolean isAnyItemGradingAttachmentListModified) { this.isAnyItemGradingAttachmentListModified = isAnyItemGradingAttachmentListModified; } }
/* * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * Copyright (c) 2008-2012, Stephen Colebourne & Michael Nascimento Santos * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of JSR-310 nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package tck.java.time.chrono; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.time.DateTimeException; import java.time.Duration; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.ZoneOffset; import java.time.chrono.ChronoLocalDateTime; import java.time.chrono.Chronology; import java.time.chrono.HijrahChronology; import java.time.chrono.IsoChronology; import java.time.chrono.JapaneseChronology; import java.time.chrono.MinguoChronology; import java.time.chrono.ThaiBuddhistChronology; import java.time.temporal.ChronoUnit; import java.time.temporal.Temporal; import java.time.temporal.TemporalAccessor; import java.time.temporal.TemporalAdjuster; import java.time.temporal.TemporalAmount; import java.time.temporal.TemporalField; import java.time.temporal.TemporalUnit; import java.time.temporal.ValueRange; import java.util.ArrayList; import java.util.List; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; /** * Test assertions that must be true for all built-in chronologies. */ @Test public class TCKChronoLocalDateTime { //----------------------------------------------------------------------- // regular data factory for names and descriptions of available calendars //----------------------------------------------------------------------- @DataProvider(name = "calendars") Chronology[][] data_of_calendars() { return new Chronology[][]{ {HijrahChronology.INSTANCE}, {IsoChronology.INSTANCE}, {JapaneseChronology.INSTANCE}, {MinguoChronology.INSTANCE}, {ThaiBuddhistChronology.INSTANCE}}; } @Test(dataProvider="calendars") public void test_badWithAdjusterChrono(Chronology chrono) { LocalDate refDate = LocalDate.of(2013, 1, 1); ChronoLocalDateTime<?> cdt = chrono.date(refDate).atTime(LocalTime.NOON); for (Chronology[] clist : data_of_calendars()) { Chronology chrono2 = clist[0]; ChronoLocalDateTime<?> cdt2 = chrono2.date(refDate).atTime(LocalTime.NOON); TemporalAdjuster adjuster = new FixedAdjuster(cdt2); if (chrono != chrono2) { try { cdt.with(adjuster); Assert.fail("WithAdjuster should have thrown a ClassCastException, " + "required: " + cdt + ", supplied: " + cdt2); } catch (ClassCastException cce) { // Expected exception; not an error } } else { // Same chronology, ChronoLocalDateTime<?> result = cdt.with(adjuster); assertEquals(result, cdt2, "WithAdjuster failed to replace date"); } } } @Test(dataProvider="calendars") public void test_badPlusAdjusterChrono(Chronology chrono) { LocalDate refDate = LocalDate.of(2013, 1, 1); ChronoLocalDateTime<?> cdt = chrono.date(refDate).atTime(LocalTime.NOON); for (Chronology[] clist : data_of_calendars()) { Chronology chrono2 = clist[0]; ChronoLocalDateTime<?> cdt2 = chrono2.date(refDate).atTime(LocalTime.NOON); TemporalAmount adjuster = new FixedAdjuster(cdt2); if (chrono != chrono2) { try { cdt.plus(adjuster); Assert.fail("WithAdjuster should have thrown a ClassCastException, " + "required: " + cdt + ", supplied: " + cdt2); } catch (ClassCastException cce) { // Expected exception; not an error } } else { // Same chronology, ChronoLocalDateTime<?> result = cdt.plus(adjuster); assertEquals(result, cdt2, "WithAdjuster failed to replace date time"); } } } @Test(dataProvider="calendars") public void test_badMinusAdjusterChrono(Chronology chrono) { LocalDate refDate = LocalDate.of(2013, 1, 1); ChronoLocalDateTime<?> cdt = chrono.date(refDate).atTime(LocalTime.NOON); for (Chronology[] clist : data_of_calendars()) { Chronology chrono2 = clist[0]; ChronoLocalDateTime<?> cdt2 = chrono2.date(refDate).atTime(LocalTime.NOON); TemporalAmount adjuster = new FixedAdjuster(cdt2); if (chrono != chrono2) { try { cdt.minus(adjuster); Assert.fail("WithAdjuster should have thrown a ClassCastException, " + "required: " + cdt + ", supplied: " + cdt2); } catch (ClassCastException cce) { // Expected exception; not an error } } else { // Same chronology, ChronoLocalDateTime<?> result = cdt.minus(adjuster); assertEquals(result, cdt2, "WithAdjuster failed to replace date"); } } } @Test(dataProvider="calendars") public void test_badPlusTemporalUnitChrono(Chronology chrono) { LocalDate refDate = LocalDate.of(2013, 1, 1); ChronoLocalDateTime<?> cdt = chrono.date(refDate).atTime(LocalTime.NOON); for (Chronology[] clist : data_of_calendars()) { Chronology chrono2 = clist[0]; ChronoLocalDateTime<?> cdt2 = chrono2.date(refDate).atTime(LocalTime.NOON); TemporalUnit adjuster = new FixedTemporalUnit(cdt2); if (chrono != chrono2) { try { cdt.plus(1, adjuster); Assert.fail("TemporalUnit.doPlus plus should have thrown a ClassCastException" + cdt + ", can not be cast to " + cdt2); } catch (ClassCastException cce) { // Expected exception; not an error } } else { // Same chronology, ChronoLocalDateTime<?> result = cdt.plus(1, adjuster); assertEquals(result, cdt2, "WithAdjuster failed to replace date"); } } } @Test(dataProvider="calendars") public void test_badMinusTemporalUnitChrono(Chronology chrono) { LocalDate refDate = LocalDate.of(2013, 1, 1); ChronoLocalDateTime<?> cdt = chrono.date(refDate).atTime(LocalTime.NOON); for (Chronology[] clist : data_of_calendars()) { Chronology chrono2 = clist[0]; ChronoLocalDateTime<?> cdt2 = chrono2.date(refDate).atTime(LocalTime.NOON); TemporalUnit adjuster = new FixedTemporalUnit(cdt2); if (chrono != chrono2) { try { cdt.minus(1, adjuster); Assert.fail("TemporalUnit.doPlus minus should have thrown a ClassCastException" + cdt.getClass() + ", can not be cast to " + cdt2.getClass()); } catch (ClassCastException cce) { // Expected exception; not an error } } else { // Same chronology, ChronoLocalDateTime<?> result = cdt.minus(1, adjuster); assertEquals(result, cdt2, "WithAdjuster failed to replace date"); } } } @Test(dataProvider="calendars") public void test_badTemporalFieldChrono(Chronology chrono) { LocalDate refDate = LocalDate.of(2013, 1, 1); ChronoLocalDateTime<?> cdt = chrono.date(refDate).atTime(LocalTime.NOON); for (Chronology[] clist : data_of_calendars()) { Chronology chrono2 = clist[0]; ChronoLocalDateTime<?> cdt2 = chrono2.date(refDate).atTime(LocalTime.NOON); TemporalField adjuster = new FixedTemporalField(cdt2); if (chrono != chrono2) { try { cdt.with(adjuster, 1); Assert.fail("TemporalField doWith() should have thrown a ClassCastException" + cdt.getClass() + ", can not be cast to " + cdt2.getClass()); } catch (ClassCastException cce) { // Expected exception; not an error } } else { // Same chronology, ChronoLocalDateTime<?> result = cdt.with(adjuster, 1); assertEquals(result, cdt2, "TemporalField doWith() failed to replace date"); } } } //----------------------------------------------------------------------- // isBefore, isAfter, isEqual //----------------------------------------------------------------------- @Test(dataProvider="calendars") public void test_datetime_comparisons(Chronology chrono) { List<ChronoLocalDateTime<?>> dates = new ArrayList<>(); ChronoLocalDateTime<?> date = chrono.date(LocalDate.of(2013, 1, 1)).atTime(LocalTime.MIN); // Insert dates in order, no duplicates dates.add(date.minus(1, ChronoUnit.YEARS)); dates.add(date.minus(1, ChronoUnit.MONTHS)); dates.add(date.minus(1, ChronoUnit.WEEKS)); dates.add(date.minus(1, ChronoUnit.DAYS)); dates.add(date.minus(1, ChronoUnit.HOURS)); dates.add(date.minus(1, ChronoUnit.MINUTES)); dates.add(date.minus(1, ChronoUnit.SECONDS)); dates.add(date.minus(1, ChronoUnit.NANOS)); dates.add(date); dates.add(date.plus(1, ChronoUnit.NANOS)); dates.add(date.plus(1, ChronoUnit.SECONDS)); dates.add(date.plus(1, ChronoUnit.MINUTES)); dates.add(date.plus(1, ChronoUnit.HOURS)); dates.add(date.plus(1, ChronoUnit.DAYS)); dates.add(date.plus(1, ChronoUnit.WEEKS)); dates.add(date.plus(1, ChronoUnit.MONTHS)); dates.add(date.plus(1, ChronoUnit.YEARS)); // Check these dates against the corresponding dates for every calendar for (Chronology[] clist : data_of_calendars()) { List<ChronoLocalDateTime<?>> otherDates = new ArrayList<>(); Chronology chrono2 = clist[0]; for (ChronoLocalDateTime<?> d : dates) { otherDates.add(chrono2.date(d).atTime(d.toLocalTime())); } // Now compare the sequence of original dates with the sequence of converted dates for (int i = 0; i < dates.size(); i++) { ChronoLocalDateTime<?> a = dates.get(i); for (int j = 0; j < otherDates.size(); j++) { ChronoLocalDateTime<?> b = otherDates.get(j); int cmp = ChronoLocalDateTime.timeLineOrder().compare(a, b); if (i < j) { assertTrue(cmp < 0, a + " compare " + b); assertEquals(a.isBefore(b), true, a + " isBefore " + b); assertEquals(a.isAfter(b), false, a + " isAfter " + b); assertEquals(a.isEqual(b), false, a + " isEqual " + b); } else if (i > j) { assertTrue(cmp > 0, a + " compare " + b); assertEquals(a.isBefore(b), false, a + " isBefore " + b); assertEquals(a.isAfter(b), true, a + " isAfter " + b); assertEquals(a.isEqual(b), false, a + " isEqual " + b); } else { assertTrue(cmp == 0, a + " compare " + b); assertEquals(a.isBefore(b), false, a + " isBefore " + b); assertEquals(a.isAfter(b), false, a + " isAfter " + b); assertEquals(a.isEqual(b), true, a + " isEqual " + b); } } } } } //----------------------------------------------------------------------- @Test(dataProvider="calendars") public void test_from_TemporalAccessor(Chronology chrono) { LocalDateTime refDateTime = LocalDateTime.of(2013, 1, 1, 12, 30); ChronoLocalDateTime<?> dateTime = chrono.localDateTime(refDateTime); ChronoLocalDateTime<?> test1 = ChronoLocalDateTime.from(dateTime); assertEquals(test1, dateTime); ChronoLocalDateTime<?> test2 = ChronoLocalDateTime.from(dateTime.atZone(ZoneOffset.UTC)); assertEquals(test2, dateTime); } @Test(expectedExceptions = DateTimeException.class) public void test_from_TemporalAccessor_dateOnly() { ChronoLocalDateTime.from(LocalDate.of(2013, 1, 1)); } @Test(expectedExceptions = DateTimeException.class) public void test_from_TemporalAccessor_timeOnly() { ChronoLocalDateTime.from(LocalTime.of(12, 30)); } @Test(expectedExceptions = NullPointerException.class) public void test_from_TemporalAccessor_null() { ChronoLocalDateTime.from(null); } //----------------------------------------------------------------------- @Test(dataProvider="calendars") public void test_getChronology(Chronology chrono) { ChronoLocalDateTime<?> test = chrono.localDateTime(LocalDateTime.of(2010, 6, 30, 11, 30)); assertEquals(test.getChronology(), chrono); } //----------------------------------------------------------------------- /** * FixedAdjusted returns a fixed Temporal in all adjustments. * Construct an adjuster with the Temporal that should be returned from adjust. */ static class FixedAdjuster implements TemporalAdjuster, TemporalAmount { private Temporal datetime; FixedAdjuster(Temporal datetime) { this.datetime = datetime; } @Override public Temporal adjustInto(Temporal ignore) { return datetime; } @Override public Temporal addTo(Temporal ignore) { return datetime; } @Override public Temporal subtractFrom(Temporal ignore) { return datetime; } @Override public long get(TemporalUnit unit) { throw new UnsupportedOperationException("Not supported yet."); } @Override public List<TemporalUnit> getUnits() { throw new UnsupportedOperationException("Not supported yet."); } } /** * FixedTemporalUnit returns a fixed Temporal in all adjustments. * Construct an FixedTemporalUnit with the Temporal that should be returned from addTo. */ static class FixedTemporalUnit implements TemporalUnit { private Temporal temporal; FixedTemporalUnit(Temporal temporal) { this.temporal = temporal; } @Override public Duration getDuration() { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean isDurationEstimated() { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean isDateBased() { return false; } @Override public boolean isTimeBased() { return false; } @Override public boolean isSupportedBy(Temporal temporal) { throw new UnsupportedOperationException("Not supported yet."); } @SuppressWarnings("unchecked") @Override public <R extends Temporal> R addTo(R temporal, long amount) { return (R) this.temporal; } @Override public long between(Temporal temporal1, Temporal temporal2) { throw new UnsupportedOperationException("Not supported yet."); } @Override public String toString() { return "FixedTemporalUnit"; } } /** * FixedTemporalField returns a fixed Temporal in all adjustments. * Construct an FixedTemporalField with the Temporal that should be returned from adjustInto. */ static class FixedTemporalField implements TemporalField { private Temporal temporal; FixedTemporalField(Temporal temporal) { this.temporal = temporal; } @Override public TemporalUnit getBaseUnit() { throw new UnsupportedOperationException("Not supported yet."); } @Override public TemporalUnit getRangeUnit() { throw new UnsupportedOperationException("Not supported yet."); } @Override public ValueRange range() { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean isDateBased() { return false; } @Override public boolean isTimeBased() { return false; } @Override public boolean isSupportedBy(TemporalAccessor temporal) { throw new UnsupportedOperationException("Not supported yet."); } @Override public ValueRange rangeRefinedBy(TemporalAccessor temporal) { throw new UnsupportedOperationException("Not supported yet."); } @Override public long getFrom(TemporalAccessor temporal) { throw new UnsupportedOperationException("Not supported yet."); } @SuppressWarnings("unchecked") @Override public <R extends Temporal> R adjustInto(R temporal, long newValue) { return (R) this.temporal; } @Override public String toString() { return "FixedTemporalField"; } } }
/* * Copyright (c) 2015. Rick Hightower, Geoff Chandler * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * QBit - The Microservice lib for Java : JSON, WebSocket, REST. Be The Web! */ package io.advantageous.qbit.queue; import io.advantageous.boon.core.Lists; import org.junit.Test; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import static io.advantageous.boon.core.Exceptions.die; import static io.advantageous.boon.core.IO.puts; import static io.advantageous.boon.core.Sys.sleep; /** * created by Richard on 8/11/14. */ public class BasicQueueTest { boolean ok; @Test public void testUsingListener() { final QueueBuilder builder = new QueueBuilder().setName("test").setPollWait(1000).setBatchSize(10); Queue<String> queue = builder.build(); //new BasicQueue<>("test", 1000, TimeUnit.MILLISECONDS, 10); final int[] counter = new int[1]; queue.startListener(new ReceiveQueueListener<String>() { @Override public void receive(String item) { puts(item); synchronized (counter) { counter[0]++; } } @Override public void empty() { puts("Queue is empty"); } @Override public void limit() { puts("Batch size limit is reached"); } @Override public void shutdown() { puts("Queue is shut down"); } @Override public void idle() { puts("Queue is idle"); } }); final SendQueue<String> sendQueue = queue.sendQueue(); for (int index = 0; index < 10; index++) { sendQueue.send("item" + index); } sendQueue.flushSends(); sleep(100); synchronized (counter) { puts("1", counter[0]); } for (int index = 0; index < 100; index++) { sendQueue.send("item2nd" + index); } sendQueue.flushSends(); sleep(100); synchronized (counter) { puts("2", counter[0]); } for (int index = 0; index < 5; index++) { sleep(100); sendQueue.send("item3rd" + index); } sendQueue.flushSends(); sleep(100); synchronized (counter) { puts("3", counter[0]); } sendQueue.sendMany("hello", "how", "are", "you"); sleep(100); synchronized (counter) { puts("4", counter[0]); } List<String> list = Lists.linkedList("Good", "Thanks"); sendQueue.sendBatch(list); sleep(100); synchronized (counter) { puts("1", counter[0]); } sleep(100); synchronized (counter) { ok = counter[0] == 121 || die("Crap not 121", counter[0]); } queue.stop(); } @Test public void testUsingInput() throws Exception { final QueueBuilder builder = new QueueBuilder().setName("test").setPollWait(1000).setBatchSize(10); Queue<String> queue = builder.build(); final int count[] = new int[1]; Thread writer = new Thread(new Runnable() { @Override public void run() { final SendQueue<String> sendQueue = queue.sendQueue(); for (int index = 0; index < 1000; index++) { sendQueue.send("item" + index); } sendQueue.flushSends(); } }); Thread reader = new Thread(new Runnable() { @Override public void run() { ReceiveQueue<String> receiveQueue = queue.receiveQueue(); while (receiveQueue.poll() != null) { count[0]++; } } }); writer.start(); sleep(100); reader.start(); writer.join(); reader.join(); puts(count[0]); ok = count[0] == 1000 || die("count should be 1000", count[0]); } @Test public void testUsingInputTake() throws Exception { final QueueBuilder builder = new QueueBuilder().setName("test").setPollWait(1000).setBatchSize(10); Queue<String> queue = builder.build(); final AtomicLong count = new AtomicLong(); Thread reader = new Thread(new Runnable() { @Override public void run() { long cnt = 0; final ReceiveQueue<String> receiveQueue = queue.receiveQueue(); String item = receiveQueue.take(); while (item != null) { cnt++; puts(item); item = receiveQueue.take(); if (cnt >= 900) { count.set(cnt); break; } } } }); Thread writer = new Thread(new Runnable() { @Override public void run() { final SendQueue<String> sendQueue = queue.sendQueue(); for (int index = 0; index < 1000; index++) { sendQueue.send("this item " + index); } sendQueue.flushSends(); } }); writer.start(); reader.start(); writer.join(); reader.join(); puts(count.get()); ok = count.get() == 900 || die("count should be 1000", count.get()); } @Test public void testUsingInputPollWait() throws Exception { /** Build our queue. */ final QueueBuilder builder = new QueueBuilder().setName("test").setPollWait(1000).setBatchSize(10); Queue<String> queue = builder.build(); final AtomicInteger count = new AtomicInteger(); /* Create a sender queue. */ final SendQueue<String> sendQueue = queue.sendQueue(); /* Create a receiver queue. */ final ReceiveQueue<String> receiveQueue = queue.receiveQueue(); /* Create a writer thread that uses the send queue. */ Thread writerThread = new Thread(() -> { for (int index = 0; index < 1000; index++) { sendQueue.send("item" + index); //It will flush every 10 or so } sendQueue.flushSends(); //We can also call flushSends so it sends what remains. }); /* Create a reader thread that consumes queue items. */ Thread readerThread = new Thread(() -> { String item = receiveQueue.pollWait(); while (item != null) { count.incrementAndGet(); item = receiveQueue.pollWait(); } }); /* Starts the threads and wait for them to end. */ writerThread.start(); readerThread.start(); /* Wait for them to end. */ writerThread.join(); readerThread.join(); puts(count); ok = count.get() == 1000 || die("count should be 1000", count.get()); } @Test public void testUsingAutoFlush() throws Exception { final QueueBuilder builder = new QueueBuilder().setName("test").setPollWait(1000).setBatchSize(20_000); final Queue<String> queue = builder.build(); final AtomicInteger count = new AtomicInteger(); final SendQueue<String> sendQueue = queue.sendQueueWithAutoFlush(50, TimeUnit.MILLISECONDS); final ReceiveQueue<String> receiveQueue = queue.receiveQueue(); sendQueue.start(); Thread writerThread = new Thread(() -> { for (int index = 0; index < 1000; index++) { sendQueue.send("item" + index); } }); Thread readerThread = new Thread(() -> { while (receiveQueue.pollWait() != null) { count.incrementAndGet(); } }); writerThread.start(); readerThread.start(); writerThread.join(); readerThread.join(); sleep(1000); //simulate a long sleep sendQueue.stop(); puts(count); ok = count.get() == 1000 || die("count should be 1000", count); } }
package com.adafruit.bluefruit.le.connect.ui.keyboard; import android.app.Activity; import android.inputmethodservice.Keyboard; import android.inputmethodservice.KeyboardView; import android.os.Build; import android.text.Editable; import android.text.InputType; import android.text.Layout; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.view.inputmethod.InputMethodManager; import android.widget.EditText; import android.widget.RelativeLayout; import com.adafruit.bluefruit.le.connect.R; public class CustomKeyboard { // Keys constants private static final int kKeyDelete = -1; private static final int kKeyReturn = -2; // Data private KeyboardView mKeyboardView; private Activity mActivity; private int mCurrentKeyboardId; public CustomKeyboard(Activity activity) { mActivity = activity; // Create the keyboard view mKeyboardView = new KeyboardView(activity, null); RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.WRAP_CONTENT); // Select a parent for the keyboard. First search for a R.id.keyboardContainer viewgroup. If not found, use the rootWindow as parent ViewGroup keyboardContainer = (ViewGroup) activity.findViewById(R.id.keyboardContainer); ViewGroup parentViewGroup; if (keyboardContainer != null) { parentViewGroup = keyboardContainer; } else { int currentapiVersion = android.os.Build.VERSION.SDK_INT; // Get the root view to add the keyboard subview ViewGroup rootView; if (currentapiVersion > Build.VERSION_CODES.KITKAT) { // Workaround for devices with softkeys. We cant not use getRootView() because the keyboard would be below the softkeys. rootView = (ViewGroup) activity.findViewById(android.R.id.content); } else { rootView = (ViewGroup) activity.getWindow().getDecorView().getRootView(); } // Create a dummy relative layout to align the keyboardView to the bottom ViewGroup relativeLayout = new RelativeLayout(activity); relativeLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT)); rootView.addView(relativeLayout); params.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM); // Align to the bottom of the relativelayout parentViewGroup = relativeLayout; } mKeyboardView.setLayoutParams(params); mKeyboardView.setFocusable(true); mKeyboardView.setFocusableInTouchMode(true); mKeyboardView.setVisibility(View.GONE); parentViewGroup.addView(mKeyboardView); // Configure keyboard view mKeyboardView.setPreviewEnabled(false); mKeyboardView.setOnKeyboardActionListener(mOnKeyboardActionListener); // Hide the standard keyboard initially activity.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN); } public void attachToEditText(final EditText editText, int keyboardLayoutId) { // Keyboard layout is saved into the editText tag (to reuse the same keyboardView with different keyboard layouts) editText.setTag(keyboardLayoutId); // Attach custom keyboard to onFocusChange editText.setOnFocusChangeListener(new View.OnFocusChangeListener() { @Override public void onFocusChange(View view, boolean hasFocus) { if (hasFocus) { showCustomKeyboard(view); } else { hideCustomKeyboard(); } } }); // Attach custom keyboard to onClick editText.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { showCustomKeyboard(view); } }); // Fix for cursor movement (based on http://forum.xda-developers.com/showthread.php?t=2497237) editText.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View view, MotionEvent event) { if (!isCustomKeyboardVisible() || mCurrentKeyboardId != view.getTag()) { view.requestFocus(); showCustomKeyboard(view); } switch (event.getAction()) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_MOVE: EditText editText = (EditText) view; Layout layout = ((EditText) view).getLayout(); if (layout != null) { float x = event.getX() + editText.getScrollX(); int offset = layout.getOffsetForHorizontal(0, x); if (offset > 0) { if (x > layout.getLineMax(0)) editText.setSelection(offset); else editText.setSelection(offset - 1); } } break; } /* int inType = editText.getInputType(); // Backup the input type editText.setInputType(InputType.TYPE_NULL); // Disable standard keyboard editText.onTouchEvent(event); // Call native handler editText.setInputType(inType); // Restore input type */ return true; } }); // Disable suggestions editText.setInputType(editText.getInputType() | InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS); } private KeyboardView.OnKeyboardActionListener mOnKeyboardActionListener = new KeyboardView.OnKeyboardActionListener() { @Override public void onKey(int primaryCode, int[] keyCodes) { View focusCurrent = mActivity.getWindow().getCurrentFocus(); if (focusCurrent != null && (focusCurrent instanceof EditText)) { EditText edittext = (EditText) focusCurrent; Editable editable = edittext.getText(); int start = edittext.getSelectionStart(); if (primaryCode == kKeyDelete) { if (editable != null && start > 0) editable.delete(start - 1, start); } else if (primaryCode == kKeyReturn) { View nextFocusView = edittext.focusSearch(View.FOCUS_DOWN); if (nextFocusView != null && (nextFocusView instanceof EditText)) { nextFocusView.requestFocus(); } else { hideCustomKeyboard(); } } else { editable.insert(start, Character.toString((char) primaryCode)); } } } @Override public void onPress(int arg0) { } @Override public void onRelease(int primaryCode) { } @Override public void onText(CharSequence text) { } @Override public void swipeDown() { } @Override public void swipeLeft() { } @Override public void swipeRight() { } @Override public void swipeUp() { } }; public void hideCustomKeyboard() { mKeyboardView.setVisibility(View.GONE); mKeyboardView.setEnabled(false); } public void showCustomKeyboard(View view) { EditText editText = (EditText) view; final int keyboardId = (Integer) editText.getTag(); if (mCurrentKeyboardId != keyboardId) { Keyboard keyboard = new Keyboard(mActivity, keyboardId); mKeyboardView.setKeyboard(keyboard); mCurrentKeyboardId = keyboardId; } mKeyboardView.setVisibility(View.VISIBLE); mKeyboardView.setEnabled(true); if (view != null) { ((InputMethodManager) mActivity.getSystemService(Activity.INPUT_METHOD_SERVICE)).hideSoftInputFromWindow(view.getWindowToken(), 0); } } public boolean isCustomKeyboardVisible() { return mKeyboardView.getVisibility() == View.VISIBLE; } }
// Copyright (c) 2003 Microsoft Corporation. All rights reserved. // Last modified on Wed 17 September 2008 at 4:35:32 PST by lamport // modified on Thu Jan 10 18:41:04 PST 2002 by yuanyu package tlc2.tool.liveness; import java.io.IOException; import tlc2.TLCGlobals; import tlc2.output.EC; import tlc2.output.MP; import tlc2.output.StatePrinter; import tlc2.tool.EvalException; import tlc2.tool.TLCState; import tlc2.tool.TLCStateInfo; import tlc2.util.IdThread; import tlc2.util.LongVec; import tlc2.util.MemIntQueue; import tlc2.util.MemIntStack; public class LiveWorker extends IdThread { private static int nextOOS = 0; private static int errFoundByThread = -1; private static Object workerLock = new Object(); private OrderOfSolution oos = null; private DiskGraph dg = null; private PossibleErrorModel pem = null; public LiveWorker(int id) { super(id); } public synchronized static int getNextOOS() { if (nextOOS < LiveCheck.solutions.length) { return nextOOS++; } return -1; } // Returns true iff an error has already found. public static boolean hasErrFound() { synchronized(workerLock) { return (errFoundByThread != -1); } } /** * Returns true iff either an error has not found or the error is * found by this thread. */ public /* static synchronized */ boolean setErrFound() { synchronized(workerLock) { if (errFoundByThread == -1) { errFoundByThread = this.myGetId(); // GetId(); return true; } else if (errFoundByThread == this.myGetId()) { // (* GetId()) { return true; } return false; } } /** * The main routine that computes strongely connected components, * and checks each of them to see if it contains a counterexample. */ public final void checkSccs() throws IOException { // Initialize this.dg: this.dg.makeNodePtrTbl(); // Initialize nodeQueue with initial states. MemIntQueue nodeQueue = new MemIntQueue(LiveCheck.metadir, "root"); LongVec initNodes = this.dg.getInitNodes(); int numOfInits = initNodes.size(); for (int j = 0; j < numOfInits; j += 2) { long state = initNodes.elementAt(j); int tidx = (int)initNodes.elementAt(j+1); long ptr = this.dg.getLink(state, tidx); if (ptr >= 0) { nodeQueue.enqueueLong(state); nodeQueue.enqueueInt(tidx); nodeQueue.enqueueLong(ptr); } } int[] eaaction = this.pem.EAAction; int slen = this.oos.checkState.length; int alen = this.oos.checkAction.length; MemIntStack dfsStack = new MemIntStack(LiveCheck.metadir, "dfs"); MemIntStack comStack = new MemIntStack(LiveCheck.metadir, "com"); // Generate the SCCs and check if they contain any "bad" cycle. while (nodeQueue.length() > 0) { long state = nodeQueue.dequeueLong(); int tidx = nodeQueue.dequeueInt(); long loc = nodeQueue.dequeueLong(); // Start computing SCCs with <state, tidx> as the root node: dfsStack.reset(); dfsStack.pushLong(state); dfsStack.pushInt(tidx); dfsStack.pushLong(loc); dfsStack.pushLong(DiskGraph.MAX_PTR); long newLink = DiskGraph.MAX_PTR; while (dfsStack.size() > 2) { long lowLink = dfsStack.popLong(); long curLoc = dfsStack.popLong(); int curTidx = dfsStack.popInt(); long curState = dfsStack.popLong(); if (curLoc < 0) { // The current node is explored iff curLoc < 0. long curLink = this.dg.getLink(curState, curTidx); if (curLink == lowLink) { // The states on the comStack from top to curState form a SCC. // Check for "bad" cycle. boolean isOK = this.checkComponent(curState, curTidx, comStack); if (!isOK) return; } long plowLink = dfsStack.popLong(); if (lowLink < plowLink) plowLink = lowLink; dfsStack.pushLong(plowLink); } else { // Assign newLink to curState: long link = this.dg.putLink(curState, curTidx, newLink); if (link == -1) { // Push curState back onto dfsStack, but make curState explored: dfsStack.pushLong(lowLink); dfsStack.pushLong(curState); dfsStack.pushInt(curTidx); dfsStack.pushLong(-1); // Add curState to comStack: comStack.pushLong(curLoc); comStack.pushInt(curTidx); comStack.pushLong(curState); // Look at all the successors of curState: GraphNode gnode = this.dg.getNode(curState, curTidx, curLoc); int succCnt = gnode.succSize(); long nextLowLink = newLink++; for (int i = 0; i < succCnt; i++) { long nextState = gnode.getStateFP(i); int nextTidx = gnode.getTidx(i); long nextLink = this.dg.getLink(nextState, nextTidx); if (nextLink >= 0) { if (gnode.getCheckAction(slen, alen, i, eaaction)) { if (DiskGraph.isFilePointer(nextLink)) { dfsStack.pushLong(nextState); dfsStack.pushInt(nextTidx); dfsStack.pushLong(nextLink); } else if (nextLink < nextLowLink) { nextLowLink = nextLink; } } else if (DiskGraph.isFilePointer(nextLink)) { nodeQueue.enqueueLong(nextState); nodeQueue.enqueueInt(nextTidx); nodeQueue.enqueueLong(nextLink); } } } dfsStack.pushLong(nextLowLink); } else { if (link < lowLink) lowLink = link; dfsStack.pushLong(lowLink); } } } } // After completing the checks, clean up: // dfsStack.cleanup(); // comStack.cleanup(); } /** * For currentPEM, this method checks if the current scc satisfies * its AEs and is fulfilling. (We know the current scc satisfies the * pem's EA.) If satisfiable, this pem contains a counterexample, * and this method then calls printErrorTrace to print an error * trace and returns false. */ public boolean checkComponent(long state, int tidx, MemIntStack comStack) throws IOException { long state1 = comStack.popLong(); int tidx1 = comStack.popInt(); long loc1 = comStack.popLong(); // Simply return if the component is trivial: if (state1 == state && tidx1 == tidx && !isStuttering(state1, tidx1, loc1)) { this.dg.setMaxLink(state, tidx); return true; } // Now, we know we are working on a non-trivial component // We first put all the nodes in this component in a hashtable: NodePtrTable com = new NodePtrTable(128, true); while (true) { // Add <state1, tidx1> into com: com.put(state1, tidx1, loc1); this.dg.setMaxLink(state1, tidx1); // Get the next node of the component: if (state == state1 && tidx == tidx1) break; state1 = comStack.popLong(); tidx1 = comStack.popInt(); loc1 = comStack.popLong(); } // Check this component: int slen = this.oos.checkState.length; int alen = this.oos.checkAction.length; int aeslen = this.pem.AEState.length; int aealen = this.pem.AEAction.length; int plen = this.oos.promises.length; boolean[] AEStateRes = new boolean[aeslen]; boolean[] AEActionRes = new boolean[aealen]; boolean[] promiseRes = new boolean[plen]; int tsz = com.getSize(); for (int ci = 0; ci < tsz; ci++) { int[] nodes = com.getNodesByLoc(ci); if (nodes == null) continue; state1 = NodePtrTable.getKey(nodes); for (int nidx = 2; nidx < nodes.length; nidx += 3) { tidx1 = NodePtrTable.getTidx(nodes, nidx); loc1 = NodePtrTable.getElem(nodes, nidx); GraphNode curNode = this.dg.getNode(state1, tidx1, loc1); // Check AEState: for (int i = 0; i < aeslen; i++) { if (!AEStateRes[i]) { int idx = this.pem.AEState[i]; AEStateRes[i] = curNode.getCheckState(idx); } } // Check AEAction: int succCnt = curNode.succSize(); for (int i = 0; i < succCnt; i++) { long nextState = curNode.getStateFP(i); int nextTidx = curNode.getTidx(i); if (com.getLoc(nextState, nextTidx) != -1) { for (int j = 0; j < aealen; j++) { if (!AEActionRes[j]) { int idx = this.pem.AEAction[j]; AEActionRes[j] = curNode.getCheckAction(slen, alen, i, idx); } } } } // Check that the component is fulfilling. (See MP page 453.) // Note that the promises are precomputed and stored in oos. for (int i = 0; i < plen; i++) { LNEven promise = this.oos.promises[i]; TBPar par = curNode.getTNode(this.oos.tableau).getPar(); if (par.isFulfilling(promise)) { promiseRes[i] = true; } } } } // We find a counterexample if all three conditions are satisfied. for (int i = 0; i < aeslen; i++) { if (!AEStateRes[i]) return true; } for (int i = 0; i < aealen; i++) { if (!AEActionRes[i]) return true; } for (int i = 0; i < plen; i++) { if (!promiseRes[i]) return true; } // This component must contain a counter-example because all three // conditions are satisfied. So, print a counter-example! if (setErrFound()) { this.printTrace(state, tidx, com); } return false; } /* Check if the node <state, tidx> stutters. */ private boolean isStuttering(long state, int tidx, long loc) throws IOException { int slen = this.oos.checkState.length; int alen = this.oos.checkAction.length; GraphNode gnode = this.dg.getNode(state, tidx, loc); int succCnt = gnode.succSize(); for (int i = 0; i < succCnt; i++) { long nextState = gnode.getStateFP(i); int nextTidx = gnode.getTidx(i); if (state == nextState && tidx == nextTidx) { return gnode.getCheckAction(slen, alen, i, this.pem.EAAction); } } return false; } /** * Print out the error state trace. The method first generates a * "bad" cycle from the current scc, and then generates a prefix * path from some initial state to the "bad" cycle in the state * graph. The prefix path and the "bad" cycle together forms a * counter-example. */ private void printTrace(long state, int tidx, NodePtrTable nodeTbl) throws IOException { MP.printError(EC.TLC_TEMPORAL_PROPERTY_VIOLATED); MP.printError(EC.TLC_COUNTER_EXAMPLE); // First, find a "bad" cycle from the "bad" scc. int slen = this.oos.checkState.length; int alen = this.oos.checkAction.length; boolean[] AEStateRes = new boolean[this.pem.AEState.length]; boolean[] AEActionRes = new boolean[this.pem.AEAction.length]; boolean[] promiseRes = new boolean[this.oos.promises.length]; int cnt = AEStateRes.length + AEActionRes.length + promiseRes.length; MemIntStack cycleStack = new MemIntStack(LiveCheck.metadir, "cycle"); // Mark state as visited: int[] nodes = nodeTbl.getNodes(state); int tloc = NodePtrTable.getIdx(nodes, tidx); long ptr = NodePtrTable.getElem(nodes, tloc); NodePtrTable.setSeen(nodes, tloc); GraphNode curNode = this.dg.getNode(state, tidx, ptr); while (cnt > 0) { int cnt0 = cnt; _next: while (true) { // Check AEState: for (int i = 0; i < this.pem.AEState.length; i++) { int idx = this.pem.AEState[i]; if (!AEStateRes[i] && curNode.getCheckState(idx)) { AEStateRes[i] = true; cnt--; } } // Check if the component is fulfilling. (See MP page 453.) // Note that the promises are precomputed and stored in oos. for (int i = 0; i < this.oos.promises.length; i++) { LNEven promise = this.oos.promises[i]; TBPar par = curNode.getTNode(this.oos.tableau).getPar(); if (!promiseRes[i] && par.isFulfilling(promise)) { promiseRes[i] = true; cnt--; } } if (cnt <= 0) break; // Check AEAction: long nextState1 = 0, nextState2 = 0; int nextTidx1 = 0, nextTidx2 = 0; int tloc1 = -1, tloc2 = -1; int[] nodes1 = null, nodes2 = null; boolean hasUnvisitedSucc = false; int cnt1 = cnt; int succCnt = curNode.succSize(); for (int i = 0; i < succCnt; i++) { long nextState = curNode.getStateFP(i); int nextTidx = curNode.getTidx(i); nodes = nodeTbl.getNodes(nextState); if (nodes != null) { tloc = NodePtrTable.getIdx(nodes, nextTidx); if (tloc != -1) { // <nextState, nextTidx> is in nodeTbl. nextState1 = nextState; nextTidx1 = nextTidx; tloc1 = tloc; nodes1 = nodes; for (int j = 0; j < this.pem.AEAction.length; j++) { int idx = this.pem.AEAction[j]; if (!AEActionRes[j] && curNode.getCheckAction(slen, alen, i, idx)) { AEActionRes[j] = true; cnt--; } } } } if (cnt < cnt1) { // Take curNode -> <nextState, nextTidx>: cycleStack.pushInt(curNode.tindex); cycleStack.pushLong(curNode.stateFP); long nextPtr = NodePtrTable.getPtr(NodePtrTable.getElem(nodes, tloc)); curNode = this.dg.getNode(nextState, nextTidx, nextPtr); nodeTbl.resetElems(); break _next; } if (nodes != null && tloc != -1 && !NodePtrTable.isSeen(nodes, tloc)) { // <nextState, nextTidx> is an unvisited successor of curNode: hasUnvisitedSucc = true; nextState2 = nextState; nextTidx2 = nextTidx; tloc2 = tloc; nodes2 = nodes; } } if (cnt < cnt0) { // Take curNode -> <nextState1, nextTidx1>: cycleStack.pushInt(curNode.tindex); cycleStack.pushLong(curNode.stateFP); long nextPtr = NodePtrTable.getPtr(NodePtrTable.getElem(nodes1, tloc1)); curNode = this.dg.getNode(nextState1, nextTidx1, nextPtr); nodeTbl.resetElems(); break; } // Backtrack if all successors of curNode have been visited // and no successor can reduce cnt. while (!hasUnvisitedSucc) { long curState = cycleStack.popLong(); int curTidx = cycleStack.popInt(); long curPtr = NodePtrTable.getPtr(nodeTbl.get(curState, curTidx)); curNode = this.dg.getNode(curState, curTidx, curPtr); succCnt = curNode.succSize(); for (int i = 0; i < succCnt; i++) { nextState2 = curNode.getStateFP(i); nextTidx2 = curNode.getTidx(i); nodes2 = nodeTbl.getNodes(nextState2); if (nodes2 != null) { tloc2 = NodePtrTable.getIdx(nodes2, nextTidx2); if (tloc2 != -1 && !NodePtrTable.isSeen(nodes2, tloc2)) { hasUnvisitedSucc = true; break; } } } } // Take curNode -> <nextState2, nextTidx2>. Set nextState2 visited. cycleStack.pushInt(curNode.tindex); cycleStack.pushLong(curNode.stateFP); long nextPtr = NodePtrTable.getPtr(NodePtrTable.getElem(nodes2, tloc2)); curNode = this.dg.getNode(nextState2, nextTidx2, nextPtr); NodePtrTable.setSeen(nodes2, tloc2); } } // All the conditions are satisfied. Find a path from curNode // to state to form a cycle. Note that: // 1. curNode has not been pushed on cycleStack. // 2. nodeTbl is trashed after this operation. nodeTbl.resetElems(); LongVec postfix = new LongVec(16); long startState = curNode.stateFP; if (startState != state) { MemIntQueue queue = new MemIntQueue(LiveCheck.metadir, null); long curState = startState; int ploc = -1; int curLoc = nodeTbl.getNodesLoc(curState); nodes = nodeTbl.getNodesByLoc(curLoc); NodePtrTable.setSeen(nodes); _done: while (true) { tloc = NodePtrTable.startLoc(nodes); while (tloc != -1) { int curTidx = NodePtrTable.getTidx(nodes, tloc); long curPtr = NodePtrTable.getPtr(NodePtrTable.getElem(nodes, tloc)); curNode = this.dg.getNode(curState, curTidx, curPtr); int succCnt = curNode.succSize(); for (int j = 0; j < succCnt; j++) { long nextState = curNode.getStateFP(j); if (nextState == state) { // we have found a path from startState to state: while (curState != startState) { postfix.addElement(curState); nodes = nodeTbl.getNodesByLoc(ploc); curState = NodePtrTable.getKey(nodes); ploc = NodePtrTable.getParent(nodes); } postfix.addElement(startState); break _done; } int[] nodes1 = nodeTbl.getNodes(nextState); if (nodes1 != null && !NodePtrTable.isSeen(nodes1)) { NodePtrTable.setSeen(nodes1); queue.enqueueLong(nextState); queue.enqueueInt(curLoc); } } tloc = NodePtrTable.nextLoc(nodes, tloc); } NodePtrTable.setParent(nodes, ploc); curState = queue.dequeueLong(); ploc = queue.dequeueInt(); curLoc = nodeTbl.getNodesLoc(curState); nodes = nodeTbl.getNodesByLoc(curLoc); } } // Now, print the error trace. We first construct the prefix that // led to the bad cycle. The nodes on prefix and cycleStack then // form the complete counter example. int stateNum = 0; LongVec prefix = this.dg.getPath(state); int plen = prefix.size(); TLCStateInfo[] states = new TLCStateInfo[plen]; // Recover the initial state: long fp = prefix.elementAt(plen-1); TLCStateInfo sinfo = LiveCheck.myTool.getState(fp); if (sinfo == null) { throw new EvalException(EC.TLC_FAILED_TO_RECOVER_INIT); } states[stateNum++] = sinfo; // Recover the successor states: for (int i = plen-2; i >= 0; i--) { long curFP = prefix.elementAt(i); if (curFP != fp) { sinfo = LiveCheck.myTool.getState(curFP, sinfo.state); if (sinfo == null) { throw new EvalException(EC.TLC_FAILED_TO_RECOVER_NEXT); } states[stateNum++] = sinfo; fp = curFP; } } // Print the prefix: TLCState lastState = null; for (int i = 0; i < stateNum; i++) { StatePrinter.printState(states[i], lastState, i+1); lastState = states[i].state; } // Print the cycle: int cyclePos = stateNum; long cycleFP = fp; while (cycleStack.size() > 0) { postfix.addElement(cycleStack.popLong()); cycleStack.popInt(); } // Assert.assert(fps.length > 0); for (int i = postfix.size()-1; i >= 0; i--) { long curFP = postfix.elementAt(i); if (curFP != fp) { sinfo = LiveCheck.myTool.getState(curFP, sinfo.state); if (sinfo == null) { throw new EvalException(EC.TLC_FAILED_TO_RECOVER_NEXT); } StatePrinter.printState(sinfo, lastState, ++stateNum); lastState = sinfo.state; fp = curFP; } } if (fp == cycleFP) { StatePrinter.printStutteringState(++stateNum); } else { sinfo = LiveCheck.myTool.getState(cycleFP, sinfo.state); if (sinfo == null) { throw new EvalException(EC.TLC_FAILED_TO_RECOVER_NEXT); } if (TLCGlobals.tool) { MP.printState(EC.TLC_BACK_TO_STATE, new String[] { "" + cyclePos } ); } else { StatePrinter.printState(sinfo, null, (++stateNum)); // SZ Jul 10, 2009: replaced with state printer // ToolIO.err.println("STATE " + (++stateNum) + ": " + sinfo.info); MP.printMessage(EC.TLC_BACK_TO_STATE, "" + cyclePos); } } } public final void run() { try { while (true) { // Get next OOS, and work on it: int idx = getNextOOS(); if (idx == -1 || hasErrFound()) break; this.oos = LiveCheck.solutions[idx]; this.dg = LiveCheck.dgraphs[idx]; this.dg.createCache(); PossibleErrorModel[] pems = this.oos.pems; for (int i = 0; i < pems.length; i++) { if (!hasErrFound()) { this.pem = pems[i]; this.checkSccs(); } } this.dg.destroyCache(); } } catch (Exception e) { MP.printError(EC.GENERAL, "checking liveness", e); // LL changed call 7 April 2012 // Assert.printStack(e); return; } } }
/* * Copyright (c) 2009, INRIA * All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of INRIA nor the names of its contributors may * be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package test.obvious.data; import obvious.data.Schema; import org.junit.Before; import org.junit.After; import org.junit.Test; import static org.junit.Assert.*; /** * Test class for Schema Interface. * @author Pierre-Luc Hemery * */ public abstract class SchemaTest { /** * Schema instance for tests. */ private Schema schema; /** * Gets the schema test instance. * @return test schema instance */ public Schema getSchema() { return this.schema; } /** * Sets the schema test instance. * @param inSchema input schema to set */ public void setSchema(Schema inSchema) { this.schema = inSchema; } /** * @see junit.framework.TestCase#setUp() */ @Before public void setUp() { this.schema = this.newInstance(); schema.addColumn("col1", String.class, ""); schema.addColumn("col2", Integer.class, 0); } /** * Creates a suitable instance of schema. * @return suitable Schema implementation instance */ public abstract Schema newInstance(); /** * @see junit.framework.TestCase#tearDown() */ @After public void tearDown() { schema = null; } /** * Test method for obvious.data.Schema.getColumnCount() method. */ @Test public void testGetColumnCount() { assertEquals(2, schema.getColumnCount()); } /** * Test method for obvious.data.Schema.getColumnDefault(int, col) method. */ @Test public void testGetColumnDefault() { assertEquals("", schema.getColumnDefault(0)); assertEquals(0, schema.getColumnDefault(1)); } /** * Test method for obvious.data.Schema.canGet(int, Class) method. */ @Test public void testCanGetByIndex() { assertTrue(this.schema.canGet(0, Object.class)); assertTrue(this.schema.canGet(0, String.class)); assertTrue(this.schema.canGet(1, Integer.class)); assertTrue(this.schema.canGet(1, Number.class)); assertFalse(this.schema.canGet(1, Boolean.class)); assertFalse(this.schema.canGet(2, Object.class)); } /** * Test method for obvious.data.Schema.canGet(String, Class) method. */ @Test public void testCanGetByField() { assertTrue(this.schema.canGet("col1", Object.class)); assertTrue(this.schema.canGet("col1", String.class)); assertTrue(this.schema.canGet("col2", Integer.class)); assertTrue(this.schema.canGet("col2", Number.class)); assertFalse(this.schema.canGet("col1", Boolean.class)); assertFalse(this.schema.canGet("col3", Object.class)); } /** * Test method for obvious.data.Schema.canGet(int, Class) method. */ @Test public void testCanSetByIndex() { assertTrue(this.schema.canSet(0, Object.class)); assertTrue(this.schema.canSet(0, String.class)); assertTrue(this.schema.canSet(1, Integer.class)); assertTrue(this.schema.canSet(1, Number.class)); assertFalse(this.schema.canSet(1, Boolean.class)); assertFalse(this.schema.canSet(2, Object.class)); } /** * Test method for obvious.data.Schema.canGet(int, Class) method. */ @Test public void testCanSetByField() { assertTrue(this.schema.canSet("col1", Object.class)); assertTrue(this.schema.canSet("col1", String.class)); assertTrue(this.schema.canSet("col2", Integer.class)); assertTrue(this.schema.canSet("col2", Number.class)); assertFalse(this.schema.canSet("col1", Boolean.class)); assertFalse(this.schema.canSet("col3", Object.class)); } /** * Test method for obvious.data.Schema.getColumnType(int col) method. */ @Test public void testGetColumnTypeByIndex() { assertEquals(String.class, schema.getColumnType(0)); assertTrue(Integer.class.equals(schema.getColumnType(1)) || int.class.equals(schema.getColumnType(1))); } /** * Test method for obvious.data.Schema.getColumnDefault(string field) method. */ @Test public void testGetColumnTypeByField() { assertEquals(String.class, schema.getColumnType("col1")); assertTrue(Integer.class.equals(schema.getColumnType("col2")) || int.class.equals(schema.getColumnType("col2"))); } /** * Test method for obvious.data.Schema.getColumnName(int col) method. */ @Test public void testGetColumnName() { assertEquals("col1", schema.getColumnName(0)); assertEquals("col2", schema.getColumnName(1)); } /** * Test method for obvious.data.Schema.getColumnIndex(String field) method. */ @Test public void testGetColumnIndex() { assertEquals(0, schema.getColumnIndex("col1")); assertEquals(1, schema.getColumnIndex("col2")); } /** * Test method for obvious.data.Schema.hasColumn(string field) method. */ public void testHasColumn() { assertTrue(schema.hasColumn("col1")); assertTrue(schema.hasColumn("col2")); assertFalse(schema.hasColumn("foooo")); } /** * Test method for obvious.data.Schema.addColumn(String, Class, Object). */ @Test public void testAddColumn() { int size = this.schema.getColumnCount(); this.schema.addColumn("addable", String.class, "default_value"); assertEquals(size + 1, this.schema.getColumnCount()); } /** * Test method for obvious.data.Schema.removeColumn(int col) method. */ @Test public void testRemoveColumnByIndex() { final int falseIndex = 3; assertTrue(schema.removeColumn(0)); assertTrue(schema.removeColumn(1)); assertFalse(schema.removeColumn(falseIndex)); } /** * Test method for obvious.data.Schema.removeColumn(String field) method. */ @Test public void testRemoveColumnByField() { assertTrue(schema.removeColumn("col1")); assertTrue(schema.removeColumn("col2")); assertFalse(schema.removeColumn("foo")); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.openwire.tool; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import org.codehaus.jam.JAnnotation; import org.codehaus.jam.JAnnotationValue; import org.codehaus.jam.JClass; import org.codehaus.jam.JProperty; /** * */ public class CppMarshallingClassesGenerator extends CppMarshallingHeadersGenerator { protected String getFilePostFix() { return ".cpp"; } protected void generateUnmarshalBodyForProperty(PrintWriter out, JProperty property, JAnnotationValue size) { out.print(" "); String setter = property.getSetter().getSimpleName(); String type = property.getType().getSimpleName(); if (type.equals("boolean")) { out.println("info." + setter + "( bs.readBoolean() );"); } else if (type.equals("byte")) { out.println("info." + setter + "( DataStreamMarshaller.readByte(dataIn) );"); } else if (type.equals("char")) { out.println("info." + setter + "( DataStreamMarshaller.readChar(dataIn) );"); } else if (type.equals("short")) { out.println("info." + setter + "( DataStreamMarshaller.readShort(dataIn) );"); } else if (type.equals("int")) { out.println("info." + setter + "( DataStreamMarshaller.readInt(dataIn) );"); } else if (type.equals("long")) { out.println("info." + setter + "( UnmarshalLong(wireFormat, dataIn, bs) );"); } else if (type.equals("String")) { out.println("info." + setter + "( readString(dataIn, bs) );"); } else if (type.equals("byte[]") || type.equals("ByteSequence")) { if (size != null) { out.println("info." + setter + "( readBytes(dataIn, " + size.asInt() + ") );"); } else { out.println("info." + setter + "( readBytes(dataIn, bs.readBoolean()) );"); } } else if (isThrowable(property.getType())) { out.println("info." + setter + "( unmarshalBrokerError(wireFormat, dataIn, bs) );"); } else if (isCachedProperty(property)) { out.println("info." + setter + "( (" + type + ") unmarshalCachedObject(wireFormat, dataIn, bs) );"); } else { out.println("info." + setter + "( (" + type + ") unmarshalNestedObject(wireFormat, dataIn, bs) );"); } } protected void generateUnmarshalBodyForArrayProperty(PrintWriter out, JProperty property, JAnnotationValue size) { JClass propertyType = property.getType(); String arrayType = propertyType.getArrayComponentType().getSimpleName(); String setter = property.getGetter().getSimpleName(); out.println(); if (size != null) { out.println(" {"); out.println(" " + arrayType + "[] value = new " + arrayType + "[" + size.asInt() + "];"); out.println(" " + "for( int i=0; i < " + size.asInt() + "; i++ ) {"); out.println(" value[i] = (" + arrayType + ") unmarshalNestedObject(wireFormat,dataIn, bs);"); out.println(" }"); out.println(" info." + setter + "( value );"); out.println(" }"); } else { out.println(" if (bs.readBoolean()) {"); out.println(" short size = DataStreamMarshaller.readShort(dataIn);"); out.println(" " + arrayType + "[] value = new " + arrayType + "[size];"); out.println(" for( int i=0; i < size; i++ ) {"); out.println(" value[i] = (" + arrayType + ") unmarshalNestedObject(wireFormat,dataIn, bs);"); out.println(" }"); out.println(" info." + setter + "( value );"); out.println(" }"); out.println(" else {"); out.println(" info." + setter + "( null );"); out.println(" }"); } } protected int generateMarshal1Body(PrintWriter out) { List properties = getProperties(); int baseSize = 0; for (Iterator iter = properties.iterator(); iter.hasNext();) { JProperty property = (JProperty)iter.next(); JAnnotation annotation = property.getAnnotation("openwire:property"); JAnnotationValue size = annotation.getValue("size"); JClass propertyType = property.getType(); String type = propertyType.getSimpleName(); String getter = "info." + property.getGetter().getSimpleName() + "()"; out.print(indent); if (type.equals("boolean")) { out.println("bs.writeBoolean(" + getter + ");"); } else if (type.equals("byte")) { baseSize += 1; } else if (type.equals("char")) { baseSize += 1; } else if (type.equals("short")) { baseSize += 1; } else if (type.equals("int")) { baseSize += 1; } else if (type.equals("long")) { out.println("rc += marshal1Long(wireFormat, " + getter + ", bs);"); } else if (type.equals("String")) { out.println("rc += writeString(" + getter + ", bs);"); } else if (type.equals("byte[]") || type.equals("ByteSequence")) { if (size == null) { out.println("bs.writeBoolean(" + getter + "!=null);"); out.println(" rc += " + getter + "==null ? 0 : " + getter + ".Length+4;"); } else { baseSize += size.asInt(); } } else if (propertyType.isArrayType()) { if (size != null) { out.println("rc += marshalObjectArrayConstSize(wireFormat, " + getter + ", bs, " + size.asInt() + ");"); } else { out.println("rc += marshalObjectArray(wireFormat, " + getter + ", bs);"); } } else if (isThrowable(propertyType)) { out.println("rc += marshalBrokerError(wireFormat, " + getter + ", bs);"); } else { if (isCachedProperty(property)) { out.println("rc += marshal1CachedObject(wireFormat, " + getter + ", bs);"); } else { out.println("rc += marshal1NestedObject(wireFormat, " + getter + ", bs);"); } } } return baseSize; } protected void generateMarshal2Body(PrintWriter out) { List properties = getProperties(); for (Iterator iter = properties.iterator(); iter.hasNext();) { JProperty property = (JProperty)iter.next(); JAnnotation annotation = property.getAnnotation("openwire:property"); JAnnotationValue size = annotation.getValue("size"); JClass propertyType = property.getType(); String type = propertyType.getSimpleName(); String getter = "info." + property.getGetter().getSimpleName() + "()"; out.print(indent); if (type.equals("boolean")) { out.println("bs.readBoolean();"); } else if (type.equals("byte")) { out.println("DataStreamMarshaller.writeByte(" + getter + ", dataOut);"); } else if (type.equals("char")) { out.println("DataStreamMarshaller.writeChar(" + getter + ", dataOut);"); } else if (type.equals("short")) { out.println("DataStreamMarshaller.writeShort(" + getter + ", dataOut);"); } else if (type.equals("int")) { out.println("DataStreamMarshaller.writeInt(" + getter + ", dataOut);"); } else if (type.equals("long")) { out.println("marshal2Long(wireFormat, " + getter + ", dataOut, bs);"); } else if (type.equals("String")) { out.println("writeString(" + getter + ", dataOut, bs);"); } else if (type.equals("byte[]") || type.equals("ByteSequence")) { if (size != null) { out.println("dataOut.write(" + getter + ", 0, " + size.asInt() + ");"); } else { out.println("if(bs.readBoolean()) {"); out.println(" DataStreamMarshaller.writeInt(" + getter + ".Length, dataOut);"); out.println(" dataOut.write(" + getter + ");"); out.println(" }"); } } else if (propertyType.isArrayType()) { if (size != null) { out.println("marshalObjectArrayConstSize(wireFormat, " + getter + ", dataOut, bs, " + size.asInt() + ");"); } else { out.println("marshalObjectArray(wireFormat, " + getter + ", dataOut, bs);"); } } else if (isThrowable(propertyType)) { out.println("marshalBrokerError(wireFormat, " + getter + ", dataOut, bs);"); } else { if (isCachedProperty(property)) { out.println("marshal2CachedObject(wireFormat, " + getter + ", dataOut, bs);"); } else { out.println("marshal2NestedObject(wireFormat, " + getter + ", dataOut, bs);"); } } } } protected void generateFile(PrintWriter out) throws Exception { generateLicence(out); out.println("#include \"marshal/" + className + ".hpp\""); out.println(""); out.println("using namespace apache::activemq::client::marshal;"); out.println(""); out.println("/*"); out.println(" * Marshalling code for Open Wire Format for " + jclass.getSimpleName() + ""); out.println(" *"); out.println(" * NOTE!: This file is autogenerated - do not modify!"); out.println(" * if you need to make a change, please see the Groovy scripts in the"); out.println(" * activemq-core module"); out.println(" */"); out.println(""); out.println("" + className + "::" + className + "()"); out.println("{"); out.println(" // no-op"); out.println("}"); out.println(""); out.println("" + className + "::~" + className + "()"); out.println("{"); out.println(" // no-op"); out.println("}"); out.println(""); if (!isAbstractClass()) { out.println(""); out.println(""); out.println("IDataStructure* " + className + "::createObject() "); out.println("{"); out.println(" return new " + jclass.getSimpleName() + "();"); out.println("}"); out.println(""); out.println("char " + className + "::getDataStructureType() "); out.println("{"); out.println(" return " + jclass.getSimpleName() + ".ID_" + jclass.getSimpleName() + ";"); out.println("}"); } out.println(""); out.println(" /* "); out.println(" * Un-marshal an object instance from the data input stream"); out.println(" */ "); out.println("void " + className + "::unmarshal(ProtocolFormat& wireFormat, Object o, BinaryReader& dataIn, BooleanStream& bs) "); out.println("{"); out.println(" base.unmarshal(wireFormat, o, dataIn, bs);"); List properties = getProperties(); boolean marshallerAware = isMarshallerAware(); if (!properties.isEmpty() || marshallerAware) { out.println(""); out.println(" " + jclass.getSimpleName() + "& info = (" + jclass.getSimpleName() + "&) o;"); } if (marshallerAware) { out.println(""); out.println(" info.beforeUnmarshall(wireFormat);"); out.println(" "); } generateTightUnmarshalBody(out); if (marshallerAware) { out.println(""); out.println(" info.afterUnmarshall(wireFormat);"); } out.println(""); out.println("}"); out.println(""); out.println(""); out.println("/*"); out.println(" * Write the booleans that this object uses to a BooleanStream"); out.println(" */"); out.println("int " + className + "::marshal1(ProtocolFormat& wireFormat, Object& o, BooleanStream& bs) {"); out.println(" " + jclass.getSimpleName() + "& info = (" + jclass.getSimpleName() + "&) o;"); if (marshallerAware) { out.println(""); out.println(" info.beforeMarshall(wireFormat);"); } out.println(""); out.println(" int rc = base.marshal1(wireFormat, info, bs);"); int baseSize = generateMarshal1Body(out); out.println(""); out.println(" return rc + " + baseSize + ";"); out.println("}"); out.println(""); out.println("/* "); out.println(" * Write a object instance to data output stream"); out.println(" */"); out.println("void " + className + "::marshal2(ProtocolFormat& wireFormat, Object& o, BinaryWriter& dataOut, BooleanStream& bs) {"); out.println(" base.marshal2(wireFormat, o, dataOut, bs);"); if (!properties.isEmpty() || marshallerAware) { out.println(""); out.println(" " + jclass.getSimpleName() + "& info = (" + jclass.getSimpleName() + "&) o;"); } generateMarshal2Body(out); if (marshallerAware) { out.println(""); out.println(" info.afterMarshall(wireFormat);"); } out.println(""); out.println("}"); } @SuppressWarnings("unchecked") public void generateFactory(PrintWriter out) { generateLicence(out); out.println(""); out.println("// Marshalling code for Open Wire Format"); out.println("//"); out.println("//"); out.println("// NOTE!: This file is autogenerated - do not modify!"); out.println("// if you need to make a change, please see the Groovy scripts in the"); out.println("// activemq-openwire module"); out.println("//"); out.println(""); out.println("#include \"marshal/" + className + ".hpp\""); out.println(""); List list = new ArrayList(getConcreteClasses()); Collections.sort(list, new Comparator() { public int compare(Object o1, Object o2) { JClass c1 = (JClass)o1; JClass c2 = (JClass)o2; return c1.getSimpleName().compareTo(c2.getSimpleName()); } }); for (Iterator iter = list.iterator(); iter.hasNext();) { JClass jclass = (JClass)iter.next(); out.println("#include \"marshal/" + jclass.getSimpleName() + "Marshaller.hpp\""); } out.println(""); out.println(""); out.println("using namespace apache::activemq::client::marshal;"); out.println(""); out.println(""); out.println("void MarshallerFactory::configure(ProtocolFormat& format) "); out.println("{"); for (Iterator iter = list.iterator(); iter.hasNext();) { JClass jclass = (JClass)iter.next(); out.println(" format.addMarshaller(new " + jclass.getSimpleName() + "Marshaller());"); } out.println(""); out.println("}"); } }
package io.indexr.segment.rt; import com.google.common.base.Preconditions; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; import io.indexr.segment.InfoSegment; import io.indexr.segment.Row; import io.indexr.segment.RowTraversal; import io.indexr.segment.Segment; import io.indexr.segment.SegmentFd; import io.indexr.segment.SegmentMode; import io.indexr.segment.SegmentSchema; import io.indexr.segment.SegmentUploader; import io.indexr.segment.cache.ExtIndexMemCache; import io.indexr.segment.cache.IndexMemCache; import io.indexr.segment.cache.PackMemCache; import io.indexr.segment.storage.ColumnNode; import io.indexr.segment.storage.StorageSegment; import io.indexr.segment.storage.Version; import io.indexr.segment.storage.itg.IntegratedSegment; import io.indexr.util.DelayTask; import io.indexr.util.IOUtil; import io.indexr.util.JsonUtil; import io.indexr.util.Strings; import io.indexr.util.Try; public class RTSGroup implements InfoSegment, SegmentFd { private static final Logger logger = LoggerFactory.getLogger(RTSGroup.class); private final Path path; private final String tableName; private final Metadata metadata; private long handlePeriod = TimeUnit.SECONDS.toMillis(10); private long fastPeriod = TimeUnit.MILLISECONDS.toMillis(300); private long deleteDelayPeriod = TimeUnit.SECONDS.toMillis(15); // In recoving state or not. The handle thread must finish recovring before doing anything else. private volatile boolean isRecovering; // Whether allow new rts add into this rtsg or not. The handle thread won't start merging if it is true. private volatile boolean allowAddSegment; private final Map<String, RealtimeSegment> segments = new ConcurrentHashMap<>(); private volatile SegmentFd mergeSegment; private volatile State state; public static enum State { /** * Before creation. */ Begin("_ST_BEGIN"), /** * Metadata and other files have been written, from now on it is a valid realtime segment. */ Created("_ST_CREATED"), /** * Rows in memory has dump to local disk. */ Merged("_ST_MERGED"), /** * Segment has uploaded to indexr storage system. */ Uploaded("_ST_UPLOADED"), /** * The end state. */ Done("_ST_DONE"); public final String fileName; State(String fileName) { this.fileName = fileName; } public static void commit(Path path, State state) throws IOException { IOUtil.createFileIfNotExist(path.resolve(state.fileName)); } public static State getState(Path path) throws IOException { if (!Files.exists(path)) { return Begin; } try (Stream<Path> paths = Files.list(path)) { Set<String> stateFiles = paths .map(p -> p.getFileName().toString()) .filter(n -> n.startsWith("_ST_") || n.startsWith("_st_")) .collect(Collectors.toSet()); State stat = Begin; for (State st : State.values()) { if (stateFiles.contains(st.fileName) || stateFiles.contains(st.fileName.toLowerCase())) { stat = st; } } return stat; } } } private RTSGroup(Path path, String tableName, Metadata metadata, State state, boolean isRecovering, boolean allowAddSegment) { this.path = path; this.tableName = tableName; this.metadata = metadata; this.state = state; this.isRecovering = isRecovering; this.allowAddSegment = allowAddSegment; } private static class Metadata { @JsonProperty("version") public final int version; @JsonProperty("name") public final String name; @JsonProperty("schema") public final SegmentSchema schema; @JsonProperty("createTime") public final long createTime; @JsonProperty("dims") public final List<String> dims; @JsonProperty("metrics") public final List<Metric> metrics; @JsonProperty("nameToAlias") public final Map<String, String> nameToAlias; @JsonProperty("grouping") public final boolean grouping; @JsonProperty("mode") public final String modeName; @JsonIgnore public final SegmentMode mode; @JsonCreator public Metadata(@JsonProperty("version") int version, @JsonProperty("name") String name, @JsonProperty("schema") SegmentSchema schema, @JsonProperty("createTime") long createTime, @JsonProperty("dims") List<String> dims, @JsonProperty("metrics") List<Metric> metrics, @JsonProperty("nameToAlias") Map<String, String> nameToAlias, @JsonProperty("grouping") boolean grouping, @JsonProperty("compress") Boolean compress, @JsonProperty("mode") String modeName) { this.version = version; this.name = name; this.schema = schema; this.createTime = createTime; this.dims = dims; this.metrics = metrics; this.nameToAlias = nameToAlias; this.grouping = grouping; this.mode = SegmentMode.fromNameWithCompress(modeName, compress); this.modeName = this.mode.name(); } } public static RTSGroup open(Path path, String tableName) throws IOException { return open(path, tableName, null, null, null, null, null, false, null); } public static RTSGroup open(Path path, String tableName, String name, SegmentSchema schema, List<String> dims, List<Metric> metrics, Map<String, String> nameToAlias, boolean grouping, SegmentMode mode) throws IOException { Preconditions.checkState(path != null && !Strings.isEmpty(tableName)); State state = State.getState(path); switch (state) { case Begin: { if (name == null || schema == null) { logger.info("Remove invalid rts group({}) folder. [table: {}, path: {}]", state.name(), tableName, path); Try.on(() -> FileUtils.deleteDirectory(path.toFile()), 1, logger, String.format("Delete rts group folder failed. [table: %s, rtsg: %s]", tableName, name)); return null; } if (!Files.exists(path)) { Files.createDirectories(path); } long createTime = System.currentTimeMillis(); Metadata metadata = new Metadata( Version.LATEST_ID, name, schema, createTime, dims, metrics, nameToAlias, grouping, null, mode.name() ); JsonUtil.save(path.resolve("metadata.json"), metadata); RTSGroup rtsGroup = new RTSGroup(path, tableName, metadata, state, false, false); rtsGroup.commitState(State.Created); rtsGroup.allowAddSegment = true; return rtsGroup; } case Created: case Merged: { Metadata metadata = JsonUtil.load(path.resolve("metadata.json"), Metadata.class); RTSGroup rtsGroup = new RTSGroup( path, tableName, metadata, state, true, state == State.Created); rtsGroup.loadLocalSegments(); return rtsGroup; } case Uploaded: case Done: logger.info("Remove invalid rts group({}) folder. [table: {}, path{}]", state.name(), tableName, path); Try.on(() -> FileUtils.deleteDirectory(path.toFile()), 1, logger, String.format("Delete rts group folder failed. [table: %s, rtsg: %s]", tableName, name)); return null; default: return null; } } public List<String> dims() { return metadata.dims; } public List<Metric> metrics() { return metadata.metrics; } public boolean grouping() { return metadata.grouping; } public void setHandlePeriod(long period) { handlePeriod = period; } public void setFastPeriod(long period) { fastPeriod = period; } public Path path() { return path; } public String tableName() { return tableName; } public long createTime() { return metadata.createTime; } public State state() { return state; } public boolean isRecovering() { return isRecovering; } public Map<String, RealtimeSegment> realtimeSegments() { return segments; } public boolean hasUploaded() { return state == State.Uploaded || state == State.Done; } public boolean timeToUpload(long uploadPeriodMS, long maxRow) { return System.currentTimeMillis() - metadata.createTime >= uploadPeriodMS || rowCount() >= maxRow; } public synchronized RealtimeSegment addSegment(Supplier<RealtimeSegment> supplier) { if (!allowAddSegment) { return null; } RealtimeSegment rts = supplier.get(); if (rts == null) { return null; } else { segments.put(rts.name(), rts); return rts; } } public void disallowAddSegment() { allowAddSegment = false; } public boolean allowAddSegment() { return allowAddSegment; } public synchronized boolean tryStartMerge() { if (!allowAddSegment) { return true; } if (isIngesting()) { return false; } allowAddSegment = false; return true; } private void commitState(State state) throws IOException { this.state = state; State.commit(path, state); } private boolean isIngesting() { boolean ok = false; for (RealtimeSegment rts : segments.values()) { if (rts.state() == RealtimeSegment.State.Created) { if (ok) { // It could happen when ingesting segment is failed. logger.warn("More than one segment is ingesting. [table: {}, rtsg: {}]", tableName, metadata.name); } ok = true; } } return ok; } private void loadLocalSegments() throws IOException { List<Path> segmentPaths; try (Stream<Path> paths = Files.list(path)) { segmentPaths = paths .filter(p -> Files.isDirectory(p) && p.getFileName().toString().startsWith("rts.")) .collect(Collectors.toList()); } List<RealtimeSegment> localSegments = new ArrayList<>(); for (Path path : segmentPaths) { RealtimeSegment rts = RealtimeSegment.open(path, tableName, path.getFileName().toString(), metadata.schema); localSegments.add(rts); } localSegments.forEach(rts -> segments.put(rts.name(), rts)); } private boolean recoverFromMergeSegment() { Preconditions.checkState(state == State.Merged); logger.debug("Start recover rts group data from merge segment. [table: {}, rtsg: {}]", tableName, metadata.name); // Merge segemnt already exists, we just need to load it. mergeSegment = Try.on( () -> IntegratedSegment.Fd.create(metadata.name, path.resolve("integrated")), 1, logger, String.format("Open merge segment failed. [table: %s, rtsg: %s]", tableName, metadata.name)); if (mergeSegment != null) { logger.debug("Recover rts group from merge segment completed. [table: {}, rtsg: {}]", tableName, metadata.name); return true; } else { logger.debug("Recover rts group from merge segment failed. [table: {}, rtsg: {}]", tableName, metadata.name); return false; } } private boolean recoverFromSubSegments(RTResources rtResources) { Preconditions.checkState(state == State.Created); logger.debug("Recover data from sub segments . [table: {}, rtsg: {}]", tableName, metadata.name); boolean ok = true; List<RealtimeSegment> toRemove = new ArrayList<>(); for (RealtimeSegment rts : segments.values()) { if (!rts.isRecovering()) { continue; } switch (rts.state()) { case Begin: // A rtseg in begin status means it is empty, safe to be removed. toRemove.add(rts); break; case Created: case RTIFinished: case Saved: ok = Try.on( () -> rts.recover(metadata.dims, metadata.metrics, metadata.nameToAlias, metadata.grouping, rtResources), 1, logger, String.format("Recover segment failed. [table: %s, rtsg: %s, segment: %s]", tableName, metadata.name, rts.name())); break; default: throw new IllegalStateException("illegal state: " + rts.state().name()); } if (!ok) { break; } } toRemove.forEach(rts -> segments.remove(rts.name())); if (ok) { logger.debug("Recover rts group from sub segments completed. [table: {}, rtsg: {}]", tableName, metadata.name); } else { logger.debug("Recover rts group from sub segment failed. [table: {}, rtsg: {}]", tableName, metadata.name); } return ok; } private boolean recover(RTResources rtResources) { Preconditions.checkState(isRecovering); boolean ok; switch (state) { case Created: ok = recoverFromSubSegments(rtResources); break; case Merged: ok = recoverFromMergeSegment(); break; default: throw new IllegalStateException("illegal state: " + state.name()); } if (ok) { isRecovering = false; } return ok; } private boolean saveToDisk(RTResources rtResources) { boolean allOk = true; List<RealtimeSegment> toRemove = new ArrayList<>(); for (RealtimeSegment rts : segments.values()) { boolean ok = false; switch (rts.state()) { case Begin: toRemove.add(rts); ok = true; break; case Created: // Still ingesting. //logger.debug("Segment is ingesting, ignore. [table: {}, rtsg: {}, segment: {}]", tableName, name, rts.name()); break; case RTIFinished: SegmentFd savedSegment = Try.on( () -> rts.saveToDisk(metadata.version, metadata.mode, rtResources), 1, logger, String.format("Save in memory rows to disk failed. [table: %s, rtsg: %s, segment: %s]", tableName, metadata.name, rts.name())); if (savedSegment != null) { ok = Try.on( () -> rts.commitState(RealtimeSegment.State.Saved), 1, logger, String.format("Commit saved state failed. [table: %s, rtsg: %s, segment: %s]", tableName, metadata.name, rts.name())); } break; case Saved: ok = true; break; default: throw new IllegalStateException("illegal state: " + rts.state().name()); } allOk &= ok; } toRemove.forEach(rts -> segments.remove(rts.name())); return allOk; } private SegmentFd doMerge() { boolean ok = true; ArrayList<RealtimeSegment> sortedRTSs = new ArrayList<>(segments.values()); sortedRTSs.sort((s1, s2) -> s1.name().compareTo(s2.name())); List<SegmentFd> savedSegments = new ArrayList<>(); for (RealtimeSegment rts : sortedRTSs) { Preconditions.checkState(rts.state() == RealtimeSegment.State.Saved, "Segment [%s] should be in [%s] state, but [%s]", rts.name(), RealtimeSegment.State.Saved, rts.state()); SegmentFd savedSegment = Try.on( rts::getSavedSegment, 1, logger, String.format("Get saved segmet failed. [table: %s, rtsg: %s, segment: %s]", tableName, metadata.name, rts.name())); if (savedSegment == null) { ok = false; break; } else { savedSegments.add(savedSegment); } } if (!ok) { //IOUtils.closeQuietly(mergeSegment); return null; } Path mergePath = path.resolve("merge"); StorageSegment mergeSegment = Try.on( () -> RTSMerge.merge( metadata.grouping, metadata.schema, metadata.mode, metadata.dims, metadata.metrics, savedSegments, mergePath, metadata.name ), 1, logger, String.format("Merge segment failed. [table: %s, rtsg: %s]", tableName, metadata.name)); if (mergeSegment == null) { return null; } Path integratedPath = path.resolve("integrated"); SegmentFd integratedSegment = Try.on( () -> IntegratedSegment.Fd.create(mergeSegment, integratedPath, true), 1, logger, String.format("Integrate merge segmet failed. [table: %s, rtsg: %s]", tableName, metadata.name)); IOUtil.closeQuietly(mergeSegment); return integratedSegment; } private boolean merge() { Preconditions.checkState(state == State.Created); logger.debug("Start merge rts group. [table: {}, rtsg: {}]", tableName, metadata.name); long lastTime = System.currentTimeMillis(); long rowCount = rowCount(); if (mergeSegment == null) { this.mergeSegment = doMerge(); } boolean ok = mergeSegment != null; if (ok) { ok = Try.on( () -> commitState(State.Merged), 1, logger, String.format("Commit merge state failed. [table: %s, rtsg: %s]", tableName, metadata.name)); long mergeRowCount = mergeSegment.info().rowCount(); logger.info("Merge rts group completed. [took {}, merge {}, rows {} ({})]. [table: {}, rtsg: {}]", String.format("%.2fs", (double) (System.currentTimeMillis() - lastTime) / 1000), rowCount, mergeSegment.info().rowCount(), String.format("%.2f%%", (double) mergeRowCount / rowCount * 100), tableName, metadata.name); } return ok; } private boolean upload(SegmentUploader uploader) { Preconditions.checkState(state == State.Merged && mergeSegment != null); logger.debug("Start upload rts group. [table: {}, rtsg: {}]", tableName, metadata.name); boolean ok = true; if (mergeSegment.info().rowCount() > 0) { ok = Try.on( () -> { try (Segment segment = mergeSegment.open()) { uploader.upload((StorageSegment) segment, false); } }, 1, logger, String.format("Upload merge segment failed. [table: %s, rtsg: %s]", tableName, metadata.name)); } else { logger.info("Empty rts group, ignore. [table: {}, rtsg: {}]", tableName, metadata.name); } if (!ok) { return false; } ok = Try.on( () -> commitState(State.Uploaded), 1, logger, String.format("Commit upload state failed. [table: %s, rtsg: %s]", tableName, metadata.name)); if (ok) { logger.info("Upload rts group completed. [table: {}, rtsg: {}]", tableName, metadata.name); } return ok; } private boolean delete() { String tn = tableName; String rtsgName = metadata.name; File deletePath = path.toFile(); new DelayTask(() -> { logger.debug("Delete rts group files. [table: {}, rtsg: {}]", tn, rtsgName); FileUtils.deleteDirectory(deletePath); }, 15 * 60 * 1000).submit(); return true; } /** * A thread may constantly call this method to finally push the segment into storage system. * * @return -1: This rts group is done it's job, no more calls to handle; 0: should immediately call again; else: Next call period. */ public long handle(long uploadPeriodMS, long maxRow, SegmentUploader uploader, RTResources rtResources) { Preconditions.checkState(state != State.Begin); if (isRecovering) { return recover(rtResources) ? fastPeriod : handlePeriod; } switch (state) { case Created: { if (!saveToDisk(rtResources)) { return handlePeriod; } if (!allowAddSegment || (timeToUpload(uploadPeriodMS, maxRow) && tryStartMerge())) { // Check again, some new rts could be added into. if (!saveToDisk(rtResources)) { return handlePeriod; } return merge() ? fastPeriod : handlePeriod; } else { return handlePeriod; } } case Merged: { if (!upload(uploader)) { return handlePeriod; } else { // Remove this rtsg later, wait for other nodes to load its segment. return deleteDelayPeriod; } } case Uploaded: { return delete() ? -1 : handlePeriod; } default: throw new IllegalStateException("illegal state: " + state.name()); } } // ============================================ // Segment interface implementation // ============================================ @Override public int version() { return metadata.version; } @Override public SegmentMode mode() { return metadata.mode; } @Override public boolean isRealtime() { return true; } @Override public String name() { return metadata.name; } @Override public SegmentSchema schema() { return metadata.schema; } @Override public boolean isColumned() { // Always false. return false; } @Override public long rowCount() { // We always get row valueCount from sub rts, as the merge segment could be less than sum of // sub rts because of grouping. long rowCount = 0; for (RealtimeSegment rts : segments.values()) { rowCount += rts.rowCount(); } return rowCount; } @Override public ColumnNode columnNode(int colId) throws IOException { List<ColumnNode> subNodes = new ArrayList<>(); for (RealtimeSegment rts : segments.values()) { subNodes.add(rts.columnNode(colId)); } return ColumnNode.merge(subNodes, metadata.schema.columns.get(colId).getDataType()); } @Override public InfoSegment info() { return this; } /** * This method is only used for testing. Never use it in real production. */ @Override public Segment open(IndexMemCache indexMemCache, ExtIndexMemCache extIndexMemCache, PackMemCache packMemCache) throws IOException { if (mergeSegment != null) { return mergeSegment.open(indexMemCache, extIndexMemCache, packMemCache); } return new Segment() { SegmentFdRowIterator iterator = new SegmentFdRowIterator( new ArrayList<>(segments.values()), indexMemCache, packMemCache); SegmentFdRowIterator rt; @Override public int version() { return metadata.version; } @Override public SegmentMode mode() { return metadata.mode; } @Override public String name() { return metadata.name; } @Override public SegmentSchema schema() { return metadata.schema; } @Override public long rowCount() { return RTSGroup.this.rowCount(); } @Override public RowTraversal rowTraversal() { rt = iterator; return new RowTraversal() { @Override public Iterator<Row> iterator() { return iterator; } }; } @Override public void close() throws IOException { if (rt != null) { rt.close(); rt = null; } } }; } }
/* * Copyright 2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.modelmapper.internal; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.modelmapper.Condition; import org.modelmapper.Converter; import org.modelmapper.PropertyMap; import org.modelmapper.Provider; import org.modelmapper.TypeMap; import org.modelmapper.internal.util.Assert; import org.modelmapper.internal.util.Types; import org.modelmapper.spi.Mapping; import org.modelmapper.spi.PropertyInfo; /** * TypeMap implementation. * * @author Jonathan Halterman */ class TypeMapImpl<S, D> implements TypeMap<S, D> { private final Class<S> sourceType; private final Class<D> destinationType; private final String name; final InheritingConfiguration configuration; private final MappingEngineImpl engine; /** Guarded by "mappings" */ private final Map<String, PropertyInfo> mappedProperties = new HashMap<String, PropertyInfo>(); /** Guarded by "mappings" */ private final Map<String, MappingImpl> mappings = new TreeMap<String, MappingImpl>(); private Converter<S, D> converter; private Converter<S, D> preConverter; private Converter<S, D> postConverter; private Condition<?, ?> condition; private Provider<D> provider; private Converter<?, ?> propertyConverter; private Condition<?, ?> propertyCondition; private Provider<?> propertyProvider; TypeMapImpl(Class<S> sourceType, Class<D> destinationType, String name, InheritingConfiguration configuration, MappingEngineImpl engine) { this.sourceType = sourceType; this.destinationType = destinationType; this.name = name; this.configuration = configuration; this.engine = engine; } public void addMappings(PropertyMap<S, D> propertyMap) { if (sourceType.isEnum() || destinationType.isEnum()) throw new Errors().mappingForEnum().toConfigurationException(); synchronized (mappings) { for (MappingImpl mapping : new ExplicitMappingBuilder<S, D>(sourceType, destinationType, configuration).build(propertyMap)) { MappingImpl existingMapping = addMapping(mapping); if (existingMapping != null && existingMapping.isExplicit()) throw new Errors().duplicateMapping(mapping.getLastDestinationProperty()) .toConfigurationException(); } } } public Condition<?, ?> getCondition() { return condition; } public Converter<S, D> getConverter() { return converter; } public Class<D> getDestinationType() { return destinationType; } public List<Mapping> getMappings() { synchronized (mappings) { return new ArrayList<Mapping>(mappings.values()); } } public String getName() { return name; } public Converter<S, D> getPostConverter() { return postConverter; } public Converter<S, D> getPreConverter() { return preConverter; } public Condition<?, ?> getPropertyCondition() { return propertyCondition; } public Converter<?, ?> getPropertyConverter() { return propertyConverter; } public Provider<?> getPropertyProvider() { return propertyProvider; } public Provider<D> getProvider() { return provider; } public Class<S> getSourceType() { return sourceType; } public List<PropertyInfo> getUnmappedProperties() { TypeInfo<D> destinationInfo = TypeInfoRegistry.typeInfoFor(destinationType, configuration); List<PropertyInfo> unmapped = new ArrayList<PropertyInfo>(); synchronized (mappings) { for (Map.Entry<String, Mutator> entry : destinationInfo.getMutators().entrySet()) if (!mappedProperties.containsKey(entry.getKey())) unmapped.add(entry.getValue()); } return unmapped; } public D map(S source) { Class<S> sourceType = Types.<S>deProxy(source.getClass()); MappingContextImpl<S, D> context = new MappingContextImpl<S, D>(source, sourceType, null, destinationType, null, name, engine); D result = null; try { result = engine.typeMap(context, this); } catch (Throwable t) { context.errors.errorMapping(sourceType, destinationType, t); } context.errors.throwMappingExceptionIfErrorsExist(); return result; } public void map(S source, D destination) { Class<S> sourceType = Types.<S>deProxy(source.getClass()); MappingContextImpl<S, D> context = new MappingContextImpl<S, D>(source, sourceType, destination, destinationType, null, name, engine); try { engine.typeMap(context, this); } catch (Throwable t) { context.errors.errorMapping(sourceType, destinationType, t); } context.errors.throwMappingExceptionIfErrorsExist(); } public TypeMap<S, D> setCondition(Condition<?, ?> condition) { this.condition = Assert.notNull(condition, "condition"); return this; } public TypeMap<S, D> setConverter(Converter<S, D> converter) { this.converter = Assert.notNull(converter, "converter"); return this; } public TypeMap<S, D> setPostConverter(Converter<S, D> converter) { this.postConverter = Assert.notNull(converter, "converter"); return this; } public TypeMap<S, D> setPreConverter(Converter<S, D> converter) { this.preConverter = Assert.notNull(converter, "converter"); return this; } public TypeMap<S, D> setPropertyCondition(Condition<?, ?> condition) { propertyCondition = Assert.notNull(condition, "condition"); return this; } public TypeMap<S, D> setPropertyConverter(Converter<?, ?> converter) { propertyConverter = Assert.notNull(converter, "converter"); return this; } public TypeMap<S, D> setPropertyProvider(Provider<?> provider) { propertyProvider = Assert.notNull(provider, "provider"); return this; } public TypeMap<S, D> setProvider(Provider<D> provider) { this.provider = Assert.notNull(provider, "provider"); return this; } @Override public String toString() { StringBuilder b = new StringBuilder(); b.append("TypeMap[") .append(sourceType.getSimpleName()) .append(" -> ") .append(destinationType.getSimpleName()); if (name != null) b.append(' ').append(name); return b.append(']').toString(); } public void validate() { if (converter != null || preConverter != null || postConverter != null) return; Errors errors = new Errors(); List<PropertyInfo> unmappedProperties = getUnmappedProperties(); if (!unmappedProperties.isEmpty()) errors.errorUnmappedProperties(this, unmappedProperties); errors.throwValidationExceptionIfErrorsExist(); } MappingImpl addMapping(MappingImpl mapping) { synchronized (mappings) { mappedProperties.put(mapping.getDestinationProperties().get(0).getName(), mapping.getDestinationProperties().get(0)); return mappings.put(mapping.getPath(), mapping); } } /** * Used by PropertyMapBuilder to determine if a skipped mapping exists for the {@code path}. No * need to synchronize here since the TypeMap is not exposed publicly yet. */ boolean isSkipped(String path) { Mapping mapping = mappings.get(path); return mapping != null && mapping.isSkipped(); } /** * Used by ImplicitMappingBuilder to determine if a mapping for the {@code path} already exists. * No need to synchronize here since the TypeMap is not exposed publicly yet. */ MappingImpl mappingFor(String path) { return mappings.get(path); } }
/* * android-spinnerwheel * https://github.com/ai212983/android-spinnerwheel * * based on * * Android Wheel Control. * https://code.google.com/p/android-wheel/ * * Copyright 2011 Yuri Kanivets * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.vondear.rxtools.view.wheelhorizontal; import android.content.Context; import android.os.Handler; import android.os.Message; import android.view.GestureDetector; import android.view.GestureDetector.SimpleOnGestureListener; import android.view.MotionEvent; import android.view.animation.Interpolator; import android.widget.Scroller; /** * Scroller class handles scrolling events and updates the spinnerwheel */ public abstract class WheelScroller { /** * Scrolling listener interface */ public interface ScrollingListener { /** * Scrolling callback called when scrolling is performed. * @param distance the distance to scroll */ void onScroll(int distance); /** * This callback is invoked when scroller has been touched */ void onTouch(); /** * This callback is invoked when touch is up */ void onTouchUp(); /** * Starting callback called when scrolling is started */ void onStarted(); /** * Finishing callback called after justifying */ void onFinished(); /** * Justifying callback called to justify a view when scrolling is ended */ void onJustify(); } /** Scrolling duration */ private static final int SCROLLING_DURATION = 400; /** Minimum delta for scrolling */ public static final int MIN_DELTA_FOR_SCROLLING = 1; // Listener private ScrollingListener listener; // Context private Context context; // Scrolling private GestureDetector gestureDetector; protected Scroller scroller; private int lastScrollPosition; private float lastTouchedPosition; private boolean isScrollingPerformed; /** * Constructor * @param context the current context * @param listener the scrolling listener */ public WheelScroller(Context context, ScrollingListener listener) { gestureDetector = new GestureDetector(context, new SimpleOnGestureListener() { public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { // Do scrolling in onTouchEvent() since onScroll() are not call immediately // when user touch and move the spinnerwheel return true; } public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { lastScrollPosition = 0; scrollerFling(lastScrollPosition, (int) velocityX, (int) velocityY); setNextMessage(MESSAGE_SCROLL); return true; } // public boolean onDown(MotionEvent motionEvent); }); gestureDetector.setIsLongpressEnabled(false); scroller = new Scroller(context); this.listener = listener; this.context = context; } /** * Set the the specified scrolling interpolator * @param interpolator the interpolator */ public void setInterpolator(Interpolator interpolator) { scroller.forceFinished(true); scroller = new Scroller(context, interpolator); } /** * Scroll the spinnerwheel * @param distance the scrolling distance * @param time the scrolling duration */ public void scroll(int distance, int time) { scroller.forceFinished(true); lastScrollPosition = 0; scrollerStartScroll(distance, time != 0 ? time : SCROLLING_DURATION); setNextMessage(MESSAGE_SCROLL); startScrolling(); } /** * Stops scrolling */ public void stopScrolling() { scroller.forceFinished(true); } /** * Handles Touch event * @param event the motion event * @return */ public boolean onTouchEvent(MotionEvent event) { switch (event.getAction()) { case MotionEvent.ACTION_DOWN: lastTouchedPosition = getMotionEventPosition(event); scroller.forceFinished(true); clearMessages(); listener.onTouch(); break; case MotionEvent.ACTION_UP: if (scroller.isFinished()) listener.onTouchUp(); break; case MotionEvent.ACTION_MOVE: // perform scrolling int distance = (int)(getMotionEventPosition(event) - lastTouchedPosition); if (distance != 0) { startScrolling(); listener.onScroll(distance); lastTouchedPosition = getMotionEventPosition(event); } break; } if (!gestureDetector.onTouchEvent(event) && event.getAction() == MotionEvent.ACTION_UP) { justify(); } return true; } // Messages private final int MESSAGE_SCROLL = 0; private final int MESSAGE_JUSTIFY = 1; /** * Set next message to queue. Clears queue before. * * @param message the message to set */ private void setNextMessage(int message) { clearMessages(); animationHandler.sendEmptyMessage(message); } /** * Clears messages from queue */ private void clearMessages() { animationHandler.removeMessages(MESSAGE_SCROLL); animationHandler.removeMessages(MESSAGE_JUSTIFY); } // animation handler private Handler animationHandler = new Handler() { public void handleMessage(Message msg) { scroller.computeScrollOffset(); int currPosition = getCurrentScrollerPosition(); int delta = lastScrollPosition - currPosition; lastScrollPosition = currPosition; if (delta != 0) { listener.onScroll(delta); } // scrolling is not finished when it comes to final Y // so, finish it manually if (Math.abs(currPosition - getFinalScrollerPosition()) < MIN_DELTA_FOR_SCROLLING) { // currPosition = getFinalScrollerPosition(); scroller.forceFinished(true); } if (!scroller.isFinished()) { animationHandler.sendEmptyMessage(msg.what); } else if (msg.what == MESSAGE_SCROLL) { justify(); } else { finishScrolling(); } } }; /** * Justifies spinnerwheel */ private void justify() { listener.onJustify(); setNextMessage(MESSAGE_JUSTIFY); } /** * Starts scrolling */ private void startScrolling() { if (!isScrollingPerformed) { isScrollingPerformed = true; listener.onStarted(); } } /** * Finishes scrolling */ protected void finishScrolling() { if (isScrollingPerformed) { listener.onFinished(); isScrollingPerformed = false; } } protected abstract int getCurrentScrollerPosition(); protected abstract int getFinalScrollerPosition(); protected abstract float getMotionEventPosition(MotionEvent event); protected abstract void scrollerStartScroll(int distance, int time); protected abstract void scrollerFling(int position, int velocityX, int velocityY); }
/* Copyright 2016 Goldman Sachs. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.gs.fw.common.mithra.test; import com.gs.fw.common.mithra.*; import com.gs.fw.common.mithra.test.domain.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.sql.*; import java.util.HashSet; import java.util.TimeZone; import java.util.concurrent.atomic.AtomicBoolean; public class TestClientPortalTimeoutDuringDatabaseOperation extends RemoteMithraServerTestCase { private static Logger logger = LoggerFactory.getLogger(TestClientPortalTimeoutDuringDatabaseOperation.class.getName()); private static final int TRANSACTION_TIMEOUT_SECONDS = 2; public static final TimeZone TIME_ZONE = TimeZone.getTimeZone("Asia/Tokyo"); private static final int EXISTING_ORDER_ID = 1; private static final int EXISTING_ORDER_ITEM_ID = 1; private static final int NEW_ORDER_ID = 1017; private static final int NEW_ORDER_ITEM_ID = 1017; private Connection connectionForLock; private Thread lockReleaserThread; protected Class[] getRestrictedClassList() { HashSet result = new HashSet(); // This test uses these classes result.add(Order.class); result.add(OrderItem.class); // This test does not use these classes but they are full cached by the server and generate errors if not added result.add(FullyCachedTinyBalance.class); result.add(SpecialAccount.class); Class[] array = new Class[result.size()]; result.toArray(array); return array; } @Override public void workerVmSetUp() { MithraManagerProvider.getMithraManager().setTransactionTimeout(TRANSACTION_TIMEOUT_SECONDS); super.workerVmSetUp(); } @Override protected void setUp() throws Exception { MithraManagerProvider.getMithraManager().setTransactionTimeout(TRANSACTION_TIMEOUT_SECONDS); super.setUp(); setDatabaseLockTimeout(TRANSACTION_TIMEOUT_SECONDS * 100 * 1000); } protected void setDefaultServerTimezone() { TimeZone.setDefault(TIME_ZONE); } public void setDatabaseLockTimeout(int timeout) { this.getRemoteWorkerVm().executeMethod("serverSetTimeout", new Class[]{int.class}, new Object[]{new Integer(timeout)}); } public void serverSetTimeout(int timeout) throws SQLException { Connection con = null; Connection con2 = null; try { con = setTimeoutOnConnection(timeout); con2 = setTimeoutOnConnection(timeout); } finally { if (con != null) con.close(); if (con2 != null) con2.close(); } } private Connection setTimeoutOnConnection(int timeoutInMillis) throws SQLException { Connection con; con = getServerSideConnection(); Statement stm = con.createStatement(); stm.execute("SET LOCK_TIMEOUT "+timeoutInMillis); stm.close(); return con; } public void serverTakeLockOnOrder() throws SQLException { this.connectionForLock = this.getServerSideConnection(); this.connectionForLock.setAutoCommit(false); String sql = "update APP.ORDERS set DESCRIPTION = 'my new description'"; PreparedStatement ps = this.connectionForLock.prepareStatement(sql); assertEquals(7, ps.executeUpdate()); ps.close(); } public void serverReleaseLockOnOrder() throws SQLException { this.connectionForLock.rollback(); this.connectionForLock.close(); this.connectionForLock = null; } // This test reproduces the same issue we observed in production. // i.e. an insert is blocked in the database for a long time and by the time it finally goes through // the Mithra transaction timeout has already been reached. public void testLocalTimeoutDuringInitialInsert() throws SQLException, InterruptedException { assertPreConditionOnExistingDatabaseRecords(); // At the pre-commit stage, the SQL insert statement will run and get blocked on the database side. // When lockReleaser thread releases the database lock, the SQL insert statement will complete. // At this point the Mithra transaction will have already exceeded the Mithra tx timeout and will need to // be rolled back on both the local (client) side and the remote (server) side as well as in the database. try { MithraManagerProvider.getMithraManager().executeTransactionalCommand(new TransactionalCommand() { public Object executeTransaction(MithraTransaction mithraTransaction) throws Throwable { holdTableLockLongEnoughToExceedTxTimeoutAndReleaseConcurrently(); Order order = new Order(); order.setOrderId(NEW_ORDER_ID); order.insert(); // the SQL insert is actually deferred/buffered until the pre-commit stage return null; // Mithra timeout should happen in pre-commit stage } }); fail("should not get here."); } catch(MithraDatabaseException e) { assertExceptionIsCausedByTimeout(e); } waitToEnsureLockReleaseHasCompleted(); // should already be done by now but make sure this.setDatabaseLockTimeout(0); // use zero lock timeout to detect any remaining database locks as any statement which gets blocked will fail this.getRemoteWorkerVm().executeMethod("serverTestOrderInsertRollbackInCache"); this.getRemoteWorkerVm().executeMethod("serverTestOrderInsertRollbackInDatabase"); this.getRemoteWorkerVm().executeMethod("serverTestForLocksOfAnyKindOnOrderTable"); } private void assertExceptionIsCausedByTimeout(MithraDatabaseException e) { assertTrue("Unexpected type of exception: " + e, e.isTimedOut() || e.getMessage().contains("timeout") || (e.getCause() != null && e.getCause().getMessage().contains("timeout"))); } public void testLocalTimeoutDuringInitialSelect() throws SQLException, InterruptedException { assertPreConditionOnExistingDatabaseRecords(); // The SQL select statement triggered by findOne() will block in the database. // When competing database locks are released, the select will return but the findOne() // will throw an exception as the Mithra timeout has been reached. // We just need to ensure that the database locks held by the select statement // get released in a timely fashion. try { MithraManagerProvider.getMithraManager().executeTransactionalCommand(new TransactionalCommand() { public Object executeTransaction(MithraTransaction mithraTransaction) throws Throwable { holdTableLockLongEnoughToExceedTxTimeoutAndReleaseConcurrently(); OrderFinder.findOne(OrderFinder.orderId().eq(EXISTING_ORDER_ID)); // note select within a transaction takes a lock return null; } }); fail("should not get here as we expect Mithra transaction timeout."); } catch(MithraDatabaseException e) { assertExceptionIsCausedByTimeout(e); } waitToEnsureLockReleaseHasCompleted(); // should already be done by now but make sure this.setDatabaseLockTimeout(0); // use zero lock timeout to detect any remaining database locks as any statement which gets blocked will fail this.getRemoteWorkerVm().executeMethod("serverTestForLocksOfAnyKindOnOrderTable"); } // In this test the first insert has a chance to define a ClientTransactionContext under normal conditions. // The second insert is blocked for a long time and finishes only after the Mithra tx timeout is exceeded. public void testLocalTimeoutDuringSecondInsert() throws SQLException, InterruptedException { assertPreConditionOnExistingDatabaseRecords(); // At the pre-commit stage, the SQL insert statement will run and get blocked on the database side. // When lockReleaser thread releases the database lock, the SQL insert statement will complete. // At this point the Mithra transaction will have already exceeded the Mithra tx timeout and will need to // be rolled back on both the local (client) side and the remote (server) side as well as in the database. final AtomicBoolean firstOperationCompleted = new AtomicBoolean(false); try { MithraManagerProvider.getMithraManager().executeTransactionalCommand(new TransactionalCommand() { public Object executeTransaction(MithraTransaction mithraTransaction) throws Throwable { OrderItem orderItem = new OrderItem(); orderItem.setId(NEW_ORDER_ITEM_ID); orderItem.insert(); // Force SQL insert statement to be executed right now mithraTransaction.setImmediateOperations(true); firstOperationCompleted.set(true); holdTableLockLongEnoughToExceedTxTimeoutAndReleaseConcurrently(); Order order = new Order(); order.setOrderId(NEW_ORDER_ID); order.insert(); // will block in the database. By the time it completes, it will time out. return null; } }); fail("should not get here."); } catch(MithraDatabaseException e) { assertExceptionIsCausedByTimeout(e); } assertTrue(firstOperationCompleted.get()); waitToEnsureLockReleaseHasCompleted(); // should already be done by now but make sure this.setDatabaseLockTimeout(0); // use zero lock timeout to detect any remaining database locks as any statement which gets blocked will fail this.getRemoteWorkerVm().executeMethod("serverTestOrderItemInsertRollbackInCache"); this.getRemoteWorkerVm().executeMethod("serverTestOrderItemInsertRollbackInDatabase"); this.getRemoteWorkerVm().executeMethod("serverTestOrderInsertRollbackInCache"); this.getRemoteWorkerVm().executeMethod("serverTestOrderInsertRollbackInDatabase"); this.getRemoteWorkerVm().executeMethod("serverTestForLocksOfAnyKindOnOrderTable"); this.getRemoteWorkerVm().executeMethod("serverTestForLocksOfAnyKindOnOrderItemTable"); } // In this test the first select has a chance to define a ClientTransactionContext under normal conditions. // The second insert is blocked for a long time and finishes only after the Mithra tx timeout is exceeded. public void testLocalTimeoutDuringSecondSelect() throws SQLException, InterruptedException { assertPreConditionOnExistingDatabaseRecords(); holdTableLockLongEnoughToExceedTxTimeoutAndReleaseConcurrently(); // The SQL select statement triggered by findOne() will block in the database. // When competing database locks are released, the select will return but the findOne() // will throw an exception as the Mithra timeout has been reached. // We just need to ensure that the database locks held by the select statement // get released in a timely fashion. final AtomicBoolean firstOperationCompleted = new AtomicBoolean(false); try { MithraManagerProvider.getMithraManager().executeTransactionalCommand(new TransactionalCommand() { public Object executeTransaction(MithraTransaction mithraTransaction) throws Throwable { // First select operation in this test must be on a different table so as not to be blocked OrderItem orderItem = OrderItemFinder.findOne(OrderItemFinder.id().eq(EXISTING_ORDER_ITEM_ID)); // note select within a transaction takes a lock assertNotNull(orderItem); firstOperationCompleted.set(true); OrderFinder.findOne(OrderFinder.orderId().eq(EXISTING_ORDER_ID)); // note select within a transaction takes a lock // Mithra transaction has timed out by this point but an exception is not triggered until we try to commit return null; } }); fail("should not get here as we expect transaction commit to fail due to timeout."); } catch(MithraDatabaseException e) { assertExceptionIsCausedByTimeout(e); } assertTrue(firstOperationCompleted.get()); waitToEnsureLockReleaseHasCompleted(); // should already be done by now but make sure this.setDatabaseLockTimeout(0); // use zero lock timeout to detect any remaining database locks as any statement which gets blocked will fail this.getRemoteWorkerVm().executeMethod("serverTestForLocksOfAnyKindOnOrderTable"); this.getRemoteWorkerVm().executeMethod("serverTestForLocksOfAnyKindOnOrderItemTable"); } private void holdTableLockLongEnoughToExceedTxTimeoutAndReleaseConcurrently() { TestClientPortalTimeoutDuringDatabaseOperation.this.getRemoteWorkerVm().executeMethod("serverTakeLockOnOrder"); releaseLockAtFuturePointInTime(TRANSACTION_TIMEOUT_SECONDS + 1); // wait just long enough to exceed Mithra tx timeout } private void releaseLockAtFuturePointInTime(final int secondsToWait) { this.lockReleaserThread = new Thread(new Runnable() { @Override public void run() { long startTime = System.currentTimeMillis(); while (System.currentTimeMillis() - startTime < secondsToWait) { try { long millisLeftToWait = (secondsToWait - (System.currentTimeMillis() - startTime)) * 1000L; Thread.sleep(millisLeftToWait); } catch (InterruptedException e) { // Ignore } } logger.warn("Releasing lock on database table"); TestClientPortalTimeoutDuringDatabaseOperation.this.getRemoteWorkerVm().executeMethod("serverReleaseLockOnOrder"); } }); this.lockReleaserThread.start(); } private void waitToEnsureLockReleaseHasCompleted() throws InterruptedException { this.lockReleaserThread.join(); } private void assertPreConditionOnExistingDatabaseRecords() { Order unwantedOrder = OrderFinder.findOne(OrderFinder.orderId().eq(NEW_ORDER_ID)); assertNull("There is a conflicting existing Order in the database with orderId=" + NEW_ORDER_ID + ". Please remove it!", unwantedOrder); OrderItem unwantedOrderItem = OrderItemFinder.findOne(OrderItemFinder.id().eq(NEW_ORDER_ITEM_ID)); assertNull("There is a conflicting existing OrderItem in the database with id=" + NEW_ORDER_ITEM_ID + ". Please remove it!", unwantedOrderItem); Order requiredOrder = OrderFinder.findOne(OrderFinder.orderId().eq(EXISTING_ORDER_ID)); assertNotNull("This test requires the existence of an Order with orderId=" + EXISTING_ORDER_ID + " from the test data file. Please put it back!", requiredOrder); OrderItem requiredOrderItem = OrderItemFinder.findOne(OrderItemFinder.id().eq(EXISTING_ORDER_ITEM_ID)); assertNotNull("This test requires the existence of an OrderItem with id=" + EXISTING_ORDER_ITEM_ID + " from the test data file. Please put it back!", requiredOrderItem); } public void serverTestOrderInsertRollbackInCache() { assertNull(OrderFinder.findOne(OrderFinder.orderId().eq(NEW_ORDER_ID))); } public void serverTestOrderInsertRollbackInDatabase() throws SQLException { Connection con = this.getServerSideConnection(); String sql = "select * from APP.ORDERS where ORDER_ID = ?"; PreparedStatement ps = con.prepareStatement(sql); ps.setInt(1, NEW_ORDER_ID); ResultSet rs = ps.executeQuery(); assertFalse(rs.next()); rs.close(); ps.close(); con.close(); } public void serverTestOrderItemInsertRollbackInCache() { assertNull(OrderItemFinder.findOne(OrderItemFinder.orderId().eq(NEW_ORDER_ITEM_ID))); } public void serverTestOrderItemInsertRollbackInDatabase() throws SQLException { Connection con = this.getServerSideConnection(); String sql = "select * from APP.ORDER_ITEM where ORDER_ID = ?"; PreparedStatement ps = con.prepareStatement(sql); ps.setInt(1, NEW_ORDER_ITEM_ID); ResultSet rs = ps.executeQuery(); assertFalse(rs.next()); rs.close(); ps.close(); con.close(); } public void serverTestForLocksOfAnyKindOnOrderTable() throws SQLException { Connection con = this.getServerSideConnection(); Statement stmt = con.createStatement(); stmt.executeUpdate("SET LOCK_TIMEOUT 0"); // this is to ensure any existing table locks will cause the truncate table to fail immediately stmt.executeUpdate("truncate table APP.ORDERS"); stmt.close(); con.close(); } public void serverTestForLocksOfAnyKindOnOrderItemTable() throws SQLException { Connection con = this.getServerSideConnection(); Statement stmt = con.createStatement(); stmt.executeUpdate("SET LOCK_TIMEOUT 0"); // this is to ensure any existing table locks will cause the truncate table to fail immediately stmt.executeUpdate("truncate table APP.ORDER_ITEM"); stmt.close(); con.close(); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.personalizeevents; import org.w3c.dom.*; import java.net.*; import java.util.*; import javax.annotation.Generated; import org.apache.commons.logging.*; import com.amazonaws.*; import com.amazonaws.annotation.SdkInternalApi; import com.amazonaws.auth.*; import com.amazonaws.handlers.*; import com.amazonaws.http.*; import com.amazonaws.internal.*; import com.amazonaws.internal.auth.*; import com.amazonaws.metrics.*; import com.amazonaws.regions.*; import com.amazonaws.transform.*; import com.amazonaws.util.*; import com.amazonaws.protocol.json.*; import com.amazonaws.util.AWSRequestMetrics.Field; import com.amazonaws.annotation.ThreadSafe; import com.amazonaws.client.AwsSyncClientParams; import com.amazonaws.client.builder.AdvancedConfig; import com.amazonaws.services.personalizeevents.AmazonPersonalizeEventsClientBuilder; import com.amazonaws.AmazonServiceException; import com.amazonaws.services.personalizeevents.model.*; import com.amazonaws.services.personalizeevents.model.transform.*; /** * Client for accessing Amazon Personalize Events. All service calls made using this client are blocking, and will not * return until the service call completes. * <p> * <p/> */ @ThreadSafe @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AmazonPersonalizeEventsClient extends AmazonWebServiceClient implements AmazonPersonalizeEvents { /** Provider for AWS credentials. */ private final AWSCredentialsProvider awsCredentialsProvider; private static final Log log = LogFactory.getLog(AmazonPersonalizeEvents.class); /** Default signing name for the service. */ private static final String DEFAULT_SIGNING_NAME = "personalize"; /** Client configuration factory providing ClientConfigurations tailored to this client */ protected static final ClientConfigurationFactory configFactory = new ClientConfigurationFactory(); private final AdvancedConfig advancedConfig; private static final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory = new com.amazonaws.protocol.json.SdkJsonProtocolFactory( new JsonClientMetadata() .withProtocolVersion("1.1") .withSupportsCbor(false) .withSupportsIon(false) .withContentTypeOverride("") .addErrorMetadata( new JsonErrorShapeMetadata().withErrorCode("InvalidInputException").withModeledClass( com.amazonaws.services.personalizeevents.model.InvalidInputException.class)) .withBaseServiceExceptionClass(com.amazonaws.services.personalizeevents.model.AmazonPersonalizeEventsException.class)); public static AmazonPersonalizeEventsClientBuilder builder() { return AmazonPersonalizeEventsClientBuilder.standard(); } /** * Constructs a new client to invoke service methods on Amazon Personalize Events using the specified parameters. * * <p> * All service calls made using this new client object are blocking, and will not return until the service call * completes. * * @param clientParams * Object providing client parameters. */ AmazonPersonalizeEventsClient(AwsSyncClientParams clientParams) { this(clientParams, false); } /** * Constructs a new client to invoke service methods on Amazon Personalize Events using the specified parameters. * * <p> * All service calls made using this new client object are blocking, and will not return until the service call * completes. * * @param clientParams * Object providing client parameters. */ AmazonPersonalizeEventsClient(AwsSyncClientParams clientParams, boolean endpointDiscoveryEnabled) { super(clientParams); this.awsCredentialsProvider = clientParams.getCredentialsProvider(); this.advancedConfig = clientParams.getAdvancedConfig(); init(); } private void init() { setServiceNameIntern(DEFAULT_SIGNING_NAME); setEndpointPrefix(ENDPOINT_PREFIX); // calling this.setEndPoint(...) will also modify the signer accordingly setEndpoint("personalize-events.us-east-1.amazonaws.com"); HandlerChainFactory chainFactory = new HandlerChainFactory(); requestHandler2s.addAll(chainFactory.newRequestHandlerChain("/com/amazonaws/services/personalizeevents/request.handlers")); requestHandler2s.addAll(chainFactory.newRequestHandler2Chain("/com/amazonaws/services/personalizeevents/request.handler2s")); requestHandler2s.addAll(chainFactory.getGlobalHandlers()); } /** * <p> * Records user interaction event data. * </p> * * @param putEventsRequest * @return Result of the PutEvents operation returned by the service. * @throws InvalidInputException * Provide a valid value for the field or parameter. * @sample AmazonPersonalizeEvents.PutEvents * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/personalize-events-2018-03-22/PutEvents" target="_top">AWS * API Documentation</a> */ @Override public PutEventsResult putEvents(PutEventsRequest request) { request = beforeClientExecution(request); return executePutEvents(request); } @SdkInternalApi final PutEventsResult executePutEvents(PutEventsRequest putEventsRequest) { ExecutionContext executionContext = createExecutionContext(putEventsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<PutEventsRequest> request = null; Response<PutEventsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new PutEventsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(putEventsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion()); request.addHandlerContext(HandlerContextKey.SERVICE_ID, "Personalize Events"); request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "PutEvents"); request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<PutEventsResult>> responseHandler = protocolFactory.createResponseHandler(new JsonOperationMetadata() .withPayloadJson(true).withHasStreamingSuccessResponse(false), new PutEventsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * Returns additional metadata for a previously executed successful, request, typically used for debugging issues * where a service isn't acting as expected. This data isn't considered part of the result data returned by an * operation, so it's available through this separate, diagnostic interface. * <p> * Response metadata is only cached for a limited period of time, so if you need to access this extra diagnostic * information for an executed request, you should use this method to retrieve it as soon as possible after * executing the request. * * @param request * The originally executed request * * @return The response metadata for the specified request, or null if none is available. */ public ResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest request) { return client.getResponseMetadataForRequest(request); } /** * Normal invoke with authentication. Credentials are required and may be overriden at the request level. **/ private <X, Y extends AmazonWebServiceRequest> Response<X> invoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler, ExecutionContext executionContext) { return invoke(request, responseHandler, executionContext, null, null); } /** * Normal invoke with authentication. Credentials are required and may be overriden at the request level. **/ private <X, Y extends AmazonWebServiceRequest> Response<X> invoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler, ExecutionContext executionContext, URI cachedEndpoint, URI uriFromEndpointTrait) { executionContext.setCredentialsProvider(CredentialUtils.getCredentialsProvider(request.getOriginalRequest(), awsCredentialsProvider)); return doInvoke(request, responseHandler, executionContext, cachedEndpoint, uriFromEndpointTrait); } /** * Invoke with no authentication. Credentials are not required and any credentials set on the client or request will * be ignored for this operation. **/ private <X, Y extends AmazonWebServiceRequest> Response<X> anonymousInvoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler, ExecutionContext executionContext) { return doInvoke(request, responseHandler, executionContext, null, null); } /** * Invoke the request using the http client. Assumes credentials (or lack thereof) have been configured in the * ExecutionContext beforehand. **/ private <X, Y extends AmazonWebServiceRequest> Response<X> doInvoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler, ExecutionContext executionContext, URI discoveredEndpoint, URI uriFromEndpointTrait) { if (discoveredEndpoint != null) { request.setEndpoint(discoveredEndpoint); request.getOriginalRequest().getRequestClientOptions().appendUserAgent("endpoint-discovery"); } else if (uriFromEndpointTrait != null) { request.setEndpoint(uriFromEndpointTrait); } else { request.setEndpoint(endpoint); } request.setTimeOffset(timeOffset); HttpResponseHandler<AmazonServiceException> errorResponseHandler = protocolFactory.createErrorResponseHandler(new JsonErrorResponseMetadata()); return client.execute(request, responseHandler, errorResponseHandler, executionContext); } @com.amazonaws.annotation.SdkInternalApi static com.amazonaws.protocol.json.SdkJsonProtocolFactory getProtocolFactory() { return protocolFactory; } }
package psidev.psi.mi.jami.xml.io.writer.expanded; import junit.framework.Assert; import org.junit.Test; import psidev.psi.mi.jami.binary.BinaryInteraction; import psidev.psi.mi.jami.binary.impl.DefaultNamedBinaryInteraction; import psidev.psi.mi.jami.model.Complex; import psidev.psi.mi.jami.model.Participant; import psidev.psi.mi.jami.model.impl.*; import javax.xml.stream.XMLStreamException; import java.io.StringWriter; import java.util.Arrays; /** * Unit tester for LightExpandedXmlNamedBinaryWriter * * @author Marine Dumousseau (marine@ebi.ac.uk) * @version $Id$ * @since <pre>26/11/13</pre> */ public class LightExpandedXml25NamedBinaryWriterTest { private String interaction = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_multiple = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " <interaction id=\"5\">\n" + " <experimentList>\n" + " <experimentDescription id=\"6\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"7\">\n" + " <interactor id=\"8\">\n" + " <names>\n" + " <shortLabel>protein test2</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_same_experiment_interactors = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n"+ " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " <interaction id=\"5\">\n" + " <experimentList>\n" + " <experimentDescription id=\"6\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"7\">\n" + " <interactor id=\"8\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_complexes = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactionRef>4</interactionRef>\n" + " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " <interaction id=\"4\">\n" + " <names>\n" + " <shortLabel>test complex</shortLabel>\n"+ " </names>\n" + " <experimentList>\n" + " <experimentDescription id=\"5\">\n" + " <names>\n" + " <fullName>Mock publication and experiment for abstract interactions that are not interaction evidences.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication and experiment for abstract interactions that are not interaction evidences.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"6\">\n" + " <interactor id=\"7\">\n" + " <names>\n" + " <shortLabel>test protein</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_complexes_as_interactor = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>test complex</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>complex</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0314\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_different_entries1 = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; private String interaction_different_entries2 = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<entrySet xmlns=\"http://psi.hupo.org/mi/mif\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + "xsi:schemaLocation=\"http://psi.hupo.org/mi/mif https://raw.githubusercontent.com/HUPO-PSI/miXML/master/2.5/src/MIF254.xsd\" " + "level=\"2\" version=\"5\" minorVersion=\"4\">\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " <interaction id=\"5\">\n" + " <experimentList>\n" + " <experimentDescription id=\"6\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"7\">\n" + " <interactor id=\"8\">\n" + " <names>\n" + " <shortLabel>protein test2</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + " <entry>\n" + " <interactionList>\n" + " <interaction id=\"1\">\n" + " <experimentList>\n" + " <experimentDescription id=\"2\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"3\">\n" + " <interactor id=\"4\">\n" + " <names>\n" + " <shortLabel>protein test</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " <interaction id=\"5\">\n" + " <experimentList>\n" + " <experimentDescription id=\"6\">\n" + " <names>\n" + " <fullName>Mock publication for interactions that do not have experimental details.</fullName>\n" + " </names>\n" + " <bibref>\n" + " <attributeList>\n" + " <attribute name=\"publication title\" nameAc=\"MI:1091\">Mock publication for interactions that do not have experimental details.</attribute>\n" + " </attributeList>\n" + " </bibref>\n" + " <interactionDetectionMethod>\n" + " <names>\n" + " <shortLabel>unspecified method</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactionDetectionMethod>\n" + " </experimentDescription>\n"+ " </experimentList>\n" + " <participantList>\n" + " <participant id=\"7\">\n" + " <interactor id=\"8\">\n" + " <names>\n" + " <shortLabel>protein test2</shortLabel>\n" + " </names>\n" + " <interactorType>\n" + " <names>\n" + " <shortLabel>protein</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </interactorType>\n" + " </interactor>\n"+ " <biologicalRole>\n" + " <names>\n" + " <shortLabel>unspecified role</shortLabel>\n" + " </names>\n" + " <xref>\n" + " <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" + " </xref>\n" + " </biologicalRole>\n" + " </participant>\n"+ " </participantList>\n" + " </interaction>\n"+ " </interactionList>\n"+ " </entry>\n" + "</entrySet>"; @Test(expected = IllegalStateException.class) public void test_not_initialised_writer() { LightExpandedXmlNamedBinaryWriter writer = new LightExpandedXmlNamedBinaryWriter(); writer.write(new DefaultNamedBinaryInteraction()); } @Test(expected = IllegalArgumentException.class) public void test_not_initialised_no_options() { LightExpandedXmlNamedBinaryWriter writer = new LightExpandedXmlNamedBinaryWriter(); writer.initialiseContext(null); } @Test public void test_single_interaction() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); LightExpandedXmlNamedBinaryWriter writer = new LightExpandedXmlNamedBinaryWriter(stringWriter); BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultNamedParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); writer.start(); writer.write(interaction); writer.end(); writer.close(); Assert.assertEquals(this.interaction, stringWriter.toString()); } @Test public void test_several_interactions1() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); LightExpandedXmlNamedBinaryWriter writer = new LightExpandedXmlNamedBinaryWriter(stringWriter); BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultNamedParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); BinaryInteraction interaction2 = new DefaultNamedBinaryInteraction(); Participant participant2 = new DefaultNamedParticipant(new DefaultProtein("protein test2")); interaction2.addParticipant(participant2); writer.start(); writer.write(Arrays.asList(interaction, interaction2)); writer.end(); writer.close(); Assert.assertEquals(this.interaction_multiple, stringWriter.toString()); } @Test public void test_several_interactions2() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); LightExpandedXmlNamedBinaryWriter writer = new LightExpandedXmlNamedBinaryWriter(stringWriter); BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultNamedParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); BinaryInteraction interaction2 = new DefaultNamedBinaryInteraction(); Participant participant2 = new DefaultNamedParticipant(new DefaultProtein("protein test2")); interaction2.addParticipant(participant2); writer.start(); writer.write(Arrays.asList(interaction, interaction2).iterator()); writer.end(); writer.close(); Assert.assertEquals(this.interaction_multiple, stringWriter.toString()); } @Test public void test_interactions_same_interactors1() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); LightExpandedXmlNamedBinaryWriter writer = new LightExpandedXmlNamedBinaryWriter(stringWriter); BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultNamedParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); BinaryInteraction interaction2 = new DefaultNamedBinaryInteraction(); Participant participant2 = new DefaultNamedParticipant(participant.getInteractor()); interaction2.addParticipant(participant2); writer.start(); writer.write(Arrays.asList(interaction, interaction2)); writer.end(); writer.close(); Assert.assertEquals(this.interaction_same_experiment_interactors, stringWriter.toString()); } @Test public void test_interactions_same_interactors2() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); LightExpandedXmlNamedBinaryWriter writer = new LightExpandedXmlNamedBinaryWriter(stringWriter); BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultNamedParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); BinaryInteraction interaction2 = new DefaultNamedBinaryInteraction(); Participant participant2 = new DefaultNamedParticipant(participant.getInteractor()); interaction2.addParticipant(participant2); writer.start(); writer.write(Arrays.asList(interaction, interaction2).iterator()); writer.end(); writer.close(); Assert.assertEquals(this.interaction_same_experiment_interactors, stringWriter.toString()); } @Test public void test_single_interaction_complexes() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); Complex complex = new DefaultComplex("test complex"); complex.getParticipants().add(new DefaultNamedModelledParticipant(new DefaultProtein("test protein"))); LightExpandedXmlNamedBinaryWriter writer = new LightExpandedXmlNamedBinaryWriter(stringWriter); BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultNamedParticipant(complex); interaction.addParticipant(participant); writer.start(); writer.write(interaction); writer.end(); writer.close(); Assert.assertEquals(this.interaction_complexes, stringWriter.toString()); } @Test public void test_single_interaction_complexes_as_Interactor() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); Complex complex = new DefaultComplex("test complex"); complex.getParticipants().add(new DefaultModelledParticipant(new DefaultProtein("test protein"))); LightExpandedXmlNamedBinaryWriter writer = new LightExpandedXmlNamedBinaryWriter(stringWriter); writer.setWriteComplexesAsInteractors(true); BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultNamedParticipant(complex); interaction.addParticipant(participant); writer.start(); writer.write(interaction); writer.end(); writer.close(); Assert.assertEquals(this.interaction_complexes_as_interactor, stringWriter.toString()); } @Test public void test_interactions_different_entries1() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); LightExpandedXmlNamedBinaryWriter writer = new LightExpandedXmlNamedBinaryWriter(stringWriter); BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultNamedParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); writer.start(); writer.write(interaction); writer.write(interaction); writer.end(); writer.close(); Assert.assertEquals(this.interaction_different_entries1, stringWriter.toString()); } @Test public void test_interactions_different_entries2() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); LightExpandedXmlNamedBinaryWriter writer = new LightExpandedXmlNamedBinaryWriter(stringWriter); BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultNamedParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); BinaryInteraction interaction2 = new DefaultNamedBinaryInteraction(); Participant participant2 = new DefaultNamedParticipant(new DefaultProtein("protein test2")); interaction2.addParticipant(participant2); writer.start(); writer.write(Arrays.asList(interaction, interaction2)); writer.write(Arrays.asList(interaction, interaction2)); writer.end(); writer.close(); Assert.assertEquals(this.interaction_different_entries2, stringWriter.toString()); } @Test public void test_interactions_different_entries3() throws XMLStreamException { StringWriter stringWriter = new StringWriter(); LightExpandedXmlNamedBinaryWriter writer = new LightExpandedXmlNamedBinaryWriter(stringWriter); BinaryInteraction interaction = new DefaultNamedBinaryInteraction(); Participant participant = new DefaultNamedParticipant(new DefaultProtein("protein test")); interaction.addParticipant(participant); BinaryInteraction interaction2 = new DefaultNamedBinaryInteraction(); Participant participant2 = new DefaultNamedParticipant(new DefaultProtein("protein test2")); interaction2.addParticipant(participant2); writer.start(); writer.write(Arrays.asList(interaction, interaction2).iterator()); writer.write(Arrays.asList(interaction, interaction2).iterator()); writer.end(); writer.close(); Assert.assertEquals(this.interaction_different_entries2, stringWriter.toString()); } }
/* * Copyright (c) 2002 JSON.org * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * The Software shall be used for Good, not Evil. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.amazonaws.util.json; import java.io.BufferedReader; import java.io.IOException; import java.io.Reader; import java.io.StringReader; /** * A JSONTokener takes a source string and extracts characters and tokens from * it. It is used by the JSONObject and JSONArray constructors to parse * JSON source strings. * * @version 2010-02-02 */ public class JSONTokener { private int character; private boolean eof; private int index; private int line; private char previous; private Reader reader; private boolean usePrevious; /** * Construct a JSONTokener from a reader. * * @param reader A reader. */ public JSONTokener(Reader reader) { this.reader = reader.markSupported() ? reader : new BufferedReader(reader); this.eof = false; this.usePrevious = false; this.previous = 0; this.index = 0; this.character = 1; this.line = 1; } /** * Construct a JSONTokener from a string. * * @param s A source string. */ public JSONTokener(String s) { this(new StringReader(s)); } /** * Back up one character. This provides a sort of lookahead capability, * so that you can test for a digit or letter before attempting to parse * the next number or identifier. */ public void back() throws JSONException { if (usePrevious || index <= 0) { throw new JSONException("Stepping back two steps is not supported"); } this.index -= 1; this.character -= 1; this.usePrevious = true; this.eof = false; } /** * Get the hex value of a character (base16). * @param c A character between '0' and '9' or between 'A' and 'F' or * between 'a' and 'f'. * @return An int between 0 and 15, or -1 if c was not a hex digit. */ public static int dehexchar(char c) { if (c >= '0' && c <= '9') { return c - '0'; } if (c >= 'A' && c <= 'F') { return c - ('A' - 10); } if (c >= 'a' && c <= 'f') { return c - ('a' - 10); } return -1; } public boolean end() { return eof && !usePrevious; } /** * Determine if the source string still contains characters that next() * can consume. * @return true if not yet at the end of the source. */ public boolean more() throws JSONException { next(); if (end()) { return false; } back(); return true; } /** * Get the next character in the source string. * * @return The next character, or 0 if past the end of the source string. */ public char next() throws JSONException { int c; if (this.usePrevious) { this.usePrevious = false; c = this.previous; } else { try { c = this.reader.read(); } catch (IOException exception) { throw new JSONException(exception); } if (c <= 0) { // End of stream this.eof = true; c = 0; } } this.index += 1; if (this.previous == '\r') { this.line += 1; this.character = c == '\n' ? 0 : 1; } else if (c == '\n') { this.line += 1; this.character = 0; } else { this.character += 1; } this.previous = (char) c; return this.previous; } /** * Consume the next character, and check that it matches a specified * character. * @param c The character to match. * @return The character. * @throws JSONException if the character does not match. */ public char next(char c) throws JSONException { char n = next(); if (n != c) { throw syntaxError("Expected '" + c + "' and instead saw '" + n + "'"); } return n; } /** * Get the next n characters. * * @param n The number of characters to take. * @return A string of n characters. * @throws JSONException * Substring bounds error if there are not * n characters remaining in the source string. */ public String next(int n) throws JSONException { if (n == 0) { return ""; } char[] buffer = new char[n]; int pos = 0; while (pos < n) { buffer[pos] = next(); if (end()) { throw syntaxError("Substring bounds error"); } pos += 1; } return new String(buffer); } /** * Get the next char in the string, skipping whitespace. * @throws JSONException * @return A character, or 0 if there are no more characters. */ public char nextClean() throws JSONException { for (;;) { char c = next(); if (c == 0 || c > ' ') { return c; } } } /** * Return the characters up to the next close quote character. * Backslash processing is done. The formal JSON format does not * allow strings in single quotes, but an implementation is allowed to * accept them. * @param quote The quoting character, either * <code>"</code>&nbsp;<small>(double quote)</small> or * <code>'</code>&nbsp;<small>(single quote)</small>. * @return A String. * @throws JSONException Unterminated string. */ public String nextString(char quote) throws JSONException { char c; StringBuffer sb = new StringBuffer(); for (;;) { c = next(); switch (c) { case 0: case '\n': case '\r': throw syntaxError("Unterminated string"); case '\\': c = next(); switch (c) { case 'b': sb.append('\b'); break; case 't': sb.append('\t'); break; case 'n': sb.append('\n'); break; case 'f': sb.append('\f'); break; case 'r': sb.append('\r'); break; case 'u': sb.append((char)Integer.parseInt(next(4), 16)); break; case '"': case '\'': case '\\': case '/': sb.append(c); break; default: throw syntaxError("Illegal escape."); } break; default: if (c == quote) { return sb.toString(); } sb.append(c); } } } /** * Get the text up but not including the specified character or the * end of line, whichever comes first. * @param d A delimiter character. * @return A string. */ public String nextTo(char d) throws JSONException { StringBuffer sb = new StringBuffer(); for (;;) { char c = next(); if (c == d || c == 0 || c == '\n' || c == '\r') { if (c != 0) { back(); } return sb.toString().trim(); } sb.append(c); } } /** * Get the text up but not including one of the specified delimiter * characters or the end of line, whichever comes first. * @param delimiters A set of delimiter characters. * @return A string, trimmed. */ public String nextTo(String delimiters) throws JSONException { char c; StringBuffer sb = new StringBuffer(); for (;;) { c = next(); if (delimiters.indexOf(c) >= 0 || c == 0 || c == '\n' || c == '\r') { if (c != 0) { back(); } return sb.toString().trim(); } sb.append(c); } } /** * Get the next value. The value can be a Boolean, Double, Integer, * JSONArray, JSONObject, Long, or String, or the JSONObject.NULL object. * @throws JSONException If syntax error. * * @return An object. */ public Object nextValue() throws JSONException { char c = nextClean(); String s; switch (c) { case '"': case '\'': return nextString(c); case '{': back(); return new JSONObject(this); case '[': case '(': back(); return new JSONArray(this); } /* * Handle unquoted text. This could be the values true, false, or * null, or it can be a number. An implementation (such as this one) * is allowed to also accept non-standard forms. * * Accumulate characters until we reach the end of the text or a * formatting character. */ StringBuffer sb = new StringBuffer(); while (c >= ' ' && ",:]}/\\\"[{;=#".indexOf(c) < 0) { sb.append(c); c = next(); } back(); s = sb.toString().trim(); if (s.equals("")) { throw syntaxError("Missing value"); } return JSONObject.stringToValue(s); } /** * Skip characters until the next character is the requested character. * If the requested character is not found, no characters are skipped. * @param to A character to skip to. * @return The requested character, or zero if the requested character * is not found. */ public char skipTo(char to) throws JSONException { char c; try { int startIndex = this.index; int startCharacter = this.character; int startLine = this.line; reader.mark(Integer.MAX_VALUE); do { c = next(); if (c == 0) { reader.reset(); this.index = startIndex; this.character = startCharacter; this.line = startLine; return c; } } while (c != to); } catch (IOException exc) { throw new JSONException(exc); } back(); return c; } /** * Make a JSONException to signal a syntax error. * * @param message The error message. * @return A JSONException object, suitable for throwing */ public JSONException syntaxError(String message) { return new JSONException(message + toString()); } /** * Make a printable string of this JSONTokener. * * @return " at {index} [character {character} line {line}]" */ public String toString() { return " at " + index + " [character " + this.character + " line " + this.line + "]"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package freemarker.ext.beans; import java.beans.BeanInfo; import java.beans.IndexedPropertyDescriptor; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.MethodDescriptor; import java.beans.PropertyDescriptor; import java.lang.ref.Reference; import java.lang.ref.ReferenceQueue; import java.lang.ref.WeakReference; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import freemarker.core.BugException; import freemarker.core._JavaVersions; import freemarker.ext.beans.BeansWrapper.MethodAppearanceDecision; import freemarker.ext.beans.BeansWrapper.MethodAppearanceDecisionInput; import freemarker.ext.util.ModelCache; import freemarker.log.Logger; import freemarker.template.utility.NullArgumentException; import freemarker.template.utility.SecurityUtilities; /** * Returns information about a {@link Class} that's useful for FreeMarker. Encapsulates a cache for this. Thread-safe, * doesn't even require "proper publishing" starting from 2.3.24 or Java 5. Immutable, with the exception of the * internal caches. * * <p> * Note that instances of this are cached on the level of FreeMarker's defining class loader. Hence, it must not do * operations that depend on the Thread Context Class Loader, such as resolving class names. */ class ClassIntrospector { // Attention: This class must be thread-safe (not just after proper publishing). This is important as some of // these are shared by many object wrappers, and concurrency related glitches due to user errors must remain // local to the object wrappers, not corrupting the shared ClassIntrospector. private static final Logger LOG = Logger.getLogger("freemarker.beans"); private static final String JREBEL_SDK_CLASS_NAME = "org.zeroturnaround.javarebel.ClassEventListener"; private static final String JREBEL_INTEGRATION_ERROR_MSG = "Error initializing JRebel integration. JRebel integration disabled."; /** * When this property is true, some things are stricter. This is mostly to catch suspicious things in development * that can otherwise be valid situations. */ static final boolean DEVELOPMENT_MODE = "true".equals(SecurityUtilities.getSystemProperty("freemarker.development", "false")); private static final ClassChangeNotifier CLASS_CHANGE_NOTIFIER; static { boolean jRebelAvailable; try { Class.forName(JREBEL_SDK_CLASS_NAME); jRebelAvailable = true; } catch (Throwable e) { jRebelAvailable = false; try { if (!(e instanceof ClassNotFoundException)) { LOG.error(JREBEL_INTEGRATION_ERROR_MSG, e); } } catch (Throwable loggingE) { // ignore } } ClassChangeNotifier classChangeNotifier; if (jRebelAvailable) { try { classChangeNotifier = (ClassChangeNotifier) Class.forName("freemarker.ext.beans.JRebelClassChangeNotifier").newInstance(); } catch (Throwable e) { classChangeNotifier = null; try { LOG.error(JREBEL_INTEGRATION_ERROR_MSG, e); } catch (Throwable loggingE) { // ignore } } } else { classChangeNotifier = null; } CLASS_CHANGE_NOTIFIER = classChangeNotifier; } // ----------------------------------------------------------------------------------------------------------------- // Introspection info Map keys: /** Key in the class info Map to the Map that maps method to argument type arrays */ private static final Object ARG_TYPES_BY_METHOD_KEY = new Object(); /** Key in the class info Map to the object that represents the constructors (one or multiple due to overloading) */ static final Object CONSTRUCTORS_KEY = new Object(); /** Key in the class info Map to the get(String|Object) Method */ static final Object GENERIC_GET_KEY = new Object(); // ----------------------------------------------------------------------------------------------------------------- // Introspection configuration properties: // Note: These all must be *declared* final (or else synchronization is needed everywhere where they are accessed). final int exposureLevel; final boolean exposeFields; final MethodAppearanceFineTuner methodAppearanceFineTuner; final MethodSorter methodSorter; final boolean treatDefaultMethodsAsBeanMembers; final boolean bugfixed; /** See {@link #getHasSharedInstanceRestrictons()} */ final private boolean hasSharedInstanceRestrictons; /** See {@link #isShared()} */ final private boolean shared; // ----------------------------------------------------------------------------------------------------------------- // State fields: private final Object sharedLock; private final Map<Class<?>, Map<Object, Object>> cache = new ConcurrentHashMap<Class<?>, Map<Object, Object>>(0, 0.75f, 16); private final Set<String> cacheClassNames = new HashSet<String>(0); private final Set<Class<?>> classIntrospectionsInProgress = new HashSet<Class<?>>(0); private final List<WeakReference<Object/*ClassBasedModelFactory|ModelCache>*/>> modelFactories = new LinkedList<WeakReference<Object>>(); private final ReferenceQueue<Object> modelFactoriesRefQueue = new ReferenceQueue<Object>(); private int clearingCounter; // ----------------------------------------------------------------------------------------------------------------- // Instantiation: /** * Creates a new instance, that is hence surely not shared (singleton) instance. * * @param pa * Stores what the values of the JavaBean properties of the returned instance will be. Not {@code null}. */ ClassIntrospector(ClassIntrospectorBuilder pa, Object sharedLock) { this(pa, sharedLock, false, false); } /** * @param hasSharedInstanceRestrictons * {@code true} exactly if we are creating a new instance with {@link ClassIntrospectorBuilder}. Then * it's {@code true} even if it won't put the instance into the cache. */ ClassIntrospector(ClassIntrospectorBuilder builder, Object sharedLock, boolean hasSharedInstanceRestrictons, boolean shared) { NullArgumentException.check("sharedLock", sharedLock); this.exposureLevel = builder.getExposureLevel(); this.exposeFields = builder.getExposeFields(); this.methodAppearanceFineTuner = builder.getMethodAppearanceFineTuner(); this.methodSorter = builder.getMethodSorter(); this.treatDefaultMethodsAsBeanMembers = builder.getTreatDefaultMethodsAsBeanMembers(); this.bugfixed = builder.isBugfixed(); this.sharedLock = sharedLock; this.hasSharedInstanceRestrictons = hasSharedInstanceRestrictons; this.shared = shared; if (CLASS_CHANGE_NOTIFIER != null) { CLASS_CHANGE_NOTIFIER.subscribe(this); } } /** * Returns a {@link ClassIntrospectorBuilder}-s that could be used to create an identical {@link #ClassIntrospector} * . The returned {@link ClassIntrospectorBuilder} can be modified without interfering with anything. */ ClassIntrospectorBuilder createBuilder() { return new ClassIntrospectorBuilder(this); } // ------------------------------------------------------------------------------------------------------------------ // Introspection: /** * Gets the class introspection data from {@link #cache}, automatically creating the cache entry if it's missing. * * @return A {@link Map} where each key is a property/method/field name (or a special {@link Object} key like * {@link #CONSTRUCTORS_KEY}), each value is a {@link FastPropertyDescriptor} or {@link Method} or * {@link OverloadedMethods} or {@link Field} (but better check the source code...). */ Map<Object, Object> get(Class<?> clazz) { { Map<Object, Object> introspData = cache.get(clazz); if (introspData != null) return introspData; } String className; synchronized (sharedLock) { Map<Object, Object> introspData = cache.get(clazz); if (introspData != null) return introspData; className = clazz.getName(); if (cacheClassNames.contains(className)) { onSameNameClassesDetected(className); } while (introspData == null && classIntrospectionsInProgress.contains(clazz)) { // Another thread is already introspecting this class; // waiting for its result. try { sharedLock.wait(); introspData = cache.get(clazz); } catch (InterruptedException e) { throw new RuntimeException( "Class inrospection data lookup aborded: " + e); } } if (introspData != null) return introspData; // This will be the thread that introspects this class. classIntrospectionsInProgress.add(clazz); } try { Map<Object, Object> introspData = createClassIntrospectionData(clazz); synchronized (sharedLock) { cache.put(clazz, introspData); cacheClassNames.add(className); } return introspData; } finally { synchronized (sharedLock) { classIntrospectionsInProgress.remove(clazz); sharedLock.notifyAll(); } } } /** * Creates a {@link Map} with the content as described for the return value of {@link #get(Class)}. */ private Map<Object, Object> createClassIntrospectionData(Class<?> clazz) { final Map<Object, Object> introspData = new HashMap<Object, Object>(); if (exposeFields) { addFieldsToClassIntrospectionData(introspData, clazz); } final Map<MethodSignature, List<Method>> accessibleMethods = discoverAccessibleMethods(clazz); addGenericGetToClassIntrospectionData(introspData, accessibleMethods); if (exposureLevel != BeansWrapper.EXPOSE_NOTHING) { try { addBeanInfoToClassIntrospectionData(introspData, clazz, accessibleMethods); } catch (IntrospectionException e) { LOG.warn("Couldn't properly perform introspection for class " + clazz, e); introspData.clear(); // FIXME NBC: Don't drop everything here. } } addConstructorsToClassIntrospectionData(introspData, clazz); if (introspData.size() > 1) { return introspData; } else if (introspData.size() == 0) { return Collections.emptyMap(); } else { // map.size() == 1 Entry<Object, Object> e = introspData.entrySet().iterator().next(); return Collections.singletonMap(e.getKey(), e.getValue()); } } private void addFieldsToClassIntrospectionData(Map<Object, Object> introspData, Class<?> clazz) throws SecurityException { Field[] fields = clazz.getFields(); for (int i = 0; i < fields.length; i++) { Field field = fields[i]; if ((field.getModifiers() & Modifier.STATIC) == 0) { introspData.put(field.getName(), field); } } } private void addBeanInfoToClassIntrospectionData( Map<Object, Object> introspData, Class<?> clazz, Map<MethodSignature, List<Method>> accessibleMethods) throws IntrospectionException { BeanInfo beanInfo = Introspector.getBeanInfo(clazz); List<PropertyDescriptor> pdas = getPropertyDescriptors(beanInfo, clazz); int pdasLength = pdas.size(); // Reverse order shouldn't mater, but we keep it to not risk backward incompatibility. for (int i = pdasLength - 1; i >= 0; --i) { addPropertyDescriptorToClassIntrospectionData( introspData, pdas.get(i), clazz, accessibleMethods); } if (exposureLevel < BeansWrapper.EXPOSE_PROPERTIES_ONLY) { final MethodAppearanceDecision decision = new MethodAppearanceDecision(); MethodAppearanceDecisionInput decisionInput = null; List<MethodDescriptor> mds = getMethodDescriptors(beanInfo, clazz); sortMethodDescriptors(mds); int mdsSize = mds.size(); IdentityHashMap<Method, Void> argTypesUsedByIndexerPropReaders = null; for (int i = mdsSize - 1; i >= 0; --i) { final Method method = getMatchingAccessibleMethod(mds.get(i).getMethod(), accessibleMethods); if (method != null && isAllowedToExpose(method)) { decision.setDefaults(method); if (methodAppearanceFineTuner != null) { if (decisionInput == null) { decisionInput = new MethodAppearanceDecisionInput(); } decisionInput.setContainingClass(clazz); decisionInput.setMethod(method); methodAppearanceFineTuner.process(decisionInput, decision); } PropertyDescriptor propDesc = decision.getExposeAsProperty(); if (propDesc != null && (decision.getReplaceExistingProperty() || !(introspData.get(propDesc.getName()) instanceof FastPropertyDescriptor))) { addPropertyDescriptorToClassIntrospectionData( introspData, propDesc, clazz, accessibleMethods); } String methodKey = decision.getExposeMethodAs(); if (methodKey != null) { Object previous = introspData.get(methodKey); if (previous instanceof Method) { // Overloaded method - replace Method with a OverloadedMethods OverloadedMethods overloadedMethods = new OverloadedMethods(bugfixed); overloadedMethods.addMethod((Method) previous); overloadedMethods.addMethod(method); introspData.put(methodKey, overloadedMethods); // Remove parameter type information (unless an indexed property reader needs it): if (argTypesUsedByIndexerPropReaders == null || !argTypesUsedByIndexerPropReaders.containsKey(previous)) { getArgTypesByMethod(introspData).remove(previous); } } else if (previous instanceof OverloadedMethods) { // Already overloaded method - add new overload ((OverloadedMethods) previous).addMethod(method); } else if (decision.getMethodShadowsProperty() || !(previous instanceof FastPropertyDescriptor)) { // Simple method (this far) introspData.put(methodKey, method); Class<?>[] replaced = getArgTypesByMethod(introspData).put(method, method.getParameterTypes()); if (replaced != null) { if (argTypesUsedByIndexerPropReaders == null) { argTypesUsedByIndexerPropReaders = new IdentityHashMap<Method, Void>(); } argTypesUsedByIndexerPropReaders.put(method, null); } } } } } // for each in mds } // end if (exposureLevel < EXPOSE_PROPERTIES_ONLY) } /** * Very similar to {@link BeanInfo#getPropertyDescriptors()}, but can deal with Java 8 default methods too. */ private List<PropertyDescriptor> getPropertyDescriptors(BeanInfo beanInfo, Class<?> clazz) { PropertyDescriptor[] introspectorPDsArray = beanInfo.getPropertyDescriptors(); List<PropertyDescriptor> introspectorPDs = introspectorPDsArray != null ? Arrays.asList(introspectorPDsArray) : Collections.<PropertyDescriptor>emptyList(); if (!treatDefaultMethodsAsBeanMembers || _JavaVersions.JAVA_8 == null) { // java.beans.Introspector was good enough then. return introspectorPDs; } // introspectorPDs contains each property exactly once. But as now we will search them manually too, it can // happen that we find the same property for multiple times. Worse, because of indexed properties, it's possible // that we have to merge entries (like one has the normal reader method, the other has the indexed reader // method), instead of just replacing them in a Map. That's why we have introduced PropertyReaderMethodPair, // which holds the methods belonging to the same property name. IndexedPropertyDescriptor is not good for that, // as it can't store two methods whose types are incompatible, and we have to wait until all the merging was // done to see if the incompatibility goes away. // This could be Map<String, PropertyReaderMethodPair>, but since we rarely need to do merging, we try to avoid // creating those and use the source objects as much as possible. Also note that we initialize this lazily. LinkedHashMap<String, Object /*PropertyReaderMethodPair|Method|PropertyDescriptor*/> mergedPRMPs = null; // Collect Java 8 default methods that look like property readers into mergedPRMPs: // (Note that java.beans.Introspector discovers non-accessible public methods, and to emulate that behavior // here, we don't utilize the accessibleMethods Map, which we might already have at this point.) for (Method method : clazz.getMethods()) { if (_JavaVersions.JAVA_8.isDefaultMethod(method) && method.getReturnType() != void.class && !method.isBridge()) { Class<?>[] paramTypes = method.getParameterTypes(); if (paramTypes.length == 0 || paramTypes.length == 1 && paramTypes[0] == int.class /* indexed property reader */) { String propName = _MethodUtil.getBeanPropertyNameFromReaderMethodName( method.getName(), method.getReturnType()); if (propName != null) { if (mergedPRMPs == null) { // Lazy initialization mergedPRMPs = new LinkedHashMap<String, Object>(); } if (paramTypes.length == 0) { mergeInPropertyReaderMethod(mergedPRMPs, propName, method); } else { // It's an indexed property reader method mergeInPropertyReaderMethodPair(mergedPRMPs, propName, new PropertyReaderMethodPair(null, method)); } } } } } // for clazz.getMethods() if (mergedPRMPs == null) { // We had no interfering Java 8 default methods, so we can chose the fast route. return introspectorPDs; } for (PropertyDescriptor introspectorPD : introspectorPDs) { mergeInPropertyDescriptor(mergedPRMPs, introspectorPD); } // Now we convert the PRMPs to PDs, handling case where the normal and the indexed read methods contradict. List<PropertyDescriptor> mergedPDs = new ArrayList<PropertyDescriptor>(mergedPRMPs.size()); for (Entry<String, Object> entry : mergedPRMPs.entrySet()) { String propName = entry.getKey(); Object propDescObj = entry.getValue(); if (propDescObj instanceof PropertyDescriptor) { mergedPDs.add((PropertyDescriptor) propDescObj); } else { Method readMethod; Method indexedReadMethod; if (propDescObj instanceof Method) { readMethod = (Method) propDescObj; indexedReadMethod = null; } else if (propDescObj instanceof PropertyReaderMethodPair) { PropertyReaderMethodPair prmp = (PropertyReaderMethodPair) propDescObj; readMethod = prmp.readMethod; indexedReadMethod = prmp.indexedReadMethod; if (readMethod != null && indexedReadMethod != null && indexedReadMethod.getReturnType() != readMethod.getReturnType().getComponentType()) { // Here we copy the java.beans.Introspector behavior: If the array item class is not exactly the // the same as the indexed read method return type, we say that the property is not indexed. indexedReadMethod = null; } } else { throw new BugException(); } try { mergedPDs.add( indexedReadMethod != null ? new IndexedPropertyDescriptor(propName, readMethod, null, indexedReadMethod, null) : new PropertyDescriptor(propName, readMethod, null)); } catch (IntrospectionException e) { if (LOG.isWarnEnabled()) { LOG.warn("Failed creating property descriptor for " + clazz.getName() + " property " + propName, e); } } } } return mergedPDs; } private static class PropertyReaderMethodPair { private final Method readMethod; private final Method indexedReadMethod; PropertyReaderMethodPair(Method readerMethod, Method indexedReaderMethod) { this.readMethod = readerMethod; this.indexedReadMethod = indexedReaderMethod; } PropertyReaderMethodPair(PropertyDescriptor pd) { this( pd.getReadMethod(), pd instanceof IndexedPropertyDescriptor ? ((IndexedPropertyDescriptor) pd).getIndexedReadMethod() : null); } static PropertyReaderMethodPair from(Object obj) { if (obj instanceof PropertyReaderMethodPair) { return (PropertyReaderMethodPair) obj; } else if (obj instanceof PropertyDescriptor) { return new PropertyReaderMethodPair((PropertyDescriptor) obj); } else if (obj instanceof Method) { return new PropertyReaderMethodPair((Method) obj, null); } else { throw new BugException("Unexpected obj type: " + obj.getClass().getName()); } } static PropertyReaderMethodPair merge(PropertyReaderMethodPair oldMethods, PropertyReaderMethodPair newMethods) { return new PropertyReaderMethodPair( newMethods.readMethod != null ? newMethods.readMethod : oldMethods.readMethod, newMethods.indexedReadMethod != null ? newMethods.indexedReadMethod : oldMethods.indexedReadMethod); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((indexedReadMethod == null) ? 0 : indexedReadMethod.hashCode()); result = prime * result + ((readMethod == null) ? 0 : readMethod.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; PropertyReaderMethodPair other = (PropertyReaderMethodPair) obj; return other.readMethod == readMethod && other.indexedReadMethod == indexedReadMethod; } } private void mergeInPropertyDescriptor(LinkedHashMap<String, Object> mergedPRMPs, PropertyDescriptor pd) { String propName = pd.getName(); Object replaced = mergedPRMPs.put(propName, pd); if (replaced != null) { PropertyReaderMethodPair newPRMP = new PropertyReaderMethodPair(pd); putIfMergedPropertyReaderMethodPairDiffers(mergedPRMPs, propName, replaced, newPRMP); } } private void mergeInPropertyReaderMethodPair(LinkedHashMap<String, Object> mergedPRMPs, String propName, PropertyReaderMethodPair newPRM) { Object replaced = mergedPRMPs.put(propName, newPRM); if (replaced != null) { putIfMergedPropertyReaderMethodPairDiffers(mergedPRMPs, propName, replaced, newPRM); } } private void mergeInPropertyReaderMethod(LinkedHashMap<String, Object> mergedPRMPs, String propName, Method readerMethod) { Object replaced = mergedPRMPs.put(propName, readerMethod); if (replaced != null) { putIfMergedPropertyReaderMethodPairDiffers(mergedPRMPs, propName, replaced, new PropertyReaderMethodPair(readerMethod, null)); } } private void putIfMergedPropertyReaderMethodPairDiffers(LinkedHashMap<String, Object> mergedPRMPs, String propName, Object replaced, PropertyReaderMethodPair newPRMP) { PropertyReaderMethodPair replacedPRMP = PropertyReaderMethodPair.from(replaced); PropertyReaderMethodPair mergedPRMP = PropertyReaderMethodPair.merge(replacedPRMP, newPRMP); if (!mergedPRMP.equals(newPRMP)) { mergedPRMPs.put(propName, mergedPRMP); } } /** * Very similar to {@link BeanInfo#getMethodDescriptors()}, but can deal with Java 8 default methods too. */ private List<MethodDescriptor> getMethodDescriptors(BeanInfo beanInfo, Class<?> clazz) { MethodDescriptor[] introspectorMDArray = beanInfo.getMethodDescriptors(); List<MethodDescriptor> introspectionMDs = introspectorMDArray != null && introspectorMDArray.length != 0 ? Arrays.asList(introspectorMDArray) : Collections.<MethodDescriptor>emptyList(); if (!treatDefaultMethodsAsBeanMembers || _JavaVersions.JAVA_8 == null) { // java.beans.Introspector was good enough then. return introspectionMDs; } Map<String, List<Method>> defaultMethodsToAddByName = null; for (Method method : clazz.getMethods()) { if (_JavaVersions.JAVA_8.isDefaultMethod(method) && !method.isBridge()) { if (defaultMethodsToAddByName == null) { defaultMethodsToAddByName = new HashMap<String, List<Method>>(); } List<Method> overloads = defaultMethodsToAddByName.get(method.getName()); if (overloads == null) { overloads = new ArrayList<Method>(0); defaultMethodsToAddByName.put(method.getName(), overloads); } overloads.add(method); } } if (defaultMethodsToAddByName == null) { // We had no interfering default methods: return introspectionMDs; } // Recreate introspectionMDs so that its size can grow: ArrayList<MethodDescriptor> newIntrospectionMDs = new ArrayList<MethodDescriptor>(introspectionMDs.size() + 16); for (MethodDescriptor introspectorMD : introspectionMDs) { Method introspectorM = introspectorMD.getMethod(); // Prevent cases where the same method is added with different return types both from the list of default // methods and from the list of Introspector-discovered methods, as that would lead to overloaded method // selection ambiguity later. This is known to happen when the default method in an interface has reified // return type, and then the interface is implemented by a class where the compiler generates an override // for the bridge method only. (Other tricky cases might exist.) if (!containsMethodWithSameParameterTypes( defaultMethodsToAddByName.get(introspectorM.getName()), introspectorM)) { newIntrospectionMDs.add(introspectorMD); } } introspectionMDs = newIntrospectionMDs; // Add default methods: for (Entry<String, List<Method>> entry : defaultMethodsToAddByName.entrySet()) { for (Method method : entry.getValue()) { introspectionMDs.add(new MethodDescriptor(method)); } } return introspectionMDs; } private boolean containsMethodWithSameParameterTypes(List<Method> overloads, Method m) { if (overloads == null) { return false; } Class<?>[] paramTypes = m.getParameterTypes(); for (Method overload : overloads) { if (Arrays.equals(overload.getParameterTypes(), paramTypes)) { return true; } } return false; } private void addPropertyDescriptorToClassIntrospectionData(Map<Object, Object> introspData, PropertyDescriptor pd, Class<?> clazz, Map<MethodSignature, List<Method>> accessibleMethods) { Method readMethod = getMatchingAccessibleMethod(pd.getReadMethod(), accessibleMethods); if (readMethod != null && !isAllowedToExpose(readMethod)) { readMethod = null; } Method indexedReadMethod; if (pd instanceof IndexedPropertyDescriptor) { indexedReadMethod = getMatchingAccessibleMethod( ((IndexedPropertyDescriptor) pd).getIndexedReadMethod(), accessibleMethods); if (indexedReadMethod != null && !isAllowedToExpose(indexedReadMethod)) { indexedReadMethod = null; } if (indexedReadMethod != null) { getArgTypesByMethod(introspData).put( indexedReadMethod, indexedReadMethod.getParameterTypes()); } } else { indexedReadMethod = null; } if (readMethod != null || indexedReadMethod != null) { introspData.put(pd.getName(), new FastPropertyDescriptor(readMethod, indexedReadMethod)); } } private void addGenericGetToClassIntrospectionData(Map<Object, Object> introspData, Map<MethodSignature, List<Method>> accessibleMethods) { Method genericGet = getFirstAccessibleMethod( MethodSignature.GET_STRING_SIGNATURE, accessibleMethods); if (genericGet == null) { genericGet = getFirstAccessibleMethod( MethodSignature.GET_OBJECT_SIGNATURE, accessibleMethods); } if (genericGet != null) { introspData.put(GENERIC_GET_KEY, genericGet); } } private void addConstructorsToClassIntrospectionData(final Map<Object, Object> introspData, Class<?> clazz) { try { Constructor<?>[] ctors = clazz.getConstructors(); if (ctors.length == 1) { Constructor<?> ctor = ctors[0]; introspData.put(CONSTRUCTORS_KEY, new SimpleMethod(ctor, ctor.getParameterTypes())); } else if (ctors.length > 1) { OverloadedMethods overloadedCtors = new OverloadedMethods(bugfixed); for (int i = 0; i < ctors.length; i++) { overloadedCtors.addConstructor(ctors[i]); } introspData.put(CONSTRUCTORS_KEY, overloadedCtors); } } catch (SecurityException e) { LOG.warn("Can't discover constructors for class " + clazz.getName(), e); } } /** * Retrieves mapping of {@link MethodSignature}-s to a {@link List} of accessible methods for a class. In case the * class is not public, retrieves methods with same signature as its public methods from public superclasses and * interfaces. Basically upcasts every method to the nearest accessible method. */ private static Map<MethodSignature, List<Method>> discoverAccessibleMethods(Class<?> clazz) { Map<MethodSignature, List<Method>> accessibles = new HashMap<MethodSignature, List<Method>>(); discoverAccessibleMethods(clazz, accessibles); return accessibles; } private static void discoverAccessibleMethods(Class<?> clazz, Map<MethodSignature, List<Method>> accessibles) { if (Modifier.isPublic(clazz.getModifiers())) { try { Method[] methods = clazz.getMethods(); for (int i = 0; i < methods.length; i++) { Method method = methods[i]; MethodSignature sig = new MethodSignature(method); // Contrary to intuition, a class can actually have several // different methods with same signature *but* different // return types. These can't be constructed using Java the // language, as this is illegal on source code level, but // the compiler can emit synthetic methods as part of // generic type reification that will have same signature // yet different return type than an existing explicitly // declared method. Consider: // public interface I<T> { T m(); } // public class C implements I<Integer> { Integer m() { return 42; } } // C.class will have both "Object m()" and "Integer m()" methods. List<Method> methodList = accessibles.get(sig); if (methodList == null) { // TODO Collection.singletonList is more efficient, though read only. methodList = new LinkedList<Method>(); accessibles.put(sig, methodList); } methodList.add(method); } return; } catch (SecurityException e) { LOG.warn("Could not discover accessible methods of class " + clazz.getName() + ", attemping superclasses/interfaces.", e); // Fall through and attempt to discover superclass/interface methods } } Class<?>[] interfaces = clazz.getInterfaces(); for (int i = 0; i < interfaces.length; i++) { discoverAccessibleMethods(interfaces[i], accessibles); } Class<?> superclass = clazz.getSuperclass(); if (superclass != null) { discoverAccessibleMethods(superclass, accessibles); } } private static Method getMatchingAccessibleMethod(Method m, Map<MethodSignature, List<Method>> accessibles) { if (m == null) { return null; } MethodSignature sig = new MethodSignature(m); List<Method> ams = accessibles.get(sig); if (ams == null) { return null; } for (Method am : ams) { if (am.getReturnType() == m.getReturnType()) { return am; } } return null; } private static Method getFirstAccessibleMethod(MethodSignature sig, Map<MethodSignature, List<Method>> accessibles) { List<Method> ams = accessibles.get(sig); if (ams == null || ams.isEmpty()) { return null; } return ams.get(0); } /** * As of this writing, this is only used for testing if method order really doesn't mater. */ private void sortMethodDescriptors(List<MethodDescriptor> methodDescriptors) { if (methodSorter != null) { methodSorter.sortMethodDescriptors(methodDescriptors); } } boolean isAllowedToExpose(Method method) { return exposureLevel < BeansWrapper.EXPOSE_SAFE || !UnsafeMethods.isUnsafeMethod(method); } private static Map<Method, Class<?>[]> getArgTypesByMethod(Map<Object, Object> classInfo) { @SuppressWarnings("unchecked") Map<Method, Class<?>[]> argTypes = (Map<Method, Class<?>[]>) classInfo.get(ARG_TYPES_BY_METHOD_KEY); if (argTypes == null) { argTypes = new HashMap<Method, Class<?>[]>(); classInfo.put(ARG_TYPES_BY_METHOD_KEY, argTypes); } return argTypes; } private static final class MethodSignature { private static final MethodSignature GET_STRING_SIGNATURE = new MethodSignature("get", new Class[] { String.class }); private static final MethodSignature GET_OBJECT_SIGNATURE = new MethodSignature("get", new Class[] { Object.class }); private final String name; private final Class<?>[] args; private MethodSignature(String name, Class<?>[] args) { this.name = name; this.args = args; } MethodSignature(Method method) { this(method.getName(), method.getParameterTypes()); } @Override public boolean equals(Object o) { if (o instanceof MethodSignature) { MethodSignature ms = (MethodSignature) o; return ms.name.equals(name) && Arrays.equals(args, ms.args); } return false; } @Override public int hashCode() { return name.hashCode() ^ args.length; // TODO That's a poor quality hash... isn't this a problem? } } // ----------------------------------------------------------------------------------------------------------------- // Cache management: /** * Corresponds to {@link BeansWrapper#clearClassIntrospecitonCache()}. * * @since 2.3.20 */ void clearCache() { if (getHasSharedInstanceRestrictons()) { throw new IllegalStateException( "It's not allowed to clear the whole cache in a read-only " + this.getClass().getName() + "instance. Use removeFromClassIntrospectionCache(String prefix) instead."); } forcedClearCache(); } private void forcedClearCache() { synchronized (sharedLock) { cache.clear(); cacheClassNames.clear(); clearingCounter++; for (WeakReference<Object> regedMfREf : modelFactories) { Object regedMf = regedMfREf.get(); if (regedMf != null) { if (regedMf instanceof ClassBasedModelFactory) { ((ClassBasedModelFactory) regedMf).clearCache(); } else if (regedMf instanceof ModelCache) { ((ModelCache) regedMf).clearCache(); } else { throw new BugException(); } } } removeClearedModelFactoryReferences(); } } /** * Corresponds to {@link BeansWrapper#removeFromClassIntrospectionCache(Class)}. * * @since 2.3.20 */ void remove(Class<?> clazz) { synchronized (sharedLock) { cache.remove(clazz); cacheClassNames.remove(clazz.getName()); clearingCounter++; for (WeakReference<Object> regedMfREf : modelFactories) { Object regedMf = regedMfREf.get(); if (regedMf != null) { if (regedMf instanceof ClassBasedModelFactory) { ((ClassBasedModelFactory) regedMf).removeFromCache(clazz); } else if (regedMf instanceof ModelCache) { ((ModelCache) regedMf).clearCache(); // doesn't support selective clearing ATM } else { throw new BugException(); } } } removeClearedModelFactoryReferences(); } } /** * Returns the number of events so far that could make class introspection data returned earlier outdated. */ int getClearingCounter() { synchronized (sharedLock) { return clearingCounter; } } private void onSameNameClassesDetected(String className) { // TODO: This behavior should be pluggable, as in environments where // some classes are often reloaded or multiple versions of the // same class is normal (OSGi), this will drop the cache contents // too often. if (LOG.isInfoEnabled()) { LOG.info( "Detected multiple classes with the same name, \"" + className + "\". Assuming it was a class-reloading. Clearing class introspection " + "caches to release old data."); } forcedClearCache(); } // ----------------------------------------------------------------------------------------------------------------- // Managing dependent objects: void registerModelFactory(ClassBasedModelFactory mf) { registerModelFactory((Object) mf); } void registerModelFactory(ModelCache mf) { registerModelFactory((Object) mf); } private void registerModelFactory(Object mf) { // Note that this `synchronized (sharedLock)` is also need for the BeansWrapper constructor to work safely. synchronized (sharedLock) { modelFactories.add(new WeakReference<Object>(mf, modelFactoriesRefQueue)); removeClearedModelFactoryReferences(); } } void unregisterModelFactory(ClassBasedModelFactory mf) { unregisterModelFactory((Object) mf); } void unregisterModelFactory(ModelCache mf) { unregisterModelFactory((Object) mf); } void unregisterModelFactory(Object mf) { synchronized (sharedLock) { for (Iterator<WeakReference<Object>> it = modelFactories.iterator(); it.hasNext(); ) { Object regedMf = it.next().get(); if (regedMf == mf) { it.remove(); } } } } private void removeClearedModelFactoryReferences() { Reference<?> cleardRef; while ((cleardRef = modelFactoriesRefQueue.poll()) != null) { synchronized (sharedLock) { findClearedRef: for (Iterator<WeakReference<Object>> it = modelFactories.iterator(); it.hasNext(); ) { if (it.next() == cleardRef) { it.remove(); break findClearedRef; } } } } } // ----------------------------------------------------------------------------------------------------------------- // Extracting from introspection info: static Class<?>[] getArgTypes(Map<Object, Object> classInfo, Method method) { @SuppressWarnings("unchecked") Map<Method, Class<?>[]> argTypesByMethod = (Map<Method, Class<?>[]>) classInfo.get(ARG_TYPES_BY_METHOD_KEY); return argTypesByMethod.get(method); } /** * Returns the number of introspected methods/properties that should be available via the TemplateHashModel * interface. */ int keyCount(Class<?> clazz) { Map<Object, Object> map = get(clazz); int count = map.size(); if (map.containsKey(CONSTRUCTORS_KEY)) count--; if (map.containsKey(GENERIC_GET_KEY)) count--; if (map.containsKey(ARG_TYPES_BY_METHOD_KEY)) count--; return count; } /** * Returns the Set of names of introspected methods/properties that should be available via the TemplateHashModel * interface. */ Set<Object> keySet(Class<?> clazz) { Set<Object> set = new HashSet<Object>(get(clazz).keySet()); set.remove(CONSTRUCTORS_KEY); set.remove(GENERIC_GET_KEY); set.remove(ARG_TYPES_BY_METHOD_KEY); return set; } // ----------------------------------------------------------------------------------------------------------------- // Properties int getExposureLevel() { return exposureLevel; } boolean getExposeFields() { return exposeFields; } boolean getTreatDefaultMethodsAsBeanMembers() { return treatDefaultMethodsAsBeanMembers; } MethodAppearanceFineTuner getMethodAppearanceFineTuner() { return methodAppearanceFineTuner; } MethodSorter getMethodSorter() { return methodSorter; } /** * Returns {@code true} if this instance was created with {@link ClassIntrospectorBuilder}, even if it wasn't * actually put into the cache (as we reserve the right to do so in later versions). */ boolean getHasSharedInstanceRestrictons() { return hasSharedInstanceRestrictons; } /** * Tells if this instance is (potentially) shared among {@link BeansWrapper} instances. * * @see #getHasSharedInstanceRestrictons() */ boolean isShared() { return shared; } /** * Almost always, you want to use {@link BeansWrapper#getSharedIntrospectionLock()}, not this! The only exception is * when you get this to set the field returned by {@link BeansWrapper#getSharedIntrospectionLock()}. */ Object getSharedLock() { return sharedLock; } // ----------------------------------------------------------------------------------------------------------------- // Monitoring: /** For unit testing only */ Object[] getRegisteredModelFactoriesSnapshot() { synchronized (sharedLock) { return modelFactories.toArray(); } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui.treeStructure.treetable; import com.intellij.ui.TableUtil; import com.intellij.ui.table.JBTable; import com.intellij.util.ObjectUtils; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.accessibility.ScreenReader; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.tree.DefaultTreeSelectionModel; import javax.swing.tree.TreeCellRenderer; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.KeyEvent; import java.awt.event.MouseEvent; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.*; import java.util.List; /** * This example shows how to create a simple JTreeTable component, * by using a JTree as a renderer (and editor) for the cells in a * particular column in the JTable. * * @version 1.2 10/27/98 * * @author Philip Milne * @author Scott Violet */ public class TreeTable extends JBTable { /** A subclass of JTree. */ private TreeTableTree myTree; private TreeTableModel myTableModel; private PropertyChangeListener myTreeRowHeightPropertyListener; // If a screen reader is present, it is better to let the left/right cursor keys // be routed to the JTable, as opposed to expand/collapse tree nodes. private boolean myProcessCursorKeys = !ScreenReader.isActive(); public TreeTable(TreeTableModel treeTableModel) { super(); setModel(treeTableModel); } @SuppressWarnings({"MethodOverloadsMethodOfSuperclass"}) public void setModel(TreeTableModel treeTableModel) {// Create the tree. It will be used as a renderer and editor. if (myTree != null) { myTree.removePropertyChangeListener(JTree.ROW_HEIGHT_PROPERTY, myTreeRowHeightPropertyListener); } myTree = new TreeTableTree(treeTableModel, this); setRowHeight(myTree.getRowHeight()); myTreeRowHeightPropertyListener = new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { int treeRowHeight = myTree.getRowHeight(); if (treeRowHeight == getRowHeight()) return; setRowHeight(treeRowHeight); } }; myTree.addPropertyChangeListener(JTree.ROW_HEIGHT_PROPERTY, myTreeRowHeightPropertyListener); // Install a tableModel representing the visible rows in the tree. setTableModel(treeTableModel); // Force the JTable and JTree to share their row selection models. ListToTreeSelectionModelWrapper selectionWrapper = new ListToTreeSelectionModelWrapper(); myTree.setSelectionModel(selectionWrapper); setSelectionModel(selectionWrapper.getListSelectionModel()); // Install the tree editor renderer and editor. TreeTableCellRenderer treeTableCellRenderer = createTableRenderer(treeTableModel); setDefaultRenderer(TreeTableModel.class, treeTableCellRenderer); setDefaultEditor(TreeTableModel.class, new TreeTableCellEditor(treeTableCellRenderer)); // No grid. setShowGrid(false); // No intercell spacing setIntercellSpacing(new Dimension(0, 0)); // And update the height of the trees row to match that of the table. if (myTree.getRowHeight() < 1) { setRowHeight(JBUI.scale(18)); // Metal looks better like this. } else { setRowHeight(getRowHeight()); } } public TreeTableModel getTableModel() { return myTableModel; } public void setTableModel(TreeTableModel treeTableModel) { myTableModel = treeTableModel; super.setModel(adapt(treeTableModel)); } protected TreeTableModelAdapter adapt(TreeTableModel treeTableModel) { return new TreeTableModelAdapter(treeTableModel, myTree, this); } public void setRootVisible(boolean visible){ myTree.setRootVisible(visible); } public void putTreeClientProperty(Object key, Object value){ myTree.putClientProperty(key, value); } public void setTreeCellRenderer(TreeCellRenderer renderer){ myTree.setCellRenderer(renderer); } /** * Overridden to message super and forward the method to the tree. * Since the tree is not actually in the component hierarchy it will * never receive this unless we forward it in this manner. */ public void updateUI() { super.updateUI(); if (myTree!= null) { myTree.updateUI(); } // Use the tree's default foreground and background colors in the // table. //noinspection HardCodedStringLiteral LookAndFeel.installColorsAndFont(this, "Tree.background", "Tree.foreground", "Tree.font"); } /* Workaround for BasicTableUI anomaly. Make sure the UI never tries to * paint the editor. The UI currently uses different techniques to * paint the renderers and editors and overriding setBounds() below * is not the right thing to do for an editor. Returning -1 for the * editing row in this case, ensures the editor is never painted. */ public int getEditingRow() { return editingColumn == -1 || isTreeColumn(editingColumn) ? -1 : editingRow; } /** * Overridden to pass the new rowHeight to the tree. */ public void setRowHeight(int rowHeight) { super.setRowHeight(rowHeight); if (myTree != null && myTree.getRowHeight() < rowHeight) { myTree.setRowHeight(getRowHeight()); } } /** * @return the tree that is being shared between the model. */ public TreeTableTree getTree() { return myTree; } protected void processKeyEvent(KeyEvent e){ if (!myProcessCursorKeys) { super.processKeyEvent(e); return; } int keyCode = e.getKeyCode(); final int selColumn = columnModel.getSelectionModel().getAnchorSelectionIndex(); boolean treeHasFocus = selColumn == -1 || selColumn >= 0 && isTreeColumn(selColumn); boolean oneRowSelected = getSelectedRowCount() == 1; if(treeHasFocus && oneRowSelected && ((keyCode == KeyEvent.VK_LEFT) || (keyCode == KeyEvent.VK_RIGHT))){ myTree._processKeyEvent(e); int rowToSelect = ObjectUtils.notNull(myTree.getSelectionRows())[0]; getSelectionModel().setSelectionInterval(rowToSelect, rowToSelect); TableUtil.scrollSelectionToVisible(this); } else{ super.processKeyEvent(e); } } /** * Enable or disable processing of left/right cursor keys to expand/collapse * nodes in the tree column. Disabling these keys can be useful to improve * accessibility support when the left/right cursor keys are better suited to * navigate to the previous/next cell of a given row. */ public void setProcessCursorKeys(boolean processCursorKeys) { myProcessCursorKeys = processCursorKeys; } /** * ListToTreeSelectionModelWrapper extends DefaultTreeSelectionModel * to listen for changes in the ListSelectionModel it maintains. Once * a change in the ListSelectionModel happens, the paths are updated * in the DefaultTreeSelectionModel. */ private class ListToTreeSelectionModelWrapper extends DefaultTreeSelectionModel { /** Set to true when we are updating the ListSelectionModel. */ protected boolean updatingListSelectionModel; public ListToTreeSelectionModelWrapper() { super(); getListSelectionModel().addListSelectionListener(createListSelectionListener()); } /** * @return the list selection model. ListToTreeSelectionModelWrapper * listens for changes to this model and updates the selected paths * accordingly. */ ListSelectionModel getListSelectionModel() { return listSelectionModel; } /** * This is overriden to set <code>updatingListSelectionModel</code> * and message super. This is the only place DefaultTreeSelectionModel * alters the ListSelectionModel. */ public void resetRowSelection() { if (!updatingListSelectionModel) { updatingListSelectionModel = true; try { Set<Integer> selectedRows = new HashSet<>(); int min = listSelectionModel.getMinSelectionIndex(); int max = listSelectionModel.getMaxSelectionIndex(); if (min != -1 && max != -1) { for (int counter = min; counter <= max; counter++) { if (listSelectionModel.isSelectedIndex(counter)) { selectedRows.add(new Integer(counter)); } } } super.resetRowSelection(); listSelectionModel.clearSelection(); for (final Object selectedRow : selectedRows) { Integer row = (Integer)selectedRow; listSelectionModel.addSelectionInterval(row.intValue(), row.intValue()); } } finally { updatingListSelectionModel = false; } } // Notice how we don't message super if // updatingListSelectionModel is true. If // updatingListSelectionModel is true, it implies the // ListSelectionModel has already been updated and the // paths are the only thing that needs to be updated. } /** * @return a newly created instance of ListSelectionHandler. */ protected ListSelectionListener createListSelectionListener() { return new ListSelectionHandler(); } /** * If <code>updatingListSelectionModel</code> is false, this will * reset the selected paths from the selected rows in the list * selection model. */ protected void updateSelectedPathsFromSelectedRows() { if (!updatingListSelectionModel) { updatingListSelectionModel = true; try { // This is way expensive, ListSelectionModel needs an // enumerator for iterating. int min = listSelectionModel.getMinSelectionIndex(); int max = listSelectionModel.getMaxSelectionIndex(); clearSelection(); if (min != -1 && max != -1) { List<TreePath> selectionPaths = new ArrayList<>(); for (int counter = min; counter <= max; counter++) { if (listSelectionModel.isSelectedIndex(counter)) { TreePath selPath = myTree.getPathForRow(counter); if (selPath != null) { selectionPaths.add(selPath); } } } if (!selectionPaths.isEmpty()) { addSelectionPaths(selectionPaths.toArray(new TreePath[selectionPaths.size()])); } } } finally { updatingListSelectionModel = false; } } } /** * Class responsible for calling updateSelectedPathsFromSelectedRows * when the selection of the list changse. */ class ListSelectionHandler implements ListSelectionListener { public void valueChanged(ListSelectionEvent e) { updateSelectedPathsFromSelectedRows(); } } } public boolean editCellAt(int row, int column, EventObject e) { boolean editResult = super.editCellAt(row, column, e); if (e instanceof MouseEvent && isTreeColumn(column)){ MouseEvent me = (MouseEvent)e; int y = me.getY(); if (getRowHeight() != myTree.getRowHeight()) { // fix y if row heights are not equal // [todo]: review setRowHeight to synchronize heights correctly! final Rectangle tableCellRect = getCellRect(row, column, true); y = Math.min(y - tableCellRect.y, myTree.getRowHeight() - 1) + row * myTree.getRowHeight(); } MouseEvent newEvent = new MouseEvent(myTree, me.getID(), me.getWhen(), me.getModifiers(), me.getX() - getCellRect(0, column, true).x, y, me.getClickCount(), me.isPopupTrigger() ); myTree.dispatchEvent(newEvent); // Some LAFs, for example, Aqua under MAC OS X // expand tree node by MOUSE_RELEASED event. Unfortunately, // it's not possible to find easy way to wedge in table's // event sequense. Therefore we send "synthetic" release event. if (newEvent.getID()==MouseEvent.MOUSE_PRESSED) { MouseEvent newME2 = new MouseEvent( myTree, MouseEvent.MOUSE_RELEASED, me.getWhen(), me.getModifiers(), me.getX() - getCellRect(0, column, true).x, y - getCellRect(0, column, true).y, me.getClickCount(), me.isPopupTrigger() ); myTree.dispatchEvent(newME2); } } return editResult; } protected boolean isTreeColumn(int column) { return TreeTableModel.class.isAssignableFrom(getColumnClass(column)); } public void addSelectedPath(TreePath path) { int row = getTree().getRowForPath(path); getTree().addSelectionPath(path); getSelectionModel().addSelectionInterval(row, row); } public void removeSelectedPath(TreePath path) { int row = getTree().getRowForPath(path); getTree().removeSelectionPath(path); getSelectionModel().removeSelectionInterval(row, row); } public TreeTableCellRenderer createTableRenderer(TreeTableModel treeTableModel) { return new TreeTableCellRenderer(this, myTree); } public void setMinRowHeight(int i) { setRowHeight(Math.max(getRowHeight(), i)); } }
package au.com.mineauz.minigames.minigame.modules; import au.com.mineauz.minigames.MinigameMessageType; import au.com.mineauz.minigames.objects.MinigamePlayer; import au.com.mineauz.minigames.MinigameUtils; import au.com.mineauz.minigames.PlayerLoadout; import au.com.mineauz.minigames.config.Flag; import au.com.mineauz.minigames.config.LoadoutSetFlag; import au.com.mineauz.minigames.menu.Menu; import au.com.mineauz.minigames.menu.MenuItemCustom; import au.com.mineauz.minigames.minigame.Minigame; import com.google.common.collect.Maps; import org.bukkit.Material; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.configuration.file.FileConfiguration; import org.bukkit.inventory.ItemStack; import org.bukkit.plugin.Plugin; import org.bukkit.potion.PotionEffect; import org.bukkit.potion.PotionEffectType; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Set; public class LoadoutModule extends MinigameModule { private static Map<Class<? extends LoadoutAddon>, LoadoutAddon<?>> addons = Maps.newHashMap(); private Map<String, PlayerLoadout> extraLoadouts = new HashMap<>(); private LoadoutSetFlag loadoutsFlag = new LoadoutSetFlag(extraLoadouts, "loadouts"); public LoadoutModule(Minigame mgm) { super(mgm); PlayerLoadout def = new PlayerLoadout("default"); def.setDeleteable(false); extraLoadouts.put("default", def); } public static LoadoutModule getMinigameModule(Minigame minigame) { return (LoadoutModule) minigame.getModule("Loadouts"); } /** * Registers a loadout addon. This addon will be available for all loadouts on all games. * * @param plugin The plugin registering the addon * @param addon The addon to register */ public static void registerAddon(Plugin plugin, LoadoutAddon<?> addon) { addons.put(addon.getClass(), addon); } /** * Unregisters a previously registered addon * * @param addon The addon to unregister */ public static void unregisterAddon(Class<? extends LoadoutAddon<?>> addon) { addons.remove(addon); } /** * Retrieves a registered addon * * @param addonClass The addon class to get the addon for * @return The addon or null */ @SuppressWarnings("unchecked") public static <T extends LoadoutAddon<?>> T getAddon(Class<T> addonClass) { return (T) addons.get(addonClass); } public static void addAddonMenuItems(Menu menu, PlayerLoadout loadout) { for (LoadoutAddon<?> addon : addons.values()) { addon.addMenuOptions(menu, loadout); } } @Override public String getName() { return "Loadouts"; } @Override public Map<String, Flag<?>> getFlags() { Map<String, Flag<?>> flags = new HashMap<>(); flags.put(loadoutsFlag.getName(), loadoutsFlag); return flags; } @Override public boolean useSeparateConfig() { return false; } @Override public void save(FileConfiguration config) { //Do Nothing } @Override public void load(FileConfiguration config) { //TODO: Remove entire load after 1.7 if (config.contains(getMinigame() + ".loadout")) { Set<String> keys = config.getConfigurationSection(getMinigame() + ".loadout").getKeys(false); for (String key : keys) { if (key.matches("[-]?[0-9]+")) getLoadout("default").addItem(config.getItemStack(getMinigame() + ".loadout." + key), Integer.parseInt(key)); } if (config.contains(getMinigame() + ".loadout.potions")) { keys = config.getConfigurationSection(getMinigame() + ".loadout.potions").getKeys(false); for (String eff : keys) { if (PotionEffectType.getByName(eff) != null) { PotionEffect effect = new PotionEffect(PotionEffectType.getByName(eff), config.getInt(getMinigame() + ".loadout.potions." + eff + ".dur"), config.getInt(getMinigame() + ".loadout.potions." + eff + ".amp"), true); getLoadout("default").addPotionEffect(effect); } } } if (config.contains(getMinigame() + ".loadout.usepermissions")) { getLoadout("default").setUsePermissions(config.getBoolean(getMinigame() + ".loadout.usepermissions")); } if (config.contains(getMinigame() + ".loadout.falldamage")) { getLoadout("default").setHasFallDamage(config.getBoolean(getMinigame() + ".loadout.falldamage")); } if (config.contains(getMinigame() + ".loadout.hunger")) { getLoadout("default").setHasHunger(config.getBoolean(getMinigame() + ".loadout.hunger")); } } if (config.contains(getMinigame() + ".extraloadouts")) { Set<String> keys = config.getConfigurationSection(getMinigame() + ".extraloadouts").getKeys(false); for (String loadout : keys) { addLoadout(loadout); Set<String> items = config.getConfigurationSection(getMinigame() + ".extraloadouts." + loadout).getKeys(false); for (String key : items) { if (key.matches("[-]?[0-9]+")) getLoadout(loadout).addItem(config.getItemStack(getMinigame() + ".extraloadouts." + loadout + "." + key), Integer.parseInt(key)); } if (config.contains(getMinigame() + ".extraloadouts." + loadout + ".potions")) { Set<String> pots = config.getConfigurationSection(getMinigame() + ".extraloadouts." + loadout + ".potions").getKeys(false); for (String eff : pots) { if (PotionEffectType.getByName(eff) != null) { PotionEffect effect = new PotionEffect(PotionEffectType.getByName(eff), config.getInt(getMinigame() + ".extraloadouts." + loadout + ".potions." + eff + ".dur"), config.getInt(getMinigame() + ".extraloadouts." + loadout + ".potions." + eff + ".amp")); getLoadout(loadout).addPotionEffect(effect); } } } if (config.contains(getMinigame() + ".extraloadouts." + loadout + ".usepermissions")) { getLoadout(loadout).setUsePermissions(config.getBoolean(getMinigame() + ".extraloadouts." + loadout + ".usepermissions")); } if (config.contains(getMinigame() + ".extraloadouts." + loadout + ".falldamage")) getLoadout(loadout).setHasFallDamage(config.getBoolean(getMinigame() + ".extraloadouts." + loadout + ".falldamage")); if (config.contains(getMinigame() + ".extraloadouts." + loadout + ".hunger")) getLoadout(loadout).setHasHunger(config.getBoolean(getMinigame() + ".extraloadouts." + loadout + ".hunger")); } } } public void addLoadout(String name) { extraLoadouts.put(name, new PlayerLoadout(name)); } public void deleteLoadout(String name) { extraLoadouts.remove(name); } public Set<String> getLoadouts() { return extraLoadouts.keySet(); } public Map<String, PlayerLoadout> getLoadoutMap() { return extraLoadouts; } public PlayerLoadout getLoadout(String name) { PlayerLoadout pl = null; if (extraLoadouts.containsKey(name)) { pl = extraLoadouts.get(name); } else { for (String loadout : extraLoadouts.keySet()) { if (loadout.equalsIgnoreCase(name)) { pl = extraLoadouts.get(loadout); break; } } } return pl; } public boolean hasLoadouts() { return !extraLoadouts.isEmpty(); } public boolean hasLoadout(String name) { if (!name.equalsIgnoreCase("default")) { if (extraLoadouts.containsKey(name)) return extraLoadouts.containsKey(name); else { for (String loadout : extraLoadouts.keySet()) { if (loadout.equalsIgnoreCase(name)) return true; } return false; } } else { return true; } } public void displaySelectionMenu(MinigamePlayer player, final boolean equip) { Menu m = new Menu(6, "Select Loadout", player); final MinigamePlayer fply = player; for (PlayerLoadout loadout : extraLoadouts.values()) { if (loadout.isDisplayedInMenu()) { if (!loadout.getUsePermissions() || player.getPlayer().hasPermission("minigame.loadout." + loadout.getName(false).toLowerCase())) { if (!player.getMinigame().isTeamGame() || loadout.getTeamColor() == null || player.getTeam().getColor() == loadout.getTeamColor()) { MenuItemCustom c = new MenuItemCustom(loadout.getName(true), Material.GLASS); if (!loadout.getItems().isEmpty()) { ItemStack item = loadout.getItem(new ArrayList<>(loadout.getItems()).get(0)); c.setItem(item); } final PlayerLoadout floadout2 = loadout; c.setClick(object -> { fply.setLoadout(floadout2); fply.getPlayer().closeInventory(); if (!equip) fply.sendMessage(MinigameUtils.getLang("player.loadout.nextSpawn"), MinigameMessageType.INFO); else { fply.sendMessage(MinigameUtils.formStr("player.loadout.equipped", floadout2.getName(true)), MinigameMessageType.INFO); floadout2.equiptLoadout(fply); } return null; }); m.addItem(c); } } } } m.displayMenu(player); } @Override public void addEditMenuOptions(Menu menu) { // TODO Move loadout menu stuff here } @Override public boolean displayMechanicSettings(Menu previous) { return false; } /** * Represents a custom loadout element. * This can be used to add things like disguises * or commands. * * @param <T> The value type for this loadout addon.arg1 */ public interface LoadoutAddon<T> { String getName(); void addMenuOptions(Menu menu, PlayerLoadout loadout); void save(ConfigurationSection section, T value); T load(ConfigurationSection section); void applyLoadout(MinigamePlayer player, T value); void clearLoadout(MinigamePlayer player, T value); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.shard; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexNotFoundException; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.query.DisabledQueryCache; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.seqno.SequenceNumbersService; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.Store; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.indices.recovery.RecoveryFailedException; import org.elasticsearch.indices.recovery.RecoverySourceHandler; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.indices.recovery.RecoveryTarget; import org.elasticsearch.indices.recovery.StartRecoveryRequest; import org.elasticsearch.node.Node; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.function.Consumer; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasSize; /** * A base class for unit tests that need to create and shutdown {@link IndexShard} instances easily, * containing utilities for shard creation and recoveries. See {{@link #newShard(boolean)}} and * {@link #newStartedShard()} for a good starting points */ public abstract class IndexShardTestCase extends ESTestCase { protected static final PeerRecoveryTargetService.RecoveryListener recoveryListener = new PeerRecoveryTargetService.RecoveryListener() { @Override public void onRecoveryDone(RecoveryState state) { } @Override public void onRecoveryFailure(RecoveryState state, RecoveryFailedException e, boolean sendShardFailure) { throw new AssertionError(e); } }; protected ThreadPool threadPool; @Override public void setUp() throws Exception { super.setUp(); threadPool = new TestThreadPool(getClass().getName()); } @Override public void tearDown() throws Exception { try { ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); } finally { super.tearDown(); } } private Store createStore(IndexSettings indexSettings, ShardPath shardPath) throws IOException { final ShardId shardId = shardPath.getShardId(); final DirectoryService directoryService = new DirectoryService(shardId, indexSettings) { @Override public Directory newDirectory() throws IOException { return newFSDirectory(shardPath.resolveIndex()); } }; return new Store(shardId, indexSettings, directoryService, new DummyShardLock(shardId)); } /** * creates a new initializing shard. The shard will have its own unique data path. * * @param primary indicates whether to a primary shard (ready to recover from an empty store) or a replica * (ready to recover from another shard) */ protected IndexShard newShard(boolean primary) throws IOException { ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId("index", "_na_", 0), "n1", primary, ShardRoutingState.INITIALIZING, primary ? RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE); return newShard(shardRouting); } /** * creates a new initializing shard. The shard will have its own unique data path. * * @param shardRouting the {@link ShardRouting} to use for this shard * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardRouting shardRouting, IndexingOperationListener... listeners) throws IOException { assert shardRouting.initializing() : shardRouting; Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData.Builder metaData = IndexMetaData.builder(shardRouting.getIndexName()) .settings(settings) .primaryTerm(0, randomIntBetween(1, 100)); return newShard(shardRouting, metaData.build(), listeners); } /** * creates a new initializing shard. The shard will have its own unique data path. * * @param shardId the shard id to use * @param primary indicates whether to a primary shard (ready to recover from an empty store) or a replica * (ready to recover from another shard) * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardId shardId, boolean primary, IndexingOperationListener... listeners) throws IOException { ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, randomAlphaOfLength(5), primary, ShardRoutingState.INITIALIZING, primary ? RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE); return newShard(shardRouting, listeners); } /** * creates a new initializing shard. The shard will will be put in its proper path under the * supplied node id. * * @param shardId the shard id to use * @param primary indicates whether to a primary shard (ready to recover from an empty store) or a replica * (ready to recover from another shard) */ protected IndexShard newShard(ShardId shardId, boolean primary, String nodeId, IndexMetaData indexMetaData, @Nullable IndexSearcherWrapper searcherWrapper) throws IOException { ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, nodeId, primary, ShardRoutingState.INITIALIZING, primary ? RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE); return newShard(shardRouting, indexMetaData, searcherWrapper, null); } /** * creates a new initializing shard. The shard will will be put in its proper path under the * supplied node id. * * @param shardId the shard id to use * @param primary indicates whether to a primary shard (ready to recover from an empty store) or a replica * (ready to recover from another shard) */ protected IndexShard newShard(ShardId shardId, boolean primary, String nodeId, IndexMetaData indexMetaData, Runnable globalCheckpointSyncer, @Nullable IndexSearcherWrapper searcherWrapper) throws IOException { ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, nodeId, primary, ShardRoutingState.INITIALIZING, primary ? RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE); return newShard(shardRouting, indexMetaData, searcherWrapper, null); } /** * creates a new initializing shard. The shard will will be put in its proper path under the * current node id the shard is assigned to. * * @param routing shard routing to use * @param indexMetaData indexMetaData for the shard, including any mapping * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardRouting routing, IndexMetaData indexMetaData, IndexingOperationListener... listeners) throws IOException { return newShard(routing, indexMetaData, null, null, listeners); } /** * creates a new initializing shard. The shard will will be put in its proper path under the * current node id the shard is assigned to. * @param routing shard routing to use * @param indexMetaData indexMetaData for the shard, including any mapping * @param indexSearcherWrapper an optional wrapper to be used during searchers * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardRouting routing, IndexMetaData indexMetaData, @Nullable IndexSearcherWrapper indexSearcherWrapper, @Nullable EngineFactory engineFactory, IndexingOperationListener... listeners) throws IOException { // add node id as name to settings for proper logging final ShardId shardId = routing.shardId(); final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); return newShard(routing, shardPath, indexMetaData, indexSearcherWrapper, engineFactory, listeners); } /** * creates a new initializing shard. * @param routing shard routing to use * @param shardPath path to use for shard data * @param indexMetaData indexMetaData for the shard, including any mapping * @param indexSearcherWrapper an optional wrapper to be used during searchers * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMetaData indexMetaData, @Nullable IndexSearcherWrapper indexSearcherWrapper, @Nullable EngineFactory engineFactory, IndexingOperationListener... listeners) throws IOException { final Settings nodeSettings = Settings.builder().put("node.name", routing.currentNodeId()).build(); final IndexSettings indexSettings = new IndexSettings(indexMetaData, nodeSettings); final IndexShard indexShard; final Store store = createStore(indexSettings, shardPath); boolean success = false; try { IndexCache indexCache = new IndexCache(indexSettings, new DisabledQueryCache(indexSettings), null); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), indexSettings.getSettings(), "index"); mapperService.merge(indexMetaData, MapperService.MergeReason.MAPPING_RECOVERY, true); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); final IndexEventListener indexEventListener = new IndexEventListener() { }; final Engine.Warmer warmer = searcher -> { }; IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(nodeSettings, new IndexFieldDataCache.Listener() { }); IndexFieldDataService indexFieldDataService = new IndexFieldDataService(indexSettings, indicesFieldDataCache, new NoneCircuitBreakerService(), mapperService); indexShard = new IndexShard(routing, indexSettings, shardPath, store, () ->null, indexCache, mapperService, similarityService, indexFieldDataService, engineFactory, indexEventListener, indexSearcherWrapper, threadPool, BigArrays.NON_RECYCLING_INSTANCE, warmer, Collections.emptyList(), Arrays.asList(listeners)); success = true; } finally { if (success == false) { IOUtils.close(store); } } return indexShard; } /** * Takes an existing shard, closes it and and starts a new initialing shard at the same location * * @param listeners new listerns to use for the newly created shard */ protected IndexShard reinitShard(IndexShard current, IndexingOperationListener... listeners) throws IOException { final ShardRouting shardRouting = current.routingEntry(); return reinitShard(current, ShardRoutingHelper.initWithSameId(shardRouting, shardRouting.primary() ? RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE ), listeners); } /** * Takes an existing shard, closes it and and starts a new initialing shard at the same location * * @param routing the shard routing to use for the newly created shard. * @param listeners new listerns to use for the newly created shard */ protected IndexShard reinitShard(IndexShard current, ShardRouting routing, IndexingOperationListener... listeners) throws IOException { closeShards(current); return newShard(routing, current.shardPath(), current.indexSettings().getIndexMetaData(), null, current.engineFactory, listeners); } /** * creates a new empyu shard and starts it. The shard will be either a replica or a primary. */ protected IndexShard newStartedShard() throws IOException { return newStartedShard(randomBoolean()); } /** * creates a new empty shard and starts it. * * @param primary controls whether the shard will be a primary or a replica. */ protected IndexShard newStartedShard(boolean primary) throws IOException { IndexShard shard = newShard(primary); if (primary) { recoveryShardFromStore(shard); } else { recoveryEmptyReplica(shard); } return shard; } protected void closeShards(IndexShard... shards) throws IOException { closeShards(Arrays.asList(shards)); } protected void closeShards(Iterable<IndexShard> shards) throws IOException { for (IndexShard shard : shards) { if (shard != null) { try { shard.close("test", false); } finally { IOUtils.close(shard.store()); } } } } protected void recoveryShardFromStore(IndexShard primary) throws IOException { primary.markAsRecovering("store", new RecoveryState(primary.routingEntry(), getFakeDiscoNode(primary.routingEntry().currentNodeId()), null)); primary.recoverFromStore(); updateRoutingEntry(primary, ShardRoutingHelper.moveToStarted(primary.routingEntry())); } public static void updateRoutingEntry(IndexShard shard, ShardRouting shardRouting) throws IOException { shard.updateShardState(shardRouting, shard.getPrimaryTerm(), null, 0L, Collections.emptySet(), Collections.emptySet()); } protected void recoveryEmptyReplica(IndexShard replica) throws IOException { IndexShard primary = null; try { primary = newStartedShard(true); recoverReplica(replica, primary); } finally { closeShards(primary); } } protected DiscoveryNode getFakeDiscoNode(String id) { return new DiscoveryNode(id, id, buildNewFakeTransportAddress(), Collections.emptyMap(), EnumSet.allOf(DiscoveryNode.Role.class), Version.CURRENT); } /** recovers a replica from the given primary **/ protected void recoverReplica(IndexShard replica, IndexShard primary) throws IOException { recoverReplica(replica, primary, (r, sourceNode) -> new RecoveryTarget(r, sourceNode, recoveryListener, version -> { }), true); } /** * Recovers a replica from the give primary, allow the user to supply a custom recovery target. A typical usage of a custom recovery * target is to assert things in the various stages of recovery. * @param replica the recovery target shard * @param primary the recovery source shard * @param targetSupplier supplies an instance of {@link RecoveryTarget} * @param markAsRecovering set to {@code false} if the replica is marked as recovering */ protected final void recoverReplica(final IndexShard replica, final IndexShard primary, final BiFunction<IndexShard, DiscoveryNode, RecoveryTarget> targetSupplier, final boolean markAsRecovering) throws IOException { final DiscoveryNode pNode = getFakeDiscoNode(primary.routingEntry().currentNodeId()); final DiscoveryNode rNode = getFakeDiscoNode(replica.routingEntry().currentNodeId()); if (markAsRecovering) { replica.markAsRecovering("remote", new RecoveryState(replica.routingEntry(), pNode, rNode)); } else { assertEquals(replica.state(), IndexShardState.RECOVERING); } replica.prepareForIndexRecovery(); final RecoveryTarget recoveryTarget = targetSupplier.apply(replica, pNode); final String targetAllocationId = recoveryTarget.indexShard().routingEntry().allocationId().getId(); final Store.MetadataSnapshot snapshot = getMetadataSnapshotOrEmpty(replica); final long startingSeqNo; if (snapshot.size() > 0) { startingSeqNo = PeerRecoveryTargetService.getStartingSeqNo(recoveryTarget); } else { startingSeqNo = SequenceNumbersService.UNASSIGNED_SEQ_NO; } final StartRecoveryRequest request = new StartRecoveryRequest(replica.shardId(), targetAllocationId, pNode, rNode, snapshot, false, 0, startingSeqNo); final RecoverySourceHandler recovery = new RecoverySourceHandler( primary, recoveryTarget, request, () -> 0L, e -> () -> {}, (int) ByteSizeUnit.MB.toBytes(1), Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), pNode.getName()).build()); recovery.recoverToTarget(); recoveryTarget.markAsDone(); updateRoutingEntry(replica, ShardRoutingHelper.moveToStarted(replica.routingEntry())); } private Store.MetadataSnapshot getMetadataSnapshotOrEmpty(IndexShard replica) throws IOException { Store.MetadataSnapshot result; try { result = replica.snapshotStoreMetadata(); } catch (IndexNotFoundException e) { // OK! result = Store.MetadataSnapshot.EMPTY; } catch (IOException e) { logger.warn("failed read store, treating as empty", e); result = Store.MetadataSnapshot.EMPTY; } return result; } protected Set<String> getShardDocUIDs(final IndexShard shard) throws IOException { shard.refresh("get_uids"); try (Engine.Searcher searcher = shard.acquireSearcher("test")) { Set<String> ids = new HashSet<>(); for (LeafReaderContext leafContext : searcher.reader().leaves()) { LeafReader reader = leafContext.reader(); Bits liveDocs = reader.getLiveDocs(); for (int i = 0; i < reader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { Document uuid = reader.document(i, Collections.singleton(IdFieldMapper.NAME)); ids.add(uuid.get(IdFieldMapper.NAME)); } } } return ids; } } protected void assertDocCount(IndexShard shard, int docDount) throws IOException { assertThat(getShardDocUIDs(shard), hasSize(docDount)); } protected void assertDocs(IndexShard shard, String... ids) throws IOException { final Set<String> shardDocUIDs = getShardDocUIDs(shard); assertThat(shardDocUIDs, contains(ids)); assertThat(shardDocUIDs, hasSize(ids.length)); } protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id) throws IOException { return indexDoc(shard, type, id, "{}"); } protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id, String source) throws IOException { return indexDoc(shard, type, id, source, XContentType.JSON); } protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id, String source, XContentType xContentType) throws IOException { SourceToParse sourceToParse = SourceToParse.source(shard.shardId().getIndexName(), type, id, new BytesArray(source), xContentType); if (shard.routingEntry().primary()) { return shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, getMappingUpdater(shard, type)); } else { return shard.applyIndexOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, shard.getPrimaryTerm(), 0, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse, getMappingUpdater(shard, type)); } } protected Consumer<Mapping> getMappingUpdater(IndexShard shard, String type) { return update -> { try { updateMappings(shard, IndexMetaData.builder(shard.indexSettings().getIndexMetaData()) .putMapping(type, update.toString()).build()); } catch (IOException e) { ExceptionsHelper.reThrowIfNotNull(e); } }; } protected void updateMappings(IndexShard shard, IndexMetaData indexMetadata) { shard.indexSettings().updateIndexMetaData(indexMetadata); shard.mapperService().merge(indexMetadata, MapperService.MergeReason.MAPPING_UPDATE, true); } protected Engine.DeleteResult deleteDoc(IndexShard shard, String type, String id) throws IOException { if (shard.routingEntry().primary()) { return shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, type, id, VersionType.INTERNAL, update -> {}); } else { return shard.applyDeleteOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, shard.getPrimaryTerm(), 0L, type, id, VersionType.EXTERNAL, update -> {}); } } protected void flushShard(IndexShard shard) { flushShard(shard, false); } protected void flushShard(IndexShard shard, boolean force) { shard.flush(new FlushRequest(shard.shardId().getIndexName()).force(force)); } }
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package flex2.compiler.extensions; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URL; import java.net.URLClassLoader; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.jar.JarFile; import java.util.jar.Manifest; import flex2.compiler.util.ThreadLocalToolkit; import flex2.compiler.util.CompilerMessage.CompilerError; /** * This class manages external extensions, which can be used to add * additional functionality to the compiler. */ public class ExtensionManager { public static enum ExtensionType { PRELINK( "extensions-prelink" ), MXMLC( "extensions-mxmlc" ), COMPC( "extensions-compc" ), APPLICATION( "extensions-application" ), LIBRARY( "extensions-library" ), PRE_COMPILE( "extensions-pre-compile" ); private String extensionTag; private ExtensionType( String extensionTag ) { this.extensionTag = extensionTag; } String getExtensionTag() { return extensionTag; } } public static Set<IPreLinkExtension> getPreLinkExtensions( Map<String, List<String>> extensions ) { return getExtension( ExtensionType.PRELINK, extensions, IPreLinkExtension.class ); } public static Set<IMxmlcExtension> getMxmlcExtensions( Map<String, List<String>> extensions ) { return getExtension( ExtensionType.MXMLC, extensions, IMxmlcExtension.class ); } public static Set<ICompcExtension> getCompcExtensions( Map<String, List<String>> extensions ) { return getExtension( ExtensionType.COMPC, extensions, ICompcExtension.class ); } public static Set<ILibraryExtension> getLibraryExtensions( Map<String, List<String>> extensions ) { return getExtension( ExtensionType.LIBRARY, extensions, ILibraryExtension.class ); } public static Set<IApplicationExtension> getApplicationExtensions( Map<String, List<String>> extensions ) { return getExtension( ExtensionType.APPLICATION, extensions, IApplicationExtension.class ); } public static Set<IPreCompileExtension> getPreCompileExtensions( Map<String, List<String>> extensions ) { return getExtension( ExtensionType.PRE_COMPILE, extensions, IPreCompileExtension.class ); } @SuppressWarnings( "deprecation" ) private static <E> Set<E> getExtension( ExtensionType extensionType, Map<String, List<String>> availableExtensions, Class<E> clazz ) { if ( availableExtensions == null ) { return Collections.emptySet(); } Set<String> files = availableExtensions.keySet(); Set<E> extensions = new LinkedHashSet<E>(); for ( String extensionPath : files ) { List<String> parameters = availableExtensions.get( extensionPath ); File extensionFile = new File( extensionPath ); if ( !extensionFile.exists() ) { ThreadLocalToolkit.getLogger().log(new InvalidExtensionFileError(new FileNotFoundException().getLocalizedMessage() ) ); continue; } try { URLClassLoader loader; Manifest mf; try { loader = new URLClassLoader( new URL[] { extensionFile.toURL() }, Thread.currentThread().getContextClassLoader() ); JarFile jar = new JarFile( extensionFile ); mf = jar.getManifest(); } catch ( IOException e ) { ThreadLocalToolkit.getLogger().log( new InvalidExtensionFileError( e.getLocalizedMessage() ) ); continue; } extensions.addAll( getClasses( mf, extensionType, loader, parameters, clazz ) ); } catch ( CompilerError e ) { ThreadLocalToolkit.getLogger().log( e ); continue; } } return extensions; } @SuppressWarnings( "unchecked" ) private static <E> Set<E> getClasses( Manifest mf, ExtensionType extensionType, URLClassLoader loader, List<String> parameters, Class<E> clazz ) throws CompilerError { String extensionsStr = mf.getMainAttributes().getValue( extensionType.getExtensionTag() ); if ( extensionsStr == null ) { return Collections.emptySet(); } String[] extNames = extensionsStr.split( ":" ); Set<E> extensions = new LinkedHashSet<E>(); for ( int j = 0; j < extNames.length; j++ ) { String extName = extNames[j]; Class<?> extClass; try { extClass = loader.loadClass( extName ); } catch ( ClassNotFoundException e ) { throw new UnexistentExtensionError( extName ); } if ( clazz.isAssignableFrom( extClass ) ) { E extInstance; try { extInstance = (E) extClass.newInstance(); } catch ( Exception e ) { throw new FailToInstanciateError( e.getMessage() ); } if ( extInstance instanceof IConfigurableExtension ) { IConfigurableExtension configExtension = (IConfigurableExtension) extInstance; configExtension.configure( parameters ); } extensions.add( extInstance ); } else { throw new InvalidExtensionKindError( extClass, clazz ); } } return extensions; } public static class InvalidExtensionFileError extends CompilerError { private static final long serialVersionUID = -1466423208365841681L; public String errorMessage; public InvalidExtensionFileError( String errorMessage ) { this.errorMessage = errorMessage; } } public static class FailToInstanciateError extends CompilerError { private static final long serialVersionUID = -4329041275278609962L; public String errorMessage; public FailToInstanciateError( String errorMessage ) { this.errorMessage = errorMessage; } } public static class UnexistentExtensionError extends CompilerError { private static final long serialVersionUID = 7778107370187386124L; public String extensionClassName; public UnexistentExtensionError( String extName ) { this.extensionClassName = extName; } } public static class InvalidExtensionKindError extends CompilerError { private static final long serialVersionUID = -3190757647243331631L; public Class<?> extensionClass; public Class<?> parentClass; public InvalidExtensionKindError( Class<?> extClass, Class<?> clazz ) { this.extensionClass = extClass; this.parentClass = clazz; } } }
/** * This class is generated by jOOQ */ package io.cattle.platform.core.model.tables.records; /** * This class is generated by jOOQ. */ @javax.annotation.Generated(value = { "http://www.jooq.org", "3.3.0" }, comments = "This class is generated by jOOQ") @java.lang.SuppressWarnings({ "all", "unchecked", "rawtypes" }) @javax.persistence.Entity @javax.persistence.Table(name = "environment", schema = "cattle") public class StackRecord extends org.jooq.impl.UpdatableRecordImpl<io.cattle.platform.core.model.tables.records.StackRecord> implements io.cattle.platform.db.jooq.utils.TableRecordJaxb, org.jooq.Record15<java.lang.Long, java.lang.String, java.lang.Long, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.util.Date, java.util.Date, java.util.Date, java.util.Map<String,Object>, java.lang.String, java.lang.String, java.lang.String, java.lang.Boolean>, io.cattle.platform.core.model.Stack { private static final long serialVersionUID = -2097717329; /** * Setter for <code>cattle.environment.id</code>. */ @Override public void setId(java.lang.Long value) { setValue(0, value); } /** * Getter for <code>cattle.environment.id</code>. */ @javax.persistence.Id @javax.persistence.Column(name = "id", unique = true, nullable = false, precision = 19) @Override public java.lang.Long getId() { return (java.lang.Long) getValue(0); } /** * Setter for <code>cattle.environment.name</code>. */ @Override public void setName(java.lang.String value) { setValue(1, value); } /** * Getter for <code>cattle.environment.name</code>. */ @javax.persistence.Column(name = "name", length = 255) @Override public java.lang.String getName() { return (java.lang.String) getValue(1); } /** * Setter for <code>cattle.environment.account_id</code>. */ @Override public void setAccountId(java.lang.Long value) { setValue(2, value); } /** * Getter for <code>cattle.environment.account_id</code>. */ @javax.persistence.Column(name = "account_id", precision = 19) @Override public java.lang.Long getAccountId() { return (java.lang.Long) getValue(2); } /** * Setter for <code>cattle.environment.kind</code>. */ @Override public void setKind(java.lang.String value) { setValue(3, value); } /** * Getter for <code>cattle.environment.kind</code>. */ @javax.persistence.Column(name = "kind", nullable = false, length = 255) @Override public java.lang.String getKind() { return (java.lang.String) getValue(3); } /** * Setter for <code>cattle.environment.uuid</code>. */ @Override public void setUuid(java.lang.String value) { setValue(4, value); } /** * Getter for <code>cattle.environment.uuid</code>. */ @javax.persistence.Column(name = "uuid", unique = true, nullable = false, length = 128) @Override public java.lang.String getUuid() { return (java.lang.String) getValue(4); } /** * Setter for <code>cattle.environment.description</code>. */ @Override public void setDescription(java.lang.String value) { setValue(5, value); } /** * Getter for <code>cattle.environment.description</code>. */ @javax.persistence.Column(name = "description", length = 1024) @Override public java.lang.String getDescription() { return (java.lang.String) getValue(5); } /** * Setter for <code>cattle.environment.state</code>. */ @Override public void setState(java.lang.String value) { setValue(6, value); } /** * Getter for <code>cattle.environment.state</code>. */ @javax.persistence.Column(name = "state", nullable = false, length = 128) @Override public java.lang.String getState() { return (java.lang.String) getValue(6); } /** * Setter for <code>cattle.environment.created</code>. */ @Override public void setCreated(java.util.Date value) { setValue(7, value); } /** * Getter for <code>cattle.environment.created</code>. */ @javax.persistence.Column(name = "created") @Override public java.util.Date getCreated() { return (java.util.Date) getValue(7); } /** * Setter for <code>cattle.environment.removed</code>. */ @Override public void setRemoved(java.util.Date value) { setValue(8, value); } /** * Getter for <code>cattle.environment.removed</code>. */ @javax.persistence.Column(name = "removed") @Override public java.util.Date getRemoved() { return (java.util.Date) getValue(8); } /** * Setter for <code>cattle.environment.remove_time</code>. */ @Override public void setRemoveTime(java.util.Date value) { setValue(9, value); } /** * Getter for <code>cattle.environment.remove_time</code>. */ @javax.persistence.Column(name = "remove_time") @Override public java.util.Date getRemoveTime() { return (java.util.Date) getValue(9); } /** * Setter for <code>cattle.environment.data</code>. */ @Override public void setData(java.util.Map<String,Object> value) { setValue(10, value); } /** * Getter for <code>cattle.environment.data</code>. */ @javax.persistence.Column(name = "data", length = 16777215) @Override public java.util.Map<String,Object> getData() { return (java.util.Map<String,Object>) getValue(10); } /** * Setter for <code>cattle.environment.external_id</code>. */ @Override public void setExternalId(java.lang.String value) { setValue(11, value); } /** * Getter for <code>cattle.environment.external_id</code>. */ @javax.persistence.Column(name = "external_id", length = 128) @Override public java.lang.String getExternalId() { return (java.lang.String) getValue(11); } /** * Setter for <code>cattle.environment.health_state</code>. */ @Override public void setHealthState(java.lang.String value) { setValue(12, value); } /** * Getter for <code>cattle.environment.health_state</code>. */ @javax.persistence.Column(name = "health_state", length = 128) @Override public java.lang.String getHealthState() { return (java.lang.String) getValue(12); } /** * Setter for <code>cattle.environment.folder</code>. */ @Override public void setGroup(java.lang.String value) { setValue(13, value); } /** * Getter for <code>cattle.environment.folder</code>. */ @javax.persistence.Column(name = "folder", length = 255) @Override public java.lang.String getGroup() { return (java.lang.String) getValue(13); } /** * Setter for <code>cattle.environment.system</code>. */ @Override public void setSystem(java.lang.Boolean value) { setValue(14, value); } /** * Getter for <code>cattle.environment.system</code>. */ @javax.persistence.Column(name = "system", nullable = false, precision = 1) @Override public java.lang.Boolean getSystem() { return (java.lang.Boolean) getValue(14); } // ------------------------------------------------------------------------- // Primary key information // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public org.jooq.Record1<java.lang.Long> key() { return (org.jooq.Record1) super.key(); } // ------------------------------------------------------------------------- // Record15 type implementation // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public org.jooq.Row15<java.lang.Long, java.lang.String, java.lang.Long, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.util.Date, java.util.Date, java.util.Date, java.util.Map<String,Object>, java.lang.String, java.lang.String, java.lang.String, java.lang.Boolean> fieldsRow() { return (org.jooq.Row15) super.fieldsRow(); } /** * {@inheritDoc} */ @Override public org.jooq.Row15<java.lang.Long, java.lang.String, java.lang.Long, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.util.Date, java.util.Date, java.util.Date, java.util.Map<String,Object>, java.lang.String, java.lang.String, java.lang.String, java.lang.Boolean> valuesRow() { return (org.jooq.Row15) super.valuesRow(); } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.Long> field1() { return io.cattle.platform.core.model.tables.StackTable.STACK.ID; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field2() { return io.cattle.platform.core.model.tables.StackTable.STACK.NAME; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.Long> field3() { return io.cattle.platform.core.model.tables.StackTable.STACK.ACCOUNT_ID; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field4() { return io.cattle.platform.core.model.tables.StackTable.STACK.KIND; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field5() { return io.cattle.platform.core.model.tables.StackTable.STACK.UUID; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field6() { return io.cattle.platform.core.model.tables.StackTable.STACK.DESCRIPTION; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field7() { return io.cattle.platform.core.model.tables.StackTable.STACK.STATE; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.util.Date> field8() { return io.cattle.platform.core.model.tables.StackTable.STACK.CREATED; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.util.Date> field9() { return io.cattle.platform.core.model.tables.StackTable.STACK.REMOVED; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.util.Date> field10() { return io.cattle.platform.core.model.tables.StackTable.STACK.REMOVE_TIME; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.util.Map<String,Object>> field11() { return io.cattle.platform.core.model.tables.StackTable.STACK.DATA; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field12() { return io.cattle.platform.core.model.tables.StackTable.STACK.EXTERNAL_ID; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field13() { return io.cattle.platform.core.model.tables.StackTable.STACK.HEALTH_STATE; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.String> field14() { return io.cattle.platform.core.model.tables.StackTable.STACK.GROUP; } /** * {@inheritDoc} */ @Override public org.jooq.Field<java.lang.Boolean> field15() { return io.cattle.platform.core.model.tables.StackTable.STACK.SYSTEM; } /** * {@inheritDoc} */ @Override public java.lang.Long value1() { return getId(); } /** * {@inheritDoc} */ @Override public java.lang.String value2() { return getName(); } /** * {@inheritDoc} */ @Override public java.lang.Long value3() { return getAccountId(); } /** * {@inheritDoc} */ @Override public java.lang.String value4() { return getKind(); } /** * {@inheritDoc} */ @Override public java.lang.String value5() { return getUuid(); } /** * {@inheritDoc} */ @Override public java.lang.String value6() { return getDescription(); } /** * {@inheritDoc} */ @Override public java.lang.String value7() { return getState(); } /** * {@inheritDoc} */ @Override public java.util.Date value8() { return getCreated(); } /** * {@inheritDoc} */ @Override public java.util.Date value9() { return getRemoved(); } /** * {@inheritDoc} */ @Override public java.util.Date value10() { return getRemoveTime(); } /** * {@inheritDoc} */ @Override public java.util.Map<String,Object> value11() { return getData(); } /** * {@inheritDoc} */ @Override public java.lang.String value12() { return getExternalId(); } /** * {@inheritDoc} */ @Override public java.lang.String value13() { return getHealthState(); } /** * {@inheritDoc} */ @Override public java.lang.String value14() { return getGroup(); } /** * {@inheritDoc} */ @Override public java.lang.Boolean value15() { return getSystem(); } /** * {@inheritDoc} */ @Override public StackRecord value1(java.lang.Long value) { setId(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value2(java.lang.String value) { setName(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value3(java.lang.Long value) { setAccountId(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value4(java.lang.String value) { setKind(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value5(java.lang.String value) { setUuid(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value6(java.lang.String value) { setDescription(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value7(java.lang.String value) { setState(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value8(java.util.Date value) { setCreated(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value9(java.util.Date value) { setRemoved(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value10(java.util.Date value) { setRemoveTime(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value11(java.util.Map<String,Object> value) { setData(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value12(java.lang.String value) { setExternalId(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value13(java.lang.String value) { setHealthState(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value14(java.lang.String value) { setGroup(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord value15(java.lang.Boolean value) { setSystem(value); return this; } /** * {@inheritDoc} */ @Override public StackRecord values(java.lang.Long value1, java.lang.String value2, java.lang.Long value3, java.lang.String value4, java.lang.String value5, java.lang.String value6, java.lang.String value7, java.util.Date value8, java.util.Date value9, java.util.Date value10, java.util.Map<String,Object> value11, java.lang.String value12, java.lang.String value13, java.lang.String value14, java.lang.Boolean value15) { return this; } // ------------------------------------------------------------------------- // FROM and INTO // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public void from(io.cattle.platform.core.model.Stack from) { setId(from.getId()); setName(from.getName()); setAccountId(from.getAccountId()); setKind(from.getKind()); setUuid(from.getUuid()); setDescription(from.getDescription()); setState(from.getState()); setCreated(from.getCreated()); setRemoved(from.getRemoved()); setRemoveTime(from.getRemoveTime()); setData(from.getData()); setExternalId(from.getExternalId()); setHealthState(from.getHealthState()); setGroup(from.getGroup()); setSystem(from.getSystem()); } /** * {@inheritDoc} */ @Override public <E extends io.cattle.platform.core.model.Stack> E into(E into) { into.from(this); return into; } // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- /** * Create a detached StackRecord */ public StackRecord() { super(io.cattle.platform.core.model.tables.StackTable.STACK); } /** * Create a detached, initialised StackRecord */ public StackRecord(java.lang.Long id, java.lang.String name, java.lang.Long accountId, java.lang.String kind, java.lang.String uuid, java.lang.String description, java.lang.String state, java.util.Date created, java.util.Date removed, java.util.Date removeTime, java.util.Map<String,Object> data, java.lang.String externalId, java.lang.String healthState, java.lang.String folder, java.lang.Boolean system) { super(io.cattle.platform.core.model.tables.StackTable.STACK); setValue(0, id); setValue(1, name); setValue(2, accountId); setValue(3, kind); setValue(4, uuid); setValue(5, description); setValue(6, state); setValue(7, created); setValue(8, removed); setValue(9, removeTime); setValue(10, data); setValue(11, externalId); setValue(12, healthState); setValue(13, folder); setValue(14, system); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package io.hops.transaction.lock; import io.hops.metadata.common.entity.Variable; import io.hops.metadata.hdfs.entity.INodeIdentifier; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.server.namenode.NameNode; import java.util.ArrayList; import java.util.Collection; import java.util.List; public class LockFactory { private final static LockFactory instance = new LockFactory(); public static enum BLK { /** * Replica */ RE, /** * CorruptReplica */ CR, /** * ExcessReplica */ ER, /** * UnderReplicated */ UR, /** * ReplicaUnderConstruction */ UC, /** * InvalidatedBlock */ IV, /** * PendingBlock */ PE } private LockFactory() { } public static LockFactory getInstance() { return instance; } public Lock getBlockChecksumLock(String target, int blockIndex) { return new BlockChecksumLock(target, blockIndex); } public Lock getBlockLock() { return new BlockLock(); } public Lock getBlockLock(long blockId, INodeIdentifier inode) { return new BlockLock(blockId, inode); } public Lock getReplicaLock() { return new BlockRelatedLock(Lock.Type.Replica); } public Lock getCorruptReplicaLock() { return new BlockRelatedLock(Lock.Type.CorruptReplica); } public Lock getExcessReplicaLock() { return new BlockRelatedLock(Lock.Type.ExcessReplica); } public Lock getReplicatUnderConstructionLock() { return new BlockRelatedLock(Lock.Type.ReplicaUnderConstruction); } public Lock getInvalidatedBlockLock() { return new BlockRelatedLock(Lock.Type.InvalidatedBlock); } public Lock getUnderReplicatedBlockLock() { return new BlockRelatedLock(Lock.Type.UnderReplicatedBlock); } public Lock getPendingBlockLock() { return new BlockRelatedLock(Lock.Type.PendingBlock); } public Lock getSqlBatchedBlocksLock() { return new SqlBatchedBlocksLock(); } public Lock getSqlBatchedReplicasLock() { return new SqlBatchedBlocksRelatedLock(Lock.Type.Replica); } public Lock getSqlBatchedCorruptReplicasLock() { return new SqlBatchedBlocksRelatedLock(Lock.Type.CorruptReplica); } public Lock getSqlBatchedExcessReplicasLock() { return new SqlBatchedBlocksRelatedLock(Lock.Type.ExcessReplica); } public Lock getSqlBatchedReplicasUnderConstructionLock() { return new SqlBatchedBlocksRelatedLock(Lock.Type.ReplicaUnderConstruction); } public Lock getSqlBatchedInvalidatedBlocksLock() { return new SqlBatchedBlocksRelatedLock(Lock.Type.InvalidatedBlock); } public Lock getSqlBatchedUnderReplicatedBlocksLock() { return new SqlBatchedBlocksRelatedLock(Lock.Type.UnderReplicatedBlock); } public Lock getSqlBatchedPendingBlocksLock() { return new SqlBatchedBlocksRelatedLock(Lock.Type.PendingBlock); } public Lock getIndividualBlockLock(long blockId, INodeIdentifier inode) { return new IndividualBlockLock(blockId, inode); } public Lock getBatchedINodesLock(List<INodeIdentifier> inodeIdentifiers) { return new BatchedINodeLock(inodeIdentifiers); } public Lock getIndividualINodeLock( TransactionLockTypes.INodeLockType lockType, INodeIdentifier inodeIdentifier, boolean readUpPathInodes) { return new IndividualINodeLock(lockType, inodeIdentifier, readUpPathInodes); } public Lock getIndividualINodeLock( TransactionLockTypes.INodeLockType lockType, INodeIdentifier inodeIdentifier) { return new IndividualINodeLock(lockType, inodeIdentifier); } public Lock getINodeLock(boolean skipReadingQuotaAttr, NameNode nameNode, TransactionLockTypes.INodeLockType lockType, TransactionLockTypes.INodeResolveType resolveType, boolean resolveLink, boolean ignoreLocalSubtreeLocks, String... paths) { return new INodeLock(lockType, resolveType, resolveLink, ignoreLocalSubtreeLocks, skipReadingQuotaAttr, nameNode.getId(), nameNode.getActiveNameNodes().getActiveNodes(), paths); } public Lock getINodeLock(NameNode nameNode, TransactionLockTypes.INodeLockType lockType, TransactionLockTypes.INodeResolveType resolveType, boolean resolveLink, boolean ignoreLocalSubtreeLocks, String... paths) { return new INodeLock(lockType, resolveType, resolveLink, ignoreLocalSubtreeLocks, false, nameNode.getId(), nameNode.getActiveNameNodes().getActiveNodes(), paths); } public Lock getINodeLock(NameNode nameNode, TransactionLockTypes.INodeLockType lockType, TransactionLockTypes.INodeResolveType resolveType, boolean resolveLink, String... paths) { return new INodeLock(false, lockType, resolveType, resolveLink, nameNode.getActiveNameNodes().getActiveNodes(), paths); } public Lock getINodeLock(NameNode nameNode, TransactionLockTypes.INodeLockType lockType, TransactionLockTypes.INodeResolveType resolveType, String... paths) { return new INodeLock(lockType, resolveType, nameNode.getActiveNameNodes().getActiveNodes(), paths); } public Lock getINodeLock(boolean skipReadingQuotaAttr, NameNode nameNode, TransactionLockTypes.INodeLockType lockType, TransactionLockTypes.INodeResolveType resolveType, String... paths) { return new INodeLock(lockType, resolveType, true, false, skipReadingQuotaAttr, nameNode.getId(), nameNode.getActiveNameNodes().getActiveNodes(), paths); } public Lock getINodeLock(boolean skipReadingQuotaAttr, NameNode nameNode, TransactionLockTypes.INodeLockType lockType, TransactionLockTypes.INodeResolveType resolveType, boolean resolveLink, String... paths) { return new INodeLock(lockType, resolveType, resolveLink, false, skipReadingQuotaAttr, nameNode.getId(), nameNode.getActiveNameNodes().getActiveNodes(), paths); } public Lock getRenameINodeLock(NameNode nameNode, TransactionLockTypes.INodeLockType lockType, TransactionLockTypes.INodeResolveType resolveType, boolean ignoreLocalSubtreeLocks, String src, String dst) { return new RenameINodeLock(lockType, resolveType, ignoreLocalSubtreeLocks, nameNode.getId(), nameNode.getActiveNameNodes().getActiveNodes(), src, dst); } public Lock getRenameINodeLock(NameNode nameNode, TransactionLockTypes.INodeLockType lockType, TransactionLockTypes.INodeResolveType resolveType, String src, String dst) { return new RenameINodeLock(lockType, resolveType, nameNode.getActiveNameNodes().getActiveNodes(), src, dst); } public Lock getLegacyRenameINodeLock(NameNode nameNode, TransactionLockTypes.INodeLockType lockType, TransactionLockTypes.INodeResolveType resolveType, boolean ignoreLocalSubtreeLocks, String src, String dst) { return new RenameINodeLock(lockType, resolveType, ignoreLocalSubtreeLocks, nameNode.getId(), nameNode.getActiveNameNodes().getActiveNodes(), src, dst, true); } public Lock getLegacyRenameINodeLock(boolean skipReadingQuotaAttr,NameNode nameNode, TransactionLockTypes.INodeLockType lockType, TransactionLockTypes.INodeResolveType resolveType, boolean ignoreLocalSubtreeLocks, String src, String dst) { return new RenameINodeLock(skipReadingQuotaAttr,lockType, resolveType, ignoreLocalSubtreeLocks, nameNode.getId(), nameNode.getActiveNameNodes().getActiveNodes(), src, dst, true); } public Lock getLegacyRenameINodeLock(boolean skipReadingQuotaAttr,NameNode nameNode, TransactionLockTypes.INodeLockType lockType, TransactionLockTypes.INodeResolveType resolveType, String src, String dst) { return new RenameINodeLock(skipReadingQuotaAttr, lockType, resolveType, nameNode.getActiveNameNodes().getActiveNodes(), src, dst, true); } public Lock getLegacyRenameINodeLock(NameNode nameNode, TransactionLockTypes.INodeLockType lockType, TransactionLockTypes.INodeResolveType resolveType, String src, String dst) { return new RenameINodeLock(lockType, resolveType, nameNode.getActiveNameNodes().getActiveNodes(), src, dst, true); } public Lock getLeaseLock(TransactionLockTypes.LockType lockType, String leaseHolder) { return new LeaseLock(lockType, leaseHolder); } public Lock getLeaseLock(TransactionLockTypes.LockType lockType) { return new LeaseLock(lockType); } public Lock getLeasePathLock(TransactionLockTypes.LockType lockType, int expectedCount) { return new LeasePathLock(lockType, expectedCount); } public Lock getLeasePathLock(TransactionLockTypes.LockType lockType) { return new LeasePathLock(lockType); } public Lock getLeasePathLock(TransactionLockTypes.LockType lockType, String src) { return new LeasePathLock(lockType, src); } public Lock getNameNodeLeaseLock(TransactionLockTypes.LockType lockType) { return new NameNodeLeaseLock(lockType); } public Lock getQuotaUpdateLock(boolean includeChildren, String... targets) { return new QuotaUpdateLock(includeChildren, targets); } public Lock getQuotaUpdateLock(String... targets) { return new QuotaUpdateLock(targets); } public Lock getVariableLock(Variable.Finder[] finders, TransactionLockTypes.LockType[] lockTypes) { assert finders.length == lockTypes.length; VariablesLock lock = new VariablesLock(); for (int i = 0; i < finders.length; i++) { lock.addVariable(finders[i], lockTypes[i]); } return lock; } public Lock getVariableLock(Variable.Finder finder, TransactionLockTypes.LockType lockType) { VariablesLock lock = new VariablesLock(); lock.addVariable(finder, lockType); return lock; } public List<Lock> getBlockReportingLocks(long[] blockIds, int[] inodeIds, long[] unresolvedBlks, int storageId) { ArrayList<Lock> list = new ArrayList(3); list.add(new BatchedBlockLock(blockIds,inodeIds, unresolvedBlks)); //list.add(new BatchedBlocksRelatedLock.BatchedInvalidatedBlocksLock(storageId)); return list; } public Lock getEncodingStatusLock(TransactionLockTypes.LockType lockType, String... targets) { return new BaseEncodingStatusLock.EncodingStatusLock(lockType, targets); } public Lock getEncodingStatusLock(boolean includeChildren, TransactionLockTypes.LockType lockType, String... targets) { return new BaseEncodingStatusLock.EncodingStatusLock(includeChildren, lockType, targets); } public Lock getIndivdualEncodingStatusLock( TransactionLockTypes.LockType lockType, int inodeId) { return new BaseEncodingStatusLock.IndividualEncodingStatusLock(lockType, inodeId); } public Lock getSubTreeOpsLock(TransactionLockTypes.LockType lockType, String pathPrefix) { return new SubTreeOpLock(lockType, pathPrefix); } public Lock getIndividualHashBucketLock(int storageId, int bucketId) { return new IndividualHashBucketLock(storageId, bucketId); } public Lock getLastBlockHashBucketsLock(){ return new LastBlockReplicasHashBucketLock(); } public Collection<Lock> getBlockRelated(BLK... relatedBlks) { ArrayList<Lock> list = new ArrayList(); for (BLK b : relatedBlks) { switch (b) { case RE: list.add(getReplicaLock()); break; case CR: list.add(getCorruptReplicaLock()); break; case IV: list.add(getInvalidatedBlockLock()); break; case PE: list.add(getPendingBlockLock()); break; case UC: list.add(getReplicatUnderConstructionLock()); break; case UR: list.add(getUnderReplicatedBlockLock()); break; case ER: list.add(getExcessReplicaLock()); break; } } return list; } public Collection<Lock> getSqlBatchedBlocksRelated(BLK... relatedBlks) { ArrayList<Lock> list = new ArrayList(); for (BLK b : relatedBlks) { switch (b) { case RE: list.add(getSqlBatchedReplicasLock()); break; case CR: list.add(getSqlBatchedCorruptReplicasLock()); break; case IV: list.add(getSqlBatchedInvalidatedBlocksLock()); break; case PE: list.add(getSqlBatchedInvalidatedBlocksLock()); break; case UC: list.add(getSqlBatchedReplicasUnderConstructionLock()); break; case UR: list.add(getUnderReplicatedBlockLock()); break; case ER: list.add(getSqlBatchedExcessReplicasLock()); break; } } return list; } public Lock getLastTwoBlocksLock(String src){ return new LastTwoBlocksLock(src); } public void setConfiguration(Configuration conf) { BaseINodeLock.enableSetPartitionKey( conf.getBoolean(DFSConfigKeys.DFS_SET_PARTITION_KEY_ENABLED, DFSConfigKeys.DFS_SET_PARTITION_KEY_ENABLED_DEFAULT)); BaseINodeLock.enableSetRandomPartitionKey(conf.getBoolean(DFSConfigKeys .DFS_SET_RANDOM_PARTITION_KEY_ENABLED, DFSConfigKeys .DFS_SET_RANDOM_PARTITION_KEY_ENABLED_DEFAULT)); BaseINodeLock.setDefaultLockType(getPrecedingPathLockType(conf)); } private TransactionLockTypes.INodeLockType getPrecedingPathLockType( Configuration conf) { String val = conf.get(DFSConfigKeys.DFS_STORAGE_ANCESTOR_LOCK_TYPE, DFSConfigKeys.DFS_STORAGE_ANCESTOR_LOCK_TYPE_DEFAULT); if (val.compareToIgnoreCase("READ") == 0) { return TransactionLockTypes.INodeLockType.READ; } else if (val.compareToIgnoreCase("READ_COMMITTED") == 0) { return TransactionLockTypes.INodeLockType.READ_COMMITTED; } else { throw new IllegalStateException( "Critical Parameter is not defined. Set " + DFSConfigKeys.DFS_STORAGE_ANCESTOR_LOCK_TYPE); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams; import org.elasticsearch.search.aggregations.support.AggregationPath; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; /** * An internal implementation of {@link Aggregation}. Serves as a base class for all aggregation implementations. */ public abstract class InternalAggregation implements Aggregation, ToXContent, Streamable, NamedWriteable { // NORELEASE remove Streamable /** * The aggregation type that holds all the string types that are associated with an aggregation: * <ul> * <li>name - used as the parser type</li> * <li>stream - used as the stream type</li> * </ul> */ public static class Type { private String name; private BytesReference stream; public Type(String name) { this(name, new BytesArray(name)); } public Type(String name, String stream) { this(name, new BytesArray(stream)); } public Type(String name, BytesReference stream) { this.name = name; this.stream = stream; } /** * @return The name of the type of aggregation. This is the key for parsing the aggregation from XContent and is the name of the * aggregation's builder when serialized. */ public String name() { return name; } /** * @return The name of the stream type (used for registering the aggregation stream * (see {@link AggregationStreams#registerStream(AggregationStreams.Stream, BytesReference...)}). */ public BytesReference stream() { return stream; } @Override public String toString() { return name; } } public static class ReduceContext { private final BigArrays bigArrays; private final ScriptService scriptService; private final ClusterState clusterState; public ReduceContext(BigArrays bigArrays, ScriptService scriptService, ClusterState clusterState) { this.bigArrays = bigArrays; this.scriptService = scriptService; this.clusterState = clusterState; } public BigArrays bigArrays() { return bigArrays; } public ScriptService scriptService() { return scriptService; } public ClusterState clusterState() { return clusterState; } } protected String name; protected Map<String, Object> metaData; private List<PipelineAggregator> pipelineAggregators; /** Constructs an un initialized addAggregation (used for serialization) **/ protected InternalAggregation() {} // NORELEASE remove when removing Streamable /** * Constructs an get with a given name. * * @param name The name of the get. */ protected InternalAggregation(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) { this.name = name; this.pipelineAggregators = pipelineAggregators; this.metaData = metaData; } /** * Read from a stream. */ protected InternalAggregation(StreamInput in) throws IOException { name = in.readString(); metaData = in.readMap(); int size = in.readVInt(); if (size == 0) { pipelineAggregators = Collections.emptyList(); } else { pipelineAggregators = new ArrayList<>(size); for (int i = 0; i < size; i++) { if (in.readBoolean()) { pipelineAggregators.add(in.readNamedWriteable(PipelineAggregator.class)); } else { BytesReference type = in.readBytesReference(); PipelineAggregator pipelineAggregator = PipelineAggregatorStreams.stream(type).readResult(in); pipelineAggregators.add(pipelineAggregator); } } } } @Override public final void readFrom(StreamInput in) throws IOException { try { getWriteableName(); // Throws UnsupportedOperationException if this aggregation should be read using old style Streams assert false : "Used reading constructor instead"; } catch (UnsupportedOperationException e) { // OK } name = in.readString(); metaData = in.readMap(); int size = in.readVInt(); if (size == 0) { pipelineAggregators = Collections.emptyList(); } else { pipelineAggregators = new ArrayList<>(size); for (int i = 0; i < size; i++) { if (in.readBoolean()) { pipelineAggregators.add(in.readNamedWriteable(PipelineAggregator.class)); } else { BytesReference type = in.readBytesReference(); PipelineAggregator pipelineAggregator = PipelineAggregatorStreams.stream(type).readResult(in); pipelineAggregators.add(pipelineAggregator); } } } doReadFrom(in); } protected void doReadFrom(StreamInput in) throws IOException { throw new UnsupportedOperationException("Use reading constructor instead"); // NORELEASE remove when we remove Streamable } @Override public final void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeGenericValue(metaData); out.writeVInt(pipelineAggregators.size()); for (PipelineAggregator pipelineAggregator : pipelineAggregators) { // NORELEASE temporary hack to support old style streams and new style NamedWriteable try { pipelineAggregator.getWriteableName(); // Throws UnsupportedOperationException if we should use old style streams. out.writeBoolean(true); out.writeNamedWriteable(pipelineAggregator); } catch (UnsupportedOperationException e) { out.writeBoolean(false); out.writeBytesReference(pipelineAggregator.type().stream()); pipelineAggregator.writeTo(out); } } doWriteTo(out); } protected abstract void doWriteTo(StreamOutput out) throws IOException; @Override public String getWriteableName() { // NORELEASE remove me when all InternalAggregations override it throw new UnsupportedOperationException("Override on every class"); } @Override public String getName() { return name; } /** * @return The {@link Type} of this aggregation */ public Type type() { // NORELEASE remove this method throw new UnsupportedOperationException(getClass().getName() + " used type but should Use getWriteableName instead"); } /** * Reduces the given aggregations to a single one and returns it. In <b>most</b> cases, the assumption will be the all given * aggregations are of the same type (the same type as this aggregation). For best efficiency, when implementing, * try reusing an existing instance (typically the first in the given list) to save on redundant object * construction. */ public final InternalAggregation reduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) { InternalAggregation aggResult = doReduce(aggregations, reduceContext); for (PipelineAggregator pipelineAggregator : pipelineAggregators) { aggResult = pipelineAggregator.reduce(aggResult, reduceContext); } return aggResult; } public abstract InternalAggregation doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext); @Override public Object getProperty(String path) { AggregationPath aggPath = AggregationPath.parse(path); return getProperty(aggPath.getPathElementsAsStringList()); } public abstract Object getProperty(List<String> path); /** * Read a size under the assumption that a value of 0 means unlimited. */ protected static int readSize(StreamInput in) throws IOException { final int size = in.readVInt(); return size == 0 ? Integer.MAX_VALUE : size; } /** * Write a size under the assumption that a value of 0 means unlimited. */ protected static void writeSize(int size, StreamOutput out) throws IOException { if (size == Integer.MAX_VALUE) { size = 0; } out.writeVInt(size); } @Override public Map<String, Object> getMetaData() { return metaData; } public List<PipelineAggregator> pipelineAggregators() { return pipelineAggregators; } @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(name); if (this.metaData != null) { builder.field(CommonFields.META); builder.map(this.metaData); } doXContentBody(builder, params); builder.endObject(); return builder; } public abstract XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException; /** * Common xcontent fields that are shared among addAggregation */ public static final class CommonFields extends ParseField.CommonFields { // todo convert these to ParseField public static final String META = "meta"; public static final String BUCKETS = "buckets"; public static final String VALUE = "value"; public static final String VALUES = "values"; public static final String VALUE_AS_STRING = "value_as_string"; public static final String DOC_COUNT = "doc_count"; public static final String KEY = "key"; public static final String KEY_AS_STRING = "key_as_string"; public static final String FROM = "from"; public static final String FROM_AS_STRING = "from_as_string"; public static final String TO = "to"; public static final String TO_AS_STRING = "to_as_string"; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.source.resolve.reference.impl.providers; import com.intellij.codeInsight.daemon.EmptyResolveMessageProvider; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.LocalQuickFixProvider; import com.intellij.lang.LangBundle; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.FileTypeRegistry; import com.intellij.openapi.fileTypes.UnknownFileType; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileSystem; import com.intellij.psi.*; import com.intellij.psi.impl.source.resolve.ResolveCache; import com.intellij.psi.impl.source.resolve.reference.impl.CachingReference; import com.intellij.psi.impl.source.resolve.reference.impl.PsiMultiReference; import com.intellij.psi.search.PsiFileSystemItemProcessor; import com.intellij.refactoring.rename.BindablePsiReference; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import consulo.logging.Logger; import javax.annotation.Nonnull; import java.net.URI; import java.text.Normalizer; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; /** * @author cdr */ public class FileReference implements PsiFileReference, FileReferenceOwner, PsiPolyVariantReference, LocalQuickFixProvider, EmptyResolveMessageProvider, BindablePsiReference { private static final Logger LOG = Logger.getInstance(FileReference.class); public static final FileReference[] EMPTY = new FileReference[0]; private final int myIndex; private TextRange myRange; private final String myText; @Nonnull private final FileReferenceSet myFileReferenceSet; public FileReference(@Nonnull final FileReferenceSet fileReferenceSet, TextRange range, int index, String text) { myFileReferenceSet = fileReferenceSet; myIndex = index; myRange = range; myText = text; } public FileReference(final FileReference original) { this(original.myFileReferenceSet, original.myRange, original.myIndex, original.myText); } @javax.annotation.Nullable public static FileReference findFileReference(@Nonnull final PsiReference original) { if (original instanceof PsiMultiReference) { final PsiMultiReference multiReference = (PsiMultiReference)original; for (PsiReference reference : multiReference.getReferences()) { if (reference instanceof FileReference) { return (FileReference)reference; } } } else if (original instanceof FileReferenceOwner) { final PsiFileReference fileReference = ((FileReferenceOwner)original).getLastFileReference(); if (fileReference instanceof FileReference) { return (FileReference)fileReference; } } return null; } @Nonnull protected Collection<PsiFileSystemItem> getContexts() { final FileReference contextRef = getContextReference(); ArrayList<PsiFileSystemItem> result = new ArrayList<>(); if (contextRef == null) { Collection<PsiFileSystemItem> defaultContexts = myFileReferenceSet.getDefaultContexts(); for (PsiFileSystemItem context : defaultContexts) { if (context == null) { LOG.error(myFileReferenceSet.getClass() + " provided a null context"); } } result.addAll(defaultContexts); } else { ResolveResult[] resolveResults = contextRef.multiResolve(false); for (ResolveResult resolveResult : resolveResults) { if (resolveResult.getElement() != null) { result.add((PsiFileSystemItem)resolveResult.getElement()); } } } result.addAll(myFileReferenceSet.getExtraContexts()); return result; } @Override @Nonnull public ResolveResult[] multiResolve(final boolean incompleteCode) { PsiFile file = getElement().getContainingFile(); return ResolveCache.getInstance(file.getProject()).resolveWithCaching(this, MyResolver.INSTANCE, false, false, file); } @Nonnull protected ResolveResult[] innerResolve(boolean caseSensitive, @Nonnull PsiFile containingFile) { final String referenceText = getText(); if (referenceText.isEmpty() && myIndex == 0) { return new ResolveResult[]{new PsiElementResolveResult(containingFile)}; } final Collection<PsiFileSystemItem> contexts = getContexts(); final Collection<ResolveResult> result = new HashSet<>(); for (final PsiFileSystemItem context : contexts) { innerResolveInContext(referenceText, context, result, caseSensitive); } if (contexts.isEmpty() && isAllowedEmptyPath(referenceText)) { result.add(new PsiElementResolveResult(containingFile)); } final int resultCount = result.size(); return resultCount > 0 ? result.toArray(new ResolveResult[resultCount]) : ResolveResult.EMPTY_ARRAY; } protected void innerResolveInContext(@Nonnull final String text, @Nonnull PsiFileSystemItem context, final Collection<ResolveResult> result, final boolean caseSensitive) { if (isAllowedEmptyPath(text) || "".equals(text) || "/".equals(text)) { result.add(new PsiElementResolveResult(context)); } else if ("".equals(text)) { final PsiFileSystemItem resolved = context.getParent(); if (resolved != null) { result.add(new PsiElementResolveResult(resolved)); } } else { final int separatorIndex = text.indexOf('/'); if (separatorIndex >= 0) { final List<ResolveResult> resolvedContexts = new ArrayList<>(); if (separatorIndex == 0 /*starts with slash*/ && "/".equals(context.getName())) { resolvedContexts.add(new PsiElementResolveResult(context)); } else { innerResolveInContext(text.substring(0, separatorIndex), context, resolvedContexts, caseSensitive); } final String restOfText = text.substring(separatorIndex + 1); for (ResolveResult contextVariant : resolvedContexts) { final PsiFileSystemItem item = (PsiFileSystemItem)contextVariant.getElement(); if (item != null) { innerResolveInContext(restOfText, item, result, caseSensitive); } } } else { final String decoded = decode(text); if (context instanceof PackagePrefixFileSystemItem) { context = ((PackagePrefixFileSystemItem)context).getDirectory(); } else if (context instanceof FileReferenceResolver) { PsiFileSystemItem child = ((FileReferenceResolver)context).resolveFileReference(this, decoded); if (child != null) { result.add(new PsiElementResolveResult(getOriginalFile(child))); return; } } if (context.getParent() == null && FileUtil.namesEqual(decoded, context.getName())) { // match filesystem roots result.add(new PsiElementResolveResult(getOriginalFile(context))); } else if (context instanceof PsiDirectory && caseSensitivityApplies((PsiDirectory)context, caseSensitive)) { // optimization: do not load all children into VFS PsiDirectory directory = (PsiDirectory)context; PsiFileSystemItem child = directory.findFile(decoded); if (child == null) child = directory.findSubdirectory(decoded); if (child != null) { result.add(new PsiElementResolveResult(getOriginalFile(child))); } } else { processVariants(context, new PsiFileSystemItemProcessor() { @Override public boolean acceptItem(String name, boolean isDirectory) { return caseSensitive ? decoded.equals(name) : decoded.compareToIgnoreCase(name) == 0; } @Override public boolean execute(@Nonnull PsiFileSystemItem element) { result.add(new PsiElementResolveResult(getOriginalFile(element))); return true; } }); } } } } @Nonnull public String getFileNameToCreate() { return decode(getCanonicalText()); } @javax.annotation.Nullable public String getNewFileTemplateName() { FileType fileType = FileTypeRegistry.getInstance().getFileTypeByFileName(myText); if (fileType != UnknownFileType.INSTANCE) { return fileType.getName() + " File." + fileType.getDefaultExtension(); } return null; } private static boolean caseSensitivityApplies(PsiDirectory context, boolean caseSensitive) { VirtualFileSystem fs = context.getVirtualFile().getFileSystem(); return fs.isCaseSensitive() == caseSensitive; } private boolean isAllowedEmptyPath(String text) { return text.isEmpty() && isLast() && (StringUtil.isEmpty(myFileReferenceSet.getPathString()) && myFileReferenceSet.isEmptyPathAllowed() || !myFileReferenceSet.isEndingSlashNotAllowed() && myIndex > 0); } @Nonnull public String decode(@Nonnull String text) { if (SystemInfo.isMac) { text = Normalizer.normalize(text, Normalizer.Form.NFC); } // strip http get parameters String _text = text; int paramIndex = text.lastIndexOf('?'); if (paramIndex >= 0) { _text = text.substring(0, paramIndex); } if (myFileReferenceSet.isUrlEncoded()) { try { return StringUtil.notNullize(new URI(_text).getPath(), text); } catch (Exception ignored) { return text; } } return _text; } @Override @Nonnull public Object[] getVariants() { FileReferenceCompletion completion = FileReferenceCompletion.getInstance(); if (completion != null) { return completion.getFileReferenceCompletionVariants(this); } return ArrayUtil.EMPTY_OBJECT_ARRAY; } /** * Generates a lookup item for the specified completion variant candidate. * * @param candidate the element to show in the completion list. * @return the lookup item representation (PsiElement, LookupElement or String). If returns null, * {@code FileInfoManager.getFileLookupItem(candidate)} will be used to create the lookup item. */ protected Object createLookupItem(PsiElement candidate) { return null; } /** * Converts a wrapper like WebDirectoryElement into plain PsiFile */ protected static PsiFileSystemItem getOriginalFile(PsiFileSystemItem fileSystemItem) { final VirtualFile file = fileSystemItem.getVirtualFile(); if (file != null && !file.isDirectory()) { final PsiManager psiManager = fileSystemItem.getManager(); if (psiManager != null) { final PsiFile psiFile = psiManager.findFile(file); if (psiFile != null) { fileSystemItem = psiFile; } } } return fileSystemItem; } @javax.annotation.Nullable protected String encode(final String name, PsiElement psiElement) { try { return new URI(null, null, name, null).toString(); } catch (Exception ignored) { return name; } } protected static void processVariants(final PsiFileSystemItem context, final PsiFileSystemItemProcessor processor) { context.processChildren(processor); } @javax.annotation.Nullable private FileReference getContextReference() { return myIndex > 0 ? myFileReferenceSet.getReference(myIndex - 1) : null; } @Override public PsiElement getElement() { return myFileReferenceSet.getElement(); } @Override public PsiFileSystemItem resolve() { ResolveResult[] resolveResults = multiResolve(false); return resolveResults.length == 1 ? (PsiFileSystemItem)resolveResults[0].getElement() : null; } @javax.annotation.Nullable public PsiFileSystemItem innerSingleResolve(final boolean caseSensitive, @Nonnull PsiFile containingFile) { final ResolveResult[] resolveResults = innerResolve(caseSensitive, containingFile); return resolveResults.length == 1 ? (PsiFileSystemItem)resolveResults[0].getElement() : null; } @Override public boolean isReferenceTo(PsiElement element) { if (!(element instanceof PsiFileSystemItem)) return false; final PsiFileSystemItem item = resolve(); return item != null && FileReferenceHelperRegistrar.areElementsEquivalent(item, (PsiFileSystemItem)element); } @Override public TextRange getRangeInElement() { return myRange; } @Override @Nonnull public String getCanonicalText() { return myText; } public String getText() { return myText; } @Override public boolean isSoft() { return myFileReferenceSet.isSoft(); } @Override public PsiElement handleElementRename(String newElementName) throws IncorrectOperationException { final ElementManipulator<PsiElement> manipulator = CachingReference.getManipulator(getElement()); myFileReferenceSet.setElement(manipulator.handleContentChange(getElement(), getRangeInElement(), newElementName)); //Correct ranges int delta = newElementName.length() - myRange.getLength(); myRange = new TextRange(getRangeInElement().getStartOffset(), getRangeInElement().getStartOffset() + newElementName.length()); FileReference[] references = myFileReferenceSet.getAllReferences(); for (int idx = myIndex + 1; idx < references.length; idx++) { references[idx].myRange = references[idx].myRange.shiftRight(delta); } return myFileReferenceSet.getElement(); } public PsiElement bindToElement(@Nonnull final PsiElement element, final boolean absolute) throws IncorrectOperationException { if (!(element instanceof PsiFileSystemItem)) { throw new IncorrectOperationException("Cannot bind to element, should be instanceof PsiFileSystemItem: " + element); } // handle empty reference that resolves to current file if (getCanonicalText().isEmpty() && element == getElement().getContainingFile()) return getElement(); final PsiFileSystemItem fileSystemItem = (PsiFileSystemItem)element; VirtualFile dstVFile = fileSystemItem.getVirtualFile(); if (dstVFile == null) throw new IncorrectOperationException("Cannot bind to non-physical element:" + element); PsiFile file = getElement().getContainingFile(); PsiElement contextPsiFile = InjectedLanguageManager.getInstance(file.getProject()).getInjectionHost(file); if (contextPsiFile != null) file = contextPsiFile.getContainingFile(); // use host file! final VirtualFile curVFile = file.getVirtualFile(); if (curVFile == null) throw new IncorrectOperationException("Cannot bind from non-physical element:" + file); final Project project = element.getProject(); String newName; if (absolute) { PsiFileSystemItem root = null; PsiFileSystemItem dstItem = null; for (final FileReferenceHelper helper : FileReferenceHelperRegistrar.getHelpers()) { if (!helper.isMine(project, dstVFile)) continue; PsiFileSystemItem _dstItem = helper.getPsiFileSystemItem(project, dstVFile); if (_dstItem != null) { PsiFileSystemItem _root = helper.findRoot(project, dstVFile); if (_root != null) { root = _root; dstItem = _dstItem; break; } } } if (root == null) { PsiFileSystemItem _dstItem = NullFileReferenceHelper.INSTANCE.getPsiFileSystemItem(project, dstVFile); if (_dstItem != null) { PsiFileSystemItem _root = NullFileReferenceHelper.INSTANCE.findRoot(project, dstVFile); if (_root != null) { root = _root; dstItem = _dstItem; } } if (root == null) { return getElement(); } } final String relativePath = PsiFileSystemItemUtil.getRelativePath(root, dstItem); if (relativePath == null) { return getElement(); } newName = myFileReferenceSet.getNewAbsolutePath(root, relativePath); } else { // relative path final FileReferenceHelper helper = FileReferenceHelperRegistrar.getNotNullHelper(file); final Collection<PsiFileSystemItem> contexts = getContextsForBindToElement(curVFile, project, helper); for (PsiFileSystemItem context : contexts) { final VirtualFile contextFile = context.getVirtualFile(); assert contextFile != null; if (VfsUtilCore.isAncestor(contextFile, dstVFile, true)) { final String path = VfsUtilCore.getRelativePath(dstVFile, contextFile, '/'); if (path != null) { return rename(path); } } } PsiFileSystemItem dstItem = helper.getPsiFileSystemItem(project, dstVFile); PsiFileSystemItem curItem = helper.getPsiFileSystemItem(project, curVFile); if (curItem == null) { throw new IncorrectOperationException("Cannot find path between files; " + "src = " + curVFile.getPresentableUrl() + "; " + "dst = " + dstVFile.getPresentableUrl() + "; " + "Contexts: " + contexts); } if (curItem.equals(dstItem)) { if (getCanonicalText().equals(dstItem.getName())) { return getElement(); } return fixRefText(file.getName()); } newName = PsiFileSystemItemUtil.getRelativePath(curItem, dstItem); if (newName == null) { return getElement(); } } if (myFileReferenceSet.isUrlEncoded()) { newName = encode(newName, element); } return rename(newName); } /** * TODO: This should be fixed: bindToElement takes contexts from FileReferenceHelper.getContexts() while for resolve they are taken from * FileReference.getContexts(). Note that in this case it should rename only the text range of the reference */ protected Collection<PsiFileSystemItem> getContextsForBindToElement(VirtualFile curVFile, Project project, FileReferenceHelper helper) { return helper.getContexts(project, curVFile); } protected PsiElement fixRefText(String name) { return ElementManipulators.getManipulator(getElement()).handleContentChange(getElement(), getRangeInElement(), name); } /* Happens when it's been moved to another folder */ @Override public PsiElement bindToElement(@Nonnull final PsiElement element) throws IncorrectOperationException { return bindToElement(element, myFileReferenceSet.isAbsolutePathReference()); } protected PsiElement rename(final String newName) throws IncorrectOperationException { final TextRange range = new TextRange(myFileReferenceSet.getStartInElement(), getRangeInElement().getEndOffset()); PsiElement element = getElement(); try { return CachingReference.getManipulator(element).handleContentChange(element, range, newName); } catch (IncorrectOperationException e) { LOG.error("Cannot rename " + getClass() + " from " + myFileReferenceSet.getClass() + " to " + newName, e); throw e; } } @Nonnull protected static List<FileReferenceHelper> getHelpers() { return FileReferenceHelperRegistrar.getHelpers(); } public int getIndex() { return myIndex; } @Nonnull @Override public String getUnresolvedMessagePattern() { return LangBundle.message("error.cannot.resolve") + " " + (LangBundle.message(isLast() ? "terms.file" : "terms.directory")) + " '" + StringUtil.escapePattern(decode(getCanonicalText())) + "'"; } public final boolean isLast() { return myIndex == myFileReferenceSet.getAllReferences().length - 1; } @Nonnull public FileReferenceSet getFileReferenceSet() { return myFileReferenceSet; } @Override public LocalQuickFix[] getQuickFixes() { final List<LocalQuickFix> result = new ArrayList<>(); for (final FileReferenceHelper helper : getHelpers()) { result.addAll(helper.registerFixes(this)); } return result.toArray(new LocalQuickFix[result.size()]); } @Override public FileReference getLastFileReference() { return myFileReferenceSet.getLastReference(); } static class MyResolver implements ResolveCache.PolyVariantContextResolver<FileReference> { static final MyResolver INSTANCE = new MyResolver(); @Nonnull @Override public ResolveResult[] resolve(@Nonnull FileReference ref, @Nonnull PsiFile containingFile, boolean incompleteCode) { return ref.innerResolve(ref.getFileReferenceSet().isCaseSensitive(), containingFile); } } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package com.rnd.snapsplit.view; import android.support.v4.app.DialogFragment; import android.content.Context; import android.content.SharedPreferences; import android.hardware.fingerprint.FingerprintManager; import android.os.Bundle; import android.preference.PreferenceManager; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.inputmethod.EditorInfo; import android.view.inputmethod.InputMethodManager; import android.widget.Button; import android.widget.CheckBox; import android.widget.EditText; import android.widget.ImageView; import android.widget.TextView; import com.rnd.snapsplit.DialogClickListener; import com.rnd.snapsplit.FingerprintUiHelper; import com.rnd.snapsplit.PaymentRequest; import com.rnd.snapsplit.R; /** * A dialog which uses fingerprint APIs to authenticate the user, and falls back to password * authentication if fingerprint is not available. */ public class DialogFragmentFingerprintAuthentication extends DialogFragment implements TextView.OnEditorActionListener, FingerprintUiHelper.Callback { private DialogClickListener callback; private Button mCancelButton; private Button mSecondDialogButton; private View mFingerprintContent; private View mBackupContent; private EditText mPassword; private CheckBox mUseFingerprintFutureCheckBox; private TextView mPasswordDescriptionTextView; private TextView mNewFingerprintEnrolledTextView; private PaymentRequest pr; private Stage mStage = Stage.FINGERPRINT; private FingerprintManager.CryptoObject mCryptoObject; private FingerprintUiHelper mFingerprintUiHelper; private Context mContext; private InputMethodManager mInputMethodManager; private SharedPreferences mSharedPreferences; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); callback = (DialogClickListener) getTargetFragment(); // Do not create a new Fragment when the Activity is re-created such as orientation changes. setRetainInstance(true); setStyle(DialogFragment.STYLE_NORMAL, android.R.style.Theme_Material_Light_Dialog); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { getDialog().setTitle(getString(R.string.sign_in)); View v = inflater.inflate(R.layout.fingerprint_dialog_container, container, false); Bundle bundle = this.getArguments(); pr = (PaymentRequest) bundle.getSerializable("pr"); final TextView amount = (TextView) v.findViewById(R.id.amount_value); String amountValue = "HKD"+ String.format("%.2f", pr.getShareAmount()); amount.setText(amountValue); final TextView receipient = (TextView) v.findViewById(R.id.receipient_value); receipient.setText((String) pr.getRequestorName() + " - " + pr.getRequestorPhoneNumber()); mCancelButton = (Button) v.findViewById(R.id.cancel_button); mCancelButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { dismiss(); } }); mSecondDialogButton = (Button) v.findViewById(R.id.second_dialog_button); mSecondDialogButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (mStage == Stage.FINGERPRINT) { goToBackup(); } else { verifyPassword(); } } }); mFingerprintContent = v.findViewById(R.id.fingerprint_container); mBackupContent = v.findViewById(R.id.backup_container); mPassword = (EditText) v.findViewById(R.id.password); mPassword.setOnEditorActionListener(this); mPasswordDescriptionTextView = (TextView) v.findViewById(R.id.password_description); //mFingerprintContent mUseFingerprintFutureCheckBox = (CheckBox) v.findViewById(R.id.use_fingerprint_in_future_check); mNewFingerprintEnrolledTextView = (TextView) v.findViewById(R.id.new_fingerprint_enrolled_description); mFingerprintUiHelper = new FingerprintUiHelper( mContext.getSystemService(FingerprintManager.class), (ImageView) v.findViewById(R.id.fingerprint_icon), (TextView) v.findViewById(R.id.fingerprint_status), this); updateStage(); // If fingerprint authentication is not available, switch immediately to the backup // (password) screen. if (!mFingerprintUiHelper.isFingerprintAuthAvailable()) { goToBackup(); } return v; } @Override public void onResume() { super.onResume(); if (mStage == Stage.FINGERPRINT) { mFingerprintUiHelper.startListening(mCryptoObject); } } public void setStage(Stage stage) { mStage = stage; } @Override public void onPause() { super.onPause(); mFingerprintUiHelper.stopListening(); } @Override public void onAttach(Context context) { super.onAttach(context); mContext = context; mInputMethodManager = context.getSystemService(InputMethodManager.class); mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); } /** * Sets the crypto object to be passed in when authenticating with fingerprint. */ public void setCryptoObject(FingerprintManager.CryptoObject cryptoObject) { mCryptoObject = cryptoObject; } /** * Switches to backup (password) screen. This either can happen when fingerprint is not * available or the user chooses to use the password authentication method by pressing the * button. This can also happen when the user had too many fingerprint attempts. */ private void goToBackup() { mStage = Stage.PASSWORD; updateStage(); mPassword.requestFocus(); // Show the keyboard. mPassword.postDelayed(mShowKeyboardRunnable, 500); // Fingerprint is not used anymore. Stop listening for it. mFingerprintUiHelper.stopListening(); } /** * Checks whether the current entered password is correct, and dismisses the the dialog and * let's the activity know about the result. */ private void verifyPassword() { if (!checkPassword(mPassword.getText().toString())) { return; } if (mStage == Stage.NEW_FINGERPRINT_ENROLLED) { SharedPreferences.Editor editor = mSharedPreferences.edit(); editor.putBoolean(getString(R.string.use_fingerprint_to_authenticate_key), mUseFingerprintFutureCheckBox.isChecked()); editor.apply(); if (mUseFingerprintFutureCheckBox.isChecked()) { // Re-create the key so that fingerprints including new ones are validated. //mActivity.createKey(mActivity.DEFAULT_KEY_NAME, true); mStage = Stage.FINGERPRINT; } } mPassword.setText(""); callback.onPurchased(false /* without Fingerprint */, null, pr); dismiss(); } /** * @return true if {@code password} is correct, false otherwise */ private boolean checkPassword(String password) { // Assume the password is always correct. // In the real world situation, the password needs to be verified in the server side. return password.length() > 0; } private final Runnable mShowKeyboardRunnable = new Runnable() { @Override public void run() { mInputMethodManager.showSoftInput(mPassword, 0); } }; private void updateStage() { switch (mStage) { case FINGERPRINT: mCancelButton.setText(R.string.cancel); mSecondDialogButton.setText(R.string.use_password); mFingerprintContent.setVisibility(View.VISIBLE); mBackupContent.setVisibility(View.GONE); break; case NEW_FINGERPRINT_ENROLLED: // Intentional fall through case PASSWORD: mCancelButton.setText(R.string.cancel); mSecondDialogButton.setText(R.string.ok); mFingerprintContent.setVisibility(View.GONE); mBackupContent.setVisibility(View.VISIBLE); if (mStage == Stage.NEW_FINGERPRINT_ENROLLED) { mPasswordDescriptionTextView.setVisibility(View.GONE); mNewFingerprintEnrolledTextView.setVisibility(View.VISIBLE); mUseFingerprintFutureCheckBox.setVisibility(View.VISIBLE); } break; } } @Override public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { if (actionId == EditorInfo.IME_ACTION_GO) { verifyPassword(); return true; } return false; } @Override public void onAuthenticated() { // Callback from FingerprintUiHelper. Let the activity know that authentication was // successful. callback.onPurchased(true /* withFingerprint */, mCryptoObject, pr); dismiss(); } @Override public void onError() { goToBackup(); } /** * Enumeration to indicate which authentication method the user is trying to authenticate with. */ public enum Stage { FINGERPRINT, NEW_FINGERPRINT_ENROLLED, PASSWORD } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.cordova.inappbrowser; import android.annotation.SuppressLint; import org.apache.cordova.inappbrowser.InAppBrowserDialog; import android.content.Context; import android.content.Intent; import android.provider.Browser; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.text.InputType; import android.util.Log; import android.util.TypedValue; import android.view.Gravity; import android.view.KeyEvent; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.view.WindowManager.LayoutParams; import android.view.inputmethod.EditorInfo; import android.view.inputmethod.InputMethodManager; import android.webkit.CookieManager; import android.webkit.HttpAuthHandler; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.Button; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.RelativeLayout; import org.apache.cordova.CallbackContext; import org.apache.cordova.Config; import org.apache.cordova.CordovaArgs; import org.apache.cordova.CordovaHttpAuthHandler; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CordovaWebView; import org.apache.cordova.LOG; import org.apache.cordova.PluginManager; import org.apache.cordova.PluginResult; import org.json.JSONException; import org.json.JSONObject; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.HashMap; import java.util.StringTokenizer; import android.graphics.Color; @SuppressLint("SetJavaScriptEnabled") public class InAppBrowser extends CordovaPlugin { private static final String NULL = "null"; protected static final String LOG_TAG = "InAppBrowser"; private static final String SELF = "_self"; private static final String SYSTEM = "_system"; private static final String EXIT_EVENT = "exit"; private static final String LOCATION = "location"; private static final String ZOOM = "zoom"; private static final String TOOLBAR = "toolbar"; private static final String HIDDEN = "hidden"; private static final String LOAD_START_EVENT = "loadstart"; private static final String LOAD_STOP_EVENT = "loadstop"; private static final String LOAD_ERROR_EVENT = "loaderror"; private static final String CLEAR_ALL_CACHE = "clearcache"; private static final String CLEAR_SESSION_CACHE = "clearsessioncache"; private static final String HARDWARE_BACK_BUTTON = "hardwareback"; private InAppBrowserDialog dialog; private WebView inAppWebView; private EditText edittext; private CallbackContext callbackContext; private boolean showLocationBar = true; private boolean showZoomControls = true; private boolean showToolbar = true; private boolean openWindowHidden = false; private boolean clearAllCache= false; private boolean clearSessionCache=false; private boolean hadwareBackButton=false; /** * Executes the request and returns PluginResult. * * @param action the action to execute. * @param args JSONArry of arguments for the plugin. * @param callbackContext the callbackContext used when calling back into JavaScript. * @return A PluginResult object with a status and message. */ public boolean execute(String action, CordovaArgs args, final CallbackContext callbackContext) throws JSONException { if (action.equals("open")) { this.callbackContext = callbackContext; final String url = args.getString(0); String t = args.optString(1); if (t == null || t.equals("") || t.equals(NULL)) { t = SELF; } final String target = t; final HashMap<String, Boolean> features = parseFeature(args.optString(2)); Log.d(LOG_TAG, "target = " + target); this.cordova.getActivity().runOnUiThread(new Runnable() { @Override public void run() { String result = ""; // SELF if (SELF.equals(target)) { Log.d(LOG_TAG, "in self"); /* This code exists for compatibility between 3.x and 4.x versions of Cordova. * Previously the Config class had a static method, isUrlWhitelisted(). That * responsibility has been moved to the plugins, with an aggregating method in * PluginManager. */ Boolean shouldAllowNavigation = null; if (url.startsWith("javascript:")) { shouldAllowNavigation = true; } if (shouldAllowNavigation == null) { try { Method iuw = Config.class.getMethod("isUrlWhiteListed", String.class); shouldAllowNavigation = (Boolean)iuw.invoke(null, url); } catch (NoSuchMethodException e) { } catch (IllegalAccessException e) { } catch (InvocationTargetException e) { } } if (shouldAllowNavigation == null) { try { Method gpm = webView.getClass().getMethod("getPluginManager"); PluginManager pm = (PluginManager)gpm.invoke(webView); Method san = pm.getClass().getMethod("shouldAllowNavigation", String.class); shouldAllowNavigation = (Boolean)san.invoke(pm, url); } catch (NoSuchMethodException e) { } catch (IllegalAccessException e) { } catch (InvocationTargetException e) { } } // load in webview if (Boolean.TRUE.equals(shouldAllowNavigation)) { Log.d(LOG_TAG, "loading in webview"); webView.loadUrl(url); } //Load the dialer else if (url.startsWith(WebView.SCHEME_TEL)) { try { Log.d(LOG_TAG, "loading in dialer"); Intent intent = new Intent(Intent.ACTION_DIAL); intent.setData(Uri.parse(url)); cordova.getActivity().startActivity(intent); } catch (android.content.ActivityNotFoundException e) { sendException(url, e); } } // load in InAppBrowser else { Log.d(LOG_TAG, "loading in InAppBrowser"); result = showWebPage(url, features); } } // SYSTEM else if (SYSTEM.equals(target)) { Log.d(LOG_TAG, "in system"); result = openExternal(url); } // BLANK - or anything else else { Log.d(LOG_TAG, "in blank"); result = showWebPage(url, features); } PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, result); pluginResult.setKeepCallback(true); callbackContext.sendPluginResult(pluginResult); } }); } else if (action.equals("close")) { closeDialog(); } else if (action.equals("injectScriptCode")) { String jsWrapper = null; if (args.getBoolean(1)) { jsWrapper = String.format("prompt(JSON.stringify([eval(%%s)]), 'gap-iab://%s')", callbackContext.getCallbackId()); } injectDeferredObject(args.getString(0), jsWrapper); } else if (action.equals("injectScriptFile")) { String jsWrapper; if (args.getBoolean(1)) { jsWrapper = String.format("(function(d) { var c = d.createElement('script'); c.src = %%s; c.onload = function() { prompt('', 'gap-iab://%s'); }; d.body.appendChild(c); })(document)", callbackContext.getCallbackId()); } else { jsWrapper = "(function(d) { var c = d.createElement('script'); c.src = %s; d.body.appendChild(c); })(document)"; } injectDeferredObject(args.getString(0), jsWrapper); } else if (action.equals("injectStyleCode")) { String jsWrapper; if (args.getBoolean(1)) { jsWrapper = String.format("(function(d) { var c = d.createElement('style'); c.innerHTML = %%s; d.body.appendChild(c); prompt('', 'gap-iab://%s');})(document)", callbackContext.getCallbackId()); } else { jsWrapper = "(function(d) { var c = d.createElement('style'); c.innerHTML = %s; d.body.appendChild(c); })(document)"; } injectDeferredObject(args.getString(0), jsWrapper); } else if (action.equals("injectStyleFile")) { String jsWrapper; if (args.getBoolean(1)) { jsWrapper = String.format("(function(d) { var c = d.createElement('link'); c.rel='stylesheet'; c.type='text/css'; c.href = %%s; d.head.appendChild(c); prompt('', 'gap-iab://%s');})(document)", callbackContext.getCallbackId()); } else { jsWrapper = "(function(d) { var c = d.createElement('link'); c.rel='stylesheet'; c.type='text/css'; c.href = %s; d.head.appendChild(c); })(document)"; } injectDeferredObject(args.getString(0), jsWrapper); } else if (action.equals("show")) { this.cordova.getActivity().runOnUiThread(new Runnable() { @Override public void run() { dialog.show(); } }); PluginResult pluginResult = new PluginResult(PluginResult.Status.OK); pluginResult.setKeepCallback(true); this.callbackContext.sendPluginResult(pluginResult); } else { return false; } return true; } /** * Called when the view navigates. */ @Override public void onReset() { closeDialog(); } /** * Called by AccelBroker when listener is to be shut down. * Stop listener. */ public void onDestroy() { closeDialog(); } /** * Inject an object (script or style) into the InAppBrowser WebView. * * This is a helper method for the inject{Script|Style}{Code|File} API calls, which * provides a consistent method for injecting JavaScript code into the document. * * If a wrapper string is supplied, then the source string will be JSON-encoded (adding * quotes) and wrapped using string formatting. (The wrapper string should have a single * '%s' marker) * * @param source The source object (filename or script/style text) to inject into * the document. * @param jsWrapper A JavaScript string to wrap the source string in, so that the object * is properly injected, or null if the source string is JavaScript text * which should be executed directly. */ private void injectDeferredObject(String source, String jsWrapper) { String scriptToInject; if (jsWrapper != null) { org.json.JSONArray jsonEsc = new org.json.JSONArray(); jsonEsc.put(source); String jsonRepr = jsonEsc.toString(); String jsonSourceString = jsonRepr.substring(1, jsonRepr.length()-1); scriptToInject = String.format(jsWrapper, jsonSourceString); } else { scriptToInject = source; } final String finalScriptToInject = scriptToInject; this.cordova.getActivity().runOnUiThread(new Runnable() { @SuppressLint("NewApi") @Override public void run() { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { // This action will have the side-effect of blurring the currently focused element inAppWebView.loadUrl("javascript:" + finalScriptToInject); } else { inAppWebView.evaluateJavascript(finalScriptToInject, null); } } }); } /** * Put the list of features into a hash map * * @param optString * @return */ private HashMap<String, Boolean> parseFeature(String optString) { if (optString.equals(NULL)) { return null; } else { HashMap<String, Boolean> map = new HashMap<String, Boolean>(); StringTokenizer features = new StringTokenizer(optString, ","); StringTokenizer option; while(features.hasMoreElements()) { option = new StringTokenizer(features.nextToken(), "="); if (option.hasMoreElements()) { String key = option.nextToken(); Boolean value = option.nextToken().equals("no") ? Boolean.FALSE : Boolean.TRUE; map.put(key, value); } } return map; } } /** * Display a new browser with the specified URL. * * @param url the url to load. * @return "" if ok, or error message. */ public String openExternal(String url) { try { Intent intent = null; intent = new Intent(Intent.ACTION_VIEW); // Omitting the MIME type for file: URLs causes "No Activity found to handle Intent". // Adding the MIME type to http: URLs causes them to not be handled by the downloader. Uri uri = Uri.parse(url); if ("file".equals(uri.getScheme())) { intent.setDataAndType(uri, webView.getResourceApi().getMimeType(uri)); } else { intent.setData(uri); } intent.putExtra(Browser.EXTRA_APPLICATION_ID, cordova.getActivity().getPackageName()); this.cordova.getActivity().startActivity(intent); return ""; } catch (android.content.ActivityNotFoundException e) { sendException(url, e); return e.toString(); } } /** * Closes the dialog */ public void closeDialog() { final WebView childView = this.inAppWebView; // The JS protects against multiple calls, so this should happen only when // closeDialog() is called by other native code. if (childView == null) { return; } this.cordova.getActivity().runOnUiThread(new Runnable() { @Override public void run() { childView.setWebViewClient(new WebViewClient() { // NB: wait for about:blank before dismissing public void onPageFinished(WebView view, String url) { if (dialog != null) { dialog.dismiss(); } } }); // NB: From SDK 19: "If you call methods on WebView from any thread // other than your app's UI thread, it can cause unexpected results." // http://developer.android.com/guide/webapps/migrating.html#Threads childView.loadUrl("about:blank"); } }); try { JSONObject obj = new JSONObject(); obj.put("type", EXIT_EVENT); sendUpdate(obj, false); } catch (JSONException ex) { Log.d(LOG_TAG, "Should never happen"); } } /** * Checks to see if it is possible to go back one page in history, then does so. */ public void goBack() { if (this.inAppWebView.canGoBack()) { this.inAppWebView.goBack(); } } /** * Can the web browser go back? * @return boolean */ public boolean canGoBack() { return this.inAppWebView.canGoBack(); } /** * Has the user set the hardware back button to go back * @return boolean */ public boolean hardwareBack() { return hadwareBackButton; } /** * Checks to see if it is possible to go forward one page in history, then does so. */ private void goForward() { if (this.inAppWebView.canGoForward()) { this.inAppWebView.goForward(); } } /** * Navigate to the new page * * @param url to load */ private void navigate(String url) { InputMethodManager imm = (InputMethodManager)this.cordova.getActivity().getSystemService(Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(edittext.getWindowToken(), 0); if (!url.startsWith("http") && !url.startsWith("file:")) { this.inAppWebView.loadUrl("http://" + url); } else { this.inAppWebView.loadUrl(url); } this.inAppWebView.requestFocus(); } /** * Should we show the location bar? * * @return boolean */ private boolean getShowLocationBar() { return this.showLocationBar; } /** * Should we show the URL In Location bar? * * @return boolean */ private boolean getShowToolbar() { return this.showToolbar; } /** * Should we show the zoom controls? * * @return boolean */ private boolean getShowZoomControls() { return this.showZoomControls; } private InAppBrowser getInAppBrowser(){ return this; } /** * Display a new browser with the specified URL. * * @param url the url to load. * @param features jsonObject */ public String showWebPage(final String url, HashMap<String, Boolean> features) { // Determine if we should hide the location bar. showLocationBar = true; showZoomControls = true; showToolbar = true; openWindowHidden = false; if (features != null) { Boolean show = features.get(LOCATION); if (show != null) { showLocationBar = show.booleanValue(); } show = features.get(TOOLBAR); if (show != null) { showToolbar = show.booleanValue(); } Boolean zoom = features.get(ZOOM); if (zoom != null) { showZoomControls = zoom.booleanValue(); } Boolean hidden = features.get(HIDDEN); if (hidden != null) { openWindowHidden = hidden.booleanValue(); } Boolean hardwareBack = features.get(HARDWARE_BACK_BUTTON); if (hardwareBack != null) { hadwareBackButton = hardwareBack.booleanValue(); } Boolean cache = features.get(CLEAR_ALL_CACHE); if (cache != null) { clearAllCache = cache.booleanValue(); } else { cache = features.get(CLEAR_SESSION_CACHE); if (cache != null) { clearSessionCache = cache.booleanValue(); } } } final CordovaWebView thatWebView = this.webView; // Create dialog in new thread Runnable runnable = new Runnable() { /** * Convert our DIP units to Pixels * * @return int */ private int dpToPixels(int dipValue) { int value = (int) TypedValue.applyDimension( TypedValue.COMPLEX_UNIT_DIP, (float) dipValue, cordova.getActivity().getResources().getDisplayMetrics() ); return value; } @SuppressLint("NewApi") public void run() { // Let's create the main dialog dialog = new InAppBrowserDialog(cordova.getActivity(), android.R.style.Theme_NoTitleBar); dialog.getWindow().getAttributes().windowAnimations = android.R.style.Animation_Dialog; dialog.requestWindowFeature(Window.FEATURE_NO_TITLE); dialog.setCancelable(true); dialog.setInAppBroswer(getInAppBrowser()); // Main container layout LinearLayout main = new LinearLayout(cordova.getActivity()); main.setOrientation(LinearLayout.VERTICAL); // Toolbar layout RelativeLayout toolbar = new RelativeLayout(cordova.getActivity()); //Please, no more black! toolbar.setBackgroundColor(Color.parseColor("#2E3A69")); // 2E3A69 toolbar.setLayoutParams(new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, this.dpToPixels(44))); toolbar.setPadding(this.dpToPixels(2), this.dpToPixels(2), this.dpToPixels(2), this.dpToPixels(2)); toolbar.setHorizontalGravity(Gravity.LEFT); toolbar.setVerticalGravity(Gravity.TOP); // Action Button Container layout RelativeLayout actionButtonContainer = new RelativeLayout(cordova.getActivity()); actionButtonContainer.setLayoutParams(new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT)); actionButtonContainer.setHorizontalGravity(Gravity.LEFT); actionButtonContainer.setVerticalGravity(Gravity.CENTER_VERTICAL); actionButtonContainer.setId(1); // Back button Button back = new Button(cordova.getActivity()); RelativeLayout.LayoutParams backLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT); backLayoutParams.addRule(RelativeLayout.ALIGN_LEFT); back.setLayoutParams(backLayoutParams); back.setContentDescription("Back Button"); back.setId(2); Resources activityRes = cordova.getActivity().getResources(); int backResId = activityRes.getIdentifier("ic_action_previous_item", "drawable", cordova.getActivity().getPackageName()); Drawable backIcon = activityRes.getDrawable(backResId); if(android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.JELLY_BEAN) { back.setBackgroundDrawable(backIcon); } else { back.setBackground(backIcon); } back.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { goBack(); } }); // Forward button Button forward = new Button(cordova.getActivity()); RelativeLayout.LayoutParams forwardLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT); forwardLayoutParams.addRule(RelativeLayout.RIGHT_OF, 2); forward.setLayoutParams(forwardLayoutParams); forward.setContentDescription("Forward Button"); forward.setId(3); int fwdResId = activityRes.getIdentifier("ic_action_next_item", "drawable", cordova.getActivity().getPackageName()); Drawable fwdIcon = activityRes.getDrawable(fwdResId); if(android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.JELLY_BEAN) { forward.setBackgroundDrawable(fwdIcon); } else { forward.setBackground(fwdIcon); } forward.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { goForward(); } }); // Edit Text Box edittext = new EditText(cordova.getActivity()); RelativeLayout.LayoutParams textLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT); textLayoutParams.addRule(RelativeLayout.RIGHT_OF, 1); textLayoutParams.addRule(RelativeLayout.LEFT_OF, 5); edittext.setLayoutParams(textLayoutParams); edittext.setId(4); edittext.setSingleLine(true); edittext.setText(url); edittext.setInputType(InputType.TYPE_TEXT_VARIATION_URI); edittext.setImeOptions(EditorInfo.IME_ACTION_GO); edittext.setInputType(InputType.TYPE_NULL); // Will not except input... Makes the text NON-EDITABLE edittext.setOnKeyListener(new View.OnKeyListener() { public boolean onKey(View v, int keyCode, KeyEvent event) { // If the event is a key-down event on the "enter" button if ((event.getAction() == KeyEvent.ACTION_DOWN) && (keyCode == KeyEvent.KEYCODE_ENTER)) { navigate(edittext.getText().toString()); return true; } return false; } }); // Close/Done button Button close = new Button(cordova.getActivity()); RelativeLayout.LayoutParams closeLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT); closeLayoutParams.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); close.setLayoutParams(closeLayoutParams); close.setContentDescription("Close Button"); close.setId(5); int closeResId = activityRes.getIdentifier("ic_action_remove", "drawable", cordova.getActivity().getPackageName()); Drawable closeIcon = activityRes.getDrawable(closeResId); if(android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.JELLY_BEAN) { close.setBackgroundDrawable(closeIcon); } else { close.setBackground(closeIcon); } close.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { closeDialog(); } }); // WebView inAppWebView = new WebView(cordova.getActivity()); inAppWebView.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); inAppWebView.setWebChromeClient(new InAppChromeClient(thatWebView)); WebViewClient client = new InAppBrowserClient(thatWebView, edittext); inAppWebView.setWebViewClient(client); WebSettings settings = inAppWebView.getSettings(); settings.setJavaScriptEnabled(true); settings.setJavaScriptCanOpenWindowsAutomatically(true); settings.setBuiltInZoomControls(getShowZoomControls()); settings.setPluginState(android.webkit.WebSettings.PluginState.ON); //Toggle whether this is enabled or not! Bundle appSettings = cordova.getActivity().getIntent().getExtras(); boolean enableDatabase = appSettings == null ? true : appSettings.getBoolean("InAppBrowserStorageEnabled", true); if (enableDatabase) { String databasePath = cordova.getActivity().getApplicationContext().getDir("inAppBrowserDB", Context.MODE_PRIVATE).getPath(); settings.setDatabasePath(databasePath); settings.setDatabaseEnabled(true); } settings.setDomStorageEnabled(true); if (clearAllCache) { CookieManager.getInstance().removeAllCookie(); } else if (clearSessionCache) { CookieManager.getInstance().removeSessionCookie(); } inAppWebView.loadUrl(url); inAppWebView.setId(6); inAppWebView.getSettings().setLoadWithOverviewMode(true); inAppWebView.getSettings().setUseWideViewPort(true); inAppWebView.requestFocus(); inAppWebView.requestFocusFromTouch(); // Add the back and forward buttons to our action button container layout actionButtonContainer.addView(back); actionButtonContainer.addView(forward); // Add the views to our toolbar toolbar.addView(actionButtonContainer); if(getShowLocationBar()) { toolbar.addView(edittext); } toolbar.addView(close); // Don't add the toolbar if its been disabled if (getShowToolbar()) { // Add our toolbar to our main view/layout main.addView(toolbar); } // Add our webview to our main view/layout main.addView(inAppWebView); WindowManager.LayoutParams lp = new WindowManager.LayoutParams(); lp.copyFrom(dialog.getWindow().getAttributes()); lp.width = WindowManager.LayoutParams.MATCH_PARENT; lp.height = WindowManager.LayoutParams.MATCH_PARENT; dialog.setContentView(main); dialog.show(); dialog.getWindow().setAttributes(lp); // the goal of openhidden is to load the url and not display it // Show() needs to be called to cause the URL to be loaded if(openWindowHidden) { dialog.hide(); } } }; this.cordova.getActivity().runOnUiThread(runnable); return ""; } /** * Create a new plugin success result and send it back to JavaScript * * @param obj a JSONObject contain event payload information */ private void sendUpdate(JSONObject obj, boolean keepCallback) { sendUpdate(obj, keepCallback, PluginResult.Status.OK); } /** * Create a new plugin result and send it back to JavaScript * * @param obj a JSONObject contain event payload information * @param status the status code to return to the JavaScript environment */ private void sendUpdate(JSONObject obj, boolean keepCallback, PluginResult.Status status) { if (callbackContext != null) { PluginResult result = new PluginResult(status, obj); result.setKeepCallback(keepCallback); callbackContext.sendPluginResult(result); if (!keepCallback) { callbackContext = null; } } } /** * Create a new error plugin result with the exception and send it back to Javascript * * @param url Url that cause the exception * @param e The exception */ private void sendException(String url, Exception e) { LOG.e(LOG_TAG, "Error dialing " + url + ": " + e.toString()); try { JSONObject obj = new JSONObject(); obj.put("type", LOAD_ERROR_EVENT); obj.put("url", url); obj.put("code", 500); obj.put("message", e.getMessage()); sendUpdate(obj, true, PluginResult.Status.ERROR); } catch(JSONException ex) { LOG.e(LOG_TAG, ex.toString()); } } /** * The webview client receives notifications about appView */ public class InAppBrowserClient extends WebViewClient { EditText edittext; CordovaWebView webView; /** * Constructor. * * @param webView * @param mEditText */ public InAppBrowserClient(CordovaWebView webView, EditText mEditText) { this.webView = webView; this.edittext = mEditText; } /** * Notify the host application that a page has started loading. * * @param view The webview initiating the callback. * @param url The url of the page. */ @Override public void onPageStarted(WebView view, String url, Bitmap favicon) { super.onPageStarted(view, url, favicon); String newloc = ""; if (url.startsWith("http:") || url.startsWith("https:") || url.startsWith("file:")) { newloc = url; } // If dialing phone (tel:5551212) else if (url.startsWith(WebView.SCHEME_TEL)) { try { Intent intent = new Intent(Intent.ACTION_DIAL); intent.setData(Uri.parse(url)); cordova.getActivity().startActivity(intent); } catch (android.content.ActivityNotFoundException e) { sendException(url, e); } } else if (url.startsWith("geo:") || url.startsWith(WebView.SCHEME_MAILTO) || url.startsWith("market:")) { try { Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse(url)); cordova.getActivity().startActivity(intent); } catch (android.content.ActivityNotFoundException e) { sendException(url, e); } } // If sms:5551212?body=This is the message else if (url.startsWith("sms:")) { try { Intent intent = new Intent(Intent.ACTION_VIEW); // Get address String address = null; int parmIndex = url.indexOf('?'); if (parmIndex == -1) { address = url.substring(4); } else { address = url.substring(4, parmIndex); // If body, then set sms body Uri uri = Uri.parse(url); String query = uri.getQuery(); if (query != null) { if (query.startsWith("body=")) { intent.putExtra("sms_body", query.substring(5)); } } } intent.setData(Uri.parse("sms:" + address)); intent.putExtra("address", address); intent.setType("vnd.android-dir/mms-sms"); cordova.getActivity().startActivity(intent); } catch (android.content.ActivityNotFoundException e) { sendException(url, e); } } else { newloc = "http://" + url; } if (!newloc.equals(edittext.getText().toString())) { edittext.setText(newloc); } try { JSONObject obj = new JSONObject(); obj.put("type", LOAD_START_EVENT); obj.put("url", newloc); sendUpdate(obj, true); } catch (JSONException ex) { Log.d(LOG_TAG, "Should never happen"); } } public void onPageFinished(WebView view, String url) { super.onPageFinished(view, url); try { JSONObject obj = new JSONObject(); obj.put("type", LOAD_STOP_EVENT); obj.put("url", url); sendUpdate(obj, true); } catch (JSONException ex) { Log.d(LOG_TAG, "Should never happen"); } } public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) { super.onReceivedError(view, errorCode, description, failingUrl); try { JSONObject obj = new JSONObject(); obj.put("type", LOAD_ERROR_EVENT); obj.put("url", failingUrl); obj.put("code", errorCode); obj.put("message", description); sendUpdate(obj, true, PluginResult.Status.ERROR); } catch (JSONException ex) { Log.d(LOG_TAG, "Should never happen"); } } /** * On received http auth request. */ @Override public void onReceivedHttpAuthRequest(WebView view, HttpAuthHandler handler, String host, String realm) { // Check if there is some plugin which can resolve this auth challenge PluginManager pluginManager = null; try { Method gpm = webView.getClass().getMethod("getPluginManager"); pluginManager = (PluginManager)gpm.invoke(webView); } catch (NoSuchMethodException e) { } catch (IllegalAccessException e) { } catch (InvocationTargetException e) { } if (pluginManager == null) { try { Field pmf = webView.getClass().getField("pluginManager"); pluginManager = (PluginManager)pmf.get(webView); } catch (NoSuchFieldException e) { } catch (IllegalAccessException e) { } } if (pluginManager != null && pluginManager.onReceivedHttpAuthRequest(webView, new CordovaHttpAuthHandler(handler), host, realm)) { return; } // By default handle 401 like we'd normally do! super.onReceivedHttpAuthRequest(view, handler, host, realm); } } }
/* * Copyright (c) 2008-2014 Maxifier Ltd. All Rights Reserved. */ package com.maxifier.mxcache.provider; import com.maxifier.mxcache.Strategy; import com.maxifier.mxcache.caches.*; import com.maxifier.mxcache.config.MxCacheConfigProviderImpl; import com.maxifier.mxcache.config.RuleUTest; import org.testng.annotations.Test; import java.util.Arrays; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; /** * @author Alexander Kochurov (alexander.kochurov@maxifier.com) */ @Test public class CacheDescriptorUTest { @SuppressWarnings( { "UnusedDeclaration" }) @Retention (RetentionPolicy.RUNTIME) public @interface TestAnnotation { String p1(); int p2(); String[] p3(); } private static final StrategyProperty<String> P_1 = StrategyProperty.create("p1", String.class, "default", TestAnnotation.class, "p1"); private static final StrategyProperty<Integer> P_2 = StrategyProperty.create("p2", Integer.class, -1, TestAnnotation.class, "p2"); private static final StrategyProperty<String[]> P_3 = StrategyProperty.create("p3", String[].class, new String[] { "default" }, TestAnnotation.class, "p3"); private static final StrategyProperty<Double> P_4 = StrategyProperty.create("p4", Double.class, 2.0); private static final StrategyProperty<Boolean> P_5 = StrategyProperty.create("p5", Boolean.class, false); private static final StrategyProperty<Class> P_6 = StrategyProperty.create("p6", Class.class, Object.class); private static final StrategyProperty<Integer> P_7 = StrategyProperty.create("p7", Integer.class, 3); private static final StrategyProperty<RetentionPolicy> P_8 = StrategyProperty.create("p8", RetentionPolicy.class, RetentionPolicy.RUNTIME); private static final StrategyProperty<Long> P_9 = StrategyProperty.create("p9", Long.class, -1L); private static final StrategyProperty<Float> P_10 = StrategyProperty.create("p10", Float.class, 0.3f); private static final String EMPTY_RULE = "<rule></rule>"; private static final String RULE_IMPORTANT = "<rule important=\"true\">" + " <property name=\"p1\" value=\"v1\" />" + " <property name=\"p3\" value=\"scalar\" />" + " <strategy>com.maxifier.mxcache.config.RuleUTest$ImportantStrategy</strategy>" + "</rule>"; private static final String RULE_1 = "<rule>" + " <property name=\"p1\" value=\"v1\" />" + " <property name=\"p2\" value=\"7\" />" + " <property name=\"p3\" value=\"scalar\" />" + " <property name=\"p5\" value=\"true\" />" + " <property name=\"p6\" value=\"java.lang.String\" />" + " <property name=\"p8\" value=\"CLASS\" />" + " <property name=\"p9\" value=\"4\" />" + " <strategy>com.maxifier.mxcache.config.RuleUTest$SomeStrategy</strategy>" + "</rule>"; private static final String RULE_2 = "<rule>" + " <property name=\"p2\" value=\"8\" />" + " <property name=\"p3\">" + " <value>e1</value>" + " <value>e2</value>" + " <value>e3</value>" + " </property>" + " <property name=\"p4\" value=\"3\" />" + " <property name=\"p6\" value=\"no.such.class\" />" + " <property name=\"p7\" value=\"this is not number\" />" + " <property name=\"p8\">" + " <value>81</value>" + " <value>82</value>" + " </property>" + " <strategy>com.maxifier.mxcache.config.RuleUTest$SomeStrategy</strategy>" + "</rule>"; private static final String RULE_3 = "<rule>" + " <property name=\"p8\" value=\"this is not enum constant\" />" + "</rule>"; public void testCacheInterface() throws Exception { CacheDescriptor descriptor = new CacheDescriptor( CacheDescriptorUTest.class, 0, null, Void.class, new Calculable() {}, "testProperties", "()V", null, null, null, MxCacheConfigProviderImpl.loadRule(RULE_3), null); assert descriptor.getCacheInterface() == ObjectCache.class; assert descriptor.getCalculatableInterface() == ObjectCalculatable.class; CacheDescriptor descriptor2 = new CacheDescriptor( CacheDescriptorUTest.class, 0, String.class, Void.class, new Calculable() {}, "testProperties", "()V", null, null, null, MxCacheConfigProviderImpl.loadRule(RULE_3), null); assert descriptor2.getCacheInterface() == ObjectObjectCache.class; assert descriptor2.getCalculatableInterface() == ObjectObjectCalculatable.class; } public void testProperties() throws Exception { CacheDescriptor descriptor = new CacheDescriptor( CacheDescriptorUTest.class, 0, null, Void.class, new Calculable() {}, "testProperties", "()V", null, null, null, MxCacheConfigProviderImpl.loadRule(RULE_1), null); assert descriptor.getProperty(P_1).equals("v1"); assert descriptor.getProperty(P_2).equals(7); assert descriptor.getProperty(P_4).equals(2.0); assert descriptor.getProperty(P_5); assert descriptor.getProperty(P_6) == String.class; assert descriptor.getProperty(P_8) == RetentionPolicy.CLASS; assert descriptor.getProperty(P_9).equals(4L); assert descriptor.getProperty(P_10).equals(0.3f); assert Arrays.equals(descriptor.getProperty(P_3), new String[] {"scalar"}); } @Test(expectedExceptions = PropertyConvertationException.class) public void testInvalidClassProperty() throws Exception { CacheDescriptor descriptor = new CacheDescriptor( CacheDescriptorUTest.class, 0, null, Void.class, new Calculable() {}, "testProperties", "()V", null, null, null, MxCacheConfigProviderImpl.loadRule(RULE_2), null); descriptor.getProperty(P_6); } @Test (expectedExceptions = PropertyConvertationException.class) public void testInvalidIntProperty() throws Exception { CacheDescriptor descriptor = new CacheDescriptor( CacheDescriptorUTest.class, 0, null, Void.class, new Calculable() {}, "testProperties", "()V", null, null, null, MxCacheConfigProviderImpl.loadRule(RULE_2), null); descriptor.getProperty(P_7); } @Test (expectedExceptions = PropertyConvertationException.class) public void testInvalidPropertyVectorForScalar() throws Exception { CacheDescriptor descriptor = new CacheDescriptor( CacheDescriptorUTest.class, 0, null, Void.class, new Calculable() {}, "testProperties", "()V", null, null, null, MxCacheConfigProviderImpl.loadRule(RULE_2), null); descriptor.getProperty(P_8); } @Test (expectedExceptions = PropertyConvertationException.class) public void testInvalidEnumProperty() throws Exception { CacheDescriptor descriptor = new CacheDescriptor( CacheDescriptorUTest.class, 0, null, Void.class, new Calculable() {}, "testProperties", "()V", null, null, null, MxCacheConfigProviderImpl.loadRule(RULE_3), null); descriptor.getProperty(P_8); } public void testOverride() throws Exception { CacheDescriptor descriptor = new CacheDescriptor( CacheDescriptorUTest.class, 0, null, Void.class, new Calculable() {}, "testOverride", "()V", null, null, null, MxCacheConfigProviderImpl.loadRule(RULE_1, RULE_2), null); assert descriptor.getProperty(P_1).equals("v1"); assert descriptor.getProperty(P_2).equals(8); assert descriptor.getProperty(P_4).equals(3.0); assert Arrays.equals(descriptor.getProperty(P_3), new String[] { "e1", "e2", "e3" }); } @TestAnnotation(p1 = "a", p2 = 3, p3 = {"e1", "e5"}) public void testAnnotation() throws Exception { CacheDescriptor descriptor = new CacheDescriptor( CacheDescriptorUTest.class, 0, null, Void.class, new Calculable() {}, "testAnnotation", "()V", null, null, null, MxCacheConfigProviderImpl.loadRule(RULE_1, RULE_2), null); assert descriptor.getProperty(P_1).equals("a"); assert descriptor.getProperty(P_2).equals(3); assert Arrays.equals(descriptor.getProperty(P_3), new String[] { "e1", "e5"}); } @TestAnnotation(p1 = "a", p2 = 3, p3 = { "e1", "e5" }) @Strategy(RuleUTest.SomeStrategy.class) public void testImportantAnnotation() throws Exception { CacheDescriptor descriptor = new CacheDescriptor( CacheDescriptorUTest.class, 0, null, Void.class, new Calculable() {}, "testImportantAnnotation", "()V", null, null, null, MxCacheConfigProviderImpl.loadRule(RULE_IMPORTANT, RULE_2), null); assert descriptor.getStrategyClass() == RuleUTest.ImportantStrategy.class; assert descriptor.getProperty(P_1).equals("v1"); assert descriptor.getProperty(P_2).equals(3); assert Arrays.equals(descriptor.getProperty(P_3), new String[] { "scalar" }); } public void testDefault() throws Exception { CacheDescriptor descriptor = new CacheDescriptor( CacheDescriptorUTest.class, 0, null, Void.class, new Calculable() {}, "testDefault", "()V", null, null, null, MxCacheConfigProviderImpl.loadRule(EMPTY_RULE), null); assert descriptor.getProperty(P_1).equals("default"); assert descriptor.getProperty(P_2).equals(-1); assert descriptor.getProperty(P_4).equals(2.0); assert !descriptor.getProperty(P_5); assert descriptor.getProperty(P_6) == Object.class; assert descriptor.getProperty(P_7).equals(3); assert descriptor.getProperty(P_8) == RetentionPolicy.RUNTIME; assert Arrays.equals(descriptor.getProperty(P_3), new String[] { "default" }); } }
package org.stagemonitor.web.monitor.resteasy; import static com.codahale.metrics.MetricRegistry.name; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.mockito.Matchers.any; import static org.mockito.Mockito.*; import static org.mockito.Mockito.when; import static org.stagemonitor.core.util.GraphiteSanitizer.sanitizeGraphiteMetricSegment; import static org.stagemonitor.requestmonitor.BusinessTransactionNamingStrategy.METHOD_NAME_SPLIT_CAMEL_CASE; import static org.stagemonitor.requestmonitor.BusinessTransactionNamingStrategy.CLASS_NAME_HASH_METHOD_NAME; import static org.stagemonitor.requestmonitor.BusinessTransactionNamingStrategy.CLASS_NAME_DOT_METHOD_NAME; import com.codahale.metrics.Metric; import com.codahale.metrics.MetricFilter; import com.codahale.metrics.MetricRegistry; import org.jboss.resteasy.core.ResourceInvoker; import org.jboss.resteasy.core.ResourceMethodInvoker; import org.jboss.resteasy.mock.MockHttpRequest; import org.jboss.resteasy.spi.HttpRequest; import org.jboss.resteasy.spi.Registry; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.mockito.ArgumentMatcher; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.springframework.mock.web.MockFilterChain; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.stagemonitor.core.CorePlugin; import org.stagemonitor.core.configuration.Configuration; import org.stagemonitor.requestmonitor.RequestMonitor; import org.stagemonitor.requestmonitor.RequestMonitorPlugin; import org.stagemonitor.web.WebPlugin; import org.stagemonitor.web.monitor.HttpRequestTrace; import org.stagemonitor.web.monitor.filter.StatusExposingByteCountingServletResponse; import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.regex.Pattern; @RunWith(value = Parameterized.class) public class ResteasyRequestMonitorTest { private MockHttpServletRequest resteasyServletRequest = new MockHttpServletRequest("GET", "/test/requestName"); private MockHttpServletRequest nonResteasyServletRequest = new MockHttpServletRequest("GET", "/META-INF/resources/stagemonitor/static/jquery.js"); private MockHttpRequest resteasyRequest; private Configuration configuration = mock(Configuration.class); private RequestMonitorPlugin requestMonitorPlugin = mock(RequestMonitorPlugin.class); private WebPlugin webPlugin = mock(WebPlugin.class); private CorePlugin corePlugin = mock(CorePlugin.class); private RequestMonitor requestMonitor; private MetricRegistry registry = new MetricRegistry(); private final boolean useNameDeterminerAspect; private Registry getRequestNameRegistry; public ResteasyRequestMonitorTest(boolean useNameDeterminerAspect) { this.useNameDeterminerAspect = useNameDeterminerAspect; } // the purpose of this class is to obtain a instance to a Method, // because Method objects can't be mocked as they are final private static class TestResource { public void testGetRequestName() {} } @Parameterized.Parameters public static Collection<Object[]> data() { Object[][] data = new Object[][] { { true }, { false } }; return Arrays.asList(data); } @Before public void before() throws Exception { resteasyRequest = MockHttpRequest.create(resteasyServletRequest.getMethod(), resteasyServletRequest.getRequestURI()); getRequestNameRegistry = createRegistry(resteasyRequest, TestResource.class.getMethod("testGetRequestName")); resteasyServletRequest.getServletContext().setAttribute(Registry.class.getName(), getRequestNameRegistry); nonResteasyServletRequest.getServletContext().setAttribute(Registry.class.getName(), getRequestNameRegistry); registry.removeMatching(new MetricFilter() { @Override public boolean matches(String name, Metric metric) { return true; } }); when(configuration.getConfig(RequestMonitorPlugin.class)).thenReturn(requestMonitorPlugin); when(configuration.getConfig(WebPlugin.class)).thenReturn(webPlugin); when(configuration.getConfig(CorePlugin.class)).thenReturn(corePlugin); when(corePlugin.isStagemonitorActive()).thenReturn(true); when(requestMonitorPlugin.isCollectRequestStats()).thenReturn(true); when(requestMonitorPlugin.getBusinessTransactionNamingStrategy()).thenReturn(METHOD_NAME_SPLIT_CAMEL_CASE); when(webPlugin.getGroupUrls()).thenReturn(Collections.singletonMap(Pattern.compile("(.*).js$"), "*.js")); requestMonitor = new RequestMonitor(corePlugin, registry, requestMonitorPlugin); ResteasyRequestNameDeterminerInstrumenter.setWebPlugin(webPlugin); ResteasyRequestNameDeterminerInstrumenter.setRequestMonitorPlugin(requestMonitorPlugin); } private Registry createRegistry(final MockHttpRequest request, Method requestMappingMethod) { ResourceInvoker invoker = mock(ResourceMethodInvoker.class); when(invoker.getMethod()).thenReturn(requestMappingMethod); ArgumentMatcher<HttpRequest> httpRequestMatcher = new ArgumentMatcher<HttpRequest>() { @Override public boolean matches(Object argument) { if (argument == null) { return false; } if (!HttpRequest.class.isAssignableFrom(argument.getClass())) { return false; } HttpRequest other = (HttpRequest) argument; return request.getUri().getPath().equals(other.getUri().getPath()) && request.getHttpMethod().equals(other.getHttpMethod()); } }; Registry registry = mock(Registry.class); when(registry.getResourceInvoker(argThat(httpRequestMatcher))).thenReturn(invoker); return registry; } @Test public void testRequestMonitorResteasyRequest() throws Exception { System.out.println("useNameDeterminerAspect="+useNameDeterminerAspect); when(webPlugin.isMonitorOnlyResteasyRequests()).thenReturn(false); ResteasyMonitoredHttpRequest monitoredRequest = createResteasyMonitoredHttpRequest(resteasyServletRequest); registerAspect(monitoredRequest, getRequestNameRegistry.getResourceInvoker(resteasyRequest)); final RequestMonitor.RequestInformation<HttpRequestTrace> requestInformation = requestMonitor.monitor(monitoredRequest); assertEquals(1, requestInformation.getRequestTimer().getCount()); assertEquals("Test-Get-Request-Name", requestInformation.getTimerName()); assertEquals("Test Get Request Name", requestInformation.getRequestTrace().getName()); assertEquals("/test/requestName", requestInformation.getRequestTrace().getUrl()); assertEquals(Integer.valueOf(200), requestInformation.getRequestTrace().getStatusCode()); assertEquals("GET", requestInformation.getRequestTrace().getMethod()); Assert.assertNull(requestInformation.getExecutionResult()); assertNotNull(registry.getTimers().get(name("request", "Test-Get-Request-Name", "server", "time", "total"))); verify(monitoredRequest, times(1)).onPostExecute(anyRequestInformation()); verify(monitoredRequest, times(useNameDeterminerAspect ? 0 : 1)).getRequestName(); } @Test public void testRequestMonitorResteasyRequestWithClassHashMethodNaming() throws Exception { System.out.println("useNameDeterminerAspect="+useNameDeterminerAspect); when(webPlugin.isMonitorOnlyResteasyRequests()).thenReturn(false); when(requestMonitorPlugin.getBusinessTransactionNamingStrategy()).thenReturn(CLASS_NAME_HASH_METHOD_NAME); ResteasyMonitoredHttpRequest monitoredRequest = createResteasyMonitoredHttpRequest(resteasyServletRequest); registerAspect(monitoredRequest, getRequestNameRegistry.getResourceInvoker(resteasyRequest)); final RequestMonitor.RequestInformation<HttpRequestTrace> requestInformation = requestMonitor.monitor(monitoredRequest); assertEquals(1, requestInformation.getRequestTimer().getCount()); assertEquals("TestResource#testGetRequestName", requestInformation.getTimerName()); assertEquals("TestResource#testGetRequestName", requestInformation.getRequestTrace().getName()); assertEquals("/test/requestName", requestInformation.getRequestTrace().getUrl()); assertEquals(Integer.valueOf(200), requestInformation.getRequestTrace().getStatusCode()); assertEquals("GET", requestInformation.getRequestTrace().getMethod()); Assert.assertNull(requestInformation.getExecutionResult()); assertNotNull(registry.getTimers().get(name("request", "TestResource#testGetRequestName", "server", "time", "total"))); verify(monitoredRequest, times(1)).onPostExecute(anyRequestInformation()); verify(monitoredRequest, times(useNameDeterminerAspect ? 0 : 1)).getRequestName(); } @Test public void testRequestMonitorResteasyRequestWithClassDotMethodNaming() throws Exception { System.out.println("useNameDeterminerAspect="+useNameDeterminerAspect); when(webPlugin.isMonitorOnlyResteasyRequests()).thenReturn(false); when(requestMonitorPlugin.getBusinessTransactionNamingStrategy()).thenReturn(CLASS_NAME_DOT_METHOD_NAME); ResteasyMonitoredHttpRequest monitoredRequest = createResteasyMonitoredHttpRequest(resteasyServletRequest); registerAspect(monitoredRequest, getRequestNameRegistry.getResourceInvoker(resteasyRequest)); final RequestMonitor.RequestInformation<HttpRequestTrace> requestInformation = requestMonitor.monitor(monitoredRequest); assertEquals(1, requestInformation.getRequestTimer().getCount()); // Timer names repalces '.' with ':' for graphite assertEquals("TestResource:testGetRequestName", requestInformation.getTimerName()); assertEquals("TestResource.testGetRequestName", requestInformation.getRequestTrace().getName()); assertEquals("/test/requestName", requestInformation.getRequestTrace().getUrl()); assertEquals(Integer.valueOf(200), requestInformation.getRequestTrace().getStatusCode()); assertEquals("GET", requestInformation.getRequestTrace().getMethod()); Assert.assertNull(requestInformation.getExecutionResult()); assertNotNull(registry.getTimers().get(name("request", "TestResource:testGetRequestName", "server", "time", "total"))); verify(monitoredRequest, times(1)).onPostExecute(anyRequestInformation()); verify(monitoredRequest, times(useNameDeterminerAspect ? 0 : 1)).getRequestName(); } @Test public void testRequestMonitorNonResteasyRequestDoMonitor() throws Exception { when(webPlugin.isMonitorOnlyResteasyRequests()).thenReturn(false); ResteasyMonitoredHttpRequest monitoredRequest = createResteasyMonitoredHttpRequest(nonResteasyServletRequest); registerAspect(monitoredRequest, getRequestNameRegistry.getResourceInvoker(resteasyRequest)); registerAspect(monitoredRequest, null); RequestMonitor.RequestInformation<HttpRequestTrace> requestInformation = requestMonitor.monitor(monitoredRequest); assertEquals(1, requestInformation.getRequestTimer().getCount()); assertEquals("GET-*:js", requestInformation.getTimerName()); assertEquals("GET *.js", requestInformation.getRequestTrace().getName()); assertNotNull(registry.getTimers().get(name("request", "GET-*:js", "server", "time", "total"))); verify(monitoredRequest, times(1)).onPostExecute(anyRequestInformation()); verify(monitoredRequest, times(1)).getRequestName(); } @Test public void testRequestMonitorNonResteasyRequestDontMonitor() throws Exception { when(webPlugin.isMonitorOnlyResteasyRequests()).thenReturn(true); ResteasyMonitoredHttpRequest monitoredRequest = createResteasyMonitoredHttpRequest(nonResteasyServletRequest); registerAspect(monitoredRequest, getRequestNameRegistry.getResourceInvoker(resteasyRequest)); registerAspect(monitoredRequest, null); RequestMonitor.RequestInformation<HttpRequestTrace> requestInformation = requestMonitor.monitor(monitoredRequest); assertEquals("", requestInformation.getRequestTrace().getName()); assertNull(registry.getTimers().get(name("request", sanitizeGraphiteMetricSegment("GET *.js"), "server", "time", "total"))); verify(monitoredRequest, never()).onPostExecute(anyRequestInformation()); verify(monitoredRequest, times(useNameDeterminerAspect ? 0 : 1)).getRequestName(); } private void registerAspect(ResteasyMonitoredHttpRequest monitoredRequest, final ResourceInvoker invoker) throws Exception { if (useNameDeterminerAspect) { when(monitoredRequest.execute()).thenAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { ResteasyRequestNameDeterminerInstrumenter.setRequestNameByInvoker(invoker); return null; } }); } } private RequestMonitor.RequestInformation<HttpRequestTrace> anyRequestInformation() { return any(); } private ResteasyMonitoredHttpRequest createResteasyMonitoredHttpRequest(HttpServletRequest request) throws IOException { final StatusExposingByteCountingServletResponse response = new StatusExposingByteCountingServletResponse(new MockHttpServletResponse()); return Mockito.spy(new ResteasyMonitoredHttpRequest(request, response, new MockFilterChain(), configuration)); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Terms; import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeShortValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; /** * */ public class LegacyShortFieldMapper extends LegacyNumberFieldMapper { public static final String CONTENT_TYPE = "short"; public static final int DEFAULT_PRECISION_STEP = 8; public static class Defaults extends LegacyNumberFieldMapper.Defaults { public static final MappedFieldType FIELD_TYPE = new ShortFieldType(); static { FIELD_TYPE.freeze(); } } public static class Builder extends LegacyNumberFieldMapper.Builder<Builder, LegacyShortFieldMapper> { public Builder(String name) { super(name, Defaults.FIELD_TYPE, DEFAULT_PRECISION_STEP); builder = this; } @Override public LegacyShortFieldMapper build(BuilderContext context) { if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); } setupFieldType(context); LegacyShortFieldMapper fieldMapper = new LegacyShortFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); return (LegacyShortFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override protected int maxPrecisionStep() { return 32; } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { LegacyShortFieldMapper.Builder builder = new LegacyShortFieldMapper.Builder(name); parseNumberField(builder, name, node, parserContext); for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String propName = entry.getKey(); Object propNode = entry.getValue(); if (propName.equals("null_value")) { if (propNode == null) { throw new MapperParsingException("Property [null_value] cannot be null."); } builder.nullValue(nodeShortValue(propNode)); iterator.remove(); } } return builder; } } static final class ShortFieldType extends NumberFieldType { public ShortFieldType() { super(LegacyNumericType.INT); } protected ShortFieldType(ShortFieldType ref) { super(ref); } @Override public NumberFieldType clone() { return new ShortFieldType(this); } @Override public String typeName() { return CONTENT_TYPE; } @Override public Short nullValue() { return (Short)super.nullValue(); } @Override public Short valueForSearch(Object value) { if (value == null) { return null; } return ((Number) value).shortValue(); } @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), lowerTerm == null ? null : (int)parseValue(lowerTerm), upperTerm == null ? null : (int)parseValue(upperTerm), includeLower, includeUpper); } @Override public FieldStats.Long stats(IndexReader reader) throws IOException { int maxDoc = reader.maxDoc(); Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); if (terms == null) { return null; } long minValue = LegacyNumericUtils.getMinInt(terms); long maxValue = LegacyNumericUtils.getMaxInt(terms); return new FieldStats.Long( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), isSearchable(), isAggregatable(), minValue, maxValue); } @Override public IndexFieldData.Builder fielddataBuilder() { failIfNoDocValues(); return new DocValuesIndexFieldData.Builder().numericType(NumericType.SHORT); } } protected LegacyShortFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); } @Override public ShortFieldType fieldType() { return (ShortFieldType) super.fieldType(); } private static short parseValue(Object value) { if (value instanceof Number) { return ((Number) value).shortValue(); } if (value instanceof BytesRef) { return Short.parseShort(((BytesRef) value).utf8ToString()); } return Short.parseShort(value.toString()); } @Override protected boolean customBoost() { return true; } @Override protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException { short value; float boost = fieldType().boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { if (fieldType().nullValue() == null) { return; } value = fieldType().nullValue(); } else if (externalValue instanceof String) { String sExternalValue = (String) externalValue; if (sExternalValue.length() == 0) { if (fieldType().nullValue() == null) { return; } value = fieldType().nullValue(); } else { value = Short.parseShort(sExternalValue); } } else { value = ((Number) externalValue).shortValue(); } if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(fieldType().name(), Short.toString(value), boost); } } else { XContentParser parser = context.parser(); if (parser.currentToken() == XContentParser.Token.VALUE_NULL || (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { if (fieldType().nullValue() == null) { return; } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { XContentParser.Token token; String currentFieldName = null; Short objValue = fieldType().nullValue(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { objValue = parser.shortValue(coerce.value()); } } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); } } } if (objValue == null) { // no value return; } value = objValue; } else { value = parser.shortValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(fieldType().name(), parser.text(), boost); } } } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { CustomShortNumericField field = new CustomShortNumericField(value, fieldType()); if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { field.setBoost(boost); } fields.add(field); } if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @Override protected String contentType() { return CONTENT_TYPE; } @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); if (includeDefaults || fieldType().numericPrecisionStep() != DEFAULT_PRECISION_STEP) { builder.field("precision_step", fieldType().numericPrecisionStep()); } if (includeDefaults || fieldType().nullValue() != null) { builder.field("null_value", fieldType().nullValue()); } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } else if (includeDefaults) { builder.field("include_in_all", false); } } public static class CustomShortNumericField extends CustomNumericField { private final short number; public CustomShortNumericField(short number, NumberFieldType fieldType) { super(number, fieldType); this.number = number; } @Override public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { if (fieldType().indexOptions() != IndexOptions.NONE) { return getCachedStream().setIntValue(number); } return null; } @Override public String numericAsString() { return Short.toString(number); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.v2.app.rm; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent; import org.apache.hadoop.mapreduce.jobhistory.NormalizedResourceEvent; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.client.ClientService; import org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.JobDiagnosticsUpdateEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.JobUpdatedNodesEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptKillEvent; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NMToken; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.NodeState; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.client.ClientRMProxy; import org.apache.hadoop.yarn.client.api.NMTokenCache; import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException; import org.apache.hadoop.yarn.exceptions.ApplicationMasterNotRegisteredException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; import org.apache.hadoop.yarn.util.Clock; import org.apache.hadoop.yarn.util.RackResolver; import org.apache.hadoop.yarn.util.resource.Resources; import com.google.common.annotations.VisibleForTesting; /** * Allocates the container from the ResourceManager scheduler. */ public class RMContainerAllocator extends RMContainerRequestor implements ContainerAllocator { static final Log LOG = LogFactory.getLog(RMContainerAllocator.class); public static final float DEFAULT_COMPLETED_MAPS_PERCENT_FOR_REDUCE_SLOWSTART = 0.05f; static final Priority PRIORITY_FAST_FAIL_MAP; static final Priority PRIORITY_REDUCE; static final Priority PRIORITY_MAP; @VisibleForTesting public static final String RAMPDOWN_DIAGNOSTIC = "Reducer preempted " + "to make room for pending map attempts"; private Thread eventHandlingThread; private final AtomicBoolean stopped; static { PRIORITY_FAST_FAIL_MAP = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Priority.class); PRIORITY_FAST_FAIL_MAP.setPriority(5); PRIORITY_REDUCE = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Priority.class); PRIORITY_REDUCE.setPriority(10); PRIORITY_MAP = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Priority.class); PRIORITY_MAP.setPriority(20); } /* Vocabulary Used: pending -> requests which are NOT yet sent to RM scheduled -> requests which are sent to RM but not yet assigned assigned -> requests which are assigned to a container completed -> request corresponding to which container has completed Lifecycle of map scheduled->assigned->completed Lifecycle of reduce pending->scheduled->assigned->completed Maps are scheduled as soon as their requests are received. Reduces are added to the pending and are ramped up (added to scheduled) based on completed maps and current availability in the cluster. */ //reduces which are not yet scheduled private final LinkedList<ContainerRequest> pendingReduces = new LinkedList<ContainerRequest>(); //holds information about the assigned containers to task attempts private final AssignedRequests assignedRequests = new AssignedRequests(); //holds scheduled requests to be fulfilled by RM private final ScheduledRequests scheduledRequests = new ScheduledRequests(); private int containersAllocated = 0; private int containersReleased = 0; private int hostLocalAssigned = 0; private int rackLocalAssigned = 0; private int lastCompletedTasks = 0; private boolean recalculateReduceSchedule = false; private Resource mapResourceRequest = Resources.none(); private Resource reduceResourceRequest = Resources.none(); private boolean reduceStarted = false; private float maxReduceRampupLimit = 0; private float maxReducePreemptionLimit = 0; /** * after this threshold, if the container request is not allocated, it is * considered delayed. */ private long allocationDelayThresholdMs = 0; private float reduceSlowStart = 0; private int maxRunningMaps = 0; private int maxRunningReduces = 0; private long retryInterval; private long retrystartTime; private Clock clock; // private ReduceScheduler reduceScheduler = new DefaultReduceScheduler("ReduceScheduelr"); @VisibleForTesting protected BlockingQueue<ContainerAllocatorEvent> eventQueue = new LinkedBlockingQueue<ContainerAllocatorEvent>(); private ScheduleStats scheduleStats = new ScheduleStats(); public RMContainerAllocator(ClientService clientService, AppContext context) { super(clientService, context); this.stopped = new AtomicBoolean(false); this.clock = context.getClock(); } @Override protected void serviceInit(Configuration conf) throws Exception { super.serviceInit(conf); //this.reduceScheduler.serviceInit(conf); reduceSlowStart = conf.getFloat( MRJobConfig.COMPLETED_MAPS_FOR_REDUCE_SLOWSTART, DEFAULT_COMPLETED_MAPS_PERCENT_FOR_REDUCE_SLOWSTART); maxReduceRampupLimit = conf.getFloat( MRJobConfig.MR_AM_JOB_REDUCE_RAMPUP_UP_LIMIT, MRJobConfig.DEFAULT_MR_AM_JOB_REDUCE_RAMP_UP_LIMIT); maxReducePreemptionLimit = conf.getFloat( MRJobConfig.MR_AM_JOB_REDUCE_PREEMPTION_LIMIT, MRJobConfig.DEFAULT_MR_AM_JOB_REDUCE_PREEMPTION_LIMIT); allocationDelayThresholdMs = conf.getInt( MRJobConfig.MR_JOB_REDUCER_PREEMPT_DELAY_SEC, MRJobConfig.DEFAULT_MR_JOB_REDUCER_PREEMPT_DELAY_SEC) * 1000;//sec -> ms maxRunningMaps = conf.getInt(MRJobConfig.JOB_RUNNING_MAP_LIMIT, MRJobConfig.DEFAULT_JOB_RUNNING_MAP_LIMIT); maxRunningReduces = conf.getInt(MRJobConfig.JOB_RUNNING_REDUCE_LIMIT, MRJobConfig.DEFAULT_JOB_RUNNING_REDUCE_LIMIT); RackResolver.init(conf); retryInterval = getConfig().getLong(MRJobConfig.MR_AM_TO_RM_WAIT_INTERVAL_MS, MRJobConfig.DEFAULT_MR_AM_TO_RM_WAIT_INTERVAL_MS); // Init startTime to current time. If all goes well, it will be reset after // first attempt to contact RM. retrystartTime = System.currentTimeMillis(); } @Override protected void serviceStart() throws Exception { //this.reduceScheduler.serviceStart(); this.eventHandlingThread = new Thread() { @SuppressWarnings("unchecked") @Override public void run() { ContainerAllocatorEvent event; while (!stopped.get() && !Thread.currentThread().isInterrupted()) { try { event = RMContainerAllocator.this.eventQueue.take(); } catch (InterruptedException e) { if (!stopped.get()) { LOG.error("Returning, interrupted : " + e); } return; } try { handleEvent(event); } catch (Throwable t) { LOG.error("Error in handling event type " + event.getType() + " to the ContainreAllocator", t); // Kill the AM eventHandler.handle(new JobEvent(getJob().getID(), JobEventType.INTERNAL_ERROR)); return; } } } }; this.eventHandlingThread.start(); super.serviceStart(); } @Override protected synchronized void heartbeat() throws Exception { scheduleStats.updateAndLogIfChanged("Before Scheduling: "); List<Container> allocatedContainers = getResources(); if (allocatedContainers != null && allocatedContainers.size() > 0) { scheduledRequests.assign(allocatedContainers); } int completedMaps = getJob().getCompletedMaps(); int completedTasks = completedMaps + getJob().getCompletedReduces(); if ((lastCompletedTasks != completedTasks) || (scheduledRequests.maps.size() > 0)) { lastCompletedTasks = completedTasks; recalculateReduceSchedule = true; } if (recalculateReduceSchedule) { preemptReducesIfNeeded(); scheduleReduces( getJob().getTotalMaps(), completedMaps, scheduledRequests.maps.size(), scheduledRequests.reduces.size(), assignedRequests.maps.size(), assignedRequests.reduces.size(), mapResourceRequest, reduceResourceRequest, pendingReduces.size(), maxReduceRampupLimit, reduceSlowStart); recalculateReduceSchedule = false; } scheduleStats.updateAndLogIfChanged("After Scheduling: "); } @Override protected void serviceStop() throws Exception { if (stopped.getAndSet(true)) { // return if already stopped return; } // this.reduceScheduler.serviceStop(); if (eventHandlingThread != null) { eventHandlingThread.interrupt(); } super.serviceStop(); scheduleStats.log("Final Stats: "); } @Private @VisibleForTesting AssignedRequests getAssignedRequests() { return assignedRequests; } @Private @VisibleForTesting ScheduledRequests getScheduledRequests() { return scheduledRequests; } public boolean getIsReduceStarted() { return reduceStarted; } public void setIsReduceStarted(boolean reduceStarted) { this.reduceStarted = reduceStarted; } @Override public void handle(ContainerAllocatorEvent event) { int qSize = eventQueue.size(); if (qSize != 0 && qSize % 1000 == 0) { LOG.info("Size of event-queue in RMContainerAllocator is " + qSize); } int remCapacity = eventQueue.remainingCapacity(); if (remCapacity < 1000) { LOG.warn("Very low remaining capacity in the event-queue " + "of RMContainerAllocator: " + remCapacity); } try { eventQueue.put(event); } catch (InterruptedException e) { throw new YarnRuntimeException(e); } } @SuppressWarnings({ "unchecked" }) protected synchronized void handleEvent(ContainerAllocatorEvent event) { recalculateReduceSchedule = true; if (event.getType() == ContainerAllocator.EventType.CONTAINER_REQ) { ContainerRequestEvent reqEvent = (ContainerRequestEvent) event; JobId jobId = getJob().getID(); Resource supportedMaxContainerCapability = getMaxContainerCapability(); if (reqEvent.getAttemptID().getTaskId().getTaskType().equals(TaskType.MAP)) { if (mapResourceRequest.equals(Resources.none())) { mapResourceRequest = reqEvent.getCapability(); eventHandler.handle(new JobHistoryEvent(jobId, new NormalizedResourceEvent( org.apache.hadoop.mapreduce.TaskType.MAP, mapResourceRequest .getMemory()))); LOG.info("mapResourceRequest:" + mapResourceRequest); if (mapResourceRequest.getMemory() > supportedMaxContainerCapability .getMemory() || mapResourceRequest.getVirtualCores() > supportedMaxContainerCapability .getVirtualCores()) { String diagMsg = "MAP capability required is more than the supported " + "max container capability in the cluster. Killing the Job. mapResourceRequest: " + mapResourceRequest + " maxContainerCapability:" + supportedMaxContainerCapability; LOG.info(diagMsg); eventHandler.handle(new JobDiagnosticsUpdateEvent(jobId, diagMsg)); eventHandler.handle(new JobEvent(jobId, JobEventType.JOB_KILL)); } } // set the resources reqEvent.getCapability().setMemory(mapResourceRequest.getMemory()); reqEvent.getCapability().setVirtualCores( mapResourceRequest.getVirtualCores()); scheduledRequests.addMap(reqEvent);//maps are immediately scheduled } else { if (reduceResourceRequest.equals(Resources.none())) { reduceResourceRequest = reqEvent.getCapability(); eventHandler.handle(new JobHistoryEvent(jobId, new NormalizedResourceEvent( org.apache.hadoop.mapreduce.TaskType.REDUCE, reduceResourceRequest.getMemory()))); LOG.info("reduceResourceRequest:" + reduceResourceRequest); if (reduceResourceRequest.getMemory() > supportedMaxContainerCapability .getMemory() || reduceResourceRequest.getVirtualCores() > supportedMaxContainerCapability .getVirtualCores()) { String diagMsg = "REDUCE capability required is more than the " + "supported max container capability in the cluster. Killing the " + "Job. reduceResourceRequest: " + reduceResourceRequest + " maxContainerCapability:" + supportedMaxContainerCapability; LOG.info(diagMsg); eventHandler.handle(new JobDiagnosticsUpdateEvent(jobId, diagMsg)); eventHandler.handle(new JobEvent(jobId, JobEventType.JOB_KILL)); } } // set the resources reqEvent.getCapability().setMemory(reduceResourceRequest.getMemory()); reqEvent.getCapability().setVirtualCores( reduceResourceRequest.getVirtualCores()); if (reqEvent.getEarlierAttemptFailed()) { //add to the front of queue for fail fast pendingReduces.addFirst(new ContainerRequest(reqEvent, PRIORITY_REDUCE)); } else { pendingReduces.add(new ContainerRequest(reqEvent, PRIORITY_REDUCE)); //reduces are added to pending and are slowly ramped up } } } else if ( event.getType() == ContainerAllocator.EventType.CONTAINER_DEALLOCATE) { LOG.info("Processing the event " + event.toString()); TaskAttemptId aId = event.getAttemptID(); boolean removed = scheduledRequests.remove(aId); if (!removed) { ContainerId containerId = assignedRequests.get(aId); if (containerId != null) { removed = true; assignedRequests.remove(aId); containersReleased++; pendingRelease.add(containerId); release(containerId); } } if (!removed) { LOG.error("Could not deallocate container for task attemptId " + aId); } } else if ( event.getType() == ContainerAllocator.EventType.CONTAINER_FAILED) { ContainerFailedEvent fEv = (ContainerFailedEvent) event; String host = getHost(fEv.getContMgrAddress()); containerFailedOnHost(host); } } private static String getHost(String contMgrAddress) { String host = contMgrAddress; String[] hostport = host.split(":"); if (hostport.length == 2) { host = hostport[0]; } return host; } @Private @VisibleForTesting synchronized void setReduceResourceRequest(Resource res) { this.reduceResourceRequest = res; } @Private @VisibleForTesting synchronized void setMapResourceRequest(Resource res) { this.mapResourceRequest = res; } @Private @VisibleForTesting void preemptReducesIfNeeded() { if (reduceResourceRequest.equals(Resources.none())) { return; // no reduces } //check if reduces have taken over the whole cluster and there are //unassigned maps if (scheduledRequests.maps.size() > 0) { Resource resourceLimit = getResourceLimit(); Resource availableResourceForMap = Resources.subtract( resourceLimit, Resources.multiply(reduceResourceRequest, assignedRequests.reduces.size() - assignedRequests.preemptionWaitingReduces.size())); // availableMemForMap must be sufficient to run at least 1 map if (ResourceCalculatorUtils.computeAvailableContainers(availableResourceForMap, mapResourceRequest, getSchedulerResourceTypes()) <= 0) { // to make sure new containers are given to maps and not reduces // ramp down all scheduled reduces if any // (since reduces are scheduled at higher priority than maps) LOG.info("Ramping down all scheduled reduces:" + scheduledRequests.reduces.size()); for (ContainerRequest req : scheduledRequests.reduces.values()) { pendingReduces.add(req); } scheduledRequests.reduces.clear(); //do further checking to find the number of map requests that were //hanging around for a while int hangingMapRequests = getNumOfHangingRequests(scheduledRequests.maps); if (hangingMapRequests > 0) { // preempt for making space for at least one map int preemptionReduceNumForOneMap = ResourceCalculatorUtils.divideAndCeilContainers(mapResourceRequest, reduceResourceRequest, getSchedulerResourceTypes()); int preemptionReduceNumForPreemptionLimit = ResourceCalculatorUtils.divideAndCeilContainers( Resources.multiply(resourceLimit, maxReducePreemptionLimit), reduceResourceRequest, getSchedulerResourceTypes()); int preemptionReduceNumForAllMaps = ResourceCalculatorUtils.divideAndCeilContainers( Resources.multiply(mapResourceRequest, hangingMapRequests), reduceResourceRequest, getSchedulerResourceTypes()); int toPreempt = Math.min(Math.max(preemptionReduceNumForOneMap, preemptionReduceNumForPreemptionLimit), preemptionReduceNumForAllMaps); LOG.info("Going to preempt " + toPreempt + " due to lack of space for maps"); assignedRequests.preemptReduce(toPreempt); } } } } private int getNumOfHangingRequests(Map<TaskAttemptId, ContainerRequest> requestMap) { if (allocationDelayThresholdMs <= 0) return requestMap.size(); int hangingRequests = 0; long currTime = clock.getTime(); for (ContainerRequest request: requestMap.values()) { long delay = currTime - request.requestTimeMs; if (delay > allocationDelayThresholdMs) hangingRequests++; } return hangingRequests; } @Private public void scheduleReduces( int totalMaps, int completedMaps, int scheduledMaps, int scheduledReduces, int assignedMaps, int assignedReduces, Resource mapResourceReqt, Resource reduceResourceReqt, int numPendingReduces, float maxReduceRampupLimit, float reduceSlowStart) { if (numPendingReduces == 0) { return; } // get available resources for this job Resource headRoom = getAvailableResources(); if (headRoom == null) { headRoom = Resources.none(); } LOG.info("Recalculating schedule, headroom=" + headRoom); //check for slow start if (!getIsReduceStarted()) {//not set yet int completedMapsForReduceSlowstart = (int)Math.ceil(reduceSlowStart * totalMaps); if(completedMaps < completedMapsForReduceSlowstart) { LOG.info("Reduce slow start threshold not met. " + "completedMapsForReduceSlowstart " + completedMapsForReduceSlowstart); return; } else { LOG.info("Reduce slow start threshold reached. Scheduling reduces."); setIsReduceStarted(true); } } //if all maps are assigned, then ramp up all reduces irrespective of the //headroom if (scheduledMaps == 0 && numPendingReduces > 0) { LOG.info("All maps assigned. " + "Ramping up all remaining reduces:" + numPendingReduces); scheduleAllReduces(); return; } float completedMapPercent = 0f; if (totalMaps != 0) {//support for 0 maps completedMapPercent = (float)completedMaps/totalMaps; } else { completedMapPercent = 1; } Resource netScheduledMapResource = Resources.multiply(mapResourceReqt, (scheduledMaps + assignedMaps)); Resource netScheduledReduceResource = Resources.multiply(reduceResourceReqt, (scheduledReduces + assignedReduces)); Resource finalMapResourceLimit; Resource finalReduceResourceLimit; // ramp up the reduces based on completed map percentage Resource totalResourceLimit = getResourceLimit(); Resource idealReduceResourceLimit = Resources.multiply(totalResourceLimit, Math.min(completedMapPercent, maxReduceRampupLimit)); Resource ideaMapResourceLimit = Resources.subtract(totalResourceLimit, idealReduceResourceLimit); // check if there aren't enough maps scheduled, give the free map capacity // to reduce. // Even when container number equals, there may be unused resources in one // dimension if (ResourceCalculatorUtils.computeAvailableContainers(ideaMapResourceLimit, mapResourceReqt, getSchedulerResourceTypes()) >= (scheduledMaps + assignedMaps)) { // enough resource given to maps, given the remaining to reduces Resource unusedMapResourceLimit = Resources.subtract(ideaMapResourceLimit, netScheduledMapResource); finalReduceResourceLimit = Resources.add(idealReduceResourceLimit, unusedMapResourceLimit); finalMapResourceLimit = Resources.subtract(totalResourceLimit, finalReduceResourceLimit); } else { finalMapResourceLimit = ideaMapResourceLimit; finalReduceResourceLimit = idealReduceResourceLimit; } LOG.info("completedMapPercent " + completedMapPercent + " totalResourceLimit:" + totalResourceLimit + " finalMapResourceLimit:" + finalMapResourceLimit + " finalReduceResourceLimit:" + finalReduceResourceLimit + " netScheduledMapResource:" + netScheduledMapResource + " netScheduledReduceResource:" + netScheduledReduceResource); int rampUp = ResourceCalculatorUtils.computeAvailableContainers(Resources.subtract( finalReduceResourceLimit, netScheduledReduceResource), reduceResourceReqt, getSchedulerResourceTypes()); if (rampUp > 0) { rampUp = Math.min(rampUp, numPendingReduces); LOG.info("Ramping up " + rampUp); rampUpReduces(rampUp); } else if (rampUp < 0) { int rampDown = -1 * rampUp; rampDown = Math.min(rampDown, scheduledReduces); LOG.info("Ramping down " + rampDown); rampDownReduces(rampDown); } } @Private public void scheduleAllReduces() { for (ContainerRequest req : pendingReduces) { //reduceScheduler.selectHostForReduceRequest(req); scheduledRequests.addReduce(req); } pendingReduces.clear(); } @Private public void rampUpReduces(int rampUp) { //more reduce to be scheduled for (int i = 0; i < rampUp; i++) { ContainerRequest request = pendingReduces.removeFirst(); //reduceScheduler.selectHostForReduceRequest(request); scheduledRequests.addReduce(request); } } @Private public void rampDownReduces(int rampDown) { //remove from the scheduled and move back to pending for (int i = 0; i < rampDown; i++) { ContainerRequest request = scheduledRequests.removeReduce(); pendingReduces.add(request); } } @SuppressWarnings("unchecked") private List<Container> getResources() throws Exception { applyConcurrentTaskLimits(); // will be null the first time Resource headRoom = getAvailableResources() == null ? Resources.none() : Resources.clone(getAvailableResources()); AllocateResponse response; /* * If contact with RM is lost, the AM will wait MR_AM_TO_RM_WAIT_INTERVAL_MS * milliseconds before aborting. During this interval, AM will still try * to contact the RM. */ try { response = makeRemoteRequest(); // Reset retry count if no exception occurred. retrystartTime = System.currentTimeMillis(); } catch (ApplicationAttemptNotFoundException e ) { // This can happen if the RM has been restarted. If it is in that state, // this application must clean itself up. eventHandler.handle(new JobEvent(this.getJob().getID(), JobEventType.JOB_AM_REBOOT)); throw new YarnRuntimeException( "Resource Manager doesn't recognize AttemptId: " + this.getContext().getApplicationAttemptId(), e); } catch (ApplicationMasterNotRegisteredException e) { LOG.info("ApplicationMaster is out of sync with ResourceManager," + " hence resync and send outstanding requests."); // RM may have restarted, re-register with RM. lastResponseID = 0; register(); addOutstandingRequestOnResync(); return null; } catch (Exception e) { // This can happen when the connection to the RM has gone down. Keep // re-trying until the retryInterval has expired. if (System.currentTimeMillis() - retrystartTime >= retryInterval) { LOG.error("Could not contact RM after " + retryInterval + " milliseconds."); eventHandler.handle(new JobEvent(this.getJob().getID(), JobEventType.JOB_AM_REBOOT)); throw new YarnRuntimeException("Could not contact RM after " + retryInterval + " milliseconds."); } // Throw this up to the caller, which may decide to ignore it and // continue to attempt to contact the RM. throw e; } Resource newHeadRoom = getAvailableResources() == null ? Resources.none() : getAvailableResources(); List<Container> newContainers = response.getAllocatedContainers(); // Setting NMTokens if (response.getNMTokens() != null) { for (NMToken nmToken : response.getNMTokens()) { NMTokenCache.setNMToken(nmToken.getNodeId().toString(), nmToken.getToken()); } } // Setting AMRMToken if (response.getAMRMToken() != null) { updateAMRMToken(response.getAMRMToken()); } List<ContainerStatus> finishedContainers = response.getCompletedContainersStatuses(); if (newContainers.size() + finishedContainers.size() > 0 || !headRoom.equals(newHeadRoom)) { //something changed recalculateReduceSchedule = true; if (LOG.isDebugEnabled() && !headRoom.equals(newHeadRoom)) { LOG.debug("headroom=" + newHeadRoom); } } if (LOG.isDebugEnabled()) { for (Container cont : newContainers) { LOG.debug("Received new Container :" + cont); } } //Called on each allocation. Will know about newly blacklisted/added hosts. computeIgnoreBlacklisting(); handleUpdatedNodes(response); for (ContainerStatus cont : finishedContainers) { LOG.info("Received completed container " + cont.getContainerId()); TaskAttemptId attemptID = assignedRequests.get(cont.getContainerId()); if (attemptID == null) { LOG.error("Container complete event for unknown container id " + cont.getContainerId()); } else { pendingRelease.remove(cont.getContainerId()); assignedRequests.remove(attemptID); // send the container completed event to Task attempt eventHandler.handle(createContainerFinishedEvent(cont, attemptID)); // Send the diagnostics String diagnostics = StringInterner.weakIntern(cont.getDiagnostics()); eventHandler.handle(new TaskAttemptDiagnosticsUpdateEvent(attemptID, diagnostics)); } } return newContainers; } private void applyConcurrentTaskLimits() { int numScheduledMaps = scheduledRequests.maps.size(); if (maxRunningMaps > 0 && numScheduledMaps > 0) { int maxRequestedMaps = Math.max(0, maxRunningMaps - assignedRequests.maps.size()); int numScheduledFailMaps = scheduledRequests.earlierFailedMaps.size(); int failedMapRequestLimit = Math.min(maxRequestedMaps, numScheduledFailMaps); int normalMapRequestLimit = Math.min( maxRequestedMaps - failedMapRequestLimit, numScheduledMaps - numScheduledFailMaps); setRequestLimit(PRIORITY_FAST_FAIL_MAP, mapResourceRequest, failedMapRequestLimit); setRequestLimit(PRIORITY_MAP, mapResourceRequest, normalMapRequestLimit); } int numScheduledReduces = scheduledRequests.reduces.size(); if (maxRunningReduces > 0 && numScheduledReduces > 0) { int maxRequestedReduces = Math.max(0, maxRunningReduces - assignedRequests.reduces.size()); int reduceRequestLimit = Math.min(maxRequestedReduces, numScheduledReduces); setRequestLimit(PRIORITY_REDUCE, reduceResourceRequest, reduceRequestLimit); } } private boolean canAssignMaps() { return (maxRunningMaps <= 0 || assignedRequests.maps.size() < maxRunningMaps); } private boolean canAssignReduces() { return (maxRunningReduces <= 0 || assignedRequests.reduces.size() < maxRunningReduces); } private void updateAMRMToken(Token token) throws IOException { org.apache.hadoop.security.token.Token<AMRMTokenIdentifier> amrmToken = new org.apache.hadoop.security.token.Token<AMRMTokenIdentifier>(token .getIdentifier().array(), token.getPassword().array(), new Text( token.getKind()), new Text(token.getService())); UserGroupInformation currentUGI = UserGroupInformation.getCurrentUser(); currentUGI.addToken(amrmToken); amrmToken.setService(ClientRMProxy.getAMRMTokenService(getConfig())); } @VisibleForTesting public TaskAttemptEvent createContainerFinishedEvent(ContainerStatus cont, TaskAttemptId attemptID) { if (cont.getExitStatus() == ContainerExitStatus.ABORTED || cont.getExitStatus() == ContainerExitStatus.PREEMPTED) { // killed by framework return new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_KILL); } else { return new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_CONTAINER_COMPLETED); } } @SuppressWarnings("unchecked") private void handleUpdatedNodes(AllocateResponse response) { // send event to the job about on updated nodes List<NodeReport> updatedNodes = response.getUpdatedNodes(); if (!updatedNodes.isEmpty()) { // send event to the job to act upon completed tasks eventHandler.handle(new JobUpdatedNodesEvent(getJob().getID(), updatedNodes)); // act upon running tasks HashSet<NodeId> unusableNodes = new HashSet<NodeId>(); for (NodeReport nr : updatedNodes) { NodeState nodeState = nr.getNodeState(); if (nodeState.isUnusable()) { unusableNodes.add(nr.getNodeId()); } } for (int i = 0; i < 2; ++i) { HashMap<TaskAttemptId, Container> taskSet = i == 0 ? assignedRequests.maps : assignedRequests.reduces; // kill running containers for (Map.Entry<TaskAttemptId, Container> entry : taskSet.entrySet()) { TaskAttemptId tid = entry.getKey(); NodeId taskAttemptNodeId = entry.getValue().getNodeId(); if (unusableNodes.contains(taskAttemptNodeId)) { LOG.info("Killing taskAttempt:" + tid + " because it is running on unusable node:" + taskAttemptNodeId); eventHandler.handle(new TaskAttemptKillEvent(tid, "TaskAttempt killed because it ran on unusable node" + taskAttemptNodeId)); } } } } } @Private public Resource getResourceLimit() { Resource headRoom = getAvailableResources(); if (headRoom == null) { headRoom = Resources.none(); } Resource assignedMapResource = Resources.multiply(mapResourceRequest, assignedRequests.maps.size()); Resource assignedReduceResource = Resources.multiply(reduceResourceRequest, assignedRequests.reduces.size()); return Resources.add(headRoom, Resources.add(assignedMapResource, assignedReduceResource)); } @Private @VisibleForTesting class ScheduledRequests { private final LinkedList<TaskAttemptId> earlierFailedMaps = new LinkedList<TaskAttemptId>(); /** Maps from a host to a list of Map tasks with data on the host */ private final Map<String, LinkedList<TaskAttemptId>> mapsHostMapping = new HashMap<String, LinkedList<TaskAttemptId>>(); private final Map<String, LinkedList<TaskAttemptId>> mapsRackMapping = new HashMap<String, LinkedList<TaskAttemptId>>(); @VisibleForTesting final Map<TaskAttemptId, ContainerRequest> maps = new LinkedHashMap<TaskAttemptId, ContainerRequest>(); private final LinkedHashMap<TaskAttemptId, ContainerRequest> reduces = new LinkedHashMap<TaskAttemptId, ContainerRequest>(); boolean remove(TaskAttemptId tId) { ContainerRequest req = null; if (tId.getTaskId().getTaskType().equals(TaskType.MAP)) { req = maps.remove(tId); } else { req = reduces.remove(tId); } if (req == null) { return false; } else { decContainerReq(req); return true; } } ContainerRequest removeReduce() { Iterator<Entry<TaskAttemptId, ContainerRequest>> it = reduces.entrySet().iterator(); if (it.hasNext()) { Entry<TaskAttemptId, ContainerRequest> entry = it.next(); it.remove(); decContainerReq(entry.getValue()); return entry.getValue(); } return null; } void addMap(ContainerRequestEvent event) { ContainerRequest request = null; if (event.getEarlierAttemptFailed()) { earlierFailedMaps.add(event.getAttemptID()); request = new ContainerRequest(event, PRIORITY_FAST_FAIL_MAP); LOG.info("Added "+event.getAttemptID()+" to list of failed maps"); } else { for (String host : event.getHosts()) { LinkedList<TaskAttemptId> list = mapsHostMapping.get(host); if (list == null) { list = new LinkedList<TaskAttemptId>(); mapsHostMapping.put(host, list); } list.add(event.getAttemptID()); if (LOG.isDebugEnabled()) { LOG.debug("Added attempt req to host " + host); } } for (String rack: event.getRacks()) { LinkedList<TaskAttemptId> list = mapsRackMapping.get(rack); if (list == null) { list = new LinkedList<TaskAttemptId>(); mapsRackMapping.put(rack, list); } list.add(event.getAttemptID()); if (LOG.isDebugEnabled()) { LOG.debug("Added attempt req to rack " + rack); } } request = new ContainerRequest(event, PRIORITY_MAP); } maps.put(event.getAttemptID(), request); addContainerReq(request); } void addReduce(ContainerRequest req) { reduces.put(req.attemptID, req); addContainerReq(req); } // this method will change the list of allocatedContainers. private void assign(List<Container> allocatedContainers) { Iterator<Container> it = allocatedContainers.iterator(); LOG.info("Got allocated containers " + allocatedContainers.size()); containersAllocated += allocatedContainers.size(); while (it.hasNext()) { Container allocated = it.next(); if (LOG.isDebugEnabled()) { LOG.debug("Assigning container " + allocated.getId() + " with priority " + allocated.getPriority() + " to NM " + allocated.getNodeId()); } // check if allocated container meets memory requirements // and whether we have any scheduled tasks that need // a container to be assigned boolean isAssignable = true; Priority priority = allocated.getPriority(); Resource allocatedResource = allocated.getResource(); if (PRIORITY_FAST_FAIL_MAP.equals(priority) || PRIORITY_MAP.equals(priority)) { if (ResourceCalculatorUtils.computeAvailableContainers(allocatedResource, mapResourceRequest, getSchedulerResourceTypes()) <= 0 || maps.isEmpty()) { LOG.info("Cannot assign container " + allocated + " for a map as either " + " container memory less than required " + mapResourceRequest + " or no pending map tasks - maps.isEmpty=" + maps.isEmpty()); isAssignable = false; } } else if (PRIORITY_REDUCE.equals(priority)) { if (ResourceCalculatorUtils.computeAvailableContainers(allocatedResource, reduceResourceRequest, getSchedulerResourceTypes()) <= 0 || reduces.isEmpty()) { LOG.info("Cannot assign container " + allocated + " for a reduce as either " + " container memory less than required " + reduceResourceRequest + " or no pending reduce tasks - reduces.isEmpty=" + reduces.isEmpty()); isAssignable = false; } } else { LOG.warn("Container allocated at unwanted priority: " + priority + ". Returning to RM..."); isAssignable = false; } if(!isAssignable) { // release container if we could not assign it containerNotAssigned(allocated); it.remove(); continue; } // do not assign if allocated container is on a // blacklisted host String allocatedHost = allocated.getNodeId().getHost(); if (isNodeBlacklisted(allocatedHost)) { // we need to request for a new container // and release the current one LOG.info("Got allocated container on a blacklisted " + " host "+allocatedHost +". Releasing container " + allocated); // find the request matching this allocated container // and replace it with a new one ContainerRequest toBeReplacedReq = getContainerReqToReplace(allocated); if (toBeReplacedReq != null) { LOG.info("Placing a new container request for task attempt " + toBeReplacedReq.attemptID); ContainerRequest newReq = getFilteredContainerRequest(toBeReplacedReq); decContainerReq(toBeReplacedReq); if (toBeReplacedReq.attemptID.getTaskId().getTaskType() == TaskType.MAP) { maps.put(newReq.attemptID, newReq); } else { reduces.put(newReq.attemptID, newReq); } addContainerReq(newReq); } else { LOG.info("Could not map allocated container to a valid request." + " Releasing allocated container " + allocated); } // release container if we could not assign it containerNotAssigned(allocated); it.remove(); continue; } } assignContainers(allocatedContainers); // release container if we could not assign it it = allocatedContainers.iterator(); while (it.hasNext()) { Container allocated = it.next(); LOG.info("Releasing unassigned container " + allocated); containerNotAssigned(allocated); } } @SuppressWarnings("unchecked") private void containerAssigned(Container allocated, ContainerRequest assigned) { // Update resource requests decContainerReq(assigned); // send the container-assigned event to task attempt eventHandler.handle(new TaskAttemptContainerAssignedEvent( assigned.attemptID, allocated, applicationACLs)); assignedRequests.add(allocated, assigned.attemptID); if (LOG.isDebugEnabled()) { LOG.info("Assigned container (" + allocated + ") " + " to task " + assigned.attemptID + " on node " + allocated.getNodeId().toString()); } } private void containerNotAssigned(Container allocated) { containersReleased++; pendingRelease.add(allocated.getId()); release(allocated.getId()); } private ContainerRequest assignWithoutLocality(Container allocated) { ContainerRequest assigned = null; Priority priority = allocated.getPriority(); if (PRIORITY_FAST_FAIL_MAP.equals(priority)) { LOG.info("Assigning container " + allocated + " to fast fail map"); assigned = assignToFailedMap(allocated); } else if (PRIORITY_REDUCE.equals(priority)) { if (LOG.isDebugEnabled()) { LOG.debug("Assigning container " + allocated + " to reduce"); } assigned = assignToReduce(allocated); } return assigned; } private void assignContainers(List<Container> allocatedContainers) { Iterator<Container> it = allocatedContainers.iterator(); while (it.hasNext()) { Container allocated = it.next(); ContainerRequest assigned = assignWithoutLocality(allocated); if (assigned != null) { containerAssigned(allocated, assigned); it.remove(); } } assignMapsWithLocality(allocatedContainers); } private ContainerRequest getContainerReqToReplace(Container allocated) { LOG.info("Finding containerReq for allocated container: " + allocated); Priority priority = allocated.getPriority(); ContainerRequest toBeReplaced = null; if (PRIORITY_FAST_FAIL_MAP.equals(priority)) { LOG.info("Replacing FAST_FAIL_MAP container " + allocated.getId()); Iterator<TaskAttemptId> iter = earlierFailedMaps.iterator(); while (toBeReplaced == null && iter.hasNext()) { toBeReplaced = maps.get(iter.next()); } LOG.info("Found replacement: " + toBeReplaced); return toBeReplaced; } else if (PRIORITY_MAP.equals(priority)) { LOG.info("Replacing MAP container " + allocated.getId()); // allocated container was for a map String host = allocated.getNodeId().getHost(); LinkedList<TaskAttemptId> list = mapsHostMapping.get(host); if (list != null && list.size() > 0) { TaskAttemptId tId = list.removeLast(); if (maps.containsKey(tId)) { toBeReplaced = maps.remove(tId); } } else { TaskAttemptId tId = maps.keySet().iterator().next(); toBeReplaced = maps.remove(tId); } } else if (PRIORITY_REDUCE.equals(priority)) { TaskAttemptId tId = reduces.keySet().iterator().next(); toBeReplaced = reduces.remove(tId); } LOG.info("Found replacement: " + toBeReplaced); return toBeReplaced; } @SuppressWarnings("unchecked") private ContainerRequest assignToFailedMap(Container allocated) { //try to assign to earlierFailedMaps if present ContainerRequest assigned = null; while (assigned == null && earlierFailedMaps.size() > 0 && canAssignMaps()) { TaskAttemptId tId = earlierFailedMaps.removeFirst(); if (maps.containsKey(tId)) { assigned = maps.remove(tId); JobCounterUpdateEvent jce = new JobCounterUpdateEvent(assigned.attemptID.getTaskId().getJobId()); jce.addCounterUpdate(JobCounter.OTHER_LOCAL_MAPS, 1); eventHandler.handle(jce); LOG.info("Assigned from earlierFailedMaps"); break; } } return assigned; } private ContainerRequest assignToReduce(Container allocated) { ContainerRequest assigned = null; //try to assign to reduces if present if (assigned == null && reduces.size() > 0 && canAssignReduces()) { TaskAttemptId tId = reduces.keySet().iterator().next(); assigned = reduces.remove(tId); LOG.info("Assigned to reduce"); } return assigned; } @SuppressWarnings("unchecked") private void assignMapsWithLocality(List<Container> allocatedContainers) { // try to assign to all nodes first to match node local Iterator<Container> it = allocatedContainers.iterator(); while(it.hasNext() && maps.size() > 0 && canAssignMaps()){ Container allocated = it.next(); Priority priority = allocated.getPriority(); assert PRIORITY_MAP.equals(priority); // "if (maps.containsKey(tId))" below should be almost always true. // hence this while loop would almost always have O(1) complexity String host = allocated.getNodeId().getHost(); LinkedList<TaskAttemptId> list = mapsHostMapping.get(host); while (list != null && list.size() > 0) { if (LOG.isDebugEnabled()) { LOG.debug("Host matched to the request list " + host); } TaskAttemptId tId = list.removeFirst(); if (maps.containsKey(tId)) { ContainerRequest assigned = maps.remove(tId); containerAssigned(allocated, assigned); it.remove(); JobCounterUpdateEvent jce = new JobCounterUpdateEvent(assigned.attemptID.getTaskId().getJobId()); jce.addCounterUpdate(JobCounter.DATA_LOCAL_MAPS, 1); eventHandler.handle(jce); hostLocalAssigned++; if (LOG.isDebugEnabled()) { LOG.debug("Assigned based on host match " + host); } break; } } } // try to match all rack local it = allocatedContainers.iterator(); while(it.hasNext() && maps.size() > 0 && canAssignMaps()){ Container allocated = it.next(); Priority priority = allocated.getPriority(); assert PRIORITY_MAP.equals(priority); // "if (maps.containsKey(tId))" below should be almost always true. // hence this while loop would almost always have O(1) complexity String host = allocated.getNodeId().getHost(); String rack = RackResolver.resolve(host).getNetworkLocation(); LinkedList<TaskAttemptId> list = mapsRackMapping.get(rack); while (list != null && list.size() > 0) { TaskAttemptId tId = list.removeFirst(); if (maps.containsKey(tId)) { ContainerRequest assigned = maps.remove(tId); containerAssigned(allocated, assigned); it.remove(); JobCounterUpdateEvent jce = new JobCounterUpdateEvent(assigned.attemptID.getTaskId().getJobId()); jce.addCounterUpdate(JobCounter.RACK_LOCAL_MAPS, 1); eventHandler.handle(jce); rackLocalAssigned++; if (LOG.isDebugEnabled()) { LOG.debug("Assigned based on rack match " + rack); } break; } } } // assign remaining it = allocatedContainers.iterator(); while(it.hasNext() && maps.size() > 0 && canAssignMaps()){ Container allocated = it.next(); Priority priority = allocated.getPriority(); assert PRIORITY_MAP.equals(priority); TaskAttemptId tId = maps.keySet().iterator().next(); ContainerRequest assigned = maps.remove(tId); containerAssigned(allocated, assigned); it.remove(); JobCounterUpdateEvent jce = new JobCounterUpdateEvent(assigned.attemptID.getTaskId().getJobId()); jce.addCounterUpdate(JobCounter.OTHER_LOCAL_MAPS, 1); eventHandler.handle(jce); if (LOG.isDebugEnabled()) { LOG.debug("Assigned based on * match"); } } } } @Private @VisibleForTesting class AssignedRequests { private final Map<ContainerId, TaskAttemptId> containerToAttemptMap = new HashMap<ContainerId, TaskAttemptId>(); private final LinkedHashMap<TaskAttemptId, Container> maps = new LinkedHashMap<TaskAttemptId, Container>(); @VisibleForTesting final LinkedHashMap<TaskAttemptId, Container> reduces = new LinkedHashMap<TaskAttemptId, Container>(); @VisibleForTesting final Set<TaskAttemptId> preemptionWaitingReduces = new HashSet<TaskAttemptId>(); void add(Container container, TaskAttemptId tId) { LOG.info("Assigned container " + container.getId().toString() + " to " + tId); containerToAttemptMap.put(container.getId(), tId); if (tId.getTaskId().getTaskType().equals(TaskType.MAP)) { maps.put(tId, container); } else { reduces.put(tId, container); } } @SuppressWarnings("unchecked") void preemptReduce(int toPreempt) { List<TaskAttemptId> reduceList = new ArrayList<TaskAttemptId> (reduces.keySet()); //sort reduces on progress Collections.sort(reduceList, new Comparator<TaskAttemptId>() { @Override public int compare(TaskAttemptId o1, TaskAttemptId o2) { return Float.compare( getJob().getTask(o1.getTaskId()).getAttempt(o1).getProgress(), getJob().getTask(o2.getTaskId()).getAttempt(o2).getProgress()); } }); for (int i = 0; i < toPreempt && reduceList.size() > 0; i++) { TaskAttemptId id = reduceList.remove(0);//remove the one on top LOG.info("Preempting " + id); preemptionWaitingReduces.add(id); eventHandler.handle(new TaskAttemptKillEvent(id, RAMPDOWN_DIAGNOSTIC)); } } boolean remove(TaskAttemptId tId) { ContainerId containerId = null; if (tId.getTaskId().getTaskType().equals(TaskType.MAP)) { containerId = maps.remove(tId).getId(); } else { containerId = reduces.remove(tId).getId(); if (containerId != null) { boolean preempted = preemptionWaitingReduces.remove(tId); if (preempted) { LOG.info("Reduce preemption successful " + tId); } } } if (containerId != null) { containerToAttemptMap.remove(containerId); return true; } return false; } TaskAttemptId get(ContainerId cId) { return containerToAttemptMap.get(cId); } ContainerId get(TaskAttemptId tId) { Container taskContainer; if (tId.getTaskId().getTaskType().equals(TaskType.MAP)) { taskContainer = maps.get(tId); } else { taskContainer = reduces.get(tId); } if (taskContainer == null) { return null; } else { return taskContainer.getId(); } } } private class ScheduleStats { int numPendingReduces; int numScheduledMaps; int numScheduledReduces; int numAssignedMaps; int numAssignedReduces; int numCompletedMaps; int numCompletedReduces; int numContainersAllocated; int numContainersReleased; public void updateAndLogIfChanged(String msgPrefix) { boolean changed = false; // synchronized to fix findbug warnings synchronized (RMContainerAllocator.this) { changed |= (numPendingReduces != pendingReduces.size()); numPendingReduces = pendingReduces.size(); changed |= (numScheduledMaps != scheduledRequests.maps.size()); numScheduledMaps = scheduledRequests.maps.size(); changed |= (numScheduledReduces != scheduledRequests.reduces.size()); numScheduledReduces = scheduledRequests.reduces.size(); changed |= (numAssignedMaps != assignedRequests.maps.size()); numAssignedMaps = assignedRequests.maps.size(); changed |= (numAssignedReduces != assignedRequests.reduces.size()); numAssignedReduces = assignedRequests.reduces.size(); changed |= (numCompletedMaps != getJob().getCompletedMaps()); numCompletedMaps = getJob().getCompletedMaps(); changed |= (numCompletedReduces != getJob().getCompletedReduces()); numCompletedReduces = getJob().getCompletedReduces(); changed |= (numContainersAllocated != containersAllocated); numContainersAllocated = containersAllocated; changed |= (numContainersReleased != containersReleased); numContainersReleased = containersReleased; } if (changed) { log(msgPrefix); } } public void log(String msgPrefix) { LOG.info(msgPrefix + "PendingReds:" + numPendingReduces + " ScheduledMaps:" + numScheduledMaps + " ScheduledReds:" + numScheduledReduces + " AssignedMaps:" + numAssignedMaps + " AssignedReds:" + numAssignedReduces + " CompletedMaps:" + numCompletedMaps + " CompletedReds:" + numCompletedReduces + " ContAlloc:" + numContainersAllocated + " ContRel:" + numContainersReleased + " HostLocal:" + hostLocalAssigned + " RackLocal:" + rackLocalAssigned); } } }
/* * LOCKSS Repository Service REST API * REST API of the LOCKSS Repository Service * * The version of the OpenAPI document: 2.0.0 * Contact: lockss-support@lockss.org * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package org.lockss.laaws.model.rs; import com.google.gson.annotations.SerializedName; import java.util.Objects; /** * Artifact */ public class Artifact { private static final long serialVersionUID = 1L; public static final String SERIALIZED_NAME_ID = "id"; @SerializedName(SERIALIZED_NAME_ID) private String id; public static final String SERIALIZED_NAME_COLLECTION = "collection"; @SerializedName(SERIALIZED_NAME_COLLECTION) private String collection; public static final String SERIALIZED_NAME_AUID = "auid"; @SerializedName(SERIALIZED_NAME_AUID) private String auid; public static final String SERIALIZED_NAME_URI = "uri"; @SerializedName(SERIALIZED_NAME_URI) private String uri; public static final String SERIALIZED_NAME_VERSION = "version"; @SerializedName(SERIALIZED_NAME_VERSION) private Integer version; public static final String SERIALIZED_NAME_COMMITTED = "committed"; @SerializedName(SERIALIZED_NAME_COMMITTED) private Boolean committed; public static final String SERIALIZED_NAME_STORAGE_URL = "storageUrl"; @SerializedName(SERIALIZED_NAME_STORAGE_URL) private String storageUrl; public static final String SERIALIZED_NAME_ORIGIN_DATE = "originDate"; @SerializedName(SERIALIZED_NAME_ORIGIN_DATE) private Long originDate; public static final String SERIALIZED_NAME_COLLECTION_DATE = "collectionDate"; @SerializedName(SERIALIZED_NAME_COLLECTION_DATE) private Long collectionDate; public static final String SERIALIZED_NAME_CONTENT_DIGEST = "contentDigest"; @SerializedName(SERIALIZED_NAME_CONTENT_DIGEST) private String contentDigest; public Artifact id(String id) { this.id = id; return this; } /** * Get id * * @return id **/ public String getId() { return id; } public void setId(String id) { this.id = id; } public Artifact collection(String collection) { this.collection = collection; return this; } /** * Get collection * * @return collection **/ public String getCollection() { return collection; } public void setCollection(String collection) { this.collection = collection; } public Artifact auid(String auid) { this.auid = auid; return this; } /** * Get auid * * @return auid **/ public String getAuid() { return auid; } public void setAuid(String auid) { this.auid = auid; } public Artifact uri(String uri) { this.uri = uri; return this; } /** * Get uri * * @return uri **/ public String getUri() { return uri; } public void setUri(String uri) { this.uri = uri; } public Artifact version(Integer version) { this.version = version; return this; } /** * Get version * * @return version **/ public Integer getVersion() { return version; } public void setVersion(Integer version) { this.version = version; } public Artifact committed(Boolean committed) { this.committed = committed; return this; } /** * Get committed * * @return committed **/ public Boolean getCommitted() { return committed; } public void setCommitted(Boolean committed) { this.committed = committed; } public Artifact storageUrl(String storageUrl) { this.storageUrl = storageUrl; return this; } /** * Get storageUrl * * @return storageUrl **/ public String getStorageUrl() { return storageUrl; } public void setStorageUrl(String storageUrl) { this.storageUrl = storageUrl; } public Artifact originDate(Long originDate) { this.originDate = originDate; return this; } /** * Get originDate * * @return originDate **/ public Long getOriginDate() { return originDate; } public void setOriginDate(Long originDate) { this.originDate = originDate; } public Artifact collectionDate(Long collectionDate) { this.collectionDate = collectionDate; return this; } /** * Get collectionDate * * @return collectionDate **/ public Long getCollectionDate() { return collectionDate; } public void setCollectionDate(Long collectionDate) { this.collectionDate = collectionDate; } public Artifact contentDigest(String contentDigest) { this.contentDigest = contentDigest; return this; } /** * Get collectionDate * * @return collectionDate **/ public String getContentDigest() { return contentDigest; } public void setContentDigest(String contentDigest) { this.contentDigest = contentDigest; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Artifact artifact = (Artifact) o; return Objects.equals(this.id, artifact.id) && Objects.equals(this.collection, artifact.collection) && Objects.equals(this.auid, artifact.auid) && Objects.equals(this.uri, artifact.uri) && Objects.equals(this.version, artifact.version) && Objects.equals(this.committed, artifact.committed) && Objects.equals(this.storageUrl, artifact.storageUrl) && Objects.equals(this.originDate, artifact.originDate) && Objects.equals(this.collectionDate, artifact.collectionDate) && Objects.equals(this.contentDigest, artifact.contentDigest); } @Override public int hashCode() { return Objects.hash(id, collection, auid, uri, version, committed, storageUrl, originDate, contentDigest, collectionDate); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Artifact {\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" collection: ").append(toIndentedString(collection)).append("\n"); sb.append(" auid: ").append(toIndentedString(auid)).append("\n"); sb.append(" uri: ").append(toIndentedString(uri)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append(" committed: ").append(toIndentedString(committed)).append("\n"); sb.append(" storageUrl: ").append(toIndentedString(storageUrl)).append("\n"); sb.append(" originDate: ").append(toIndentedString(originDate)).append("\n"); sb.append(" contentDigest: ").append(toIndentedString(contentDigest)).append("\n"); sb.append(" collectionDate: ").append(toIndentedString(collectionDate)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Objects; import java.util.concurrent.RejectedExecutionException; import org.apache.camel.AsyncCallback; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.ExtendedCamelContext; import org.apache.camel.ExtendedExchange; import org.apache.camel.MessageHistory; import org.apache.camel.NamedNode; import org.apache.camel.NamedRoute; import org.apache.camel.Ordered; import org.apache.camel.Processor; import org.apache.camel.Route; import org.apache.camel.StatefulService; import org.apache.camel.StreamCache; import org.apache.camel.processor.interceptor.BacklogDebugger; import org.apache.camel.processor.interceptor.BacklogTracer; import org.apache.camel.processor.interceptor.DefaultBacklogTracerEventMessage; import org.apache.camel.spi.CamelInternalProcessorAdvice; import org.apache.camel.spi.Debugger; import org.apache.camel.spi.InflightRepository; import org.apache.camel.spi.ManagementInterceptStrategy.InstrumentationProcessor; import org.apache.camel.spi.MessageHistoryFactory; import org.apache.camel.spi.RouteContext; import org.apache.camel.spi.RoutePolicy; import org.apache.camel.spi.StreamCachingStrategy; import org.apache.camel.spi.Synchronization; import org.apache.camel.spi.Tracer; import org.apache.camel.spi.Transformer; import org.apache.camel.spi.UnitOfWork; import org.apache.camel.spi.UnitOfWorkFactory; import org.apache.camel.support.CamelContextHelper; import org.apache.camel.support.DefaultConsumer; import org.apache.camel.support.MessageHelper; import org.apache.camel.support.OrderedComparator; import org.apache.camel.support.SynchronizationAdapter; import org.apache.camel.support.UnitOfWorkHelper; import org.apache.camel.support.processor.DelegateAsyncProcessor; import org.apache.camel.util.StopWatch; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Internal {@link Processor} that Camel routing engine used during routing for cross cutting functionality such as: * <ul> * <li>Execute {@link UnitOfWork}</li> * <li>Keeping track which route currently is being routed</li> * <li>Execute {@link RoutePolicy}</li> * <li>Gather JMX performance statics</li> * <li>Tracing</li> * <li>Debugging</li> * <li>Message History</li> * <li>Stream Caching</li> * <li>{@link Transformer}</li> * </ul> * ... and more. * <p/> * This implementation executes this cross cutting functionality as a {@link CamelInternalProcessorAdvice} advice (before and after advice) * by executing the {@link CamelInternalProcessorAdvice#before(org.apache.camel.Exchange)} and * {@link CamelInternalProcessorAdvice#after(org.apache.camel.Exchange, Object)} callbacks in correct order during routing. * This reduces number of stack frames needed during routing, and reduce the number of lines in stacktraces, as well * makes debugging the routing engine easier for end users. * <p/> * <b>Debugging tips:</b> Camel end users whom want to debug their Camel applications with the Camel source code, then make sure to * read the source code of this class about the debugging tips, which you can find in the * {@link #process(org.apache.camel.Exchange, org.apache.camel.AsyncCallback)} method. * <p/> * The added advices can implement {@link Ordered} to control in which order the advices are executed. */ public class CamelInternalProcessor extends DelegateAsyncProcessor { private static final Logger LOG = LoggerFactory.getLogger(CamelInternalProcessor.class); private static final Object[] EMPTY_STATES = new Object[0]; private final List<CamelInternalProcessorAdvice<?>> advices = new ArrayList<>(); private byte statefulAdvices; public CamelInternalProcessor() { } public CamelInternalProcessor(Processor processor) { super(processor); } /** * Adds an {@link CamelInternalProcessorAdvice} advice to the list of advices to execute by this internal processor. * * @param advice the advice to add */ public void addAdvice(CamelInternalProcessorAdvice<?> advice) { advices.add(advice); // ensure advices are sorted so they are in the order we want advices.sort(OrderedComparator.get()); if (advice.hasState()) { statefulAdvices++; } } /** * Gets the advice with the given type. * * @param type the type of the advice * @return the advice if exists, or <tt>null</tt> if no advices has been added with the given type. */ public <T> T getAdvice(Class<T> type) { for (CamelInternalProcessorAdvice task : advices) { Object advice = unwrap(task); if (type.isInstance(advice)) { return type.cast(advice); } } return null; } @Override @SuppressWarnings("unchecked") public boolean process(Exchange exchange, AsyncCallback originalCallback) { // ---------------------------------------------------------- // CAMEL END USER - READ ME FOR DEBUGGING TIPS // ---------------------------------------------------------- // If you want to debug the Camel routing engine, then there is a lot of internal functionality // the routing engine executes during routing messages. You can skip debugging this internal // functionality and instead debug where the routing engine continues routing to the next node // in the routes. The CamelInternalProcessor is a vital part of the routing engine, as its // being used in between the nodes. As an end user you can just debug the code in this class // in between the: // CAMEL END USER - DEBUG ME HERE +++ START +++ // CAMEL END USER - DEBUG ME HERE +++ END +++ // you can see in the code below. // ---------------------------------------------------------- if (processor == null || !continueProcessing(exchange)) { // no processor or we should not continue then we are done originalCallback.done(true); return true; } // optimise to use object array for states, and only for the number of advices that keep state final Object[] states = statefulAdvices > 0 ? new Object[statefulAdvices] : EMPTY_STATES; // optimise for loop using index access to avoid creating iterator object for (int i = 0, j = 0; i < advices.size(); i++) { CamelInternalProcessorAdvice task = advices.get(i); try { Object state = task.before(exchange); if (task.hasState()) { states[j++] = state; } } catch (Throwable e) { exchange.setException(e); originalCallback.done(true); return true; } } // create internal callback which will execute the advices in reverse order when done AsyncCallback callback = doneSync -> { try { for (int i = advices.size() - 1, j = states.length - 1; i >= 0; i--) { CamelInternalProcessorAdvice task = advices.get(i); Object state = null; if (task.hasState()) { state = states[j--]; } try { task.after(exchange, state); } catch (Throwable e) { exchange.setException(e); // allow all advices to complete even if there was an exception } } } finally { // ---------------------------------------------------------- // CAMEL END USER - DEBUG ME HERE +++ START +++ // ---------------------------------------------------------- // callback must be called if (originalCallback != null) { exchange.getContext().getReactiveExecutor().schedule(originalCallback); } // ---------------------------------------------------------- // CAMEL END USER - DEBUG ME HERE +++ END +++ // ---------------------------------------------------------- } }; if (exchange.isTransacted()) { // must be synchronized for transacted exchanges if (LOG.isTraceEnabled()) { if (exchange.isTransacted()) { LOG.trace("Transacted Exchange must be routed synchronously for exchangeId: {} -> {}", exchange.getExchangeId(), exchange); } else { LOG.trace("Synchronous UnitOfWork Exchange must be routed synchronously for exchangeId: {} -> {}", exchange.getExchangeId(), exchange); } } // ---------------------------------------------------------- // CAMEL END USER - DEBUG ME HERE +++ START +++ // ---------------------------------------------------------- try { processor.process(exchange); } catch (Throwable e) { exchange.setException(e); } // ---------------------------------------------------------- // CAMEL END USER - DEBUG ME HERE +++ END +++ // ---------------------------------------------------------- callback.done(true); return true; } else { final UnitOfWork uow = exchange.getUnitOfWork(); // do uow before processing and if a value is returned the the uow wants to be processed after // was well in the same thread AsyncCallback async = callback; boolean beforeAndAfter = uow != null && uow.isBeforeAfterProcess(); if (beforeAndAfter) { async = uow.beforeProcess(processor, exchange, async); } // ---------------------------------------------------------- // CAMEL END USER - DEBUG ME HERE +++ START +++ // ---------------------------------------------------------- if (LOG.isTraceEnabled()) { LOG.trace("Processing exchange for exchangeId: {} -> {}", exchange.getExchangeId(), exchange); } processor.process(exchange, async); // ---------------------------------------------------------- // CAMEL END USER - DEBUG ME HERE +++ END +++ // ---------------------------------------------------------- // optimize to only do after uow processing if really needed if (beforeAndAfter) { exchange.getContext().getReactiveExecutor().schedule(() -> { // execute any after processor work (in current thread, not in the callback) uow.afterProcess(processor, exchange, callback, false); }); } if (LOG.isTraceEnabled()) { LOG.trace("Exchange processed and is continued routed asynchronously for exchangeId: {} -> {}", exchange.getExchangeId(), exchange); } // must return false return false; } } @Override public String toString() { return processor != null ? processor.toString() : super.toString(); } /** * Strategy to determine if we should continue processing the {@link Exchange}. */ private boolean continueProcessing(Exchange exchange) { Object stop = exchange.getProperty(Exchange.ROUTE_STOP); if (stop != null) { boolean doStop = exchange.getContext().getTypeConverter().convertTo(Boolean.class, stop); if (doStop) { LOG.debug("Exchange is marked to stop routing: {}", exchange); return false; } } // determine if we can still run, or the camel context is forcing a shutdown boolean forceShutdown = exchange.getContext().getShutdownStrategy().forceShutdown(this); if (forceShutdown) { String msg = "Run not allowed as ShutdownStrategy is forcing shutting down, will reject executing exchange: " + exchange; LOG.debug(msg); if (exchange.getException() == null) { exchange.setException(new RejectedExecutionException(msg)); } return false; } // yes we can continue return true; } /** * Advice to invoke callbacks for before and after routing. */ public static class RouteLifecycleAdvice implements CamelInternalProcessorAdvice<Object> { private Route route; public void setRoute(Route route) { this.route = route; } @Override public Object before(Exchange exchange) throws Exception { UnitOfWork uow = exchange.getUnitOfWork(); if (uow != null) { uow.beforeRoute(exchange, route); } return null; } @Override public void after(Exchange exchange, Object object) throws Exception { UnitOfWork uow = exchange.getUnitOfWork(); if (uow != null) { uow.afterRoute(exchange, route); } } @Override public boolean hasState() { return false; } } /** * Advice to keep the {@link InflightRepository} up to date. */ public static class RouteInflightRepositoryAdvice implements CamelInternalProcessorAdvice { private final InflightRepository inflightRepository; private final String id; public RouteInflightRepositoryAdvice(InflightRepository inflightRepository, String id) { this.inflightRepository = inflightRepository; this.id = id; } @Override public Object before(Exchange exchange) throws Exception { inflightRepository.add(exchange, id); return null; } @Override public void after(Exchange exchange, Object state) throws Exception { inflightRepository.remove(exchange, id); } @Override public boolean hasState() { return false; } } /** * Advice to execute any {@link RoutePolicy} a route may have been configured with. */ public static class RoutePolicyAdvice implements CamelInternalProcessorAdvice { private final Logger log = LoggerFactory.getLogger(getClass()); private final List<RoutePolicy> routePolicies; private Route route; public RoutePolicyAdvice(List<RoutePolicy> routePolicies) { this.routePolicies = routePolicies; } public void setRoute(Route route) { this.route = route; } /** * Strategy to determine if this policy is allowed to run * * @param policy the policy * @return <tt>true</tt> to run */ boolean isRoutePolicyRunAllowed(RoutePolicy policy) { if (policy instanceof StatefulService) { StatefulService ss = (StatefulService) policy; return ss.isRunAllowed(); } return true; } @Override public Object before(Exchange exchange) throws Exception { // invoke begin for (RoutePolicy policy : routePolicies) { try { if (isRoutePolicyRunAllowed(policy)) { policy.onExchangeBegin(route, exchange); } } catch (Exception e) { log.warn("Error occurred during onExchangeBegin on RoutePolicy: " + policy + ". This exception will be ignored", e); } } return null; } @Override public void after(Exchange exchange, Object data) throws Exception { // do not invoke it if Camel is stopping as we don't want // the policy to start a consumer during Camel is stopping if (isCamelStopping(exchange.getContext())) { return; } for (RoutePolicy policy : routePolicies) { try { if (isRoutePolicyRunAllowed(policy)) { policy.onExchangeDone(route, exchange); } } catch (Exception e) { log.warn("Error occurred during onExchangeDone on RoutePolicy: " + policy + ". This exception will be ignored", e); } } } private static boolean isCamelStopping(CamelContext context) { if (context != null) { return context.isStopping() || context.isStopped(); } return false; } @Override public boolean hasState() { return false; } } /** * Advice to execute the {@link BacklogTracer} if enabled. */ public static final class BacklogTracerAdvice implements CamelInternalProcessorAdvice, Ordered { private final BacklogTracer backlogTracer; private final NamedNode processorDefinition; private final NamedRoute routeDefinition; private final boolean first; public BacklogTracerAdvice(BacklogTracer backlogTracer, NamedNode processorDefinition, NamedRoute routeDefinition, boolean first) { this.backlogTracer = backlogTracer; this.processorDefinition = processorDefinition; this.routeDefinition = routeDefinition; this.first = first; } @Override public Object before(Exchange exchange) throws Exception { if (backlogTracer.shouldTrace(processorDefinition, exchange)) { long timestamp = System.currentTimeMillis(); String toNode = processorDefinition.getId(); String exchangeId = exchange.getExchangeId(); String messageAsXml = MessageHelper.dumpAsXml(exchange.getIn(), true, 4, backlogTracer.isBodyIncludeStreams(), backlogTracer.isBodyIncludeFiles(), backlogTracer.getBodyMaxChars()); // if first we should add a pseudo trace message as well, so we have a starting message (eg from the route) String routeId = routeDefinition != null ? routeDefinition.getRouteId() : null; if (first) { long created = exchange.getCreated(); DefaultBacklogTracerEventMessage pseudo = new DefaultBacklogTracerEventMessage(backlogTracer.incrementTraceCounter(), created, routeId, null, exchangeId, messageAsXml); backlogTracer.traceEvent(pseudo); } DefaultBacklogTracerEventMessage event = new DefaultBacklogTracerEventMessage(backlogTracer.incrementTraceCounter(), timestamp, routeId, toNode, exchangeId, messageAsXml); backlogTracer.traceEvent(event); } return null; } @Override public void after(Exchange exchange, Object data) throws Exception { // noop } @Override public boolean hasState() { return false; } @Override public int getOrder() { // we want tracer just before calling the processor return Ordered.LOWEST - 1; } } /** * Advice to execute the {@link org.apache.camel.processor.interceptor.BacklogDebugger} if enabled. */ public static final class BacklogDebuggerAdvice implements CamelInternalProcessorAdvice<StopWatch>, Ordered { private final BacklogDebugger backlogDebugger; private final Processor target; private final NamedNode definition; private final String nodeId; public BacklogDebuggerAdvice(BacklogDebugger backlogDebugger, Processor target, NamedNode definition) { this.backlogDebugger = backlogDebugger; this.target = target; this.definition = definition; this.nodeId = definition.getId(); } @Override public StopWatch before(Exchange exchange) throws Exception { if (backlogDebugger.isEnabled() && (backlogDebugger.hasBreakpoint(nodeId) || backlogDebugger.isSingleStepMode())) { StopWatch watch = new StopWatch(); backlogDebugger.beforeProcess(exchange, target, definition); return watch; } else { return null; } } @Override public void after(Exchange exchange, StopWatch stopWatch) throws Exception { if (stopWatch != null) { backlogDebugger.afterProcess(exchange, target, definition, stopWatch.taken()); } } @Override public int getOrder() { // we want debugger just before calling the processor return Ordered.LOWEST; } } /** * Advice to execute when using custom debugger. */ public static final class DebuggerAdvice implements CamelInternalProcessorAdvice<StopWatch>, Ordered { private final Debugger debugger; private final Processor target; private final NamedNode definition; public DebuggerAdvice(Debugger debugger, Processor target, NamedNode definition) { this.debugger = debugger; this.target = target; this.definition = definition; } @Override public StopWatch before(Exchange exchange) throws Exception { debugger.beforeProcess(exchange, target, definition); return new StopWatch(); } @Override public void after(Exchange exchange, StopWatch stopWatch) throws Exception { debugger.afterProcess(exchange, target, definition, stopWatch.taken()); } @Override public int getOrder() { // we want debugger just before calling the processor return Ordered.LOWEST; } } /** * Advice to inject new {@link UnitOfWork} to the {@link Exchange} if needed, and as well to ensure * the {@link UnitOfWork} is done and stopped. */ public static class UnitOfWorkProcessorAdvice implements CamelInternalProcessorAdvice<UnitOfWork> { private final RouteContext routeContext; private String routeId; private UnitOfWorkFactory uowFactory; public UnitOfWorkProcessorAdvice(RouteContext routeContext) { this.routeContext = routeContext; if (routeContext != null) { this.routeId = routeContext.getRouteId(); this.uowFactory = routeContext.getCamelContext().adapt(ExtendedCamelContext.class).getUnitOfWorkFactory(); } } @Override public UnitOfWork before(Exchange exchange) throws Exception { // if the exchange doesn't have from route id set, then set it if it originated // from this unit of work if (routeContext != null && exchange.getFromRouteId() == null) { if (routeId == null) { this.routeId = routeContext.getRouteId(); } exchange.adapt(ExtendedExchange.class).setFromRouteId(routeId); } // only return UnitOfWork if we created a new as then its us that handle the lifecycle to done the created UoW UnitOfWork created = null; if (exchange.getUnitOfWork() == null) { // If there is no existing UoW, then we should start one and // terminate it once processing is completed for the exchange. created = createUnitOfWork(exchange); exchange.adapt(ExtendedExchange.class).setUnitOfWork(created); created.start(); } // for any exchange we should push/pop route context so we can keep track of which route we are routing if (routeContext != null) { UnitOfWork existing = exchange.getUnitOfWork(); if (existing != null) { existing.pushRouteContext(routeContext); } } return created; } @Override public void after(Exchange exchange, UnitOfWork uow) throws Exception { UnitOfWork existing = exchange.getUnitOfWork(); // execute done on uow if we created it, and the consumer is not doing it if (uow != null) { UnitOfWorkHelper.doneUow(uow, exchange); } // after UoW is done lets pop the route context which must be done on every existing UoW if (routeContext != null && existing != null) { existing.popRouteContext(); } } protected UnitOfWork createUnitOfWork(Exchange exchange) { if (uowFactory != null) { return uowFactory.createUnitOfWork(exchange); } else { return exchange.getContext().adapt(ExtendedCamelContext.class).getUnitOfWorkFactory().createUnitOfWork(exchange); } } } /** * Advice when an EIP uses the <tt>shareUnitOfWork</tt> functionality. */ public static class ChildUnitOfWorkProcessorAdvice extends UnitOfWorkProcessorAdvice { private final UnitOfWork parent; public ChildUnitOfWorkProcessorAdvice(RouteContext routeContext, UnitOfWork parent) { super(routeContext); this.parent = parent; } @Override protected UnitOfWork createUnitOfWork(Exchange exchange) { // let the parent create a child unit of work to be used return parent.createChildUnitOfWork(exchange); } } /** * Advice when Message History has been enabled. */ @SuppressWarnings("unchecked") public static class MessageHistoryAdvice implements CamelInternalProcessorAdvice<MessageHistory> { private final MessageHistoryFactory factory; private final NamedNode definition; private final String routeId; public MessageHistoryAdvice(MessageHistoryFactory factory, NamedNode definition) { this.factory = factory; this.definition = definition; this.routeId = CamelContextHelper.getRouteId(definition); } @Override public MessageHistory before(Exchange exchange) throws Exception { // we may be routing outside a route in an onException or interceptor and if so then grab // route id from the exchange UoW state String targetRouteId = this.routeId; if (targetRouteId == null) { UnitOfWork uow = exchange.getUnitOfWork(); RouteContext rc = uow != null ? uow.getRouteContext() : null; if (rc != null) { targetRouteId = rc.getRouteId(); } } MessageHistory history = factory.newMessageHistory(targetRouteId, definition, System.currentTimeMillis(), exchange); if (history != null) { List<MessageHistory> list = exchange.getProperty(Exchange.MESSAGE_HISTORY, List.class); if (list == null) { list = new LinkedList<>(); exchange.setProperty(Exchange.MESSAGE_HISTORY, list); } list.add(history); } return history; } @Override public void after(Exchange exchange, MessageHistory history) throws Exception { if (history != null) { history.nodeProcessingDone(); } } } /** * Advice that stores the node id and label of the processor that is processing the exchange. */ public static class NodeHistoryAdvice implements CamelInternalProcessorAdvice { private final String id; private final String label; public NodeHistoryAdvice(NamedNode definition) { this.id = definition.getId(); this.label = definition.getLabel(); } @Override public String before(Exchange exchange) throws Exception { ExtendedExchange ee = exchange.adapt(ExtendedExchange.class); ee.setHistoryNodeId(id); ee.setHistoryNodeLabel(label); return null; } @Override public void after(Exchange exchange, Object data) throws Exception { ExtendedExchange ee = exchange.adapt(ExtendedExchange.class); ee.setHistoryNodeId(null); ee.setHistoryNodeLabel(null); } @Override public boolean hasState() { return false; } } /** * Advice for {@link org.apache.camel.spi.StreamCachingStrategy} */ public static class StreamCachingAdvice implements CamelInternalProcessorAdvice<StreamCache>, Ordered { private final StreamCachingStrategy strategy; public StreamCachingAdvice(StreamCachingStrategy strategy) { this.strategy = strategy; } @Override public StreamCache before(Exchange exchange) throws Exception { // check if body is already cached Object body = exchange.getIn().getBody(); if (body == null) { return null; } else if (body instanceof StreamCache) { StreamCache sc = (StreamCache) body; // reset so the cache is ready to be used before processing sc.reset(); return sc; } // cache the body and if we could do that replace it as the new body StreamCache sc = strategy.cache(exchange); if (sc != null) { exchange.getIn().setBody(sc); } return sc; } @Override public void after(Exchange exchange, StreamCache sc) throws Exception { Object body = exchange.getMessage().getBody(); if (body instanceof StreamCache) { // reset so the cache is ready to be reused after processing ((StreamCache) body).reset(); } } @Override public int getOrder() { // we want stream caching first return Ordered.HIGHEST; } } /** * Advice for delaying */ public static class DelayerAdvice implements CamelInternalProcessorAdvice { private final Logger log = LoggerFactory.getLogger(getClass()); private final long delay; public DelayerAdvice(long delay) { this.delay = delay; } @Override public Object before(Exchange exchange) throws Exception { try { log.trace("Sleeping for: {} millis", delay); Thread.sleep(delay); } catch (InterruptedException e) { log.debug("Sleep interrupted"); Thread.currentThread().interrupt(); throw e; } return null; } @Override public void after(Exchange exchange, Object data) throws Exception { // noop } @Override public boolean hasState() { return false; } } /** * Advice for tracing */ public static class TracingAdvice implements CamelInternalProcessorAdvice { private final Tracer tracer; private final NamedNode processorDefinition; private final NamedRoute routeDefinition; private final Synchronization tracingAfterRoute; private boolean added; public TracingAdvice(Tracer tracer, NamedNode processorDefinition, NamedRoute routeDefinition, boolean first) { this.tracer = tracer; this.processorDefinition = processorDefinition; this.routeDefinition = routeDefinition; this.tracingAfterRoute = routeDefinition != null ? new TracingAfterRoute(tracer, routeDefinition.getRouteId()) : null; } @Override public Object before(Exchange exchange) throws Exception { if (!added && tracingAfterRoute != null) { // add before route and after route tracing but only once per route, so check if there is already an existing boolean contains = exchange.getUnitOfWork().containsSynchronization(tracingAfterRoute); if (!contains) { added = true; tracer.traceBeforeRoute(routeDefinition, exchange); exchange.adapt(ExtendedExchange.class).addOnCompletion(tracingAfterRoute); } } tracer.traceBeforeNode(processorDefinition, exchange); return null; } @Override public void after(Exchange exchange, Object data) throws Exception { tracer.traceAfterNode(processorDefinition, exchange); } @Override public boolean hasState() { return false; } private static final class TracingAfterRoute extends SynchronizationAdapter { private final Tracer tracer; private final String routeId; private TracingAfterRoute(Tracer tracer, String routeId) { this.tracer = tracer; this.routeId = routeId; } @Override public void onAfterRoute(Route route, Exchange exchange) { if (routeId.equals(route.getId())) { tracer.traceAfterRoute(route, exchange); } } @Override public boolean equals(Object o) { // only match equals on route id so we can check this from containsSynchronization // to avoid adding multiple times for the same route id if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TracingAfterRoute that = (TracingAfterRoute) o; return routeId.equals(that.routeId); } @Override public int hashCode() { return Objects.hash(routeId); } } } /** * Wrap an InstrumentationProcessor into a CamelInternalProcessorAdvice */ public static <T> CamelInternalProcessorAdvice<T> wrap(InstrumentationProcessor<T> instrumentationProcessor) { if (instrumentationProcessor instanceof CamelInternalProcessor) { return (CamelInternalProcessorAdvice<T>) instrumentationProcessor; } else { return new CamelInternalProcessorAdviceWrapper<>(instrumentationProcessor); } } public static Object unwrap(CamelInternalProcessorAdvice<?> advice) { if (advice instanceof CamelInternalProcessorAdviceWrapper) { return ((CamelInternalProcessorAdviceWrapper) advice).unwrap(); } else { return advice; } } static class CamelInternalProcessorAdviceWrapper<T> implements CamelInternalProcessorAdvice<T>, Ordered { final InstrumentationProcessor<T> instrumentationProcessor; public CamelInternalProcessorAdviceWrapper(InstrumentationProcessor<T> instrumentationProcessor) { this.instrumentationProcessor = instrumentationProcessor; } InstrumentationProcessor<T> unwrap() { return instrumentationProcessor; } @Override public int getOrder() { return instrumentationProcessor.getOrder(); } @Override public T before(Exchange exchange) throws Exception { return instrumentationProcessor.before(exchange); } @Override public void after(Exchange exchange, T data) throws Exception { instrumentationProcessor.after(exchange, data); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.session; import static org.jboss.web.CatalinaMessages.MESSAGES; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.IOException; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.Map; import org.apache.catalina.Container; import org.apache.catalina.Context; import org.apache.catalina.Lifecycle; import org.apache.catalina.LifecycleException; import org.apache.catalina.LifecycleListener; import org.apache.catalina.Session; import org.apache.catalina.Store; import org.apache.catalina.security.SecurityUtil; import org.apache.catalina.util.LifecycleSupport; import org.jboss.web.CatalinaLogger; /** * Extends the <b>ManagerBase</b> class to implement most of the * functionality required by a Manager which supports any kind of * persistence, even if onlyfor restarts. * <p> * <b>IMPLEMENTATION NOTE</b>: Correct behavior of session storing and * reloading depends upon external calls to the <code>start()</code> and * <code>stop()</code> methods of this class at the correct times. * * @author Craig R. McClanahan * @author Jean-Francois Arcand * @version $Revision: 1857 $ $Date: 2011-10-27 18:52:51 +0200 (Thu, 27 Oct 2011) $ */ public abstract class PersistentManagerBase extends ManagerBase implements Lifecycle, PropertyChangeListener { // ---------------------------------------------------- Security Classes private class PrivilegedStoreClear implements PrivilegedExceptionAction { PrivilegedStoreClear() { } public Object run() throws Exception{ store.clear(); return null; } } private class PrivilegedStoreRemove implements PrivilegedExceptionAction { private String id; PrivilegedStoreRemove(String id) { this.id = id; } public Object run() throws Exception{ store.remove(id); return null; } } private class PrivilegedStoreLoad implements PrivilegedExceptionAction { private String id; PrivilegedStoreLoad(String id) { this.id = id; } public Object run() throws Exception{ return store.load(id); } } private class PrivilegedStoreSave implements PrivilegedExceptionAction { private Session session; PrivilegedStoreSave(Session session) { this.session = session; } public Object run() throws Exception{ store.save(session); return null; } } private class PrivilegedStoreKeys implements PrivilegedExceptionAction { PrivilegedStoreKeys() { } public Object run() throws Exception{ return store.keys(); } } // ----------------------------------------------------- Instance Variables /** * The descriptive information about this implementation. */ private static final String info = "PersistentManagerBase/1.1"; /** * The lifecycle event support for this component. */ protected LifecycleSupport lifecycle = new LifecycleSupport(this); /** * The maximum number of active Sessions allowed, or -1 for no limit. */ protected int maxActiveSessions = -1; /** * The descriptive name of this Manager implementation (for logging). */ private static String name = "PersistentManagerBase"; /** * Has this component been started yet? */ protected boolean started = false; /** * Store object which will manage the Session store. */ protected Store store = null; /** * Whether to save and reload sessions when the Manager <code>unload</code> * and <code>load</code> methods are called. */ protected boolean saveOnRestart = true; /** * How long a session must be idle before it should be backed up. * -1 means sessions won't be backed up. */ protected int maxIdleBackup = -1; /** * Minimum time a session must be idle before it is swapped to disk. * This overrides maxActiveSessions, to prevent thrashing if there are lots * of active sessions. Setting to -1 means it's ignored. */ protected int minIdleSwap = -1; /** * The maximum time a session may be idle before it should be swapped * to file just on general principle. Setting this to -1 means sessions * should not be forced out. */ protected int maxIdleSwap = -1; /** * Number of session creations that failed due to maxActiveSessions. */ protected int rejectedSessions = 0; /** * Processing time during session expiration and passivation. */ protected long processingTime = 0; /** * Sessions currently being swapped in and the associated locks */ private final Map<String,Object> sessionSwapInLocks = new HashMap<String,Object>(); // ------------------------------------------------------------- Properties /** * Indicates how many seconds old a session can get, after its last use in a * request, before it should be backed up to the store. -1 means sessions * are not backed up. */ public int getMaxIdleBackup() { return maxIdleBackup; } /** * Sets the option to back sessions up to the Store after they * are used in a request. Sessions remain available in memory * after being backed up, so they are not passivated as they are * when swapped out. The value set indicates how old a session * may get (since its last use) before it must be backed up: -1 * means sessions are not backed up. * <p> * Note that this is not a hard limit: sessions are checked * against this age limit periodically according to <b>processExpiresFrequency</b>. * This value should be considered to indicate when a session is * ripe for backing up. * <p> * So it is possible that a session may be idle for maxIdleBackup + * processExpiresFrequency * engine.backgroundProcessorDelay seconds, plus the time it takes to handle other * session expiration, swapping, etc. tasks. * * @param backup The number of seconds after their last accessed * time when they should be written to the Store. */ public void setMaxIdleBackup (int backup) { if (backup == this.maxIdleBackup) return; int oldBackup = this.maxIdleBackup; this.maxIdleBackup = backup; support.firePropertyChange("maxIdleBackup", new Integer(oldBackup), new Integer(this.maxIdleBackup)); } /** * The time in seconds after which a session should be swapped out of * memory to disk. */ public int getMaxIdleSwap() { return maxIdleSwap; } /** * Sets the time in seconds after which a session should be swapped out of * memory to disk. */ public void setMaxIdleSwap(int max) { if (max == this.maxIdleSwap) return; int oldMaxIdleSwap = this.maxIdleSwap; this.maxIdleSwap = max; support.firePropertyChange("maxIdleSwap", new Integer(oldMaxIdleSwap), new Integer(this.maxIdleSwap)); } /** * The minimum time in seconds that a session must be idle before * it can be swapped out of memory, or -1 if it can be swapped out * at any time. */ public int getMinIdleSwap() { return minIdleSwap; } /** * Sets the minimum time in seconds that a session must be idle before * it can be swapped out of memory due to maxActiveSession. Set it to -1 * if it can be swapped out at any time. */ public void setMinIdleSwap(int min) { if (this.minIdleSwap == min) return; int oldMinIdleSwap = this.minIdleSwap; this.minIdleSwap = min; support.firePropertyChange("minIdleSwap", new Integer(oldMinIdleSwap), new Integer(this.minIdleSwap)); } /** * Set the Container with which this Manager has been associated. If it is a * Context (the usual case), listen for changes to the session timeout * property. * * @param container * The associated Container */ public void setContainer(Container container) { // De-register from the old Container (if any) if ((this.container != null) && (this.container instanceof Context)) ((Context) this.container).removePropertyChangeListener(this); // Default processing provided by our superclass super.setContainer(container); // Register with the new Container (if any) if ((this.container != null) && (this.container instanceof Context)) { ((Context) this.container).addPropertyChangeListener(this); } } /** * Return descriptive information about this Manager implementation and * the corresponding version number, in the format * <code>&lt;description&gt;/&lt;version&gt;</code>. */ public String getInfo() { return (info); } /** * Return true, if the session id is loaded in memory * otherwise false is returned * * @param id The session id for the session to be searched for */ public boolean isLoaded( String id ){ try { if ( super.findSession(id) != null ) return true; } catch (IOException e) { CatalinaLogger.SESSION_LOGGER.persistentManagerIsLoadedException(id, e); } return false; } /** * Return the maximum number of active Sessions allowed, or -1 for * no limit. */ public int getMaxActiveSessions() { return (this.maxActiveSessions); } /** * Set the maximum number of active Sessions allowed, or -1 for * no limit. * * @param max The new maximum number of sessions */ public void setMaxActiveSessions(int max) { int oldMaxActiveSessions = this.maxActiveSessions; this.maxActiveSessions = max; support.firePropertyChange("maxActiveSessions", new Integer(oldMaxActiveSessions), new Integer(this.maxActiveSessions)); } /** * Number of session creations that failed due to maxActiveSessions. * * @return The count */ public int getRejectedSessions() { return rejectedSessions; } public void setRejectedSessions(int rejectedSessions) { this.rejectedSessions = rejectedSessions; } /** * Return the descriptive short name of this Manager implementation. */ public String getName() { return (name); } /** * Get the started status. */ protected boolean isStarted() { return started; } /** * Set the started flag */ protected void setStarted(boolean started) { this.started = started; } /** * Set the Store object which will manage persistent Session * storage for this Manager. * * @param store the associated Store */ public void setStore(Store store) { this.store = store; store.setManager(this); } /** * Return the Store object which manages persistent Session * storage for this Manager. */ public Store getStore() { return (this.store); } /** * Indicates whether sessions are saved when the Manager is shut down * properly. This requires the unload() method to be called. */ public boolean getSaveOnRestart() { return saveOnRestart; } /** * Set the option to save sessions to the Store when the Manager is * shut down, then loaded when the Manager starts again. If set to * false, any sessions found in the Store may still be picked up when * the Manager is started again. * * @param saveOnRestart true if sessions should be saved on restart, false if * they should be ignored. */ public void setSaveOnRestart(boolean saveOnRestart) { if (saveOnRestart == this.saveOnRestart) return; boolean oldSaveOnRestart = this.saveOnRestart; this.saveOnRestart = saveOnRestart; support.firePropertyChange("saveOnRestart", new Boolean(oldSaveOnRestart), new Boolean(this.saveOnRestart)); } // --------------------------------------------------------- Public Methods /** * Clear all sessions from the Store. */ public void clearStore() { if (store == null) return; try { if (SecurityUtil.isPackageProtectionEnabled()){ try{ AccessController.doPrivileged(new PrivilegedStoreClear()); }catch(PrivilegedActionException ex){ Exception exception = ex.getException(); CatalinaLogger.SESSION_LOGGER.persistentManagerStoreClearException(exception); } } else { store.clear(); } } catch (IOException e) { CatalinaLogger.SESSION_LOGGER.persistentManagerStoreClearException(e); } } /** * Implements the Manager interface, direct call to processExpires and processPersistenceChecks */ public void processExpires() { long timeNow = System.currentTimeMillis(); Session sessions[] = findSessions(); int expireHere = 0 ; if(CatalinaLogger.SESSION_LOGGER.isDebugEnabled()) CatalinaLogger.SESSION_LOGGER.debug("Start expire sessions " + getName() + " at " + timeNow + " sessioncount " + sessions.length); for (int i = 0; i < sessions.length; i++) { if (!sessions[i].isValid()) { expiredSessions++; expireHere++; } } processPersistenceChecks(); if ((getStore() != null) && (getStore() instanceof StoreBase)) { ((StoreBase) getStore()).processExpires(); } long timeEnd = System.currentTimeMillis(); if(CatalinaLogger.SESSION_LOGGER.isDebugEnabled()) CatalinaLogger.SESSION_LOGGER.debug("End expire sessions " + getName() + " processingTime " + (timeEnd - timeNow) + " expired sessions: " + expireHere); processingTime += (timeEnd - timeNow); } /** * Called by the background thread after active sessions have been checked * for expiration, to allow sessions to be swapped out, backed up, etc. */ public void processPersistenceChecks() { processMaxIdleSwaps(); processMaxActiveSwaps(); processMaxIdleBackups(); } /** * Return the active Session, associated with this Manager, with the * specified session id (if any); otherwise return <code>null</code>. * This method checks the persistence store if persistence is enabled, * otherwise just uses the functionality from ManagerBase. * * @param id The session id for the session to be returned * * @exception IllegalStateException if a new session cannot be * instantiated for any reason * @exception IOException if an input/output error occurs while * processing this request */ public Session findSession(String id) throws IOException { Session session = super.findSession(id); // OK, at this point, we're not sure if another thread is trying to // remove the session or not so the only way around this is to lock it // (or attempt to) and then try to get it by this session id again. If // the other code ran swapOut, then we should get a null back during // this run, and if not, we lock it out so we can access the session // safely. if(session != null) { synchronized(session){ session = super.findSession(session.getIdInternal()); if(session != null){ // To keep any external calling code from messing up the // concurrency. session.access(); session.endAccess(); } } } if (session != null) return (session); // See if the Session is in the Store session = swapIn(id); return (session); } /** * Remove this Session from the active Sessions for this Manager, * but not from the Store. (Used by the PersistentValve) * * @param session Session to be removed */ public void removeSuper(Session session) { super.remove (session); } /** * Load all sessions found in the persistence mechanism, assuming * they are marked as valid and have not passed their expiration * limit. If persistence is not supported, this method returns * without doing anything. * <p> * Note that by default, this method is not called by the MiddleManager * class. In order to use it, a subclass must specifically call it, * for example in the start() and/or processPersistenceChecks() methods. */ public void load() { // Initialize our internal data structures sessions.clear(); if (store == null) return; String[] ids = null; try { if (SecurityUtil.isPackageProtectionEnabled()){ try{ ids = (String[]) AccessController.doPrivileged(new PrivilegedStoreKeys()); }catch(PrivilegedActionException ex){ Exception exception = ex.getException(); CatalinaLogger.SESSION_LOGGER.persistentManagerLoadFailed(exception); } } else { ids = store.keys(); } } catch (IOException e) { CatalinaLogger.SESSION_LOGGER.persistentManagerLoadFailed(e); return; } int n = ids.length; if (n == 0) return; for (int i = 0; i < n; i++) try { swapIn(ids[i]); } catch (IOException e) { CatalinaLogger.SESSION_LOGGER.persistentManagerLoadFailed(e); } } /** * Remove this Session from the active Sessions for this Manager, * and from the Store. * * @param session Session to be removed */ public void remove(Session session) { super.remove (session); if (store != null){ removeSession(session.getIdInternal()); } } /** * Remove this Session from the active Sessions for this Manager, * and from the Store. * * @param id Session's id to be removed */ protected void removeSession(String id){ try { if (SecurityUtil.isPackageProtectionEnabled()){ try{ AccessController.doPrivileged(new PrivilegedStoreRemove(id)); }catch(PrivilegedActionException ex){ Exception exception = ex.getException(); CatalinaLogger.SESSION_LOGGER.persistentManagerSessionRemoveFailed(id, exception); } } else { store.remove(id); } } catch (IOException e) { CatalinaLogger.SESSION_LOGGER.persistentManagerSessionRemoveFailed(id, e); } } /** * Save all currently active sessions in the appropriate persistence * mechanism, if any. If persistence is not supported, this method * returns without doing anything. * <p> * Note that by default, this method is not called by the MiddleManager * class. In order to use it, a subclass must specifically call it, * for example in the stop() and/or processPersistenceChecks() methods. */ public void unload() { if (store == null) return; Session sessions[] = findSessions(); int n = sessions.length; if (n == 0) return; CatalinaLogger.SESSION_LOGGER.persistentManagerSessionUnloadCount(n); for (int i = 0; i < n; i++) try { swapOut(sessions[i]); } catch (IOException e) { ; // This is logged in writeSession() } } // ------------------------------------------------------ Protected Methods /** * Look for a session in the Store and, if found, restore * it in the Manager's list of active sessions if appropriate. * The session will be removed from the Store after swapping * in, but will not be added to the active session list if it * is invalid or past its expiration. */ protected Session swapIn(String id) throws IOException { if (store == null) return null; Object swapInLock = null; /* * The purpose of this sync and these locks is to make sure that a * session is only loaded once. It doesn't matter if the lock is removed * and then another thread enters this method and tries to load the same * session. That thread will re-create a swapIn lock for that session, * quickly find that the session is already in sessions, use it and * carry on. */ synchronized (this) { swapInLock = sessionSwapInLocks.get(id); if (swapInLock == null) { swapInLock = new Object(); sessionSwapInLocks.put(id, swapInLock); } } Session session = null; synchronized (swapInLock) { // First check to see if another thread has loaded the session into // the manager session = sessions.get(id); if (session == null) { try { if (SecurityUtil.isPackageProtectionEnabled()){ try { session = (Session) AccessController.doPrivileged( new PrivilegedStoreLoad(id)); } catch (PrivilegedActionException ex) { Exception e = ex.getException(); CatalinaLogger.SESSION_LOGGER.persistentManagerSwapInFailed(id, e); if (e instanceof IOException){ throw (IOException)e; } else if (e instanceof ClassNotFoundException) { throw (ClassNotFoundException)e; } } } else { session = store.load(id); } } catch (ClassNotFoundException e) { throw MESSAGES.persistentManagerDeserializeError(id, e); } if (session != null && !session.isValid()) { CatalinaLogger.SESSION_LOGGER.persistentManagerSwapInInvalid(id); session.expire(); removeSession(id); session = null; } if (session != null) { CatalinaLogger.SESSION_LOGGER.sessionSwapIn(id); session.setManager(this); // make sure the listeners know about it. ((StandardSession)session).tellNew(); add(session); ((StandardSession)session).activate(); // endAccess() to ensure timeouts happen correctly. // access() to keep access count correct or it will end up // negative session.access(); session.endAccess(); } } } // Make sure the lock is removed synchronized (this) { sessionSwapInLocks.remove(id); } return (session); } /** * Remove the session from the Manager's list of active * sessions and write it out to the Store. If the session * is past its expiration or invalid, this method does * nothing. * * @param session The Session to write out. */ protected void swapOut(Session session) throws IOException { if (store == null || !session.isValid()) { return; } ((StandardSession)session).passivate(); writeSession(session); super.remove(session); session.recycle(); } /** * Write the provided session to the Store without modifying * the copy in memory or triggering passivation events. Does * nothing if the session is invalid or past its expiration. */ protected void writeSession(Session session) throws IOException { if (store == null || !session.isValid()) { return; } try { if (SecurityUtil.isPackageProtectionEnabled()){ try{ AccessController.doPrivileged(new PrivilegedStoreSave(session)); }catch(PrivilegedActionException ex){ Exception exception = ex.getException(); CatalinaLogger.SESSION_LOGGER.persistentManagerStoreSaveError(session.getIdInternal(), exception); } } else { store.save(session); } } catch (IOException e) { CatalinaLogger.SESSION_LOGGER.persistentManagerStoreSaveError(session.getIdInternal(), e); throw e; } } // ------------------------------------------------------ Lifecycle Methods /** * Add a lifecycle event listener to this component. * * @param listener The listener to add */ public void addLifecycleListener(LifecycleListener listener) { lifecycle.addLifecycleListener(listener); } /** * Get the lifecycle listeners associated with this lifecycle. If this * Lifecycle has no listeners registered, a zero-length array is returned. */ public LifecycleListener[] findLifecycleListeners() { return lifecycle.findLifecycleListeners(); } /** * Remove a lifecycle event listener from this component. * * @param listener The listener to remove */ public void removeLifecycleListener(LifecycleListener listener) { lifecycle.removeLifecycleListener(listener); } /** * Prepare for the beginning of active use of the public methods of this * component. This method should be called after <code>configure()</code>, * and before any of the public methods of the component are utilized. * * @exception LifecycleException if this component detects a fatal error * that prevents this component from being used */ public void start() throws LifecycleException { // Validate and update our current component state if (started) { return; } if( ! initialized ) init(); lifecycle.fireLifecycleEvent(START_EVENT, null); started = true; if (store == null) CatalinaLogger.SESSION_LOGGER.noStoreConfigured(); else if (store instanceof Lifecycle) ((Lifecycle)store).start(); } /** * Gracefully terminate the active use of the public methods of this * component. This method should be the last one called on a given * instance of this component. * * @exception LifecycleException if this component detects a fatal error * that needs to be reported */ public void stop() throws LifecycleException { // Validate and update our current component state if (!isStarted()) { return; } lifecycle.fireLifecycleEvent(STOP_EVENT, null); setStarted(false); if (getStore() != null && saveOnRestart) { unload(); } else { // Expire all active sessions Session sessions[] = findSessions(); for (int i = 0; i < sessions.length; i++) { StandardSession session = (StandardSession) sessions[i]; if (!session.isValid()) continue; session.expire(); } } if (getStore() != null && getStore() instanceof Lifecycle) ((Lifecycle)getStore()).stop(); if( initialized ) destroy(); } // ----------------------------------------- PropertyChangeListener Methods /** * Process property change events from our associated Context. * * @param event The property change event that has occurred */ public void propertyChange(PropertyChangeEvent event) { // Validate the source of this event if (!(event.getSource() instanceof Context)) return; Context context = (Context) event.getSource(); // Process a relevant property change if (event.getPropertyName().equals("sessionTimeout")) { try { setMaxInactiveInterval ( ((Integer) event.getNewValue()).intValue()*60 ); } catch (NumberFormatException e) { CatalinaLogger.SESSION_LOGGER.managerInvalidSessionTimeout(event.getNewValue().toString()); } } } // ------------------------------------------------------ Protected Methods /** * Swap idle sessions out to Store if they are idle too long. */ protected void processMaxIdleSwaps() { if (!isStarted() || maxIdleSwap < 0) return; Session sessions[] = findSessions(); long timeNow = System.currentTimeMillis(); // Swap out all sessions idle longer than maxIdleSwap if (maxIdleSwap >= 0) { for (int i = 0; i < sessions.length; i++) { StandardSession session = (StandardSession) sessions[i]; synchronized (session) { if (!session.isValid()) continue; int timeIdle = // Truncate, do not round up (int) ((timeNow - session.getLastAccessedTime()) / 1000L); if (timeIdle > maxIdleSwap && timeIdle > minIdleSwap) { if (session.accessCount != null && session.accessCount.get() > 0) { // Session is currently being accessed - skip it continue; } CatalinaLogger.SESSION_LOGGER.sessionSwapOut(session.getIdInternal(), timeIdle); try { swapOut(session); } catch (IOException e) { ; // This is logged in writeSession() } } } } } } /** * Swap idle sessions out to Store if too many are active */ protected void processMaxActiveSwaps() { if (!isStarted() || getMaxActiveSessions() < 0) return; Session sessions[] = findSessions(); // FIXME: Smarter algorithm (LRU) if (getMaxActiveSessions() >= sessions.length) return; CatalinaLogger.SESSION_LOGGER.persistentManagerCheckIdle(sessions.length); int toswap = sessions.length - getMaxActiveSessions(); long timeNow = System.currentTimeMillis(); for (int i = 0; i < sessions.length && toswap > 0; i++) { StandardSession session = (StandardSession) sessions[i]; synchronized (session) { int timeIdle = // Truncate, do not round up (int) ((timeNow - session.getThisAccessedTimeInternal()) / 1000L); if (timeIdle > minIdleSwap) { if (session.accessCount != null && session.accessCount.get() > 0) { // Session is currently being accessed - skip it continue; } CatalinaLogger.SESSION_LOGGER.persistentManagerSwapIdleSession(session.getIdInternal(), timeIdle); try { swapOut(session); } catch (IOException e) { // This is logged in writeSession() } toswap--; } } } } /** * Back up idle sessions. */ protected void processMaxIdleBackups() { if (!isStarted() || maxIdleBackup < 0) return; Session sessions[] = findSessions(); long timeNow = System.currentTimeMillis(); // Back up all sessions idle longer than maxIdleBackup if (maxIdleBackup >= 0) { for (int i = 0; i < sessions.length; i++) { StandardSession session = (StandardSession) sessions[i]; synchronized (session) { if (!session.isValid()) continue; int timeIdle = // Truncate, do not round up (int) ((timeNow - session.getLastAccessedTime()) / 1000L); if (timeIdle > maxIdleBackup) { CatalinaLogger.SESSION_LOGGER.persistentManagerBackupSession(session.getIdInternal(), timeIdle); try { writeSession(session); } catch (IOException e) { ; // This is logged in writeSession() } } } } } } }
package de.hpi.jbpm; import java.io.StringWriter; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.w3c.dom.NamedNodeMap; public class Transition { private String uuid; private String name; private String target; private String condition; private Node targetNode; private Docker start; private Docker end; private List<Docker> dockers; public Transition(JSONObject transition) { this.dockers = new ArrayList<Docker>(); try { this.name = transition.getJSONObject("properties") .getString("name"); } catch (JSONException e) { } try { this.condition = transition.getJSONObject("properties").getString( "conditionexpression"); } catch (JSONException e) { } try { this.target = JsonToJpdl.getInstance().getTargetName( transition.getJSONObject("target").getString("resourceId")); } catch (JSONException e) { } try { JSONArray dockerArray = transition.getJSONArray("dockers"); // Create path dockers. Start and end will be ignored. if (dockerArray.length() > 2) for (int i = 1; i < dockerArray.length() - 1; i++) { try { JSONObject docker = dockerArray.getJSONObject(i); int x = Math.round(Float.parseFloat(docker .getString("x"))); int y = Math.round(Float.parseFloat(docker .getString("y"))); dockers.add(new Docker(x, y)); } catch (JSONException e) { } } } catch (JSONException f) { } } public Transition(org.w3c.dom.Node transition) { this.uuid = "oryx_" + UUID.randomUUID().toString(); NamedNodeMap attributes = transition.getAttributes(); this.name = JpdlToJson.getAttribute(attributes, "name"); this.condition = JpdlToJson.getAttribute(attributes, "condition"); this.target = JpdlToJson.getAttribute(attributes, "to"); this.targetNode = JpdlToJson.getProcess().getTarget(target); this.dockers = new ArrayList<Docker>(); String g = JpdlToJson.getAttribute(attributes, "g"); if (g != null) { // Create path dockers. Start and end are missing. String[] pathDockers = g.split(":")[0].split(";"); for (int i = 0; i < pathDockers.length; i++) { if (pathDockers[i].length() > 1) { String[] dockerPosition = pathDockers[i].split(","); if (dockerPosition.length == 2) { Docker d = new Docker(Integer .parseInt(dockerPosition[0]), Integer .parseInt(dockerPosition[1])); dockers.add(d); } } } } } public Node getTargetNode() { return targetNode; } public void setTargetNode(Node targetNode) { this.targetNode = targetNode; } public String getCondition() { return condition; } public void setCondition(String condition) { this.condition = condition; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getTarget() { return target; } public void setTarget(String target) { this.target = target; } public String getUuid() { return uuid; } public void setUuid(String uuid) { this.uuid = uuid; } public Docker getStart() { return start; } public void setStart(Docker start) { this.start = start; } public Docker getEnd() { return end; } public void setEnd(Docker end) { this.end = end; } public String toJpdl() throws InvalidModelException { StringWriter jpdl = new StringWriter(); jpdl.write("<transition"); if (name != null) { jpdl.write(JsonToJpdl.transformAttribute("name", name)); } if (target != null) { jpdl.write(JsonToJpdl.transformAttribute("to", target)); } else { throw new InvalidModelException("Invalid edge. Target is missing."); } if (dockers.size() > 0) { // g="120,42;120,45:0,0" String dockerString = ""; for(Docker d : dockers) { dockerString += d.toJpdl(); if(dockers.indexOf(d) == dockers.size() - 1) dockerString += ":"; else dockerString += ";"; } jpdl.write(JsonToJpdl.transformAttribute("g", dockerString)); } if (condition != null && !condition.equals("")) { jpdl.write(">\n"); jpdl.write("<condition expr=\""); jpdl.write(condition); jpdl.write("\" />\n"); jpdl.write("</transition>\n"); } else { jpdl.write("/>\n"); } return jpdl.toString(); } public JSONObject toJson() throws JSONException { JSONObject stencil = new JSONObject(); stencil.put("id", "SequenceFlow"); JSONObject targetAsJson = new JSONObject(); targetAsJson.put("resourceId", targetNode.getUuid()); JSONArray outgoing = new JSONArray(); outgoing.put(targetAsJson); JSONObject properties = new JSONObject(); if (name != null) properties.put("name", name); if (condition != null) { properties.put("conditionexpression", condition); properties.put("conditiontype", "Expression"); properties.put("showdiamondmarker", "true"); } else { properties.put("conditiontype", "None"); properties.put("showdiamondmarker", "false"); } JSONArray childShapes = new JSONArray(); end = new Docker(targetNode.getBounds().getWidth() / 2, targetNode .getBounds().getHeight() / 2); Bounds bounds = new Bounds(); JSONArray allDockers = new JSONArray(); allDockers.put(start.toJson()); for (Docker d : dockers) { allDockers.put(d.toJson()); } allDockers.put(end.toJson()); JSONObject node = new JSONObject(); node.put("resourceId", uuid); node.put("stencil", stencil); node.put("outgoing", outgoing); node.put("properties", properties); node.put("childShapes", childShapes); node.put("dockers", allDockers); node.put("bounds", bounds.toJson()); return node; } }
package net.floodlightcontroller.forwarding; import net.floodlightcontroller.core.IOFSwitch; import net.floodlightcontroller.core.internal.IOFSwitchService; import net.floodlightcontroller.core.module.FloodlightModuleContext; import net.floodlightcontroller.core.types.NodePortTuple; import net.floodlightcontroller.packet.*; import net.floodlightcontroller.routing.VirtualGatewayInstance; import net.floodlightcontroller.routing.VirtualGatewayInterface; import net.floodlightcontroller.test.FloodlightTestCase; import org.easymock.EasyMock; import org.junit.Before; import org.junit.Test; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.match.MatchField; import org.projectfloodlight.openflow.types.*; import java.util.HashMap; import java.util.Map; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.reset; import static org.junit.Assert.*; /** * @author Qing Wang (qw@g.clemson.edu) at 4/6/18 */ public class L3RoutingTest extends FloodlightTestCase { private FloodlightModuleContext fmc; private IOFSwitchService switchService; private IOFSwitch sw; private String swDPIDStr = "00:00:00:00:00:00:00:01"; private DatapathId swDPID = DatapathId.of(swDPIDStr); private OFFactory factory = OFFactories.getFactory(OFVersion.OF_13); private VirtualGatewayInstance gateway; private IPacket testPacket; private OFPacketIn packetIn; private OFPacketIn packetInIpv6; @Before @Override public void setUp() throws Exception { super.setUp(); fmc = new FloodlightModuleContext(); switchService = getMockSwitchService(); fmc.addService(IOFSwitchService.class, switchService); sw = EasyMock.createMock(IOFSwitch.class); reset(sw); expect(sw.getId()).andReturn(swDPID).anyTimes(); expect(sw.getOFFactory()).andReturn(factory).anyTimes(); replay(sw); // Load mock switches to switch map Map<DatapathId, IOFSwitch> switches = new HashMap<>(); switches.put(swDPID, sw); mockSwitchManager.setSwitches(switches); // L3 Initialization packetIn = buildIPv4PacketIn(); gateway = initGateway(); } private IPacket buildTestPacket() { return new Ethernet() .setDestinationMACAddress("00:11:22:33:44:55") .setSourceMACAddress("00:44:33:22:11:00") .setEtherType(EthType.IPv4) .setPayload( new IPv4() .setTtl((byte) 128) .setSourceAddress("192.168.1.1") .setDestinationAddress("192.168.1.2") .setPayload(new UDP() .setSourcePort((short) 5000) .setDestinationPort((short) 5001) .setPayload(new Data(new byte[] {0x01})))); } private OFPacketIn buildIPv4PacketIn() { return factory.buildPacketIn() .setMatch(factory.buildMatch() .setExact(MatchField.IN_PORT, OFPort.of(1)) .setExact(MatchField.ETH_SRC, MacAddress.of("00:44:33:22:11:00")) .setExact(MatchField.ETH_DST, MacAddress.of("00:11:22:33:44:55")) .setExact(MatchField.ETH_TYPE, EthType.IPv4) .setExact(MatchField.IPV4_SRC, IPv4Address.of("192.168.1.1")) .setExact(MatchField.IPV4_DST, IPv4Address.of("192.168.1.2")) .setExact(MatchField.IP_PROTO, IpProtocol.UDP) .setExact(MatchField.UDP_SRC, TransportPort.of(5000)) .setExact(MatchField.UDP_DST, TransportPort.of(5001)) .build()) .setBufferId(OFBufferId.NO_BUFFER) .setData(buildTestPacket().serialize()) .setReason(OFPacketInReason.NO_MATCH) .build(); } private VirtualGatewayInstance initGateway() { VirtualGatewayInstance instance; VirtualGatewayInstance.VirtualGatewayInstanceBuilder builder = VirtualGatewayInstance.createInstance("gateway-1"); Map<String, VirtualGatewayInterface> interfaces = new HashMap<>(); VirtualGatewayInterface interface1 = new VirtualGatewayInterface("interface-1", "10.0.0.1", "255.255.255.0"); VirtualGatewayInterface interface2 = new VirtualGatewayInterface("interface-2", "20.0.0.1", "255.255.255.0"); interfaces.put(interface1.getInterfaceName(), interface1); interfaces.put(interface2.getInterfaceName(), interface2); builder.setGatewayMac(MacAddress.of("aa:bb:cc:dd:ee:ff")); builder.setInterfaces(interfaces); instance = builder.build(); instance.addSwitchMember(DatapathId.of(1L)); instance.addNptMember(new NodePortTuple(DatapathId.of(2L), OFPort.of(1))); instance.addSubnetMember(IPv4AddressWithMask.of("192.168.1.0/24")); return instance; } @Test public void testGatewayInterfaceIPSelection() throws Exception { // "30.0.0.1" is not a configured gateway interface IP address assertFalse(gateway.isAGatewayIntf(IPv4Address.of("30.0.0.1"))); // "10.0.0.1" is a configured gateway interface IP address assertTrue(gateway.isAGatewayIntf(IPv4Address.of("10.0.0.1"))); // If destination IP is "10.0.0.25", the packet should select gateway interface "10.0.0.1" to go IPv4Address dstIP = IPv4Address.of("10.0.0.25"); assertEquals(IPv4Address.of("10.0.0.1"), gateway.findGatewayInft(dstIP).get().getIp()); // If destination IP is "20.0.0.10", the packet should select gateway interface "20.0.0.1" to go IPv4Address dstIP1 = IPv4Address.of("20.0.0.10"); assertEquals(IPv4Address.of("20.0.0.1"), gateway.findGatewayInft(dstIP1).get().getIp()); } @Test public void testBuildGatewayInstance() throws Exception { // Create gateway Instance VirtualGatewayInstance instance = initGateway(); assertNotNull(instance); assertNotNull(instance.getName()); assertNotNull(instance.getGatewayMac()); assertEquals("gateway-1", instance.getName()); assertEquals(MacAddress.of("aa:bb:cc:dd:ee:ff"), instance.getGatewayMac()); assertTrue(instance.getSwitchMembers().contains(DatapathId.of(1L))); assertEquals(1, instance.getSwitchMembers().size()); assertTrue(instance.getNptMembers().contains(new NodePortTuple(DatapathId.of(2L), OFPort.of(1)))); assertEquals(1, instance.getNptMembers().size()); assertTrue(instance.getSubsetMembers().contains(IPv4AddressWithMask.of("192.168.1.0/24"))); assertEquals(1, instance.getSubsetMembers().size()); assertEquals(2, instance.getInterfaces().size()); assertNotNull(instance.getInterface("interface-1").get()); assertFalse(instance.getInterface("interface-10").isPresent()); // Create gateway instance with only necessary fields VirtualGatewayInstance instance1 = VirtualGatewayInstance.createInstance("gateway-2") .setGatewayMac(MacAddress.of("aa:bb:cc:dd:ee:ff")).build(); assertNotNull(instance1); assertEquals("gateway-2", instance1.getName()); assertEquals(MacAddress.of("aa:bb:cc:dd:ee:ff"), instance1.getGatewayMac()); assertEquals(0, instance1.getInterfaces().size()); assertEquals(0, instance1.getSwitchMembers().size()); assertEquals(0, instance1.getNptMembers().size()); assertEquals(0, instance1.getSubsetMembers().size()); } @Test(expected = IllegalArgumentException.class) public void testBuildGatewayInstanceWithMissingFields() throws Exception { // Create virtual gateway instance with invalid name VirtualGatewayInstance instance1 = VirtualGatewayInstance.createInstance("").build(); VirtualGatewayInstance instance2 = VirtualGatewayInstance.createInstance(null).build(); // Create virtual gateway instance with invalid MAC address VirtualGatewayInstance instance3 = VirtualGatewayInstance.createInstance("gateway") .setGatewayMac(MacAddress.NONE).build(); // Create virtual gateway instance without configure switches, node-port-tuples or subnets VirtualGatewayInstance instance4 = VirtualGatewayInstance.createInstance("gateway") .setGatewayMac(MacAddress.of("aa:bb:cc:dd:ee:ff")).build(); assertEquals(0, instance4.getSwitchMembers().size()); assertEquals(0, instance4.getNptMembers().size()); assertEquals(0, instance4.getSubsetMembers().size()); } @Test public void testAddInterface() throws Exception { VirtualGatewayInstance instance = VirtualGatewayInstance.createInstance("gateway-1") .setGatewayMac(MacAddress.of("aa:bb:cc:dd:ee:ff")).build(); // interface-1 should be correctly added VirtualGatewayInterface interface1 = new VirtualGatewayInterface("interface-1", "10.0.0.1", "255.255.255.0"); instance.addInterface(interface1); assertEquals(1, instance.getInterfaces().size()); assertEquals(IPv4Address.of("10.0.0.1"), instance.getInterface("interface-1").get().getIp()); } @Test public void removeInferface() throws Exception { VirtualGatewayInstance gatewayInstance = initGateway(); // Interface-2 will not be removed because it haven't be added to gateway instance yet assertFalse(gatewayInstance.removeInterface("interface-3")); assertEquals(2, gatewayInstance.getInterfaces().size()); // Interface-1 will be removed assertTrue(gatewayInstance.removeInterface("interface-1")); assertEquals(1, gatewayInstance.getInterfaces().size()); } @Test public void testClearInterface() throws Exception { VirtualGatewayInstance instance = initGateway(); // All interface should be removed instance.clearInterfaces(); assertEquals(0, instance.getInterfaces().size()); } @Test public void testUpdateInterface() throws Exception { VirtualGatewayInstance instance = initGateway(); // Gateway MAC address should be updated instance = instance.getBuilder().setGatewayMac(MacAddress.of("ff:ee:dd:cc:bb:aa")).build(); assertEquals(MacAddress.of("ff:ee:dd:cc:bb:aa"), instance.getGatewayMac()); // interface-1 ip should be updated correctly to "30.0.0.1" VirtualGatewayInterface newInterface = new VirtualGatewayInterface("interface-1", "30.0.0.1", "255.255.255.0"); instance.addInterface(newInterface); assertEquals(IPv4Address.of("30.0.0.1"), instance.getInterface("interface-1").get().getIp()); // interface-3 ip shouldn't be updated as there is no "interface-3" added to gateway yet assertFalse(instance.getInterface("interface-3").isPresent()); assertEquals(IPv4Address.of("30.0.0.1"), instance.getInterface("interface-1").get().getIp()); assertEquals(2, instance.getInterfaces().size()); } @Test public void testUpdateInterfaceUsingBuilder() throws Exception { VirtualGatewayInstance instance = initGateway(); // New interface list should replace old interface list when use builder to rebuild instance Map<String, VirtualGatewayInterface> interfaces = new HashMap<>(); VirtualGatewayInterface newInterface3 = new VirtualGatewayInterface("interface-3", "30.0.0.1", "255.255.255.0"); VirtualGatewayInterface newInterface4 = new VirtualGatewayInterface("interface-4", "40.0.0.1", "255.255.255.0"); VirtualGatewayInterface newInterface5 = new VirtualGatewayInterface("interface-5", "50.0.0.1", "255.255.255.0"); interfaces.put(newInterface3.getInterfaceName(), newInterface3); interfaces.put(newInterface4.getInterfaceName(), newInterface4); interfaces.put(newInterface5.getInterfaceName(), newInterface5); instance = instance.getBuilder().setInterfaces(interfaces).build(); assertEquals(3, instance.getInterfaces().size()); assertTrue(instance.getInterface("interface-3").isPresent()); assertTrue(instance.getInterface("interface-4").isPresent()); assertTrue(instance.getInterface("interface-5").isPresent()); assertEquals(IPv4Address.of("30.0.0.1"), instance.getInterface("interface-3").get().getIp()); assertEquals(IPv4Address.of("40.0.0.1"), instance.getInterface("interface-4").get().getIp()); assertEquals(IPv4Address.of("50.0.0.1"), instance.getInterface("interface-5").get().getIp()); assertFalse(instance.getInterface("interface-1").isPresent()); assertFalse(instance.getInterface("interface-2").isPresent()); } @Test public void testAddOrRemoveSwitchMemberFromInstance() throws Exception { VirtualGatewayInstance instance = VirtualGatewayInstance.createInstance("gateway-1") .setGatewayMac(MacAddress.of("aa:bb:cc:dd:ee:ff")).build(); // Should have two switch member added instance.addSwitchMember(DatapathId.of(1L)); instance.addSwitchMember(DatapathId.of(1L)); instance.addSwitchMember(DatapathId.of(2L)); assertEquals(2, instance.getSwitchMembers().size()); // Remove one switch member, should only have 1 switch member left instance.removeSwitchMember(DatapathId.of(1L)); assertEquals(1, instance.getSwitchMembers().size()); // Remove all switch members, now zero members left instance.addSwitchMember(DatapathId.of(1L)); instance.clearSwitchMembers(); assertEquals(0, instance.getSwitchMembers().size()); } @Test public void testAddOrRemoveNptMemberFromInstance() throws Exception { VirtualGatewayInstance instance = VirtualGatewayInstance.createInstance("gateway-1") .setGatewayMac(MacAddress.of("aa:bb:cc:dd:ee:ff")).build(); // Should have four npt member added instance.addNptMember(new NodePortTuple(DatapathId.of(1L), OFPort.of(1))); instance.addNptMember(new NodePortTuple(DatapathId.of(1L), OFPort.of(2))); instance.addNptMember(new NodePortTuple(DatapathId.of(2L), OFPort.of(1))); instance.addNptMember(new NodePortTuple(DatapathId.of(2L), OFPort.of(2))); assertEquals(4, instance.getNptMembers().size()); // Remove one node-port-tuple, should have 3 member left instance.removeNptMember(new NodePortTuple(DatapathId.of(1L), OFPort.of(1))); assertEquals(3, instance.getNptMembers().size()); // Remove all node-port-tuple, now zero member left instance.clearNptMembers(); assertEquals(0, instance.getNptMembers().size()); } @Test public void testAddOrRemoveSubnetMemberFromInstance() throws Exception { VirtualGatewayInstance instance = VirtualGatewayInstance.createInstance("gateway-1") .setGatewayMac(MacAddress.of("aa:bb:cc:dd:ee:ff")).build(); // Should have two subnet added instance.addSubnetMember(IPv4AddressWithMask.of("10.0.0.0/24")); instance.addSubnetMember(IPv4AddressWithMask.of("20.0.0.0/24")); assertEquals(2, instance.getSubsetMembers().size()); // Remove one subnet, should have one left instance.removeSubnetMember(IPv4AddressWithMask.of("10.0.0.0/24")); assertEquals(1, instance.getSubsetMembers().size()); // Remove all subnet, should have zero left instance.clearSubnetMembers(); assertEquals(0, instance.getSubsetMembers().size()); } @Test public void testRemoveSwitchFromInstance() throws Exception { VirtualGatewayInstance instance = VirtualGatewayInstance.createInstance("gateway-1") .setGatewayMac(MacAddress.of("aa:bb:cc:dd:ee:ff")).build(); instance.addSwitchMember(DatapathId.of(1L)); instance.addSwitchMember(DatapathId.of(2L)); instance.addNptMember(new NodePortTuple(DatapathId.of(1L), OFPort.of(1))); instance.addNptMember(new NodePortTuple(DatapathId.of(1L), OFPort.of(2))); instance.addNptMember(new NodePortTuple(DatapathId.of(2L), OFPort.of(1))); instance.addNptMember(new NodePortTuple(DatapathId.of(2L), OFPort.of(2))); // Now remove Switch DPID 1L, both switch member list and npt member list should updated instance.removeSwitchFromInstance(DatapathId.of(1L)); assertEquals(1, instance.getSwitchMembers().size()); assertEquals(2, instance.getNptMembers().size()); } }
/* * Copyright 2015 herd contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.finra.herd.dao; import java.sql.Timestamp; import java.util.List; import java.util.Map; import org.apache.commons.collections4.MultiValuedMap; import org.finra.herd.model.api.xml.BusinessObjectDataKey; import org.finra.herd.model.api.xml.BusinessObjectDefinitionKey; import org.finra.herd.model.api.xml.BusinessObjectFormatKey; import org.finra.herd.model.api.xml.CustomDdlKey; import org.finra.herd.model.api.xml.DataProviderKey; import org.finra.herd.model.api.xml.EmrClusterDefinitionKey; import org.finra.herd.model.api.xml.ExpectedPartitionValueKey; import org.finra.herd.model.api.xml.FileTypeKey; import org.finra.herd.model.api.xml.NamespaceKey; import org.finra.herd.model.api.xml.PartitionKeyGroupKey; import org.finra.herd.model.api.xml.PartitionValueRange; import org.finra.herd.model.api.xml.StorageBusinessObjectDefinitionDailyUploadStats; import org.finra.herd.model.api.xml.StorageDailyUploadStats; import org.finra.herd.model.api.xml.StorageKey; import org.finra.herd.model.api.xml.StoragePolicyKey; import org.finra.herd.model.dto.DateRangeDto; import org.finra.herd.model.dto.StorageAlternateKeyDto; import org.finra.herd.model.dto.StoragePolicyPriorityLevel; import org.finra.herd.model.jpa.BusinessObjectDataEntity; import org.finra.herd.model.jpa.BusinessObjectDataStatusEntity; import org.finra.herd.model.jpa.BusinessObjectDefinitionEntity; import org.finra.herd.model.jpa.BusinessObjectFormatEntity; import org.finra.herd.model.jpa.ConfigurationEntity; import org.finra.herd.model.jpa.CustomDdlEntity; import org.finra.herd.model.jpa.DataProviderEntity; import org.finra.herd.model.jpa.EmrClusterDefinitionEntity; import org.finra.herd.model.jpa.ExpectedPartitionValueEntity; import org.finra.herd.model.jpa.FileTypeEntity; import org.finra.herd.model.jpa.JmsMessageEntity; import org.finra.herd.model.jpa.JobDefinitionEntity; import org.finra.herd.model.jpa.NamespaceEntity; import org.finra.herd.model.jpa.NotificationEventTypeEntity; import org.finra.herd.model.jpa.OnDemandPriceEntity; import org.finra.herd.model.jpa.PartitionKeyGroupEntity; import org.finra.herd.model.jpa.StorageEntity; import org.finra.herd.model.jpa.StorageFileEntity; import org.finra.herd.model.jpa.StoragePlatformEntity; import org.finra.herd.model.jpa.StoragePolicyEntity; import org.finra.herd.model.jpa.StoragePolicyRuleTypeEntity; import org.finra.herd.model.jpa.StorageUnitEntity; import org.finra.herd.model.jpa.StorageUnitStatusEntity; /** * The herd DAO. */ // TODO: This class is too big and should be split up into smaller classes (e.g. NamespaceDao, StorageDao, etc.). When this is fixed, // we can remove the PMD suppress warning below. @SuppressWarnings("PMD.ExcessivePublicCount") public interface HerdDao extends BaseJpaDao { /** * A default date mask for a single day formatted as yyyy-MM-dd. */ public static final String DEFAULT_SINGLE_DAY_DATE_MASK = "yyyy-MM-dd"; // System /** * Returns current timestamp. * * @return the current timestamp */ public Timestamp getCurrentTimestamp(); // Configuration /** * Gets a configuration by it's key. * * @param key the configuration key (case-insensitive) * * @return the configuration value for the specified key */ public ConfigurationEntity getConfigurationByKey(String key); // Namespace /** * Gets a namespace by it's key. * * @param namespaceKey the namespace key (case-insensitive) * * @return the namespace entity for the specified key */ public NamespaceEntity getNamespaceByKey(NamespaceKey namespaceKey); /** * Gets a namespace by it's code. * * @param namespaceCode the namespace code (case-insensitive) * * @return the namespace entity for the specified code */ public NamespaceEntity getNamespaceByCd(String namespaceCode); /** * Gets a list of namespace keys for all namespaces defined in the system. * * @return the list of namespace keys */ public List<NamespaceKey> getNamespaces(); // DataProvider /** * Gets a data provider by it's key. * * @param dataProviderKey the data provider key (case-insensitive) * * @return the data provider for the specified key */ public DataProviderEntity getDataProviderByKey(DataProviderKey dataProviderKey); /** * Gets a data provider by it's name. * * @param dataProviderName the data provider name (case-insensitive) * * @return the data provider for the specified name */ public DataProviderEntity getDataProviderByName(String dataProviderName); /** * Gets a list of data provider keys for all data providers defined in the system. * * @return the list of data provider keys */ public List<DataProviderKey> getDataProviders(); // BusinessObjectDefinition /** * Gets a business object definition by key. * * @param businessObjectDefinitionKey the business object definition key (case-insensitive) * * @return the business object definition for the specified key */ public BusinessObjectDefinitionEntity getBusinessObjectDefinitionByKey(BusinessObjectDefinitionKey businessObjectDefinitionKey); /** * A shortcut for {@link #getBusinessObjectDefinitionByKey(BusinessObjectDefinitionKey)} * * @param namespace The business object definition namespace * @param name The business object definition name * * @return Business object definition entity or null */ public BusinessObjectDefinitionEntity getBusinessObjectDefinitionByKey(String namespace, String name); /** * Gets all business object definition keys. * * @return the list of all business object definition keys */ public List<BusinessObjectDefinitionKey> getBusinessObjectDefinitions(); /** * Gets a list of all business object definition keys for a specified namespace, or, if, namespace is not specified, for all namespaces in the system. * * @param namespaceCode the optional namespace code (case-insensitive) * * @return the list of all business object definition keys */ public List<BusinessObjectDefinitionKey> getBusinessObjectDefinitions(String namespaceCode); // FileType /** * Gets a file type by it's code. * * @param code the file type code (case-insensitive) * * @return the file type for the specified code */ public FileTypeEntity getFileTypeByCode(String code); /** * Gets a list of file type keys for all file types defined in the system. * * @return the list of file type keys */ public List<FileTypeKey> getFileTypes(); // BusinessObjectFormat /** * Gets a business object format based on it's key. If a format version isn't specified, the latest available format version will be used. * * @param businessObjectFormatKey the business object format key (case-insensitive) * * @return the business object format */ public BusinessObjectFormatEntity getBusinessObjectFormatByAltKey(BusinessObjectFormatKey businessObjectFormatKey); /** * Gets the maximum available version of the specified business object format. * * @param businessObjectFormatKey the business object format key (case-insensitive) * * @return the maximum available version of the specified business object format */ public Integer getBusinessObjectFormatMaxVersion(BusinessObjectFormatKey businessObjectFormatKey); /** * Returns a number of business object format instances that reference a specified partition key group. * * @param partitionKeyGroupEntity the partition key group entity * * @return the number of business object format instances that reference this partition key group */ public Long getBusinessObjectFormatCount(PartitionKeyGroupEntity partitionKeyGroupEntity); /** * Gets a list of business object format keys for the specified business object definition key. * * @param businessObjectDefinitionKey the business object definition key * @param latestBusinessObjectFormatVersion specifies if only the latest (maximum) versions of the relative business object formats are returned * * @return the list of business object format keys */ public List<BusinessObjectFormatKey> getBusinessObjectFormats(BusinessObjectDefinitionKey businessObjectDefinitionKey, boolean latestBusinessObjectFormatVersion); // PartitionKeyGroup /** * Gets a partition key group entity. * * @param partitionKeyGroupKey the partition key group key (case-insensitive) * * @return the partition key group entity */ public PartitionKeyGroupEntity getPartitionKeyGroupByKey(PartitionKeyGroupKey partitionKeyGroupKey); /** * Gets a partition key group entity. * * @param partitionKeyGroupName the name of the partition key group (case-insensitive) * * @return the partition key group entity */ public PartitionKeyGroupEntity getPartitionKeyGroupByName(String partitionKeyGroupName); /** * Gets a list of all existing partition key groups. * * @return the list of partition key group keys */ public List<PartitionKeyGroupKey> getPartitionKeyGroups(); // ExpectedPartitionValue /** * Gets an expected partition value entity by partition key group name, expected partition value, and an optional offset. * * @param expectedPartitionValueKey the expected partition value key (case-insensitive) * @param offset the optional offset * * @return the expected partition value */ public ExpectedPartitionValueEntity getExpectedPartitionValue(ExpectedPartitionValueKey expectedPartitionValueKey, int offset); /** * Gets a list of expected partition values by group. * * @param partitionKeyGroupName the partition key group name (case-insensitive) * @param partitionValueRange the optional partition value range * * @return the list of expected partition values */ public List<ExpectedPartitionValueEntity> getExpectedPartitionValuesByGroupAndRange(String partitionKeyGroupName, PartitionValueRange partitionValueRange); // CustomDdl /** * Gets a custom DDL based on the key. * * @param customDdlKey the custom DDL key * * @return the custom DDL */ public CustomDdlEntity getCustomDdlByKey(CustomDdlKey customDdlKey); /** * Gets the custom DDLs defined for the specified business object format. * * @param businessObjectFormatKey the business object format key * * @return the list of custom DDL keys */ public List<CustomDdlKey> getCustomDdls(BusinessObjectFormatKey businessObjectFormatKey); // BusinessObjectDataStatus /** * Gets a business object data status by it's code. * * @param code the business object data status code (case-insensitive) * * @return the business object data status for the specified code */ public BusinessObjectDataStatusEntity getBusinessObjectDataStatusByCode(String code); // BusinessObjectData /** * Retrieves business object data by it's key. If a format version isn't specified, the latest available format version (for this partition value) will be * used. If a business object data version isn't specified, the latest data version is returned regardless of the business object data status. * * @param businessObjectDataKey the business object data key * * @return the business object data */ public BusinessObjectDataEntity getBusinessObjectDataByAltKey(BusinessObjectDataKey businessObjectDataKey); /** * Retrieves business object data by it's key. If a format version isn't specified, the latest available format version (for this partition value) will be * used. If a business object data version isn't specified, the latest data version based on the specified business object data status is returned. When * both business object data version and business object data status both are not specified, the latest data version for each set of partition values will * be used regardless of the status. * * @param businessObjectDataKey the business object data key * @param businessObjectDataStatus the business object data status. This parameter is ignored when the business object data version is specified. * * @return the business object data */ public BusinessObjectDataEntity getBusinessObjectDataByAltKeyAndStatus(BusinessObjectDataKey businessObjectDataKey, String businessObjectDataStatus); /** * Gets a maximum available version of the specified business object data. * * @param businessObjectDataKey the business object data key * * @return the maximum available version of the specified business object data */ public Integer getBusinessObjectDataMaxVersion(BusinessObjectDataKey businessObjectDataKey); /** * Retrieves a maximum available partition value per specified parameters. * * @param partitionColumnPosition the partition column position (1-based numbering) * @param businessObjectFormatKey the business object format key (case-insensitive). If a business object format version isn't specified, the latest * available format version for each partition value will be used. * @param businessObjectDataVersion the business object data version. If a business object data version isn't specified, the latest data version based on * the specified business object data status will be used for each partition value. * @param businessObjectDataStatus the business object data status. This parameter is ignored when the business object data version is specified. * @param storageNames the list of storage names (case-insensitive) * @param upperBoundPartitionValue the optional inclusive upper bound for the maximum available partition value * @param lowerBoundPartitionValue the optional inclusive lower bound for the maximum available partition value * * @return the maximum available partition value */ public String getBusinessObjectDataMaxPartitionValue(int partitionColumnPosition, BusinessObjectFormatKey businessObjectFormatKey, Integer businessObjectDataVersion, String businessObjectDataStatus, List<String> storageNames, String upperBoundPartitionValue, String lowerBoundPartitionValue); /** * Retrieves a minimum available partition value per specified parameters. * * @param partitionColumnPosition the partition column position (1-based numbering) * @param businessObjectFormatKey the business object format key (case-insensitive). If a business object format version isn't specified, the latest * available format version for each partition value will be used. * @param businessObjectDataVersion the business object data version. If a business object data version isn't specified, the latest data version based on * the specified business object data status will be used for each partition value. * @param businessObjectDataStatus the business object data status. This parameter is ignored when the business object data version is specified. * @param storageNames the list of storage names (case-insensitive) * * @return the maximum available partition value */ public String getBusinessObjectDataMinPartitionValue(int partitionColumnPosition, BusinessObjectFormatKey businessObjectFormatKey, Integer businessObjectDataVersion, String businessObjectDataStatus, List<String> storageNames); /** * Returns a number of business object data instances registered with this business object format. * * @param businessObjectFormatKey the business object format key * * @return the number of business object data instances registered with this business object format */ public Long getBusinessObjectDataCount(BusinessObjectFormatKey businessObjectFormatKey); /** * Retrieves business object data versions that match the specified business object data key with potentially missing business object format and/or data * version values. * * @param businessObjectDataKey the business object data key with potentially missing business object format and/or data version values * * @return the business object data */ public List<BusinessObjectDataEntity> getBusinessObjectDataEntities(BusinessObjectDataKey businessObjectDataKey); /** * Retrieves a list of business object data entities per specified parameters. * * @param businessObjectFormatKey the business object format key (case-insensitive). If a business object format version isn't specified, the latest * available format version for each partition value will be used. * @param partitionFilters the list of partition filter to be used to select business object data instances. Each partition filter contains a list of * primary and sub-partition values in the right order up to the maximum partition levels allowed by business object data registration - with partition * values for the relative partitions not to be used for selection passed as nulls. * @param businessObjectDataVersion the business object data version. If a business object data version isn't specified, the latest data version based on * the specified business object data status is returned. * @param businessObjectDataStatus the business object data status. This parameter is ignored when the business object data version is specified. When * business object data version and business object data status both are not specified, the latest data version for each set of partition values will be * used regardless of the status. * @param storageName the name of the storage where the business object data storage unit is located (case-insensitive) * * @return the list of business object data entities sorted by partition values */ public List<BusinessObjectDataEntity> getBusinessObjectDataEntities(BusinessObjectFormatKey businessObjectFormatKey, List<List<String>> partitionFilters, Integer businessObjectDataVersion, String businessObjectDataStatus, String storageName); /** * Selects business object data having storage files associated with the specified storage and with status not listed as ignored. Only tbe business object * data records that are older than threshold minutes will be selected. * * @param storageName the storage name * @param thresholdMinutes the expiration time in minutes * @param businessObjectDataStatusesToIgnore the list of business object data statuses to ignore * * @return the list of business object data entities sorted by created on */ public List<BusinessObjectDataEntity> getBusinessObjectDataFromStorageOlderThan(String storageName, int thresholdMinutes, List<String> businessObjectDataStatusesToIgnore); /** * Retrieves a map of business object data entities to their corresponding storage policy entities, where the business object data status is supported by * the storage policy feature and the business object data alternate key values match storage policy's filter and transition (not taking into account * storage policy rules). The storage policy priority level identifies a particular storage policy priority that will be selected by the query. The returned * map is ordered by the business object data "created on" timestamp, starting with the oldest business object data entity. * * @param storagePolicyPriorityLevel the storage policy priority level * @param supportedBusinessObjectDataStatuses the list of business object data statuses that storage policies apply to * @param startPosition the position of the first result, numbered from 0 * @param maxResult the maximum number of results to retrieve * * @return the map of business object data entities to their corresponding storage policy entities */ public Map<BusinessObjectDataEntity, StoragePolicyEntity> getBusinessObjectDataEntitiesMatchingStoragePolicies( StoragePolicyPriorityLevel storagePolicyPriorityLevel, List<String> supportedBusinessObjectDataStatuses, int startPosition, int maxResult); // StoragePlatform /** * Gets a storage platform by it's name. * * @param name the storage platform name (case-insensitive) * * @return the storage platform for the specified name */ public StoragePlatformEntity getStoragePlatformByName(String name); // Storage /** * Gets a storage by it's name. * * @param storageName the storage name (case-insensitive) * * @return the storage for the specified name */ public StorageEntity getStorageByName(String storageName); /** * Gets a list of storage keys for all storages defined in the system. * * @return the list of storage keys */ public List<StorageKey> getStorages(); // StorageUnitStatus /** * Gets a storage unit status by it's code. * * @param code the storage unit status code (case-insensitive) * * @return the storage unit status for the specified code */ public StorageUnitStatusEntity getStorageUnitStatusByCode(String code); // StorageUnit /** * Gets a storage unit identified by the given business object data entity and storage name. Returns {@code null} if storage does not exist. * * @param businessObjectDataEntity the business object data entity * @param storageName the storage name (case-insensitive) * * @return {@link StorageUnitEntity} or {@code null} */ public StorageUnitEntity getStorageUnitByBusinessObjectDataAndStorageName(BusinessObjectDataEntity businessObjectDataEntity, String storageName); /** * Returns a first discovered storage unit in the specified storage that overlaps with the directory path. * * @param storageName the storage name (case-insensitive) * @param directoryPath the directory path * * @return the first found storage unit in the specified storage that overlaps with the directory path or null if none were found */ public StorageUnitEntity getStorageUnitByStorageNameAndDirectoryPath(String storageName, String directoryPath); /** * Retrieves a list of storage units that belong to the specified storage for the specified business object data. * * @param storageEntity the storage entity * @param businessObjectDataEntities the list of business object data entities * * @return the list of storage unit entities */ public List<StorageUnitEntity> getStorageUnitsByStorageAndBusinessObjectData(StorageEntity storageEntity, List<BusinessObjectDataEntity> businessObjectDataEntities); /** * Retrieves a list of storage unit entities per specified parameters. * * @param businessObjectFormatKey the business object format key (case-insensitive). If a business object format version isn't specified, the latest * available format version for each partition value will be used. * @param partitionFilters the list of partition filter to be used to select business object data instances. Each partition filter contains a list of * primary and sub-partition values in the right order up to the maximum partition levels allowed by business object data registration - with partition * values for the relative partitions not to be used for selection passed as nulls. * @param businessObjectDataVersion the business object data version. If a business object data version isn't specified, the latest data version based on * the specified business object data status is returned. * @param businessObjectDataStatus the business object data status. This parameter is ignored when the business object data version is specified. When * business object data version and business object data status both are not specified, the latest data version for each set of partition values will be * used regardless of the status. * @param storageNames the optional list of storage names where the business object data storage units should be looked for (case-insensitive) * @param storagePlatformType the optional storage platform type, e.g. S3 for Hive DDL. It is ignored when the list of storages is not empty * @param excludedStoragePlatformType the optional storage platform type to be excluded from search. It is ignored when the list of storages is not empty or * the storage platform type is specified * @param selectOnlyAvailableStorageUnits specifies if only available storage units will be selected or any storage units regardless of their status * * @return the list of storage unit entities sorted by partition values and storage names */ public List<StorageUnitEntity> getStorageUnitsByPartitionFiltersAndStorages(BusinessObjectFormatKey businessObjectFormatKey, List<List<String>> partitionFilters, Integer businessObjectDataVersion, String businessObjectDataStatus, List<String> storageNames, String storagePlatformType, String excludedStoragePlatformType, boolean selectOnlyAvailableStorageUnits); // StorageFile /** * Retrieves storage file by storage name and file path. * * @param storageName the storage name (case-insensitive) * @param filePath the file path * * @return the storage file */ public StorageFileEntity getStorageFileByStorageNameAndFilePath(String storageName, String filePath); /** * Counts all storage files matching the file path prefix in the specified storage. * * @param storageName the storage name (case-insensitive) * @param filePathPrefix the file path prefix that file paths should match * * @return the storage file count */ public Long getStorageFileCount(String storageName, String filePathPrefix); /** * Retrieves a sorted list of storage files matching S3 key prefix in the specified storage. * * @param storageName the storage name (case-insensitive) * @param filePathPrefix the file path prefix that file paths should match * * @return the list of storage file entities sorted by file path */ public List<StorageFileEntity> getStorageFilesByStorageAndFilePathPrefix(String storageName, String filePathPrefix); /** * Retrieves a map of storage unit ids to their corresponding storage file paths. * * @param storageUnitEntities the list of storage unit entities * * @return the map of storage unit ids to their corresponding storage file paths. */ public MultiValuedMap<Integer, String> getStorageFilePathsByStorageUnits(List<StorageUnitEntity> storageUnitEntities); // StoragePolicyRuleType /** * Gets a storage policy rule type by it's code. * * @param code the storage policy rule type code (case-insensitive) * * @return the storage policy rule type for the specified code */ public StoragePolicyRuleTypeEntity getStoragePolicyRuleTypeByCode(String code); // StoragePolicy /** * Retrieves a storage policy entity by alternate key. * * @param key the storage policy key (case-insensitive) * * @return the storage policy entity */ public StoragePolicyEntity getStoragePolicyByAltKey(StoragePolicyKey key); // StorageUploadStatistics /** * Retrieves cumulative daily upload statistics for the storage for the specified upload date range. * * @param storageAlternateKey the storage alternate key (case-insensitive) * @param dateRange the upload date range * * @return the upload statistics */ public StorageDailyUploadStats getStorageUploadStats(StorageAlternateKeyDto storageAlternateKey, DateRangeDto dateRange); /** * Retrieves daily upload statistics for the storage by business object definition for the specified upload date range. * * @param storageAlternateKey the storage alternate key (case-insensitive) * @param dateRange the upload date range * * @return the upload statistics */ public StorageBusinessObjectDefinitionDailyUploadStats getStorageUploadStatsByBusinessObjectDefinition(StorageAlternateKeyDto storageAlternateKey, DateRangeDto dateRange); // JobDefinition /** * Retrieves job definition entity by alternate key. * * @param namespace the namespace (case-insensitive) * @param jobName the job name (case-insensitive) * * @return the job definition entity */ public JobDefinitionEntity getJobDefinitionByAltKey(String namespace, String jobName); /** * Gets a list of job definitions by optional filter criteria. * * @param namespace an optional namespace filter. * @param jobName an optional jobName filter. * * @return the list of job definitions. */ public List<JobDefinitionEntity> getJobDefinitionsByFilter(String namespace, String jobName); // EmrClusterDefinition /** * Retrieves EMR cluster definition entity by alternate key. * * @param emrClusterDefinitionKey the EMR cluster definition key * * @return the EMR cluster definition entity */ public EmrClusterDefinitionEntity getEmrClusterDefinitionByAltKey(EmrClusterDefinitionKey emrClusterDefinitionKey); /** * Retrieves EMR cluster definition entity by alternate key. * * @param namespaceCd the namespace (case-insensitive) * @param definitionName the EMR cluster definition name (case-insensitive) * * @return the EMR cluster definition entity */ public EmrClusterDefinitionEntity getEmrClusterDefinitionByAltKey(String namespaceCd, String definitionName); // NotificationEvent /** * Gets a notification event type by it's code. * * @param code the notification event type code (case-insensitive) * * @return the notification event type for the specified code */ public NotificationEventTypeEntity getNotificationEventTypeByCode(String code); // SecurityFunction /** * Gets a list of functions for the role. * * @param roleCd the role code * * @return the list of functions */ public List<String> getSecurityFunctionsForRole(String roleCd); /** * Gets a list of security functions. * * @return the list of functions */ public List<String> getSecurityFunctions(); // JmsMessage /** * Selects the oldest JMS message (a message with the lowest sequence generated id) from the queue. * * @return the JMS message */ public JmsMessageEntity getOldestJmsMessage(); // OnDemandPricing /** * Returns the on-demand price with the specified region and instance type. Returns {@code null} if no on-demand price is found. Throws an exception when * more than 1 on-demand price is found. * * @param region The on-demand price's region. * @param instanceType The on-demand price's instance type. * * @return The on-demand price. */ public OnDemandPriceEntity getOnDemandPrice(String region, String instanceType); }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.cassandra.cql.jdbc; import java.io.ByteArrayOutputStream; import java.net.URI; import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.sql.SQLException; import java.sql.SQLNonTransientConnectionException; import java.sql.SQLSyntaxErrorException; import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.Deflater; import org.apache.cassandra.thrift.Compression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Charsets; /** * A set of static utility methods used by the JDBC Suite, and various default values and error message strings * that can be shared across classes. */ class Utils { private static final Pattern KEYSPACE_PATTERN = Pattern.compile("USE (\\w+);?", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE); private static final Pattern SELECT_PATTERN = Pattern.compile("(?:SELECT|DELETE)\\s+.+\\s+FROM\\s+(\\w+).*", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE); private static final Pattern UPDATE_PATTERN = Pattern.compile("UPDATE\\s+(\\w+)\\s+.*", Pattern.CASE_INSENSITIVE); public static final String PROTOCOL = "jdbc:cassandra:"; public static final String DEFAULT_HOST = "localhost"; public static final int DEFAULT_PORT = 9160; public static final String TAG_DESCRIPTION = "description"; public static final String TAG_USER = "user"; public static final String TAG_PASSWORD = "password"; public static final String TAG_DATABASE_NAME = "databaseName"; public static final String TAG_SERVER_NAME = "serverName"; public static final String TAG_PORT_NUMBER = "portNumber"; protected static final String WAS_CLOSED_CON = "method was called on a closed Connection"; protected static final String WAS_CLOSED_STMT = "method was called on a closed Statement"; protected static final String WAS_CLOSED_RSLT = "method was called on a closed ResultSet"; protected static final String NO_INTERFACE = "no object was found that matched the provided interface: %s"; protected static final String NO_TRANSACTIONS = "the Cassandra implementation does not support transactions"; protected static final String NO_SERVER = "no Cassandra server is available"; protected static final String ALWAYS_AUTOCOMMIT = "the Cassandra implementation is always in auto-commit mode"; protected static final String BAD_TIMEOUT = "the timeout value was less than zero"; protected static final String SCHEMA_MISMATCH = "schema does not match across nodes, (try again later)"; protected static final String NOT_SUPPORTED = "the Cassandra implementation does not support this method"; protected static final String NO_GEN_KEYS = "the Cassandra implementation does not currently support returning generated keys"; protected static final String NO_BATCH = "the Cassandra implementation does not currently support this batch in Statement"; protected static final String NO_MULTIPLE = "the Cassandra implementation does not currently support multiple open Result Sets"; protected static final String NO_VALIDATOR = "Could not find key validator for: %s.%s"; protected static final String NO_COMPARATOR = "Could not find key comparator for: %s.%s"; protected static final String NO_RESULTSET = "No ResultSet returned from the CQL statement passed in an 'executeQuery()' method"; protected static final String NO_UPDATE_COUNT = "No Update Count was returned from the CQL statement passed in an 'executeUpdate()' method"; protected static final String NO_CF = "no column family reference could be extracted from the provided CQL statement"; protected static final String BAD_KEEP_RSET = "the argument for keeping the current result set : %s is not a valid value"; protected static final String BAD_TYPE_RSET = "the argument for result set type : %s is not a valid value"; protected static final String BAD_CONCUR_RSET = "the argument for result set concurrency : %s is not a valid value"; protected static final String BAD_HOLD_RSET = "the argument for result set holdability : %s is not a valid value"; protected static final String BAD_FETCH_DIR = "fetch direction value of : %s is illegal"; protected static final String BAD_AUTO_GEN = "auto key generation value of : %s is illegal"; protected static final String BAD_FETCH_SIZE = "fetch size of : %s rows may not be negative"; protected static final String MUST_BE_POSITIVE = "index must be a positive number less or equal the count of returned columns: %s"; protected static final String VALID_LABELS = "name provided was not in the list of valid column labels: %s"; protected static final String NOT_TRANSLATABLE = "column was stored in %s format which is not translatable to %s"; protected static final String NOT_BOOLEAN = "string value was neither 'true' nor 'false' : %s"; protected static final String HOST_IN_URL = "Connection url must specify a host, e.g., jdbc:cassandra://localhost:9170/Keyspace1"; protected static final String HOST_REQUIRED = "a 'host' name is required to build a Connection"; protected static final String BAD_KEYSPACE = "Keyspace names must be composed of alphanumerics and underscores (parsed: '%s')"; protected static final String URI_IS_SIMPLE = "Connection url may only include host, port, and keyspace, e.g., jdbc:cassandra://localhost:9170/Keyspace1"; protected static final Logger logger = LoggerFactory.getLogger(Utils.class); /** * Use the Compression object method to deflate the query string * * @param queryStr An un-compressed CQL query string * @param compression The compression object * @return A compressed string */ public static ByteBuffer compressQuery(String queryStr, Compression compression) { byte[] data = queryStr.getBytes(Charsets.UTF_8); Deflater compressor = new Deflater(); compressor.setInput(data); compressor.finish(); ByteArrayOutputStream byteArray = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; while (!compressor.finished()) { int size = compressor.deflate(buffer); byteArray.write(buffer, 0, size); } logger.trace("Compressed query statement {} bytes in length to {} bytes", data.length, byteArray.size()); return ByteBuffer.wrap(byteArray.toByteArray()); } /** * Parse a URL for the Cassandra JDBC Driver * <p/> * The URL must start with the Protocol: "jdbc:cassandra:" * The URI part(the "Subname") must contain a host and an optional port and optional keyspace name * ie. "//localhost:9160/Test1" * * @param url The full JDBC URL to be parsed * @return A list of properties that were parsed from the Subname * @throws SQLException */ public static final Properties parseURL(String url) throws SQLException { Properties props = new Properties(); if (!(url == null)) { props.setProperty(TAG_PORT_NUMBER, "" + DEFAULT_PORT); String rawUri = url.substring(PROTOCOL.length()); URI uri = null; try { uri = new URI(rawUri); } catch (URISyntaxException e) { throw new SQLSyntaxErrorException(e); } String host = uri.getHost(); if (host == null) throw new SQLNonTransientConnectionException(HOST_IN_URL); props.setProperty(TAG_SERVER_NAME, host); int port = uri.getPort() >= 0 ? uri.getPort() : DEFAULT_PORT; props.setProperty(TAG_PORT_NUMBER, "" + port); String keyspace = uri.getPath(); if ((keyspace != null) && (!keyspace.isEmpty())) { if (keyspace.startsWith("/")) keyspace = keyspace.substring(1); if (!keyspace.matches("[a-zA-Z]\\w+")) throw new SQLNonTransientConnectionException(String.format(BAD_KEYSPACE, keyspace)); props.setProperty(TAG_DATABASE_NAME, keyspace); } if (uri.getUserInfo() != null) throw new SQLNonTransientConnectionException(URI_IS_SIMPLE); } if (logger.isTraceEnabled()) logger.trace("URL : '{}' parses to: {}", url, props); return props; } /** * Create a "Subname" portion of a JDBC URL from properties. * * @param props A Properties file containing all the properties to be considered. * @return A constructed "Subname" portion of a JDBC URL in the form of a CLI (ie: //myhost:9160/Test1 ) * @throws SQLException */ public static final String createSubName(Properties props)throws SQLException { // make keyspace always start with a "/" for URI String keyspace = props.getProperty(TAG_DATABASE_NAME); // if keyspace is null then do not bother ... if (keyspace != null) if (!keyspace.startsWith("/")) keyspace = "/" + keyspace; String host = props.getProperty(TAG_SERVER_NAME); if (host==null)throw new SQLNonTransientConnectionException(HOST_REQUIRED); // construct a valid URI from parts... URI uri; try { uri = new URI( null, null, host, props.getProperty(TAG_PORT_NUMBER)==null ? DEFAULT_PORT : Integer.parseInt(props.getProperty(TAG_PORT_NUMBER)), keyspace, null, null); } catch (Exception e) { throw new SQLNonTransientConnectionException(e); } if (logger.isTraceEnabled()) logger.trace("Subname : '{}' created from : {}",uri.toString(), props); return uri.toString(); } /** * Determine the current keyspace by inspecting the CQL string to see if a USE statement is provided; which would change the keyspace. * * @param cql A CQL query string * @param current The current keyspace stored as state in the connection * @return the provided keyspace name or the keyspace from the contents of the CQL string */ public static String determineCurrentKeyspace(String cql, String current) { String ks = current; Matcher isKeyspace = KEYSPACE_PATTERN.matcher(cql); if (isKeyspace.matches()) ks = isKeyspace.group(1); return ks; } /** * Determine the current column family by inspecting the CQL to find a CF reference. * * @param cql A CQL query string * @return The column family name from the contents of the CQL string or null in none was found */ public static String determineCurrentColumnFamily(String cql) { String cf = null; Matcher isSelect = SELECT_PATTERN.matcher(cql); if (isSelect.matches()) cf = isSelect.group(1); Matcher isUpdate = UPDATE_PATTERN.matcher(cql); if (isUpdate.matches()) cf = isUpdate.group(1); return cf; } }
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.benchmark.codegenerator; import static com.google.common.truth.Truth.assertThat; import com.google.common.base.Joiner; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Scanner; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Test for {@link JavaCodeGeneratorHelper}. */ @RunWith(JUnit4.class) public class JavaCodeGeneratorHelperTest { private static final String RANDOM_CLASS_CONTENT = joinLines( "package com.package.name;", "", "import java.lang.System;", "import java.util.Random;", "", "public final class ClassName {", " public static void PrintSth() {", " Random rand = new Random();", " int n = rand.nextInt(100);", " System.out.format(\"This is method(%s) with random number(%d)\\n\", \"PrintSth\", n);", " }", "}"); private static final String RANDOM_CLASS_EXTRA_CONTENT = joinLines( "package com.package.name;", "", "import java.lang.System;", "import java.util.Random;", "", "public final class ClassNameExtra {", " public static void PrintSth() {", " Random rand = new Random();", " int n = rand.nextInt(100);", " System.out.format(\"This is method(%s) with random number(%d)\\n\", \"PrintSth\", n);", " }", "", " public static void PrintSthElse() {", " Random rand = new Random();", " int n = rand.nextInt(100);", " System.out.format(\"This is method(%s) with random number(%d)\\n\"," + " \"PrintSthElse\", n);", " }", "}"); private static final String MAIN_CLASS_CONTENT = joinLines( "package com.package.name;", "", "import java.lang.String;", "", "public class Main {", " public static void main(String[] args) {", " }", "}"); private static final String DEPS_BUILD_FILE_CONTENT = joinLines( "java_library(", " name = 'Deps42',", " srcs = glob([ 'com/example/deps42/*.java' ]),", "<this is deps>", " visibility = [ '//visibility:public' ],", ")"); private static final String TARGET_BUILD_FILE_CONTENT = joinLines( "java_binary(", " name = 'Target',", " srcs = glob([ 'com/example/generated/*.java' ]),", " main_class = 'com.example.generated.Main',", "<this is deps>", ")"); private static final String DEPS_CLASS_CONTENT = joinLines( "package com.example.deps42;", "", "import com.example.deps43.Deps43;", "import java.lang.System;", "import java.util.Random;", "", "public final class Deps42 {", " public static void PrintSth() {", " Random rand = new Random();", " int n = rand.nextInt(100);", " System.out.format(\"This is method(%s) with random number(%d)\\n\", \"PrintSth\", n);", " }", "", " public static void CallNext() {", " Deps43.PrintSth();", " }", "}"); private static final String DEPS_CLASS_EXTRA_CONTENT = joinLines( "package com.example.deps42;", "", "import java.lang.System;", "import java.util.Random;", "", "public final class Deps42 {", " public static void PrintSth() {", " Random rand = new Random();", " int n = rand.nextInt(100);", " System.out.format(\"This is method(%s) with random number(%d)\\n\", \"PrintSth\", n);", " }", "", " public static void PrintSthElse() {", " Random rand = new Random();", " int n = rand.nextInt(100);", " System.out.format(\"This is method(%s) with random number(%d)\\n\"," + " \"PrintSthElse\", n);", " }", "}"); private static final String MAIN_CLASS_WITH_DEPS_CONTENT = joinLines( "package com.example.generated;", "", "import com.example.deps1.Deps1;", "import com.example.deps2.Deps2;", "import com.example.deps3.Deps3;", "import java.lang.String;", "", "public final class Main {", " public static void main(String[] args) {", " Deps1.PrintSth();", " Deps2.PrintSth();", " Deps3.PrintSth();", " }", "}"); @Rule public TemporaryFolder folder = new TemporaryFolder(); @Test public void testWriteRandomClassToDir() throws IOException { Path dir = folder.newFolder("WriteRandomClassToDir").toPath(); JavaCodeGeneratorHelper.writeRandomClassToDir(false, "ClassName", "com.package.name", dir); Path javaFile = dir.resolve("com/package/name/ClassName.java"); assertThat(javaFile.toFile().exists()).isTrue(); String content = new Scanner(javaFile).useDelimiter("\\Z").next(); assertThat(content).isEqualTo(RANDOM_CLASS_CONTENT); } @Test public void testWriteRandomClassToDirExtraMethod() throws IOException { Path dir = folder.newFolder("WriteRandomClassToDirExtraMethod").toPath(); JavaCodeGeneratorHelper.writeRandomClassToDir(true, "ClassNameExtra", "com.package.name", dir); Path javaFile = dir.resolve("com/package/name/ClassNameExtra.java"); assertThat(javaFile.toFile().exists()).isTrue(); String content = new Scanner(javaFile).useDelimiter("\\Z").next(); assertThat(content).isEqualTo(RANDOM_CLASS_EXTRA_CONTENT); } @Test public void testWriteMainClassToDir() throws IOException { Path dir = folder.newFolder("WriteMainClassToDir").toPath(); JavaCodeGeneratorHelper.writeMainClassToDir("com.package.name", dir); Path javaFile = dir.resolve("com/package/name/Main.java"); assertThat(javaFile.toFile().exists()).isTrue(); String content = new Scanner(javaFile).useDelimiter("\\Z").next(); assertThat(content).isEqualTo(MAIN_CLASS_CONTENT); } @Test public void testBuildFileWithNextDeps() throws IOException { Path dir = folder.newFolder("BuildFileWithNextDeps").toPath(); Files.createDirectories(dir); JavaCodeGeneratorHelper.buildFileWithNextDeps(42, "<this is deps>", dir); Path buildFile = dir.resolve("BUILD"); assertThat(buildFile.toFile().exists()).isTrue(); String content = new Scanner(buildFile).useDelimiter("\\Z").next(); assertThat(content).isEqualTo(DEPS_BUILD_FILE_CONTENT); } @Test public void testBuildFileWithMainClass() throws IOException { Path dir = folder.newFolder("BuildFileWithMainClass").toPath(); Files.createDirectories(dir); JavaCodeGeneratorHelper.buildFileWithMainClass("Target", "<this is deps>", dir); Path buildFile = dir.resolve("BUILD"); assertThat(buildFile.toFile().exists()).isTrue(); String content = new Scanner(buildFile).useDelimiter("\\Z").next(); assertThat(content).isEqualTo(TARGET_BUILD_FILE_CONTENT); } @Test public void testTargetWithNextHelper() throws IOException { Path dir = folder.newFolder("TargetWithNextHelper").toPath(); JavaCodeGeneratorHelper.targetWithNextHelper(42, true, dir); Path javaFile = dir.resolve("com/example/deps42/Deps42.java"); assertThat(javaFile.toFile().exists()).isTrue(); String content = new Scanner(javaFile).useDelimiter("\\Z").next(); assertThat(content).isEqualTo(DEPS_CLASS_CONTENT); } @Test public void testTargetWithNextExtraHelper() throws IOException { Path dir = folder.newFolder("TargetWithNextHelperExtra").toPath(); JavaCodeGeneratorHelper.targetWithNextExtraHelper(42, false, dir); Path javaFile = dir.resolve("com/example/deps42/Deps42.java"); assertThat(javaFile.toFile().exists()).isTrue(); String content = new Scanner(javaFile).useDelimiter("\\Z").next(); assertThat(content).isEqualTo(DEPS_CLASS_EXTRA_CONTENT); } @Test public void testParallelDepsMainClassHelper() throws IOException { Path dir = folder.newFolder("ParallelDepsMainClassHelper").toPath(); JavaCodeGeneratorHelper.parallelDepsMainClassHelper(4, dir); Path javaFile = dir.resolve("com/example/generated/Main.java"); assertThat(javaFile.toFile().exists()).isTrue(); String content = new Scanner(javaFile).useDelimiter("\\Z").next(); assertThat(content).isEqualTo(MAIN_CLASS_WITH_DEPS_CONTENT); } private static String joinLines(String... lines) { return Joiner.on("\n").join(lines); } }
/* * Copyright 2016-2020 chronicle.software * * https://chronicle.software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.bytes.ref; import net.openhft.chronicle.bytes.Bytes; import net.openhft.chronicle.bytes.BytesStore; import net.openhft.chronicle.bytes.util.DecoratedBufferOverflowException; import net.openhft.chronicle.core.OS; import net.openhft.chronicle.core.values.LongValue; import org.jetbrains.annotations.NotNull; import java.nio.BufferOverflowException; import java.nio.BufferUnderflowException; import static java.nio.charset.StandardCharsets.ISO_8859_1; /* The format for a long array in text is { capacity: 12345678901234567890, values: [ 12345678901234567890, ... ] } */ @SuppressWarnings("rawtypes") public class TextLongArrayReference extends AbstractReference implements ByteableLongArrayValues { private static final byte[] SECTION1 = "{ locked: false, capacity: ".getBytes(ISO_8859_1); private static final byte[] SECTION2 = ", used: ".getBytes(ISO_8859_1); private static final byte[] SECTION3 = ", values: [ ".getBytes(ISO_8859_1); private static final byte[] SECTION4 = " ] }\n".getBytes(ISO_8859_1); private static final byte[] ZERO = "00000000000000000000".getBytes(ISO_8859_1); private static final byte[] SEP = ", ".getBytes(ISO_8859_1); private static final int DIGITS = ZERO.length; private static final int CAPACITY = SECTION1.length; private static final int USED = CAPACITY + DIGITS + SECTION2.length; private static final int VALUES = USED + DIGITS + SECTION3.length; private static final int VALUE_SIZE = DIGITS + SEP.length; private static final int LOCK_OFFSET = 10; private static final int FALS = 'f' | ('a' << 8) | ('l' << 16) | ('s' << 24); private static final int TRU = ' ' | ('t' << 8) | ('r' << 16) | ('u' << 24); private long length = VALUES; public static void write(@NotNull Bytes bytes, long capacity) throws IllegalArgumentException, IllegalStateException, BufferOverflowException, ArithmeticException, BufferUnderflowException { long start = bytes.writePosition(); bytes.write(SECTION1); bytes.append(capacity); while (bytes.writePosition() - start < CAPACITY + DIGITS) { bytes.writeUnsignedByte(' '); } bytes.write(SECTION2); bytes.write(ZERO); bytes.write(SECTION3); for (long i = 0; i < capacity; i++) { if (i > 0) bytes.appendUtf8(", "); bytes.write(ZERO); } bytes.write(SECTION4); } public static long peakLength(@NotNull BytesStore bytes, long offset) throws IllegalStateException, BufferUnderflowException { //todo check this, I think there could be a bug here return (bytes.parseLong(offset + CAPACITY) * VALUE_SIZE) - SEP.length + VALUES + SECTION4.length; } @Override public long getUsed() throws IllegalStateException { try { return bytes.parseLong(USED + offset); } catch (NullPointerException e) { throwExceptionIfClosed(); throw e; } catch (BufferUnderflowException e) { throw new AssertionError(e); } } public void setUsed(long used) throws IllegalStateException { try { bytes.append(VALUES + offset, used, DIGITS); } catch (NullPointerException e) { throwExceptionIfClosed(); throw e; } catch (IllegalArgumentException | BufferOverflowException e) { throw new AssertionError(e); } } @Override public void setMaxUsed(long usedAtLeast) throws IllegalStateException { try { while (true) { if (!bytes.compareAndSwapInt(LOCK_OFFSET + offset, FALS, TRU)) continue; try { if (getUsed() < usedAtLeast) { setUsed(usedAtLeast); } return; } finally { bytes.writeInt(LOCK_OFFSET + offset, FALS); } } } catch (NullPointerException e) { throwExceptionIfClosed(); throw e; } catch (IllegalStateException | BufferOverflowException e) { throw new AssertionError(e); } } @Override public long getCapacity() { return (length - VALUES) / VALUE_SIZE; } @Override public ByteableLongArrayValues capacity(long arrayLength) { BytesStore bytesStore = bytesStore(); long len = sizeInBytes(arrayLength); if (bytesStore == null) { this.length = len; } else { assert this.length == len; } return this; } @Override public long getValueAt(long index) throws IllegalStateException { try { return bytes.parseLong(VALUES + offset + index * VALUE_SIZE); } catch (NullPointerException e) { throwExceptionIfClosed(); throw e; } catch (BufferUnderflowException e) { throw new AssertionError(e); } } @Override public void setValueAt(long index, long value) throws IllegalStateException { try { bytes.append(VALUES + offset + index * VALUE_SIZE, value, DIGITS); } catch (NullPointerException e) { throwExceptionIfClosed(); throw e; } catch (IllegalArgumentException | BufferOverflowException e) { throw new AssertionError(e); } } @Override public void bindValueAt(long index, LongValue value) { throw new UnsupportedOperationException("todo"); } @Override public long getVolatileValueAt(long index) throws IllegalStateException { OS.memory().loadFence(); return getValueAt(index); } @Override public void setOrderedValueAt(long index, long value) throws IllegalStateException { setValueAt(index, value); OS.memory().storeFence(); } @Override public boolean compareAndSet(long index, long expected, long value) throws IllegalStateException { try { if (!bytes.compareAndSwapInt(LOCK_OFFSET + offset, FALS, TRU)) return false; boolean ret = false; try { if (getVolatileValueAt(index) == expected) { setOrderedValueAt(index, value); ret = true; } return ret; } finally { bytes.writeInt(LOCK_OFFSET + offset, FALS); } } catch (NullPointerException e) { throwExceptionIfClosed(); throw e; } catch (BufferOverflowException e) { throw new AssertionError(e); } } @Override public void bytesStore(@NotNull final BytesStore bytes, long offset, long length) throws IllegalStateException, BufferOverflowException, IllegalArgumentException { throwExceptionIfClosedInSetter(); long peakLength = 0; try { peakLength = peakLength(bytes, offset); } catch (BufferUnderflowException e) { throw new DecoratedBufferOverflowException(e.toString()); } if (length != peakLength) throw new IllegalArgumentException(length + " != " + peakLength); super.bytesStore(bytes, offset, length); this.length = length; } @Override public boolean isNull() { return bytes == null; } @Override public void reset() throws IllegalStateException { throwExceptionIfClosedInSetter(); bytes = null; offset = 0; length = 0; } @Override public long maxSize() { return length; } @NotNull @Override public String toString() { if (bytes == null) { return "LongArrayTextReference{" + "bytes=null" + ", offset=" + offset + ", length=" + length + '}'; } try { return "value: " + getValueAt(0) + " ..."; } catch (Exception e) { return e.toString(); } } @Override public long sizeInBytes(long capacity) { return (capacity * VALUE_SIZE) + VALUES + SECTION3.length - SEP.length; } }
/** * SAHARA Scheduling Server * * Schedules and assigns local laboratory rigs. * * @license See LICENSE in the top level directory for complete license terms. * * Copyright (c) 2011, University of Technology, Sydney * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of Technology, Sydney nor the names * of its contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * @author Michael Diponio (mdiponio) * @date 11th November 2011 */ package au.edu.uts.eng.remotelabs.schedserver.bookings.impl.slotsengine; import java.util.Calendar; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import au.edu.uts.eng.remotelabs.schedserver.bookings.BookingsActivator; import au.edu.uts.eng.remotelabs.schedserver.bookings.impl.BookingManagementTask; import au.edu.uts.eng.remotelabs.schedserver.bookings.impl.BookingNotification; import au.edu.uts.eng.remotelabs.schedserver.dataaccess.DataAccessActivator; import au.edu.uts.eng.remotelabs.schedserver.dataaccess.dao.RigDao; import au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.Bookings; import au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.ResourcePermission; import au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.Rig; import au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.Session; import au.edu.uts.eng.remotelabs.schedserver.dataaccess.listener.BookingsEventListener.BookingsEvent; import au.edu.uts.eng.remotelabs.schedserver.dataaccess.listener.RigEventListener; import au.edu.uts.eng.remotelabs.schedserver.dataaccess.listener.SessionEventListener.SessionEvent; import au.edu.uts.eng.remotelabs.schedserver.logger.Logger; import au.edu.uts.eng.remotelabs.schedserver.logger.LoggerActivator; import au.edu.uts.eng.remotelabs.schedserver.rigprovider.requests.RigAllocator; /** * Tasks that converts bookings to sessions (i.e. redeems the booking). */ public class Redeemer implements BookingManagementTask, RigEventListener { /** The number of seconds between redeem runs. */ public static final int REDEEM_INTERVAL = 30; /** The number of slots before a booking starts to send a reminder * notification out. */ public static final int NOTIF_SLOTS = 4; /** Slots booking engine. */ private SlotBookingEngine engine; /** The current day bookings. */ private DayBookings currentDay; /** The current time slot. */ private int currentSlot; /** The time at which the slot was rolled. */ private long rollTime; /** List of bookings that are currently being redeemed. */ private Map<String, MBooking> redeemingBookings; /** List of bookings that are currently in session. */ private Map<String, MBooking> runningBookings; /** Logger. */ private Logger logger; /** Flag to specify if this is a test run. */ private boolean notTest = true; public Redeemer(SlotBookingEngine engine, DayBookings startDay) { this.logger = LoggerActivator.getLogger(); this.engine = engine; this.redeemingBookings = new HashMap<String, MBooking>(); this.runningBookings = Collections.synchronizedMap(new HashMap<String, MBooking>()); this.currentDay = startDay; } @Override public void run() { org.hibernate.Session db = null; try { db = DataAccessActivator.getNewSession(); synchronized (this) { Calendar now = Calendar.getInstance(); String nowDay = TimeUtil.getDayKey(now); int nowSlot = TimeUtil.getDaySlotIndex(now, nowDay); /* The later check is to stop time going backwards, which can * occur during daylight saving time changes. */ if (!this.currentDay.getDay().equals(nowDay) && System.currentTimeMillis() > this.rollTime) { /* The day has rolled to the next day. */ this.logger.debug("Rolling day from " + this.currentDay.getDay() + " to " + nowDay + "."); /* Clean up the old day bookings. We don't need that resident * any more as it is only historical. */ this.engine.removeDay(this.currentDay.getDay()); /* Set the new day. */ synchronized (this.currentDay = this.engine.getDayBookings(nowDay)) { this.currentDay.fullLoad(db); } } /* Time must always go forwards... */ if (nowSlot != this.currentSlot && System.currentTimeMillis() > this.rollTime) { /* The slot is being rolled over. */ this.rollTime = now.getTimeInMillis(); this.currentSlot = nowSlot; /* Cancel the remaining previous slot bookings. */ for (MBooking mb : this.redeemingBookings.values()) { Bookings b = (Bookings) db.merge(mb.getBooking()); b.setActive(false); b.setCancelReason("No resources free to redeem booking too."); this.logger.warn("Unable to redeem booking (" + b.getId() + ") for " + b.getUser().qName() + " because no free resources were found in the slot period."); this.currentDay.removeBooking(mb); BookingsActivator.notifyBookingsEvent(BookingsEvent.SYSTEM_CANCELLED, b, db); } if (this.redeemingBookings.size() > 0) { this.logger.warn("Cancelling " + this.redeemingBookings.size() + " bookings that were on " + this.currentDay.getDay() + ", slot " + (this.currentSlot - 1) + '.'); db.beginTransaction(); db.flush(); db.getTransaction().commit(); } /* Get the new list of bookings. */ synchronized (this.currentDay) { this.redeemingBookings = this.currentDay.getSlotStartingBookings(this.currentSlot); } if (this.redeemingBookings.size() == 0) { this.logger.debug("No bookings are starting on " + this.currentDay.getDay() + ", slot " + this.currentSlot + '.'); this.startBookingNotification(db); return; } /* Redeem the bookings for rigs that are free. */ RigDao rigDao = new RigDao(db); Iterator<Entry<String, MBooking>> it = this.redeemingBookings.entrySet().iterator(); while (it.hasNext()) { Entry<String, MBooking> e = it.next(); Rig rig = rigDao.findByName(e.getKey()); if (rig != null && rig.isActive() && rig.isOnline() && !rig.isInSession()) { /* Rig is free so assign it. */ this.logger.info("Rig " + rig.getName() + " is free so is having booking redeemed to it."); this.redeemBooking(e.getValue(), rig, db); it.remove(); } else if (rig == null) { /* Rig is not found, serious issue. */ this.logger.warn("Booking on " + this.currentDay.getDay() + ", slot " + this.currentSlot + " for rig " + e.getKey() + " that doesn't exist."); } else if (!(rig.isActive() && rig.isOnline())) { this.logger.debug("Booking on " + this.currentDay.getDay() + ", slot " + this.currentSlot + " for rig " + e.getKey() + " cannot be redeemed because the rig is currently " + "offline."); } else if (rig.isInSession()) { this.logger.debug("Booking on " + this.currentDay.getDay() + ", slot " + this.currentSlot + " for rig " + e.getKey() + " cannot be redeemed because the rig is currently " + "in session."); } } this.startBookingNotification(db); } /* Try to load balance existing booking to a new rig. */ if (this.redeemingBookings.size() > 0 && System.currentTimeMillis() - this.rollTime > 60000) { Iterator<Entry<String, MBooking>> it = this.redeemingBookings.entrySet().iterator(); while (it.hasNext()) { synchronized (this.currentDay) { Entry<String, MBooking> e = it.next(); Rig rig = this.currentDay.findViableRig(e.getKey(), e.getValue(), db); if (rig != null) { this.redeemBooking(e.getValue(), rig, db); it.remove(); } } } } } } catch (Throwable thr) { this.logger.error("Unchecked exception caught in redeemer task. Exception type: " + thr.getClass().getName() + ", message: " + thr.getMessage() + '.'); } finally { if (db != null) db.close(); } } /** * Provides notification that a booking is going to start in the recent future. * * @param db database connection */ private void startBookingNotification(org.hibernate.Session db) { /* Notify the users of whose bookings are going to start soon. */ int notifSlot = this.currentSlot + NOTIF_SLOTS; Collection<MBooking> starting; if (notifSlot < SlotBookingEngine.NUM_SLOTS) { synchronized (this.currentDay) { starting = this.currentDay.getSlotStartingBookings(notifSlot).values(); } } else { Calendar next = Calendar.getInstance(); next.add(Calendar.DAY_OF_MONTH, 1); DayBookings nextDay = this.engine.getDayBookings(TimeUtil.getDayKey(next)); synchronized (nextDay) { nextDay.fullLoad(db); starting = nextDay.getSlotStartingBookings(notifSlot - SlotBookingEngine.NUM_SLOTS).values(); } } for (MBooking mb : starting) { new BookingNotification((Bookings)db.merge(mb.getBooking())).notifyStarting(); } } @Override public void eventOccurred(RigStateChangeEvent event, Rig rig, org.hibernate.Session db) { /* Clean the previous session. */ if (this.runningBookings.containsKey(rig.getName())) { MBooking old = this.runningBookings.remove(rig.getName()); if (old.isMultiDay() || !this.currentDay.getDay().equals(old.getDay())) { DayBookings dayb; MBooking oldNext; for (String day : TimeUtil.getDayKeys(old.getStart().getTime(), old.getEnd().getTime())) { /* If a day isn't loaded we aren't going to load it. */ if ((dayb = this.engine.getDayBookings(day, false)) != null) { synchronized (dayb) { if (dayb.getDay().equals(old.getDay())) dayb.removeBooking(old); else if ((oldNext = dayb.getBookingOnSlot(rig, 0)) != null && // Session must be continuous oldNext.getSession() != null && // Must be assigned a session /* Must be the same session. */ oldNext.getSession().getId().equals(old.getSession().getId())) { /* If the next day starting booking is an extension * of the current booking, then remove it. */ dayb.removeBooking(oldNext); } } } } } else { /* The booking was only on today so it can be safely reaped. */ synchronized (this.currentDay) { this.currentDay.removeBooking(old); } } /* If the rig event was free, and the rig isn't booked (i.e. next * slot is free), we need to fire another free broadcast to trigger * another queue run. This is because if the initial notification * fired before this in queuer, then the queue attempt would have * falsely been blocked by the memory representation of the terminated * session. Removing the finished booking then notifying again makes * sure the queue attempt will run. */ if (!this.redeemingBookings.containsKey(rig.getName()) && event == RigStateChangeEvent.FREE) { if (this.currentSlot + 1 < SlotBookingEngine.NUM_SLOTS) { /* Next slot on current day and no booking in next slot. */ if (this.currentDay.getBookingOnSlot(rig, this.currentSlot + 1) == null) { this.fireFreeEvent(rig, db); } } else { /* Next booking is on next day. */ Calendar cal = Calendar.getInstance(); cal.add(Calendar.DAY_OF_MONTH, 1); DayBookings nextBookings = this.engine.getDayBookings(TimeUtil.getDayKey(cal)); synchronized (nextBookings) { nextBookings.fullLoad(db); } if (nextBookings.getBookingOnSlot(rig, 0) == null) { this.fireFreeEvent(rig, db); } } } } switch (event) { case ONLINE: /* Falls through. */ case FREE: /* Remove the finished session. */ synchronized (this) { if (this.redeemingBookings.containsKey(rig.getName())) { this.redeemBooking(this.redeemingBookings.remove(rig.getName()), rig, db); } } break; default: /* Don't care about the other states. */ } } /** * Fires an online event to us. * * @param rig rig event refers to * @param db database session */ private void fireFreeEvent(Rig rig, org.hibernate.Session db) { /* Fire event the rig is online. */ for (RigEventListener evt : BookingsActivator.getRigEventListeners()) { /* Check so we don't fire event to us. */ if (evt == this) continue; evt.eventOccurred(RigStateChangeEvent.FREE, rig, db); } } /** * Redeems a booking by creating a session and allocating a rig to it. * * @param membooking membooking to convert to session * @param rig rig to allocate * @param db database connection */ private void redeemBooking(MBooking membooking, Rig rig, org.hibernate.Session db) { Date now = new Date(); Bookings booking = (Bookings)db.merge(membooking.getBooking()); this.logger.info("Redeeming a booking (" + booking.getId() + ") for " + booking.getUser().qName() + " using rig " + rig.getName() + " at " + now + "."); Session session = new Session(); session.setActive(true); session.setInGrace(false); session.setActivityLastUpdated(now); session.setReady(false); session.setPriority((short) 0); session.setRequestTime(now); session.setAssignmentTime(now); session.setUser(booking.getUser()); session.setUserName(booking.getUserName()); session.setUserNamespace(booking.getUserNamespace()); session.setResourcePermission(booking.getResourcePermission()); /* We need to remove the lag in redeeming the booking so we don't * propagate the lag to other bookings. */ int duration = booking.getDuration() - (int)(now.getTime() - this.rollTime) / 1000; if (membooking.getEndSlot() == SlotBookingEngine.END_SLOT) { /* If the booking is in the last slot of the day it is pulled back a * second so it stays within the day boundary. Here we restore the * second so the booking has a nominal duration. */ duration += 1; } session.setDuration(duration); session.setExtensions(booking.getResourcePermission().getAllowedExtensions()); session.setResourceType(booking.getResourceType()); if (ResourcePermission.RIG_PERMISSION.equals(booking.getResourceType())) { session.setRequestedResourceId(booking.getRig().getId()); session.setRequestedResourceName(booking.getRig().getName()); } else if (ResourcePermission.TYPE_PERMISSION.equals(booking.getResourceType())) { session.setRequestedResourceId(booking.getRigType().getId()); session.setRequestedResourceName(booking.getRigType().getName()); } else if (ResourcePermission.CAPS_PERMISSION.equals(booking.getResourceType())) { session.setRequestedResourceId(booking.getRequestCapabilities().getId()); session.setRequestedResourceName(booking.getRequestCapabilities().getCapabilities()); } session.setAssignedRigName(rig.getName()); session.setRig(rig); session.setCodeReference(booking.getCodeReference()); db.beginTransaction(); db.save(session); booking.setActive(false); booking.setSession(session); rig.setInSession(true); rig.setSession(session); db.getTransaction().commit(); membooking.setBooking(booking); membooking.setSession(session); this.runningBookings.put(rig.getName(), membooking); this.logger.info("Assigned " + session.getUser().qName() + " to rig " + rig.getName() + " (session=" + session.getId() + ")."); /* Notify a session has started. */ BookingsActivator.notifySessionEvent(SessionEvent.ASSIGNED, session, db); /* Allocate the rig to the user. */ if (this.notTest) new RigAllocator().allocate(session, db); } /** * Gets the session running on the rig. * * @param rig rig that is running * @return running session, or null if none exists */ public MBooking getRunningSession(Rig rig) { return this.runningBookings.get(rig.getName()); } /** * Puts a running booking to session. * * @param rig rig that is allocated * @param mb booking or puesdo booking of session */ public void putRunningSession(Rig rig, MBooking mb) { this.runningBookings.put(rig.getName(), mb); } @Override public int getPeriod() { return Redeemer.REDEEM_INTERVAL; } @Override public void cleanUp() { /* Does nothing. */ } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ipc; import com.google.common.base.Supplier; import com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryProxy; import org.apache.hadoop.ipc.Client.ConnectionId; import org.apache.hadoop.ipc.Server.Call; import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto; import org.apache.hadoop.ipc.protobuf.TestProtos; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.security.authorize.Service; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.MetricsAsserts; import org.apache.hadoop.test.MockitoUtil; import org.apache.log4j.Level; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.mockito.internal.util.reflection.Whitebox; import javax.net.SocketFactory; import java.io.Closeable; import java.io.IOException; import java.io.InterruptedIOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.net.ConnectException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketTimeoutException; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static org.apache.hadoop.test.MetricsAsserts.assertCounter; import static org.apache.hadoop.test.MetricsAsserts.assertCounterGt; import static org.apache.hadoop.test.MetricsAsserts.getLongCounter; import static org.apache.hadoop.test.MetricsAsserts.getMetrics; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; /** Unit tests for RPC. */ @SuppressWarnings("deprecation") public class TestRPC extends TestRpcBase { public static final Log LOG = LogFactory.getLog(TestRPC.class); @Before public void setup() { setupConf(); } int datasize = 1024*100; int numThreads = 50; public interface TestProtocol extends VersionedProtocol { long versionID = 1L; void ping() throws IOException; void sleep(long delay) throws IOException, InterruptedException; String echo(String value) throws IOException; String[] echo(String[] value) throws IOException; Writable echo(Writable value) throws IOException; int add(int v1, int v2) throws IOException; int add(int[] values) throws IOException; int error() throws IOException; } public static class TestImpl implements TestProtocol { int fastPingCounter = 0; @Override public long getProtocolVersion(String protocol, long clientVersion) { return TestProtocol.versionID; } @Override public ProtocolSignature getProtocolSignature(String protocol, long clientVersion, int hashcode) { return new ProtocolSignature(TestProtocol.versionID, null); } @Override public void ping() {} @Override public void sleep(long delay) throws InterruptedException { Thread.sleep(delay); } @Override public String echo(String value) throws IOException { return value; } @Override public String[] echo(String[] values) throws IOException { return values; } @Override public Writable echo(Writable writable) { return writable; } @Override public int add(int v1, int v2) { return v1 + v2; } @Override public int add(int[] values) { int sum = 0; for (int i = 0; i < values.length; i++) { sum += values[i]; } return sum; } @Override public int error() throws IOException { throw new IOException("bobo"); } } // // an object that does a bunch of transactions // static class Transactions implements Runnable { int datasize; TestRpcService proxy; Transactions(TestRpcService proxy, int datasize) { this.proxy = proxy; this.datasize = datasize; } // do two RPC that transfers data. @Override public void run() { Integer[] indata = new Integer[datasize]; Arrays.fill(indata, 123); TestProtos.ExchangeRequestProto exchangeRequest = TestProtos.ExchangeRequestProto.newBuilder().addAllValues( Arrays.asList(indata)).build(); Integer[] outdata = null; TestProtos.ExchangeResponseProto exchangeResponse; TestProtos.AddRequestProto addRequest = TestProtos.AddRequestProto.newBuilder().setParam1(1) .setParam2(2).build(); TestProtos.AddResponseProto addResponse; int val = 0; try { exchangeResponse = proxy.exchange(null, exchangeRequest); outdata = new Integer[exchangeResponse.getValuesCount()]; outdata = exchangeResponse.getValuesList().toArray(outdata); addResponse = proxy.add(null, addRequest); val = addResponse.getResult(); } catch (ServiceException e) { assertTrue("Exception from RPC exchange() " + e, false); } assertEquals(indata.length, outdata.length); assertEquals(3, val); for (int i = 0; i < outdata.length; i++) { assertEquals(outdata[i].intValue(), i); } } } // // A class that does an RPC but does not read its response. // static class SlowRPC implements Runnable { private TestRpcService proxy; private volatile boolean done; SlowRPC(TestRpcService proxy) { this.proxy = proxy; done = false; } boolean isDone() { return done; } @Override public void run() { try { // this would hang until two fast pings happened ping(true); done = true; } catch (ServiceException e) { assertTrue("SlowRPC ping exception " + e, false); } } void ping(boolean shouldSlow) throws ServiceException { // this would hang until two fast pings happened proxy.slowPing(null, newSlowPingRequest(shouldSlow)); } } /** * A basic interface for testing client-side RPC resource cleanup. */ private interface StoppedProtocol { long versionID = 0; void stop(); } /** * A class used for testing cleanup of client side RPC resources. */ private static class StoppedRpcEngine implements RpcEngine { @Override public <T> ProtocolProxy<T> getProxy( Class<T> protocol, long clientVersion, InetSocketAddress addr, UserGroupInformation ticket, Configuration conf, SocketFactory factory, int rpcTimeout, RetryPolicy connectionRetryPolicy) throws IOException { return getProxy(protocol, clientVersion, addr, ticket, conf, factory, rpcTimeout, connectionRetryPolicy, null); } @SuppressWarnings("unchecked") @Override public <T> ProtocolProxy<T> getProxy( Class<T> protocol, long clientVersion, InetSocketAddress addr, UserGroupInformation ticket, Configuration conf, SocketFactory factory, int rpcTimeout, RetryPolicy connectionRetryPolicy, AtomicBoolean fallbackToSimpleAuth) throws IOException { T proxy = (T) Proxy.newProxyInstance(protocol.getClassLoader(), new Class[] { protocol }, new StoppedInvocationHandler()); return new ProtocolProxy<T>(protocol, proxy, false); } @Override public org.apache.hadoop.ipc.RPC.Server getServer( Class<?> protocol, Object instance, String bindAddress, int port, int numHandlers, int numReaders, int queueSizePerHandler, boolean verbose, Configuration conf, SecretManager<? extends TokenIdentifier> secretManager, String portRangeConfig) throws IOException { return null; } @Override public ProtocolProxy<ProtocolMetaInfoPB> getProtocolMetaInfoProxy( ConnectionId connId, Configuration conf, SocketFactory factory) throws IOException { throw new UnsupportedOperationException("This proxy is not supported"); } } /** * An invocation handler which does nothing when invoking methods, and just * counts the number of times close() is called. */ private static class StoppedInvocationHandler implements InvocationHandler, Closeable { private int closeCalled = 0; @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { return null; } @Override public void close() throws IOException { closeCalled++; } public int getCloseCalled() { return closeCalled; } } @Test public void testConfRpc() throws IOException { Server server = newServerBuilder(conf) .setNumHandlers(1).setVerbose(false).build(); // Just one handler int confQ = conf.getInt( CommonConfigurationKeys.IPC_SERVER_HANDLER_QUEUE_SIZE_KEY, CommonConfigurationKeys.IPC_SERVER_HANDLER_QUEUE_SIZE_DEFAULT); assertEquals(confQ, server.getMaxQueueSize()); int confReaders = conf.getInt( CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_KEY, CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_DEFAULT); assertEquals(confReaders, server.getNumReaders()); server = newServerBuilder(conf) .setNumHandlers(1).setnumReaders(3).setQueueSizePerHandler(200) .setVerbose(false).build(); assertEquals(3, server.getNumReaders()); assertEquals(200, server.getMaxQueueSize()); server = newServerBuilder(conf).setQueueSizePerHandler(10) .setNumHandlers(2).setVerbose(false).build(); assertEquals(2 * 10, server.getMaxQueueSize()); } @Test public void testProxyAddress() throws Exception { Server server = null; TestRpcService proxy = null; try { server = setupTestServer(conf, -1); // create a client proxy = getClient(addr, conf); assertEquals(addr, RPC.getServerAddress(proxy)); } finally { stop(server, proxy); } } @Test public void testSlowRpc() throws IOException, ServiceException { Server server; TestRpcService proxy = null; System.out.println("Testing Slow RPC"); // create a server with two handlers server = setupTestServer(conf, 2); try { // create a client proxy = getClient(addr, conf); SlowRPC slowrpc = new SlowRPC(proxy); Thread thread = new Thread(slowrpc, "SlowRPC"); thread.start(); // send a slow RPC, which won't return until two fast pings assertTrue("Slow RPC should not have finished1.", !slowrpc.isDone()); slowrpc.ping(false); // first fast ping // verify that the first RPC is still stuck assertTrue("Slow RPC should not have finished2.", !slowrpc.isDone()); slowrpc.ping(false); // second fast ping // Now the slow ping should be able to be executed while (!slowrpc.isDone()) { System.out.println("Waiting for slow RPC to get done."); try { Thread.sleep(1000); } catch (InterruptedException e) {} } } finally { System.out.println("Down slow rpc testing"); stop(server, proxy); } } @Test public void testCalls() throws Exception { testCallsInternal(conf); } private void testCallsInternal(Configuration myConf) throws Exception { Server server; TestRpcService proxy = null; server = setupTestServer(myConf, -1); try { proxy = getClient(addr, myConf); proxy.ping(null, newEmptyRequest()); TestProtos.EchoResponseProto echoResp = proxy.echo(null, newEchoRequest("foo")); assertEquals(echoResp.getMessage(), "foo"); echoResp = proxy.echo(null, newEchoRequest("")); assertEquals(echoResp.getMessage(), ""); // Check rpcMetrics MetricsRecordBuilder rb = getMetrics(server.rpcMetrics.name()); assertCounter("RpcProcessingTimeNumOps", 3L, rb); assertCounterGt("SentBytes", 0L, rb); assertCounterGt("ReceivedBytes", 0L, rb); // Number of calls to echo method should be 2 rb = getMetrics(server.rpcDetailedMetrics.name()); assertCounter("EchoNumOps", 2L, rb); // Number of calls to ping method should be 1 assertCounter("PingNumOps", 1L, rb); String[] strings = new String[] {"foo","bar"}; TestProtos.EchoRequestProto2 echoRequest2 = TestProtos.EchoRequestProto2.newBuilder().addAllMessage( Arrays.asList(strings)).build(); TestProtos.EchoResponseProto2 echoResponse2 = proxy.echo2(null, echoRequest2); assertTrue(Arrays.equals(echoResponse2.getMessageList().toArray(), strings)); echoRequest2 = TestProtos.EchoRequestProto2.newBuilder() .addAllMessage(Collections.<String>emptyList()).build(); echoResponse2 = proxy.echo2(null, echoRequest2); assertTrue(Arrays.equals(echoResponse2.getMessageList().toArray(), new String[]{})); TestProtos.AddRequestProto addRequest = TestProtos.AddRequestProto.newBuilder().setParam1(1) .setParam2(2).build(); TestProtos.AddResponseProto addResponse = proxy.add(null, addRequest); assertEquals(addResponse.getResult(), 3); Integer[] integers = new Integer[] {1, 2}; TestProtos.AddRequestProto2 addRequest2 = TestProtos.AddRequestProto2.newBuilder().addAllParams( Arrays.asList(integers)).build(); addResponse = proxy.add2(null, addRequest2); assertEquals(addResponse.getResult(), 3); boolean caught = false; try { proxy.error(null, newEmptyRequest()); } catch (ServiceException e) { if(LOG.isDebugEnabled()) { LOG.debug("Caught " + e); } caught = true; } assertTrue(caught); rb = getMetrics(server.rpcDetailedMetrics.name()); assertCounter("RpcServerExceptionNumOps", 1L, rb); //proxy.testServerGet(); // create multiple threads and make them do large data transfers System.out.println("Starting multi-threaded RPC test..."); server.setSocketSendBufSize(1024); Thread threadId[] = new Thread[numThreads]; for (int i = 0; i < numThreads; i++) { Transactions trans = new Transactions(proxy, datasize); threadId[i] = new Thread(trans, "TransactionThread-" + i); threadId[i].start(); } // wait for all transactions to get over System.out.println("Waiting for all threads to finish RPCs..."); for (int i = 0; i < numThreads; i++) { try { threadId[i].join(); } catch (InterruptedException e) { i--; // retry } } } finally { stop(server, proxy); } } @Test public void testClientWithoutServer() throws Exception { TestRpcService proxy; short invalidPort = 20; InetSocketAddress invalidAddress = new InetSocketAddress(ADDRESS, invalidPort); long invalidClientVersion = 1L; try { proxy = RPC.getProxy(TestRpcService.class, invalidClientVersion, invalidAddress, conf); // Test echo method proxy.echo(null, newEchoRequest("hello")); fail("We should not have reached here"); } catch (ServiceException ioe) { //this is what we expected if (!(ioe.getCause() instanceof ConnectException)) { fail("We should not have reached here"); } } } private static final String ACL_CONFIG = "test.protocol.acl"; private static class TestPolicyProvider extends PolicyProvider { @Override public Service[] getServices() { return new Service[] { new Service(ACL_CONFIG, TestRpcService.class) }; } } private void doRPCs(Configuration myConf, boolean expectFailure) throws Exception { Server server; TestRpcService proxy = null; server = setupTestServer(myConf, 5); server.refreshServiceAcl(myConf, new TestPolicyProvider()); TestProtos.EmptyRequestProto emptyRequestProto = TestProtos.EmptyRequestProto.newBuilder().build(); try { proxy = getClient(addr, conf); proxy.ping(null, emptyRequestProto); if (expectFailure) { fail("Expect RPC.getProxy to fail with AuthorizationException!"); } } catch (ServiceException e) { if (expectFailure) { RemoteException re = (RemoteException) e.getCause(); assertTrue(re.unwrapRemoteException() instanceof AuthorizationException); assertEquals("RPC error code should be UNAUTHORIZED", RpcErrorCodeProto.FATAL_UNAUTHORIZED, re.getErrorCode()); } else { throw e; } } finally { MetricsRecordBuilder rb = getMetrics(server.rpcMetrics.name()); if (expectFailure) { assertCounter("RpcAuthorizationFailures", 1L, rb); } else { assertCounter("RpcAuthorizationSuccesses", 1L, rb); } //since we don't have authentication turned ON, we should see // 0 for the authentication successes and 0 for failure assertCounter("RpcAuthenticationFailures", 0L, rb); assertCounter("RpcAuthenticationSuccesses", 0L, rb); stop(server, proxy); } } @Test public void testServerAddress() throws IOException { Server server; server = setupTestServer(conf, 5); try { InetSocketAddress bindAddr = NetUtils.getConnectAddress(server); assertEquals(InetAddress.getLocalHost(), bindAddr.getAddress()); } finally { stop(server, null); } } @Test public void testAuthorization() throws Exception { Configuration myConf = new Configuration(); myConf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true); // Expect to succeed myConf.set(ACL_CONFIG, "*"); doRPCs(myConf, false); // Reset authorization to expect failure myConf.set(ACL_CONFIG, "invalid invalid"); doRPCs(myConf, true); myConf.setInt(CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_KEY, 2); // Expect to succeed myConf.set(ACL_CONFIG, "*"); doRPCs(myConf, false); // Reset authorization to expect failure myConf.set(ACL_CONFIG, "invalid invalid"); doRPCs(myConf, true); } /** * Switch off setting socketTimeout values on RPC sockets. * Verify that RPC calls still work ok. */ public void testNoPings() throws Exception { Configuration conf = new Configuration(); conf.setBoolean("ipc.client.ping", false); new TestRPC().testCallsInternal(conf); conf.setInt(CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_KEY, 2); new TestRPC().testCallsInternal(conf); } /** * Test stopping a non-registered proxy * @throws IOException */ @Test(expected=HadoopIllegalArgumentException.class) public void testStopNonRegisteredProxy() throws IOException { RPC.stopProxy(null); } /** * Test that the mockProtocol helper returns mock proxies that can * be stopped without error. */ @Test public void testStopMockObject() throws IOException { RPC.stopProxy(MockitoUtil.mockProtocol(TestProtocol.class)); } @Test public void testStopProxy() throws IOException { RPC.setProtocolEngine(conf, StoppedProtocol.class, StoppedRpcEngine.class); StoppedProtocol proxy = RPC.getProxy(StoppedProtocol.class, StoppedProtocol.versionID, null, conf); StoppedInvocationHandler invocationHandler = (StoppedInvocationHandler) Proxy.getInvocationHandler(proxy); assertEquals(0, invocationHandler.getCloseCalled()); RPC.stopProxy(proxy); assertEquals(1, invocationHandler.getCloseCalled()); } @Test public void testWrappedStopProxy() throws IOException { StoppedProtocol wrappedProxy = RPC.getProxy(StoppedProtocol.class, StoppedProtocol.versionID, null, conf); StoppedInvocationHandler invocationHandler = (StoppedInvocationHandler) Proxy.getInvocationHandler(wrappedProxy); StoppedProtocol proxy = (StoppedProtocol) RetryProxy.create( StoppedProtocol.class, wrappedProxy, RetryPolicies.RETRY_FOREVER); assertEquals(0, invocationHandler.getCloseCalled()); RPC.stopProxy(proxy); assertEquals(1, invocationHandler.getCloseCalled()); } @Test public void testErrorMsgForInsecureClient() throws IOException { Server server; TestRpcService proxy = null; Configuration serverConf = new Configuration(conf); SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, serverConf); UserGroupInformation.setConfiguration(serverConf); server = setupTestServer(serverConf, 5); boolean succeeded = false; try { UserGroupInformation.setConfiguration(conf); proxy = getClient(addr, conf); proxy.echo(null, newEchoRequest("")); } catch (ServiceException e) { assertTrue(e.getCause() instanceof RemoteException); RemoteException re = (RemoteException) e.getCause(); LOG.info("LOGGING MESSAGE: " + re.getLocalizedMessage()); assertEquals("RPC error code should be UNAUTHORIZED", RpcErrorCodeProto.FATAL_UNAUTHORIZED, re.getErrorCode()); assertTrue(re.unwrapRemoteException() instanceof AccessControlException); succeeded = true; } finally { stop(server, proxy); } assertTrue(succeeded); conf.setInt(CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_KEY, 2); UserGroupInformation.setConfiguration(serverConf); server = setupTestServer(serverConf, 5); succeeded = false; proxy = null; try { UserGroupInformation.setConfiguration(conf); proxy = getClient(addr, conf); proxy.echo(null, newEchoRequest("")); } catch (ServiceException e) { RemoteException re = (RemoteException) e.getCause(); LOG.info("LOGGING MESSAGE: " + re.getLocalizedMessage()); assertEquals("RPC error code should be UNAUTHORIZED", RpcErrorCodeProto.FATAL_UNAUTHORIZED, re.getErrorCode()); assertTrue(re.unwrapRemoteException() instanceof AccessControlException); succeeded = true; } finally { stop(server, proxy); } assertTrue(succeeded); } /** * Test that server.stop() properly stops all threads */ @Test public void testStopsAllThreads() throws IOException, InterruptedException { Server server; int threadsBefore = countThreads("Server$Listener$Reader"); assertEquals("Expect no Reader threads running before test", 0, threadsBefore); server = setupTestServer(conf, 5); try { // Wait for at least one reader thread to start int threadsRunning = 0; long totalSleepTime = 0; do { totalSleepTime += 10; Thread.sleep(10); threadsRunning = countThreads("Server$Listener$Reader"); } while (threadsRunning == 0 && totalSleepTime < 5000); // Validate that at least one thread started (we didn't timeout) threadsRunning = countThreads("Server$Listener$Reader"); assertTrue(threadsRunning > 0); } finally { server.stop(); } int threadsAfter = countThreads("Server$Listener$Reader"); assertEquals("Expect no Reader threads left running after test", 0, threadsAfter); } @Test public void testRPCBuilder() throws IOException { // Test mandatory field conf try { new RPC.Builder(null).setProtocol(TestProtocol.class) .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0) .setNumHandlers(5).setVerbose(true).build(); fail("Didn't throw HadoopIllegalArgumentException"); } catch (Exception e) { if (!(e instanceof HadoopIllegalArgumentException)) { fail("Expecting HadoopIllegalArgumentException but caught " + e); } } // Test mandatory field protocol try { new RPC.Builder(conf).setInstance(new TestImpl()).setBindAddress(ADDRESS) .setPort(0).setNumHandlers(5).setVerbose(true).build(); fail("Didn't throw HadoopIllegalArgumentException"); } catch (Exception e) { if (!(e instanceof HadoopIllegalArgumentException)) { fail("Expecting HadoopIllegalArgumentException but caught " + e); } } // Test mandatory field instance try { new RPC.Builder(conf).setProtocol(TestProtocol.class) .setBindAddress(ADDRESS).setPort(0).setNumHandlers(5) .setVerbose(true).build(); fail("Didn't throw HadoopIllegalArgumentException"); } catch (Exception e) { if (!(e instanceof HadoopIllegalArgumentException)) { fail("Expecting HadoopIllegalArgumentException but caught " + e); } } } @Test(timeout=90000) public void testRPCInterruptedSimple() throws Exception { Server server; TestRpcService proxy = null; RPC.Builder builder = newServerBuilder(conf) .setNumHandlers(5).setVerbose(true) .setSecretManager(null); server = setupTestServer(builder); try { proxy = getClient(addr, conf); // Connect to the server proxy.ping(null, newEmptyRequest()); // Interrupt self, try another call Thread.currentThread().interrupt(); try { proxy.ping(null, newEmptyRequest()); fail("Interruption did not cause IPC to fail"); } catch (ServiceException se) { if (se.toString().contains("InterruptedException") || se.getCause() instanceof InterruptedIOException) { // clear interrupt status for future tests Thread.interrupted(); return; } throw se; } } finally { stop(server, proxy); } } @Test(timeout=30000) public void testRPCInterrupted() throws Exception { Server server; RPC.Builder builder = newServerBuilder(conf) .setNumHandlers(5).setVerbose(true) .setSecretManager(null); server = setupTestServer(builder); int numConcurrentRPC = 200; final CyclicBarrier barrier = new CyclicBarrier(numConcurrentRPC); final CountDownLatch latch = new CountDownLatch(numConcurrentRPC); final AtomicBoolean leaderRunning = new AtomicBoolean(true); final AtomicReference<Throwable> error = new AtomicReference<>(); Thread leaderThread = null; try { for (int i = 0; i < numConcurrentRPC; i++) { final int num = i; final TestRpcService proxy = getClient(addr, conf); Thread rpcThread = new Thread(new Runnable() { @Override public void run() { try { barrier.await(); while (num == 0 || leaderRunning.get()) { proxy.slowPing(null, newSlowPingRequest(false)); } proxy.slowPing(null, newSlowPingRequest(false)); } catch (Exception e) { if (num == 0) { leaderRunning.set(false); } else { error.set(e); } LOG.error("thread " + num, e); } finally { latch.countDown(); } } }); rpcThread.start(); if (leaderThread == null) { leaderThread = rpcThread; } } // let threads get past the barrier Thread.sleep(1000); // stop a single thread while (leaderRunning.get()) { leaderThread.interrupt(); } latch.await(); // should not cause any other thread to get an error assertTrue("rpc got exception " + error.get(), error.get() == null); } finally { server.stop(); } // let threads get past the barrier Thread.sleep(1000); // stop a single thread while (leaderRunning.get()) { leaderThread.interrupt(); } latch.await(); // should not cause any other thread to get an error assertTrue("rpc got exception " + error.get(), error.get() == null); server.stop(); } @Test public void testConnectionPing() throws Exception { Server server; TestRpcService proxy = null; int pingInterval = 50; conf.setBoolean(CommonConfigurationKeys.IPC_CLIENT_PING_KEY, true); conf.setInt(CommonConfigurationKeys.IPC_PING_INTERVAL_KEY, pingInterval); server = setupTestServer(conf, 5); try { proxy = getClient(addr, conf); proxy.sleep(null, newSleepRequest(pingInterval * 4)); } finally { stop(server, proxy); } } @Test(timeout=30000) public void testExternalCall() throws Exception { final UserGroupInformation ugi = UserGroupInformation .createUserForTesting("user123", new String[0]); final IOException expectedIOE = new IOException("boom"); // use 1 handler so the callq can be plugged final Server server = setupTestServer(conf, 1); try { final AtomicBoolean result = new AtomicBoolean(); ExternalCall<String> remoteUserCall = newExtCall(ugi, new PrivilegedExceptionAction<String>() { @Override public String run() throws Exception { return UserGroupInformation.getCurrentUser().getUserName(); } }); ExternalCall<String> exceptionCall = newExtCall(ugi, new PrivilegedExceptionAction<String>() { @Override public String run() throws Exception { throw expectedIOE; } }); final CountDownLatch latch = new CountDownLatch(1); final CyclicBarrier barrier = new CyclicBarrier(2); ExternalCall<Void> barrierCall = newExtCall(ugi, new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { // notify we are in a handler and then wait to keep the callq // plugged up latch.countDown(); barrier.await(); return null; } }); server.queueCall(barrierCall); server.queueCall(exceptionCall); server.queueCall(remoteUserCall); // wait for barrier call to enter the handler, check that the other 2 // calls are actually queued latch.await(); assertEquals(2, server.getCallQueueLen()); // unplug the callq barrier.await(); barrierCall.get(); // verify correct ugi is used String answer = remoteUserCall.get(); assertEquals(ugi.getUserName(), answer); try { exceptionCall.get(); fail("didn't throw"); } catch (ExecutionException ee) { assertTrue((ee.getCause()) instanceof IOException); assertEquals(expectedIOE.getMessage(), ee.getCause().getMessage()); } } finally { server.stop(); } } private <T> ExternalCall<T> newExtCall(final UserGroupInformation ugi, PrivilegedExceptionAction<T> callable) { return new ExternalCall<T>(callable) { @Override public String getProtocol() { return "test"; } @Override public UserGroupInformation getRemoteUser() { return ugi; } }; } @Test public void testRpcMetrics() throws Exception { final Server server; TestRpcService proxy = null; final int interval = 1; conf.setBoolean(CommonConfigurationKeys. RPC_METRICS_QUANTILE_ENABLE, true); conf.set(CommonConfigurationKeys. RPC_METRICS_PERCENTILES_INTERVALS_KEY, "" + interval); server = setupTestServer(conf, 5); String testUser = "testUser"; UserGroupInformation anotherUser = UserGroupInformation.createRemoteUser(testUser); TestRpcService proxy2 = anotherUser.doAs(new PrivilegedAction<TestRpcService>() { public TestRpcService run() { try { return RPC.getProxy(TestRpcService.class, 0, server.getListenerAddress(), conf); } catch (IOException e) { e.printStackTrace(); } return null; } }); try { proxy = getClient(addr, conf); for (int i = 0; i < 1000; i++) { proxy.ping(null, newEmptyRequest()); proxy.echo(null, newEchoRequest("" + i)); proxy2.echo(null, newEchoRequest("" + i)); } MetricsRecordBuilder rpcMetrics = getMetrics(server.getRpcMetrics().name()); assertTrue("Expected non-zero rpc queue time", getLongCounter("RpcQueueTimeNumOps", rpcMetrics) > 0); assertTrue("Expected non-zero rpc processing time", getLongCounter("RpcProcessingTimeNumOps", rpcMetrics) > 0); MetricsAsserts.assertQuantileGauges("RpcQueueTime" + interval + "s", rpcMetrics); MetricsAsserts.assertQuantileGauges("RpcProcessingTime" + interval + "s", rpcMetrics); String actualUserVsCon = MetricsAsserts .getStringMetric("NumOpenConnectionsPerUser", rpcMetrics); String proxyUser = UserGroupInformation.getCurrentUser().getShortUserName(); assertTrue(actualUserVsCon.contains("\"" + proxyUser + "\":1")); assertTrue(actualUserVsCon.contains("\"" + testUser + "\":1")); } finally { if (proxy2 != null) { RPC.stopProxy(proxy2); } stop(server, proxy); } } /** * Test RPC backoff by queue full. */ @Test (timeout=30000) public void testClientBackOff() throws Exception { Server server; final TestRpcService proxy; boolean succeeded = false; final int numClients = 2; final List<Future<Void>> res = new ArrayList<Future<Void>>(); final ExecutorService executorService = Executors.newFixedThreadPool(numClients); conf.setInt(CommonConfigurationKeys.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 0); conf.setBoolean(CommonConfigurationKeys.IPC_NAMESPACE + ".0." + CommonConfigurationKeys.IPC_BACKOFF_ENABLE, true); RPC.Builder builder = newServerBuilder(conf) .setQueueSizePerHandler(1).setNumHandlers(1).setVerbose(true); server = setupTestServer(builder); @SuppressWarnings("unchecked") CallQueueManager<Call> spy = spy((CallQueueManager<Call>) Whitebox .getInternalState(server, "callQueue")); Whitebox.setInternalState(server, "callQueue", spy); Exception lastException = null; proxy = getClient(addr, conf); try { // start a sleep RPC call to consume the only handler thread. // Start another sleep RPC call to make callQueue full. // Start another sleep RPC call to make reader thread block on CallQueue. for (int i = 0; i < numClients; i++) { res.add(executorService.submit( new Callable<Void>() { @Override public Void call() throws ServiceException, InterruptedException { proxy.sleep(null, newSleepRequest(100000)); return null; } })); } while (server.getCallQueueLen() != 1 && countThreads(CallQueueManager.class.getName()) != 1) { Thread.sleep(100); } try { proxy.sleep(null, newSleepRequest(100)); } catch (ServiceException e) { RemoteException re = (RemoteException) e.getCause(); IOException unwrapExeption = re.unwrapRemoteException(); if (unwrapExeption instanceof RetriableException) { succeeded = true; } else { lastException = unwrapExeption; } } } finally { executorService.shutdown(); stop(server, proxy); } assertTrue("RetriableException not received", succeeded); } /** * Test RPC backoff by response time of each priority level. */ @Test (timeout=30000) public void testClientBackOffByResponseTime() throws Exception { final TestRpcService proxy; boolean succeeded = false; final int numClients = 1; GenericTestUtils.setLogLevel(DecayRpcScheduler.LOG, Level.DEBUG); GenericTestUtils.setLogLevel(RPC.LOG, Level.DEBUG); final List<Future<Void>> res = new ArrayList<Future<Void>>(); final ExecutorService executorService = Executors.newFixedThreadPool(numClients); conf.setInt(CommonConfigurationKeys.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 0); final String ns = CommonConfigurationKeys.IPC_NAMESPACE + ".0"; Server server = setupDecayRpcSchedulerandTestServer(ns + "."); @SuppressWarnings("unchecked") CallQueueManager<Call> spy = spy((CallQueueManager<Call>) Whitebox .getInternalState(server, "callQueue")); Whitebox.setInternalState(server, "callQueue", spy); Exception lastException = null; proxy = getClient(addr, conf); MetricsRecordBuilder rb1 = getMetrics("DecayRpcSchedulerMetrics2." + ns); final long beginDecayedCallVolume = MetricsAsserts.getLongCounter( "DecayedCallVolume", rb1); final long beginRawCallVolume = MetricsAsserts.getLongCounter( "CallVolume", rb1); final int beginUniqueCaller = MetricsAsserts.getIntCounter("UniqueCallers", rb1); try { // start a sleep RPC call that sleeps 3s. for (int i = 0; i < numClients; i++) { res.add(executorService.submit( new Callable<Void>() { @Override public Void call() throws ServiceException, InterruptedException { proxy.sleep(null, newSleepRequest(3000)); return null; } })); verify(spy, timeout(500).times(i + 1)).offer(Mockito.<Call>anyObject()); } // Start another sleep RPC call and verify the call is backed off due to // avg response time(3s) exceeds threshold (2s). try { // wait for the 1st response time update Thread.sleep(5500); proxy.sleep(null, newSleepRequest(100)); } catch (ServiceException e) { RemoteException re = (RemoteException) e.getCause(); IOException unwrapExeption = re.unwrapRemoteException(); if (unwrapExeption instanceof RetriableException) { succeeded = true; } else { lastException = unwrapExeption; } // Lets Metric system update latest metrics GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { MetricsRecordBuilder rb2 = getMetrics("DecayRpcSchedulerMetrics2." + ns); long decayedCallVolume1 = MetricsAsserts.getLongCounter( "DecayedCallVolume", rb2); long rawCallVolume1 = MetricsAsserts.getLongCounter( "CallVolume", rb2); int uniqueCaller1 = MetricsAsserts.getIntCounter( "UniqueCallers", rb2); long callVolumePriority0 = MetricsAsserts.getLongGauge( "Priority.0.CompletedCallVolume", rb2); long callVolumePriority1 = MetricsAsserts.getLongGauge( "Priority.1.CompletedCallVolume", rb2); double avgRespTimePriority0 = MetricsAsserts.getDoubleGauge( "Priority.0.AvgResponseTime", rb2); double avgRespTimePriority1 = MetricsAsserts.getDoubleGauge( "Priority.1.AvgResponseTime", rb2); LOG.info("DecayedCallVolume: " + decayedCallVolume1); LOG.info("CallVolume: " + rawCallVolume1); LOG.info("UniqueCaller: " + uniqueCaller1); LOG.info("Priority.0.CompletedCallVolume: " + callVolumePriority0); LOG.info("Priority.1.CompletedCallVolume: " + callVolumePriority1); LOG.info("Priority.0.AvgResponseTime: " + avgRespTimePriority0); LOG.info("Priority.1.AvgResponseTime: " + avgRespTimePriority1); return decayedCallVolume1 > beginDecayedCallVolume && rawCallVolume1 > beginRawCallVolume && uniqueCaller1 > beginUniqueCaller; } }, 30, 60000); } } finally { executorService.shutdown(); stop(server, proxy); } if (lastException != null) { LOG.error("Last received non-RetriableException:", lastException); } assertTrue("RetriableException not received", succeeded); } private Server setupDecayRpcSchedulerandTestServer(String ns) throws Exception { final int queueSizePerHandler = 3; conf.setInt(CommonConfigurationKeys.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 0); conf.setBoolean(ns + CommonConfigurationKeys.IPC_BACKOFF_ENABLE, true); conf.setStrings(ns + CommonConfigurationKeys.IPC_CALLQUEUE_IMPL_KEY, "org.apache.hadoop.ipc.FairCallQueue"); conf.setStrings(ns + CommonConfigurationKeys.IPC_SCHEDULER_IMPL_KEY, "org.apache.hadoop.ipc.DecayRpcScheduler"); conf.setInt(ns + CommonConfigurationKeys.IPC_SCHEDULER_PRIORITY_LEVELS_KEY, 2); conf.setBoolean(ns + DecayRpcScheduler.IPC_DECAYSCHEDULER_BACKOFF_RESPONSETIME_ENABLE_KEY, true); // set a small thresholds 2s and 4s for level 0 and level 1 for testing conf.set(ns + DecayRpcScheduler.IPC_DECAYSCHEDULER_BACKOFF_RESPONSETIME_THRESHOLDS_KEY , "2s, 4s"); // Set max queue size to 3 so that 2 calls from the test won't trigger // back off because the queue is full. RPC.Builder builder = newServerBuilder(conf) .setQueueSizePerHandler(queueSizePerHandler).setNumHandlers(1) .setVerbose(true); return setupTestServer(builder); } /** * Test RPC timeout. */ @Test(timeout=30000) public void testClientRpcTimeout() throws Exception { Server server; TestRpcService proxy = null; RPC.Builder builder = newServerBuilder(conf) .setQueueSizePerHandler(1).setNumHandlers(1).setVerbose(true); server = setupTestServer(builder); try { // Test RPC timeout with default ipc.client.ping. try { Configuration c = new Configuration(conf); c.setInt(CommonConfigurationKeys.IPC_CLIENT_RPC_TIMEOUT_KEY, 1000); proxy = getClient(addr, c); proxy.sleep(null, newSleepRequest(3000)); fail("RPC should time out."); } catch (ServiceException e) { assertTrue(e.getCause() instanceof SocketTimeoutException); LOG.info("got expected timeout.", e); } // Test RPC timeout when ipc.client.ping is false. try { Configuration c = new Configuration(conf); c.setBoolean(CommonConfigurationKeys.IPC_CLIENT_PING_KEY, false); c.setInt(CommonConfigurationKeys.IPC_CLIENT_RPC_TIMEOUT_KEY, 1000); proxy = getClient(addr, c); proxy.sleep(null, newSleepRequest(3000)); fail("RPC should time out."); } catch (ServiceException e) { assertTrue(e.getCause() instanceof SocketTimeoutException); LOG.info("got expected timeout.", e); } // Test negative timeout value. try { Configuration c = new Configuration(conf); c.setInt(CommonConfigurationKeys.IPC_CLIENT_RPC_TIMEOUT_KEY, -1); proxy = getClient(addr, c); proxy.sleep(null, newSleepRequest(2000)); } catch (ServiceException e) { LOG.info("got unexpected exception.", e); fail("RPC should not time out."); } // Test RPC timeout greater than ipc.ping.interval. try { Configuration c = new Configuration(conf); c.setBoolean(CommonConfigurationKeys.IPC_CLIENT_PING_KEY, true); c.setInt(CommonConfigurationKeys.IPC_PING_INTERVAL_KEY, 800); c.setInt(CommonConfigurationKeys.IPC_CLIENT_RPC_TIMEOUT_KEY, 1000); proxy = getClient(addr, c); try { // should not time out because effective rpc-timeout is // multiple of ping interval: 1600 (= 800 * (1000 / 800 + 1)) proxy.sleep(null, newSleepRequest(1300)); } catch (ServiceException e) { LOG.info("got unexpected exception.", e); fail("RPC should not time out."); } proxy.sleep(null, newSleepRequest(2000)); fail("RPC should time out."); } catch (ServiceException e) { assertTrue(e.getCause() instanceof SocketTimeoutException); LOG.info("got expected timeout.", e); } } finally { stop(server, proxy); } } public static void main(String[] args) throws Exception { new TestRPC().testCallsInternal(conf); } }
/* * Copyright 2017-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.ide.intellij; import com.facebook.buck.log.Logger; import com.google.common.base.Preconditions; import java.io.File; import java.io.IOException; import java.nio.file.Path; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * Updates .idea/workspace.xml to avoid doing some operations by IntelliJ. * * <p>It updates a list of ignored files based on the list of all excluded folders from all modules * in a project. IntelliJ takes quadratic time to construct this list (see * https://youtrack.jetbrains.com/issue/IDEA-174335). */ public class WorkspaceUpdater { private static final Logger LOG = Logger.get(WorkspaceUpdater.class); private final File workspaceFile; public WorkspaceUpdater(Path projectIdeaConfigPath) { workspaceFile = projectIdeaConfigPath.resolve("workspace.xml").toFile(); } public File getWorkspaceFile() { return workspaceFile; } public void updateOrCreateWorkspace() throws IOException { boolean workspaceUpdated = false; Document workspaceDocument = null; if (workspaceFile.exists()) { try { LOG.debug("Trying to update existing workspace."); workspaceDocument = updateExistingWorkspace(workspaceFile); workspaceUpdated = true; } catch (ParserConfigurationException | SAXException | XPathExpressionException e) { LOG.error("Cannot update workspace.xml file, trying re-create it", e); } if (!workspaceUpdated && !workspaceFile.delete()) { LOG.warn("Cannot remove file: %s", workspaceFile.getAbsolutePath()); return; } } if (!workspaceUpdated) { try { workspaceDocument = createNewWorkspace(); } catch (ParserConfigurationException e) { LOG.error("Cannot create workspace.xml file", e); return; } } try { writeDocument(Preconditions.checkNotNull(workspaceDocument), workspaceFile); } catch (TransformerException e) { LOG.error(e, "Cannot create workspace in %s", workspaceFile); } } private static Document updateExistingWorkspace(File workspaceFile) throws ParserConfigurationException, IOException, SAXException, XPathExpressionException { Document workspaceDocument = parseWorkspaceFile(workspaceFile); removeIgnoredFoldersAndSetConvertedFlag(workspaceDocument); return workspaceDocument; } private static DocumentBuilder createDocumentBuilder() throws ParserConfigurationException { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); return documentBuilderFactory.newDocumentBuilder(); } private static Document parseWorkspaceFile(File workspaceFile) throws ParserConfigurationException, IOException, SAXException { Document workspaceDocument = createDocumentBuilder().parse(workspaceFile); workspaceDocument.setXmlStandalone(true); return workspaceDocument; } private static void removeIgnoredFoldersAndSetConvertedFlag(Document workspaceDocument) throws XPathExpressionException { XPath xpath = XPathFactory.newInstance().newXPath(); NodeList changeListManagerNodeList = findIgnoreNodesInChangeListManager(xpath, workspaceDocument); Node parentNode; if (changeListManagerNodeList.getLength() == 0) { parentNode = findChangeListManagerNode(xpath, workspaceDocument); } else { Node firstNode = changeListManagerNodeList.item(0); Node lastNode = changeListManagerNodeList.item(changeListManagerNodeList.getLength() - 1); parentNode = firstNode.getParentNode(); removeNodeRange(parentNode, firstNode, lastNode); } if (parentNode == null) { Node projectNode = findProjectNode(xpath, workspaceDocument); if (projectNode == null) { projectNode = createNewProjectNode(workspaceDocument); workspaceDocument.appendChild(projectNode); } parentNode = createNewChangeListManagerNode(workspaceDocument); projectNode.appendChild(parentNode); } ensureExcludedConvertedToIgnoredOptionSetToTrue(workspaceDocument, xpath, parentNode); } private static NodeList findIgnoreNodesInChangeListManager( XPath xpath, Document workspaceDocument) throws XPathExpressionException { return (NodeList) xpath .compile("/project/component[@name = 'ChangeListManager']/ignored") .evaluate(workspaceDocument, XPathConstants.NODESET); } private static Node findChangeListManagerNode(XPath xpath, Document workspaceDocument) throws XPathExpressionException { return (Node) xpath .compile("/project/component[@name = 'ChangeListManager']") .evaluate(workspaceDocument, XPathConstants.NODE); } private static Node findProjectNode(XPath xpath, Document workspaceDocument) throws XPathExpressionException { return (Node) xpath.compile("/project").evaluate(workspaceDocument, XPathConstants.NODE); } private static void removeNodeRange(Node parentNode, Node firstNode, Node lastNode) { Node currentNode = firstNode; while (currentNode != lastNode) { Node nextNode = currentNode.getNextSibling(); parentNode.removeChild(currentNode); currentNode = nextNode; } parentNode.removeChild(lastNode); } private static void ensureExcludedConvertedToIgnoredOptionSetToTrue( Document workspaceDocument, XPath xpath, Node parentNode) throws XPathExpressionException { String excludedConvertedToIgnoredLocation = "/project/component[@name = 'ChangeListManager']/" + "option[@name = 'EXCLUDED_CONVERTED_TO_IGNORED']"; Node excludedConvertedToIgnoredOption = (Node) xpath .compile(excludedConvertedToIgnoredLocation) .evaluate(workspaceDocument, XPathConstants.NODE); if (excludedConvertedToIgnoredOption == null) { parentNode.appendChild(createNewOptionExcludedConvertedToIgnoredNode(workspaceDocument)); } else { NamedNodeMap attributes = excludedConvertedToIgnoredOption.getAttributes(); Node valueNode = attributes.getNamedItem("value"); valueNode.setTextContent("true"); } } private static Document createNewWorkspace() throws ParserConfigurationException { Document workspaceDocument = createNewWorkspaceDocument(); Element project = addNewProjectNode(workspaceDocument); setExcludedFlag(workspaceDocument, project); return workspaceDocument; } private static Document createNewWorkspaceDocument() throws ParserConfigurationException { Document workspaceDocument = createDocumentBuilder().newDocument(); workspaceDocument.setXmlStandalone(true); return workspaceDocument; } private static Element addNewProjectNode(Document workspaceDocument) { Element project = workspaceDocument.createElement("project"); project.setAttribute("version", "4"); workspaceDocument.appendChild(project); return project; } private static void setExcludedFlag(Document workspaceDocument, Element project) { Element changeListManager = workspaceDocument.createElement("component"); changeListManager.setAttribute("name", "ChangeListManager"); project.appendChild(changeListManager); changeListManager.appendChild(createNewOptionExcludedConvertedToIgnoredNode(workspaceDocument)); } private static Element createNewProjectNode(Document workspaceDocument) { Element project = workspaceDocument.createElement("project"); project.setAttribute("version", "4"); return project; } private static Element createNewChangeListManagerNode(Document workspaceDocument) { Element component = workspaceDocument.createElement("component"); component.setAttribute("name", "ChangeListManager"); return component; } private static Element createNewOptionExcludedConvertedToIgnoredNode(Document workspaceDocument) { Element optionExcludedConvertedToIgnored = workspaceDocument.createElement("option"); optionExcludedConvertedToIgnored.setAttribute("name", "EXCLUDED_CONVERTED_TO_IGNORED"); optionExcludedConvertedToIgnored.setAttribute("value", "true"); return optionExcludedConvertedToIgnored; } private static void writeDocument(Document workspaceDocument, File destination) throws TransformerException { Transformer transformer = TransformerFactory.newInstance().newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty(OutputKeys.METHOD, "xml"); transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2"); transformer.transform(new DOMSource(workspaceDocument), new StreamResult(destination)); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.monitoring.exporter.http; import com.unboundid.util.Base64; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.rest.RestUtils; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.http.MockRequest; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.monitoring.LocalStateMonitoring; import org.elasticsearch.xpack.monitoring.MonitoringService; import org.elasticsearch.xpack.monitoring.MonitoringTestUtils; import org.elasticsearch.xpack.monitoring.collector.indices.IndexRecoveryMonitoringDoc; import org.elasticsearch.xpack.monitoring.exporter.ClusterAlertsUtil; import org.elasticsearch.xpack.monitoring.exporter.ExportBulk; import org.elasticsearch.xpack.monitoring.exporter.Exporter; import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.time.Instant; import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils.LAST_UPDATED_VERSION; import static org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils.TEMPLATE_VERSION; import static org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils.indexName; import static org.elasticsearch.xpack.monitoring.exporter.http.ClusterAlertHttpResource.CLUSTER_ALERT_VERSION_PARAMETERS; import static org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.FILTER_PATH_RESOURCE_VERSION; import static org.elasticsearch.xpack.monitoring.exporter.http.WatcherExistsHttpResource.WATCHER_CHECK_PARAMETERS; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = Scope.TEST, numDataNodes = 1, numClientNodes = 0, supportsDedicatedMasters = false) public class HttpExporterIT extends MonitoringIntegTestCase { private final List<String> clusterAlertBlacklist = rarely() ? randomSubsetOf(Arrays.asList(ClusterAlertsUtil.WATCH_IDS)) : Collections.emptyList(); private final boolean templatesExistsAlready = randomBoolean(); private final boolean includeOldTemplates = randomBoolean(); private final boolean pipelineExistsAlready = randomBoolean(); private final boolean remoteClusterAllowsWatcher = randomBoolean(); private final boolean currentLicenseAllowsWatcher = true; private final boolean watcherAlreadyExists = randomBoolean(); private final Environment environment = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); private final String userName = "elasticuser"; private MockWebServer webServer; private MockSecureSettings mockSecureSettings = new MockSecureSettings(); @Override protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put(MonitoringService.INTERVAL.getKey(), MonitoringService.MIN_INTERVAL) // we do this by default in core, but for monitoring this isn't needed and only adds noise. .put("indices.lifecycle.history_index_enabled", false) .put("index.store.mock.check_index_on_close", false); return builder.build(); } @Before public void startWebServer() throws IOException { webServer = createMockWebServer(); } @After public void stopWebServer() { if (webServer != null) { webServer.close(); } } @Override protected boolean ignoreExternalCluster() { return true; } private Settings.Builder secureSettings(String password) { mockSecureSettings.setString("xpack.monitoring.exporters._http.auth.secure_password", password); return baseSettings().setSecureSettings(mockSecureSettings); } private Settings.Builder baseSettings() { return Settings.builder() .put("xpack.monitoring.exporters._http.enabled", false) .put("xpack.monitoring.exporters._http.type", "http") .put("xpack.monitoring.exporters._http.ssl.truststore.password", "foobar") // ensure that ssl can be used by settings .put("xpack.monitoring.exporters._http.headers.ignored", "value") // ensure that headers can be used by settings .put("xpack.monitoring.exporters._http.host", getFormattedAddress(webServer)) .putList("xpack.monitoring.exporters._http.cluster_alerts.management.blacklist", clusterAlertBlacklist) .put("xpack.monitoring.exporters._http.index.template.create_legacy_templates", includeOldTemplates) .put("xpack.monitoring.exporters._http.auth.username", userName); } public void testExport() throws Exception { final Settings settings = baseSettings().build(); enqueueGetClusterVersionResponse(Version.CURRENT); enqueueSetupResponses(webServer, templatesExistsAlready, includeOldTemplates, pipelineExistsAlready, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists); enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); final int nbDocs = randomIntBetween(1, 25); export(settings, newRandomMonitoringDocs(nbDocs)); assertMonitorResources(webServer, templatesExistsAlready, includeOldTemplates, pipelineExistsAlready, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists); assertBulk(webServer, nbDocs); } public void testSecureSetting() throws Exception { final String securePassword1 = "elasticpass"; final String securePassword2 = "anotherpassword"; final String authHeaderValue = Base64.encode(userName + ":" + securePassword1); final String authHeaderValue2 = Base64.encode(userName + ":" + securePassword2); Settings settings = secureSettings(securePassword1) .build(); PluginsService pluginsService = internalCluster().getInstances(PluginsService.class).iterator().next(); LocalStateMonitoring localStateMonitoring = pluginsService.filterPlugins(LocalStateMonitoring.class).iterator().next(); localStateMonitoring.getMonitoring().reload(settings); enqueueGetClusterVersionResponse(Version.CURRENT); enqueueSetupResponses(webServer, templatesExistsAlready, includeOldTemplates, pipelineExistsAlready, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists); enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); final int nbDocs = randomIntBetween(1, 25); export(settings, newRandomMonitoringDocs(nbDocs)); assertEquals(webServer.takeRequest().getHeader("Authorization").replace("Basic", "").replace(" ", ""), authHeaderValue); webServer.clearRequests(); settings = secureSettings(securePassword2).build(); localStateMonitoring.getMonitoring().reload(settings); enqueueGetClusterVersionResponse(Version.CURRENT); enqueueSetupResponses(webServer, templatesExistsAlready, includeOldTemplates, pipelineExistsAlready, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists); enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); export(settings, newRandomMonitoringDocs(nbDocs)); assertEquals(webServer.takeRequest().getHeader("Authorization").replace("Basic", "").replace(" ", ""), authHeaderValue2); } public void testExportWithHeaders() throws Exception { final String headerValue = randomAlphaOfLengthBetween(3, 9); final String[] array = generateRandomStringArray(2, 4, false, false); final Map<String, String[]> headers = new HashMap<>(); headers.put("X-Cloud-Cluster", new String[] { headerValue }); headers.put("X-Found-Cluster", new String[] { headerValue }); headers.put("Array-Check", array); final Settings settings = baseSettings() .put("xpack.monitoring.exporters._http.headers.X-Cloud-Cluster", headerValue) .put("xpack.monitoring.exporters._http.headers.X-Found-Cluster", headerValue) .putList("xpack.monitoring.exporters._http.headers.Array-Check", array) .build(); enqueueGetClusterVersionResponse(Version.CURRENT); enqueueSetupResponses(webServer, templatesExistsAlready, includeOldTemplates, pipelineExistsAlready, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists); enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); final int nbDocs = randomIntBetween(1, 25); export(settings, newRandomMonitoringDocs(nbDocs)); assertMonitorResources(webServer, templatesExistsAlready, includeOldTemplates, pipelineExistsAlready, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists, headers, null); assertBulk(webServer, nbDocs, headers, null); } public void testExportWithBasePath() throws Exception { final boolean useHeaders = randomBoolean(); final String headerValue = randomAlphaOfLengthBetween(3, 9); final String[] array = generateRandomStringArray(2, 4, false, false); final Map<String, String[]> headers = new HashMap<>(); if (useHeaders) { headers.put("X-Cloud-Cluster", new String[] { headerValue }); headers.put("X-Found-Cluster", new String[] { headerValue }); headers.put("Array-Check", array); } String basePath = "path/to"; if (randomBoolean()) { basePath += "/something"; if (rarely()) { basePath += "/proxied"; } } if (randomBoolean()) { basePath = "/" + basePath; } final Settings.Builder builder = baseSettings() .put("xpack.monitoring.exporters._http.proxy.base_path", basePath + (randomBoolean() ? "/" : "")); if (useHeaders) { builder.put("xpack.monitoring.exporters._http.headers.X-Cloud-Cluster", headerValue) .put("xpack.monitoring.exporters._http.headers.X-Found-Cluster", headerValue) .putList("xpack.monitoring.exporters._http.headers.Array-Check", array); } enqueueGetClusterVersionResponse(Version.CURRENT); enqueueSetupResponses(webServer, templatesExistsAlready, includeOldTemplates, pipelineExistsAlready, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists); enqueueResponse(200, "{\"errors\": false}"); final int nbDocs = randomIntBetween(1, 25); export(builder.build(), newRandomMonitoringDocs(nbDocs)); assertMonitorResources(webServer, templatesExistsAlready, includeOldTemplates, pipelineExistsAlready, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists, headers, basePath); assertBulk(webServer, nbDocs, headers, basePath); } public void testHostChangeReChecksTemplate() throws Exception { final Settings settings = baseSettings().build(); enqueueGetClusterVersionResponse(Version.CURRENT); enqueueSetupResponses(webServer, templatesExistsAlready, includeOldTemplates, pipelineExistsAlready, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists); enqueueResponse(200, "{\"errors\": false}"); export(settings, Collections.singletonList(newRandomMonitoringDoc())); assertMonitorResources(webServer, templatesExistsAlready, includeOldTemplates, pipelineExistsAlready, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists); assertBulk(webServer); try (MockWebServer secondWebServer = createMockWebServer()) { String missingTemplate = null; final Settings newSettings = Settings.builder() .put(settings) .putList("xpack.monitoring.exporters._http.host", getFormattedAddress(secondWebServer)) .build(); enqueueGetClusterVersionResponse(secondWebServer, Version.CURRENT); // pretend that one of the templates is missing for (Tuple<String, String> template : monitoringTemplates(includeOldTemplates)) { if (missingTemplate != null) { enqueueResponse(secondWebServer, 200, "{\"" + template.v1() + "\":{\"version\":" + LAST_UPDATED_VERSION + "}}"); } else { missingTemplate = template.v1(); enqueueResponse(secondWebServer, 404, "template [" + template.v1() + "] does not exist"); enqueueResponse(secondWebServer, 201, "template [" + template.v1() + "] created"); } } // opposite of if it existed before enqueuePipelineResponses(secondWebServer, !pipelineExistsAlready); enqueueWatcherResponses(secondWebServer, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists); enqueueResponse(secondWebServer, 200, "{\"errors\": false}"); // second event export(newSettings, Collections.singletonList(newRandomMonitoringDoc())); assertMonitorVersion(secondWebServer); String resourcePrefix = "/_template/"; for (Tuple<String, String> template : monitoringTemplates(includeOldTemplates)) { MockRequest recordedRequest = secondWebServer.takeRequest(); assertThat(recordedRequest.getMethod(), equalTo("GET")); assertThat(recordedRequest.getUri().getPath(), equalTo(resourcePrefix + template.v1())); assertMonitorVersionQueryString(recordedRequest.getUri().getQuery(), Collections.emptyMap()); if (missingTemplate.equals(template.v1())) { recordedRequest = secondWebServer.takeRequest(); assertThat(recordedRequest.getMethod(), equalTo("PUT")); assertThat(recordedRequest.getUri().getPath(), equalTo(resourcePrefix + template.v1())); assertMonitorVersionQueryString(recordedRequest.getUri().getQuery(), Collections.emptyMap()); assertThat(recordedRequest.getBody(), equalTo(getExternalTemplateRepresentation(template.v2()))); } } assertMonitorPipelines(secondWebServer, !pipelineExistsAlready, null, null); assertMonitorWatches(secondWebServer, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists, null, null); assertBulk(secondWebServer); } } public void testUnsupportedClusterVersion() throws Exception { final Settings settings = Settings.builder() .put("xpack.monitoring.exporters._http.type", "http") .put("xpack.monitoring.exporters._http.host", getFormattedAddress(webServer)) .build(); // returning an unsupported cluster version enqueueGetClusterVersionResponse( randomFrom(Version.fromString("0.18.0"), Version.fromString("1.0.0"), Version.fromString("1.4.0"), Version.fromString("2.4.0"), Version.fromString("5.0.0"), Version.fromString("5.4.0"))); // ensure that the exporter is not able to be used try (HttpExporter exporter = createHttpExporter(settings)) { final CountDownLatch awaitResponseAndClose = new CountDownLatch(1); final ActionListener<ExportBulk> listener = ActionListener.wrap( bulk -> { assertNull(bulk); awaitResponseAndClose.countDown(); }, e -> fail(e.getMessage()) ); exporter.openBulk(listener); // wait for it to actually respond assertTrue(awaitResponseAndClose.await(15, TimeUnit.SECONDS)); } assertThat(webServer.requests(), hasSize(1)); assertMonitorVersion(webServer); } public void testDynamicIndexFormatChange() throws Exception { final Settings settings = baseSettings().build(); enqueueGetClusterVersionResponse(Version.CURRENT); enqueueSetupResponses(webServer, templatesExistsAlready, includeOldTemplates, pipelineExistsAlready, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists); enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); MonitoringDoc doc = newRandomMonitoringDoc(); export(settings, Collections.singletonList(doc)); assertMonitorResources(webServer, templatesExistsAlready, includeOldTemplates, pipelineExistsAlready, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists); MockRequest recordedRequest = assertBulk(webServer); DateFormatter formatter = DateFormatter.forPattern("yyyy.MM.dd").withZone(ZoneOffset.UTC); String indexName = indexName(formatter, doc.getSystem(), doc.getTimestamp()); byte[] bytes = recordedRequest.getBody().getBytes(StandardCharsets.UTF_8); Map<String, Object> data = XContentHelper.convertToMap(new BytesArray(bytes), false, XContentType.JSON).v2(); @SuppressWarnings("unchecked") Map<String, Object> index = (Map<String, Object>) data.get("index"); assertThat(index.get("_index"), equalTo(indexName)); String newTimeFormat = randomFrom("yy", "yyyy", "yyyy.MM", "yyyy-MM", "MM.yyyy", "MM"); final Settings newSettings = Settings.builder() .put(settings) .put("xpack.monitoring.exporters._http.index.name.time_format", newTimeFormat) .build(); enqueueGetClusterVersionResponse(Version.CURRENT); enqueueSetupResponses(webServer, true, includeOldTemplates, true, true, true, true); enqueueResponse(200, "{\"errors\": false, \"msg\": \"successful bulk request\"}"); doc = newRandomMonitoringDoc(); export(newSettings, Collections.singletonList(doc)); DateFormatter newTimeFormatter = DateFormatter.forPattern(newTimeFormat).withZone(ZoneOffset.UTC); String expectedMonitoringIndex = ".monitoring-es-" + TEMPLATE_VERSION + "-" + newTimeFormatter.format(Instant.ofEpochMilli(doc.getTimestamp())); assertMonitorResources(webServer, true, includeOldTemplates, true, true, true, true); recordedRequest = assertBulk(webServer); bytes = recordedRequest.getBody().getBytes(StandardCharsets.UTF_8); data = XContentHelper.convertToMap(new BytesArray(bytes), false, XContentType.JSON).v2(); @SuppressWarnings("unchecked") final Map<String, Object> newIndex = (Map<String, Object>) data.get("index"); assertThat(newIndex.get("_index"), equalTo(expectedMonitoringIndex)); } private void assertMonitorVersion(final MockWebServer webServer) throws Exception { assertMonitorVersion(webServer, null, null); } private void assertMonitorVersion(final MockWebServer webServer, @Nullable final Map<String, String[]> customHeaders, @Nullable final String basePath) throws Exception { final MockRequest request = webServer.takeRequest(); assertThat(request.getMethod(), equalTo("GET")); final String pathPrefix = basePathToAssertablePrefix(basePath); if (Strings.isEmpty(pathPrefix) == false) { assertThat(request.getUri().getPath(), equalTo(pathPrefix + "/")); } assertThat(request.getUri().getQuery(), equalTo("filter_path=version.number")); assertHeaders(request, customHeaders); } private void assertMonitorResources(final MockWebServer webServer, final boolean templateAlreadyExists, final boolean includeOldTemplates, final boolean pipelineAlreadyExists, final boolean remoteClusterAllowsWatcher, final boolean currentLicenseAllowsWatcher, final boolean watcherAlreadyExists) throws Exception { assertMonitorResources(webServer, templateAlreadyExists, includeOldTemplates, pipelineAlreadyExists, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists, null, null); } private void assertMonitorResources(final MockWebServer webServer, final boolean templateAlreadyExists, final boolean includeOldTemplates, final boolean pipelineAlreadyExists, final boolean remoteClusterAllowsWatcher, final boolean currentLicenseAllowsWatcher, final boolean watcherAlreadyExists, @Nullable final Map<String, String[]> customHeaders, @Nullable final String basePath) throws Exception { assertMonitorVersion(webServer, customHeaders, basePath); assertMonitorTemplates(webServer, templateAlreadyExists, includeOldTemplates, customHeaders, basePath); assertMonitorPipelines(webServer, pipelineAlreadyExists, customHeaders, basePath); assertMonitorWatches(webServer, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists, customHeaders, basePath); } private void assertMonitorTemplates(final MockWebServer webServer, final boolean alreadyExists, final boolean includeOldTemplates, @Nullable final Map<String, String[]> customHeaders, @Nullable final String basePath) throws Exception { final List<Tuple<String, String>> templates = monitoringTemplates(includeOldTemplates); assertMonitorVersionResource(webServer, alreadyExists, "/_template/", templates, customHeaders, basePath); } private void assertMonitorPipelines(final MockWebServer webServer, final boolean alreadyExists, @Nullable final Map<String, String[]> customHeaders, @Nullable final String basePath) throws Exception { assertMonitorVersionResource(webServer, alreadyExists, "/_ingest/pipeline/", monitoringPipelines(), customHeaders, basePath); } private void assertMonitorVersionResource(final MockWebServer webServer, final boolean alreadyExists, final String resourcePrefix, final List<Tuple<String, String>> resources, @Nullable final Map<String, String[]> customHeaders, @Nullable final String basePath) throws Exception { final String pathPrefix = basePathToAssertablePrefix(basePath); for (Tuple<String, String> resource : resources) { final MockRequest getRequest = webServer.takeRequest(); assertThat(getRequest.getMethod(), equalTo("GET")); assertThat(getRequest.getUri().getPath(), equalTo(pathPrefix + resourcePrefix + resource.v1())); assertMonitorVersionQueryString(getRequest.getUri().getQuery(), Collections.emptyMap()); assertHeaders(getRequest, customHeaders); if (alreadyExists == false) { final MockRequest putRequest = webServer.takeRequest(); assertThat(putRequest.getMethod(), equalTo("PUT")); assertThat(putRequest.getUri().getPath(), equalTo(pathPrefix + resourcePrefix + resource.v1())); Map<String, String> parameters = Collections.emptyMap(); assertMonitorVersionQueryString(putRequest.getUri().getQuery(), parameters); if (resourcePrefix.startsWith("/_template")) { assertThat(putRequest.getBody(), equalTo(getExternalTemplateRepresentation(resource.v2()))); } else { assertThat(putRequest.getBody(), equalTo(resource.v2())); } assertHeaders(putRequest, customHeaders); } } } private void assertMonitorVersionQueryString(String query, final Map<String, String> parameters) { Map<String, String> expectedQueryStringMap = new HashMap<>(); RestUtils.decodeQueryString(query, 0, expectedQueryStringMap); Map<String, String> resourceVersionQueryStringMap = new HashMap<>(); RestUtils.decodeQueryString(resourceVersionQueryString(), 0, resourceVersionQueryStringMap); Map<String, String> actualQueryStringMap = new HashMap<>(); actualQueryStringMap.putAll(resourceVersionQueryStringMap); actualQueryStringMap.putAll(parameters); assertEquals(expectedQueryStringMap, actualQueryStringMap); } private void assertMonitorWatches(final MockWebServer webServer, final boolean remoteClusterAllowsWatcher, final boolean currentLicenseAllowsWatcher, final boolean alreadyExists, @Nullable final Map<String, String[]> customHeaders, @Nullable final String basePath) { final String pathPrefix = basePathToAssertablePrefix(basePath); MockRequest request; request = webServer.takeRequest(); // GET /_xpack assertThat(request.getMethod(), equalTo("GET")); assertThat(request.getUri().getPath(), equalTo(pathPrefix + "/_xpack")); assertThat(request.getUri().getQuery(), equalTo(watcherCheckQueryString())); assertHeaders(request, customHeaders); if (remoteClusterAllowsWatcher) { for (final Tuple<String, String> watch : monitoringWatches()) { final String uniqueWatchId = ClusterAlertsUtil.createUniqueWatchId(clusterService(), watch.v1()); request = webServer.takeRequest(); // GET / PUT if we are allowed to use it if (currentLicenseAllowsWatcher && clusterAlertBlacklist.contains(watch.v1()) == false) { assertThat(request.getMethod(), equalTo("GET")); assertThat(request.getUri().getPath(), equalTo(pathPrefix + "/_watcher/watch/" + uniqueWatchId)); assertThat(request.getUri().getQuery(), equalTo(resourceClusterAlertQueryString())); assertHeaders(request, customHeaders); if (alreadyExists == false) { request = webServer.takeRequest(); assertThat(request.getMethod(), equalTo("PUT")); assertThat(request.getUri().getPath(), equalTo(pathPrefix + "/_watcher/watch/" + uniqueWatchId)); assertThat(request.getUri().getQuery(), equalTo(resourceClusterAlertQueryString())); assertThat(request.getBody(), equalTo(watch.v2())); assertHeaders(request, customHeaders); } // DELETE if we're not allowed to use it } else { assertThat(request.getMethod(), equalTo("DELETE")); assertThat(request.getUri().getPath(), equalTo(pathPrefix + "/_watcher/watch/" + uniqueWatchId)); assertThat(request.getUri().getQuery(), equalTo(resourceClusterAlertQueryString())); assertHeaders(request, customHeaders); } } } } private MockRequest assertBulk(final MockWebServer webServer) throws Exception { return assertBulk(webServer, -1); } private MockRequest assertBulk(final MockWebServer webServer, final int docs) throws Exception { return assertBulk(webServer, docs, null, null); } private MockRequest assertBulk(final MockWebServer webServer, final int docs, @Nullable final Map<String, String[]> customHeaders, @Nullable final String basePath) throws Exception { final String pathPrefix = basePathToAssertablePrefix(basePath); final MockRequest request = webServer.takeRequest(); assertThat(request.getMethod(), equalTo("POST")); assertThat(request.getUri().getPath(), equalTo(pathPrefix + "/_bulk")); assertThat(request.getUri().getQuery(), equalTo(bulkQueryString())); assertHeaders(request, customHeaders); if (docs != -1) { assertBulkRequest(request.getBody(), docs); } return request; } private void assertHeaders(final MockRequest request, final Map<String, String[]> customHeaders) { if (customHeaders != null) { for (final Map.Entry<String, String[]> entry : customHeaders.entrySet()) { final String header = entry.getKey(); final String[] values = entry.getValue(); final List<String> headerValues = request.getHeaders().get(header); if (values.length > 0) { assertThat(headerValues, hasSize(values.length)); assertThat(headerValues, containsInAnyOrder(values)); } } } } private HttpExporter createHttpExporter(final Settings settings) { final Exporter.Config config = new Exporter.Config("_http", "http", settings, clusterService(), new XPackLicenseState(Settings.EMPTY)); final Environment env = TestEnvironment.newEnvironment(buildEnvSettings(settings)); return new HttpExporter(config, new SSLService(env), new ThreadContext(settings)); } private void export(final Settings settings, final Collection<MonitoringDoc> docs) throws Exception { // wait until the cluster is ready (this is done at the "Exporters" level) assertBusy(() -> assertThat(clusterService().state().version(), not(ClusterState.UNKNOWN_VERSION))); try (HttpExporter exporter = createHttpExporter(settings)) { final CountDownLatch awaitResponseAndClose = new CountDownLatch(1); exporter.openBulk(ActionListener.wrap(exportBulk -> { final HttpExportBulk bulk = (HttpExportBulk)exportBulk; assertThat("Bulk should never be null after the exporter is ready", bulk, notNullValue()); final ActionListener<Void> listener = ActionListener.wrap( ignored -> awaitResponseAndClose.countDown(), e -> fail(e.getMessage()) ); bulk.add(docs); bulk.flush(listener); }, e -> fail("Failed to create HttpExportBulk"))); // block until the bulk responds assertTrue(awaitResponseAndClose.await(15, TimeUnit.SECONDS)); } } private MonitoringDoc newRandomMonitoringDoc() { String clusterUUID = internalCluster().getClusterName(); long timestamp = System.currentTimeMillis(); long intervalMillis = randomNonNegativeLong(); MonitoringDoc.Node sourceNode = MonitoringTestUtils.randomMonitoringNode(random()); return new IndexRecoveryMonitoringDoc(clusterUUID, timestamp, intervalMillis, sourceNode, new RecoveryResponse(0, 0, 0, null, null)); } private List<MonitoringDoc> newRandomMonitoringDocs(int nb) { List<MonitoringDoc> docs = new ArrayList<>(nb); for (int i = 0; i < nb; i++) { docs.add(newRandomMonitoringDoc()); } return docs; } private String basePathToAssertablePrefix(@Nullable String basePath) { if (basePath == null) { return ""; } basePath = basePath.startsWith("/")? basePath : "/" + basePath; return basePath; } private String resourceClusterAlertQueryString() { return "filter_path=" + CLUSTER_ALERT_VERSION_PARAMETERS.get("filter_path"); } private String resourceVersionQueryString() { return "filter_path=" + FILTER_PATH_RESOURCE_VERSION; } private String watcherCheckQueryString() { return "filter_path=" + WATCHER_CHECK_PARAMETERS.get("filter_path"); } private String bulkQueryString() { final String pipelineName = MonitoringTemplateUtils.pipelineName(TEMPLATE_VERSION); return "pipeline=" + pipelineName + "&filter_path=" + "errors,items.*.error"; } private void enqueueGetClusterVersionResponse(Version v) throws IOException { enqueueGetClusterVersionResponse(webServer, v); } private void enqueueGetClusterVersionResponse(MockWebServer mockWebServer, Version v) throws IOException { mockWebServer.enqueue(new MockResponse().setResponseCode(200).setBody( BytesReference.bytes(jsonBuilder().startObject().startObject("version") .field("number", v.toString()).endObject().endObject()).utf8ToString())); } private void enqueueSetupResponses(final MockWebServer webServer, final boolean templatesAlreadyExists, final boolean includeOldTemplates, final boolean pipelineAlreadyExists, final boolean remoteClusterAllowsWatcher, final boolean currentLicenseAllowsWatcher, final boolean watcherAlreadyExists) throws IOException { enqueueTemplateResponses(webServer, templatesAlreadyExists, includeOldTemplates); enqueuePipelineResponses(webServer, pipelineAlreadyExists); enqueueWatcherResponses(webServer, remoteClusterAllowsWatcher, currentLicenseAllowsWatcher, watcherAlreadyExists); } private void enqueueTemplateResponses(final MockWebServer webServer, final boolean alreadyExists, final boolean includeOldTemplates) throws IOException { if (alreadyExists) { enqueueTemplateResponsesExistsAlready(webServer, includeOldTemplates); } else { enqueueTemplateResponsesDoesNotExistYet(webServer, includeOldTemplates); } } private void enqueueTemplateResponsesDoesNotExistYet(final MockWebServer webServer, final boolean includeOldTemplates) throws IOException { enqueueVersionedResourceResponsesDoesNotExistYet(monitoringTemplateNames(includeOldTemplates), webServer); } private void enqueueTemplateResponsesExistsAlready(final MockWebServer webServer, final boolean includeOldTemplates) throws IOException { enqueueVersionedResourceResponsesExistsAlready(monitoringTemplateNames(includeOldTemplates), webServer); } private void enqueuePipelineResponses(final MockWebServer webServer, final boolean alreadyExists) throws IOException { if (alreadyExists) { enqueuePipelineResponsesExistsAlready(webServer); } else { enqueuePipelineResponsesDoesNotExistYet(webServer); } } private void enqueuePipelineResponsesDoesNotExistYet(final MockWebServer webServer) throws IOException { enqueueVersionedResourceResponsesDoesNotExistYet(monitoringPipelineNames(), webServer); } private void enqueuePipelineResponsesExistsAlready(final MockWebServer webServer) throws IOException { enqueueVersionedResourceResponsesExistsAlready(monitoringPipelineNames(), webServer); } private void enqueueVersionedResourceResponsesDoesNotExistYet(final List<String> names, final MockWebServer webServer) throws IOException { for (String resource : names) { if (randomBoolean()) { enqueueResponse(webServer, 404, "[" + resource + "] does not exist"); } else if (randomBoolean()) { final int version = LAST_UPDATED_VERSION - randomIntBetween(1, 1000000); // it DOES exist, but it's an older version enqueueResponse(webServer, 200, "{\"" + resource + "\":{\"version\":" + version + "}}"); } else { // no version specified enqueueResponse(webServer, 200, "{\"" + resource + "\":{}}"); } enqueueResponse(webServer, 201, "[" + resource + "] created"); } } private void enqueueVersionedResourceResponsesExistsAlready(final List<String> names, final MockWebServer webServer) throws IOException { for (String resource : names) { if (randomBoolean()) { final int newerVersion = randomFrom(Version.CURRENT.id, LAST_UPDATED_VERSION) + randomIntBetween(1, 1000000); // it's a NEWER resource (template / pipeline) enqueueResponse(webServer, 200, "{\"" + resource + "\":{\"version\":" + newerVersion + "}}"); } else { // we already put it enqueueResponse(webServer, 200, "{\"" + resource + "\":{\"version\":" + LAST_UPDATED_VERSION + "}}"); } } } private void enqueueWatcherResponses(final MockWebServer webServer, final boolean remoteClusterAllowsWatcher, final boolean currentLicenseAllowsWatcher, final boolean alreadyExists) throws IOException { // if the remote cluster doesn't allow watcher, then we only check for it and we're done if (remoteClusterAllowsWatcher) { // X-Pack exists and Watcher can be used enqueueResponse(webServer, 200, "{\"features\":{\"watcher\":{\"available\":true,\"enabled\":true}}}"); // if we have an active license that's not Basic, then we should add watches if (currentLicenseAllowsWatcher) { if (alreadyExists) { enqueueClusterAlertResponsesExistsAlready(webServer); } else { enqueueClusterAlertResponsesDoesNotExistYet(webServer); } } else { // otherwise we need to delete them from the remote cluster enqueueDeleteClusterAlertResponses(webServer); } } else { // X-Pack exists but Watcher just cannot be used if (randomBoolean()) { final String responseBody = randomFrom( "{\"features\":{\"watcher\":{\"available\":false,\"enabled\":true}}}", "{\"features\":{\"watcher\":{\"available\":true,\"enabled\":false}}}", "{}" ); enqueueResponse(webServer, 200, responseBody); } else { // X-Pack is not installed enqueueResponse(webServer, 404, "{}"); } } } private void enqueueClusterAlertResponsesDoesNotExistYet(final MockWebServer webServer) throws IOException { for (final String watchId : ClusterAlertsUtil.WATCH_IDS) { if (clusterAlertBlacklist.contains(watchId)) { enqueueDeleteClusterAlertResponse(webServer, watchId); } else { if (randomBoolean()) { enqueueResponse(webServer, 404, "watch [" + watchId + "] does not exist"); } else if (randomBoolean()) { final int version = ClusterAlertsUtil.LAST_UPDATED_VERSION - randomIntBetween(1, 1000000); // it DOES exist, but it's an older version enqueueResponse(webServer, 200, "{\"metadata\":{\"xpack\":{\"version_created\":" + version + "}}}"); } else { // no version specified enqueueResponse(webServer, 200, "{\"metadata\":{\"xpack\":{}}}"); } enqueueResponse(webServer, 201, "[" + watchId + "] created"); } } } private void enqueueClusterAlertResponsesExistsAlready(final MockWebServer webServer) throws IOException { for (final String watchId : ClusterAlertsUtil.WATCH_IDS) { if (clusterAlertBlacklist.contains(watchId)) { enqueueDeleteClusterAlertResponse(webServer, watchId); } else { final int existsVersion; if (randomBoolean()) { // it's a NEWER cluster alert existsVersion = randomFrom(Version.CURRENT.id, ClusterAlertsUtil.LAST_UPDATED_VERSION) + randomIntBetween(1, 1000000); } else { // we already put it existsVersion = ClusterAlertsUtil.LAST_UPDATED_VERSION; } enqueueResponse(webServer, 200, "{\"metadata\":{\"xpack\":{\"version_created\":" + existsVersion + "}}}"); } } } private void enqueueDeleteClusterAlertResponses(final MockWebServer webServer) throws IOException { for (final String watchId : ClusterAlertsUtil.WATCH_IDS) { enqueueDeleteClusterAlertResponse(webServer, watchId); } } private void enqueueDeleteClusterAlertResponse(final MockWebServer webServer, final String watchId) throws IOException { if (randomBoolean()) { enqueueResponse(webServer, 404, "watch [" + watchId + "] did not exist"); } else { enqueueResponse(webServer, 200, "watch [" + watchId + "] deleted"); } } private void enqueueResponse(int responseCode, String body) throws IOException { enqueueResponse(webServer, responseCode, body); } private void enqueueResponse(MockWebServer mockWebServer, int responseCode, String body) throws IOException { mockWebServer.enqueue(new MockResponse().setResponseCode(responseCode).setBody(body)); } private void assertBulkRequest(String requestBody, int numberOfActions) throws Exception { BulkRequest bulkRequest = Requests.bulkRequest() .add(new BytesArray(requestBody.getBytes(StandardCharsets.UTF_8)), null, XContentType.JSON); assertThat(bulkRequest.numberOfActions(), equalTo(numberOfActions)); for (DocWriteRequest<?> actionRequest : bulkRequest.requests()) { assertThat(actionRequest, instanceOf(IndexRequest.class)); } } private String getFormattedAddress(MockWebServer server) { return server.getHostName() + ":" + server.getPort(); } private MockWebServer createMockWebServer() throws IOException { MockWebServer server = new MockWebServer(); server.start(); return server; } private List<Tuple<String, String>> monitoringTemplates(final boolean includeOldTemplates) { return includeOldTemplates ? monitoringTemplatesWithOldTemplates() : monitoringTemplates(); } // this can be removed in 7.0 private List<Tuple<String, String>> monitoringTemplatesWithOldTemplates() { final List<Tuple<String, String>> expectedTemplates = monitoringTemplates(); expectedTemplates.addAll( Arrays.stream(MonitoringTemplateUtils.OLD_TEMPLATE_IDS) .map(id -> new Tuple<>(MonitoringTemplateUtils.oldTemplateName(id), MonitoringTemplateUtils.createEmptyTemplate(id))) .collect(Collectors.toList())); return expectedTemplates; } private List<String> monitoringTemplateNames(final boolean includeOldTemplates) { return includeOldTemplates ? monitoringTemplateNamesWithOldTemplates() : monitoringTemplateNames(); } // this can be removed in 7.0 protected List<String> monitoringTemplateNamesWithOldTemplates() { final List<String> expectedTemplateNames = monitoringTemplateNames(); expectedTemplateNames.addAll( Arrays.stream(MonitoringTemplateUtils.OLD_TEMPLATE_IDS) .map(MonitoringTemplateUtils::oldTemplateName) .collect(Collectors.toList())); return expectedTemplateNames; } private String getExternalTemplateRepresentation(String internalRepresentation) throws IOException { try (XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, internalRepresentation)) { XContentBuilder builder = JsonXContent.contentBuilder(); IndexTemplateMetadata.Builder.removeType(IndexTemplateMetadata.Builder.fromXContent(parser, ""), builder); return BytesReference.bytes(builder).utf8ToString(); } } }
package hamster.app.gui; import gnu.io.PortInUseException; import hamster.app.data.HamsterCategoryDataset; import hamster.app.data.HamsterDataManager; import hamster.app.data.HamsterDataManager.ListEntryChangedEvent; import hamster.app.data.HamsterDataset; import hamster.app.data.HamsterTableModel; import hamster.app.data.HamsterXYDataset; import hamster.app.network.NoValidPortSelectedException; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartPanel; import org.jfree.chart.JFreeChart; import org.jfree.chart.axis.CategoryAxis; import org.jfree.chart.axis.DateAxis; import org.jfree.chart.axis.ValueAxis; import org.jfree.chart.plot.CategoryPlot; import org.jfree.chart.plot.XYPlot; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.SwingConstants; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.JTableHeader; import java.awt.BorderLayout; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.util.Observable; import java.util.Observer; import java.util.concurrent.TimeoutException; /** * Created by sealc on 12/12/2015. */ public class HamsterDataDisplayPanel extends JPanel implements Observer { private static final long serialVersionUID = 4215967449728196732L; private HamsterDataManager dataMgr; private HamsterTableModel jtableDataModel; private HamsterXYDataset jfreechartDataset; private ChartPanel chartPanel; private JTable dataTable; private String dataSourcePath; private JFileChooser fileChooser; private boolean hasUnsavedData; public HamsterDataDisplayPanel() { super(new BorderLayout()); dataSourcePath = null; final JFreeChart chart = ChartFactory.createXYLineChart( "Hamster Wheel Usage History", "Date/Time of Record", "Distance Travelled (Rounds)", jfreechartDataset); final XYPlot plot = chart.getXYPlot(); final DateAxis domainAxis = new DateAxis("Date/Time of Record"); domainAxis.setVerticalTickLabels(true); plot.setDomainAxis(domainAxis); GUIUtilities.resizeFontsByDPI(plot.getDomainAxis()); GUIUtilities.resizeFontsByDPI(plot.getRangeAxis()); chartPanel = new ChartPanel(chart); dataTable = new JTable(); fileChooser = new JFileChooser(); hasUnsavedData = false; dataTable.setAutoResizeMode(JTable.AUTO_RESIZE_ALL_COLUMNS); dataTable.setRowHeight(dataTable.getRowHeight()*2); DefaultTableCellRenderer renderer = (DefaultTableCellRenderer)dataTable .getDefaultRenderer(Object.class); renderer.setHorizontalAlignment(SwingConstants.CENTER); dataTable.setDefaultRenderer(Object.class, renderer); final JTableHeader header = dataTable.getTableHeader(); renderer = (DefaultTableCellRenderer)header.getDefaultRenderer(); renderer.setHorizontalAlignment(SwingConstants.CENTER); header.setDefaultRenderer(renderer); setVisible(false); setupTable(); final JScrollPane tableScrollPane = new JScrollPane(dataTable); add(chartPanel, BorderLayout.CENTER); add(tableScrollPane, BorderLayout.SOUTH); } public void getDataFromArduino() { new Thread(new Runnable() { @Override public void run() { try { jtableDataModel.getDataFromArduino(); } catch(final PortInUseException e) { JOptionPane.showMessageDialog(HamsterDataDisplayPanel.this, "Port is currently in use " + "by " + e.currentOwner, e.getClass().getSimpleName(), JOptionPane.ERROR_MESSAGE); } catch(final NoValidPortSelectedException e) { JOptionPane.showMessageDialog(HamsterDataDisplayPanel.this, "Please select a valid port.", e.getClass().getSimpleName(), JOptionPane.ERROR_MESSAGE); } catch(final TimeoutException e) { JOptionPane.showMessageDialog(HamsterDataDisplayPanel.this, "Arduino took too long to " + "respond.", e.getClass().getSimpleName(), JOptionPane.ERROR_MESSAGE); } } }).start(); } public boolean hasUnsavedData() { return hasUnsavedData; } public void setupTable() { setupTable(""); } public void setupTable(final File srcFile) { setupTable(srcFile.getAbsolutePath()); } public void setupTable(final String srcPath) { if(srcPath == null || srcPath.isEmpty()) { if(jtableDataModel != null && jtableDataModel.isEmpty()) { // We'll recreate the empty table anyway. Do nothing. return; } } try { final HamsterDataManager newDataMgr = HamsterDataManager.constructDataManager(srcPath); final HamsterTableModel newModel = new HamsterTableModel(newDataMgr); final HamsterXYDataset newDataset = new HamsterXYDataset(newDataMgr); if(closeTableAndContinue()) { dataMgr = newDataMgr; jtableDataModel = newModel; jfreechartDataset = newDataset; dataTable.setModel(newModel); chartPanel.getChart().getXYPlot().setDataset(newDataset); this.dataSourcePath = srcPath; dataMgr.addObserver(this); setVisible(true); } } catch(final InstantiationError e) { System.out.println("Failed to open file at: " + srcPath); } } public synchronized boolean saveTable() { switch(fileChooser.showSaveDialog(this)) { case JFileChooser.APPROVE_OPTION: saveToFile(fileChooser.getSelectedFile()); setVisible(true); return true; default: return false; } } public void closeTable() { closeTableAndContinue(); } public boolean closeTableAndContinue() { if(hasUnsavedData() && jtableDataModel != null) { if(dataSourcePath == null || dataSourcePath.isEmpty()) { switch(showAskToSaveDialog()) { case JOptionPane.YES_OPTION: if(saveTable()) { closeTableHelper(); return true; } else { return false; } case JOptionPane.CANCEL_OPTION: return false; default: break; } } } closeTableHelper(); return true; } private void closeTableHelper() { setVisible(false); jtableDataModel = null; hasUnsavedData = false; } private int showAskToSaveDialog() { return JOptionPane.showConfirmDialog(this, "Save your new record first?", "Save Record", JOptionPane.YES_NO_CANCEL_OPTION); } private void saveToFile() { saveToFile(null); } private void saveToFile(File file) { if(file == null) { assert dataSourcePath != null && !dataSourcePath.isEmpty(); file = new File(dataSourcePath); } try { final FileOutputStream fileOut = new FileOutputStream(file); final BufferedOutputStream buffer = new BufferedOutputStream(fileOut); final ObjectOutputStream out = new ObjectOutputStream(buffer); out.writeObject(dataMgr.getSerializableData()); out.close(); fileOut.close(); hasUnsavedData = false; } catch(IOException e) { e.printStackTrace(); } } @Override public void update(final Observable o, final Object arg) { if(o == dataMgr) { if(arg instanceof ListEntryChangedEvent) { hasUnsavedData = true; if(dataSourcePath != null && !dataSourcePath.isEmpty()) { saveToFile(new File(dataSourcePath)); } } } } }
package org.toobsframework.transformpipeline.domain; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.Log; import org.apache.xml.utils.DefaultErrorHandler; import javax.xml.transform.OutputKeys; import javax.xml.transform.Source; import javax.xml.transform.TransformerFactory; import javax.xml.transform.Transformer; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.util.Properties; import java.util.Vector; import java.util.Iterator; import java.util.Date; import java.util.HashMap; import java.util.Map; /** */ public class StaticXSLTransformer extends BaseXMLTransformer { /** * To get the logger instance */ private static Log log = LogFactory.getLog(StaticXSLTransformer.class); /** * Implementation of the transform() method. This method first checks some * input parameters. Then it creates a Source object and invoces the * {@link #makeTransformation makeTransformation()}method. * */ @SuppressWarnings("unchecked") public Vector transform( Vector inputXSLs, Vector inputXMLs, HashMap inputParams) throws XMLTransformerException { if (log.isDebugEnabled()) { log.debug("TRANSFORM XML STARTED"); log.debug("Get input XMLs"); Iterator iter = inputParams.entrySet().iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry)iter.next(); log.debug(" Transform Param - name: " + entry.getKey() + " value: " + entry.getValue()); } } Iterator XSLIterator = inputXSLs.iterator(); InputStreamReader reader = null; while (XSLIterator.hasNext()) { Iterator XMLIterator = inputXMLs.iterator(); String xslFile = (String) XSLIterator.next(); Source xslSource = null; try { xslSource = uriResolver.resolve(xslFile + ".xsl", ""); if (log.isDebugEnabled()) { log.debug("XSL Source: " + xslSource.getSystemId()); } } catch (TransformerException e) { throw new XMLTransformerException("xsl " + xslFile + " cannot be loaded"); } if (xslSource == null) { throw new XMLTransformerException("StreamSource is null"); } Vector resultingXMLs = new Vector(); //String xmlString = ""; ByteArrayInputStream xmlInputStream = null; ByteArrayOutputStream xmlOutputStream = null; ByteArrayOutputStream outXML = null; while (XMLIterator.hasNext()) { try { Object xmlObject = XMLIterator.next(); if (xmlObject instanceof org.w3c.dom.Node) { TransformerFactory tf=TransformerFactory.newInstance(); //identity Transformer t=tf.newTransformer(); t.setOutputProperty(OutputKeys.INDENT, "yes"); ByteArrayOutputStream os = new ByteArrayOutputStream(); t.transform(new DOMSource( (org.w3c.dom.Node)xmlObject ), new StreamResult( os )); xmlInputStream = new ByteArrayInputStream(os.toByteArray()); //xmlString = os.toString("UTF-8"); if (log.isDebugEnabled()) { log.debug("Input XML for " + xslSource.toString() + "( " + xslFile + ") : " + os.toString("UTF-8")); } } else { //xmlString = (String) xmlObject; xmlInputStream = new ByteArrayInputStream(((String) xmlObject).getBytes("UTF-8")); if (log.isDebugEnabled()) { log.debug("Input XML for " + xslSource.toString() + "( " + xslFile + ") : " + xmlObject); } } //StringReader xmlReader = new StringReader(xmlString); xmlOutputStream = new ByteArrayOutputStream(); //StringWriter xmlWriter = new StringWriter(); StreamSource xmlSource = new StreamSource(xmlInputStream); StreamResult xmlResult = new StreamResult(xmlOutputStream); doTransform( xslSource, xmlSource, inputParams, xmlResult, xslFile); outXML = (ByteArrayOutputStream) xmlResult.getOutputStream(); //log.debug("Output XML: " + outXML.toString("UTF-8")); resultingXMLs.add(outXML.toString("UTF-8")); } catch (UnsupportedEncodingException uee) { log.error("Error creating output string", uee); throw new XMLTransformerException(uee); } catch (TransformerException te) { log.error("Error creating input xml: " + te.getMessage(), te); throw new XMLTransformerException(te); } finally { try { if (reader != null) { try { reader.close(); reader = null; } catch (IOException ignore) { } } if (xmlInputStream != null) { xmlInputStream.close(); xmlInputStream = null; } if (xmlOutputStream != null) { xmlOutputStream.close(); xmlOutputStream = null; } if (outXML != null) { outXML.close(); outXML = null; } } catch (IOException ex) { } } } inputXMLs = resultingXMLs; } return inputXMLs; } /** * This method actually does all the XML Document transformation. * <p> * @param xslSource * holds the xslFile * @param xmlSource * holds the xmlFile * @param params * holds the params needed to do this transform * @param xmlResult * holds the streamResult of the transform. */ @SuppressWarnings("unchecked") protected void doTransform( Source xslSource, Source xmlSource, HashMap params, StreamResult xmlResult, String xslFile) throws XMLTransformerException { try { // 1. Instantiate a TransformerFactory. TransformerFactory tFactory = TransformerFactory.newInstance(); // set the URI Resolver for the transformer factory setFactoryResolver(tFactory); // 2. Use the TransformerFactory to process the stylesheet Source and // generate a Transformer. Transformer transformer = tFactory.newTransformer(xslSource); transformer.setErrorListener(new DefaultErrorHandler(true)); // 2.2 Set character encoding for all transforms to UTF-8. transformer.setOutputProperty("encoding", "UTF-8"); // 2.5 Set Parameters necessary for transformation. if(params != null) { Iterator paramIt = params.entrySet().iterator(); while (paramIt.hasNext()) { Map.Entry thisParam = (Map.Entry) paramIt.next(); transformer.setParameter( (String) thisParam.getKey(), (String) thisParam.getValue()); } } // 3. Use the Transformer to transform an XML Source and send the // output to a Result object. Date timer = new Date(); transformer.transform(xmlSource, xmlResult); Date timer2 = new Date(); if (log.isDebugEnabled()) { long diff = timer2.getTime() - timer.getTime(); log.debug("Time to transform: " + diff + " mS XSL: " + xslFile); } } catch(TransformerConfigurationException tce) { log.error(tce.toString(), tce); throw new XMLTransformerException(tce.toString()); } catch(TransformerException te) { throw new XMLTransformerException(te); } } public void setOutputProperties(Properties outputProperties) { } }
package org.vizzini.illyriad.map.swingui; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.GridLayout; import java.awt.Insets; /** * Provides a layout manager which extends GridLayout to allow the cells to be different sizes. Taken from Java World * Tip #121: Flex Your Grid Layout. */ public final class GridLayout2 extends GridLayout { /** * Construct this object. */ public GridLayout2() { this(1, 0, 0, 0); } /** * Construct this object with the given parameters. * * @param rows Number of rows, with the value zero meaning any number of rows. * @param cols Number of columns, with the value zero meaning any number of columns. */ public GridLayout2(final int rows, final int cols) { this(rows, cols, 0, 0); } /** * Construct this object with the given parameters. * * @param rows Number of rows, with the value zero meaning any number of rows. * @param cols Number of columns, with the value zero meaning any number of columns. * @param hgap Horizontal gap. * @param vgap Vertical gap. */ public GridLayout2(final int rows, final int cols, final int hgap, final int vgap) { super(rows, cols, hgap, vgap); } /** * @see java.awt.GridLayout#layoutContainer(java.awt.Container) */ @Override public void layoutContainer(final Container parent) { synchronized (parent.getTreeLock()) { final Insets insets = parent.getInsets(); final int ncomponents = parent.getComponentCount(); int nrows = getRows(); int ncols = getColumns(); if (ncomponents == 0) { return; } if (nrows > 0) { ncols = ((ncomponents + nrows) - 1) / nrows; } else { nrows = ((ncomponents + ncols) - 1) / ncols; } final int hgap = getHgap(); final int vgap = getVgap(); // scaling factors final Dimension pd = preferredLayoutSize(parent); final double sw = (1.0 * parent.getWidth()) / pd.width; final double sh = (1.0 * parent.getHeight()) / pd.height; // scale final int[] w = new int[ncols]; final int[] h = new int[nrows]; for (int i = 0; i < ncomponents; i++) { final int r = i / ncols; final int c = i % ncols; final Component comp = parent.getComponent(i); final Dimension d = comp.getPreferredSize(); d.width = (int)(sw * d.width); d.height = (int)(sh * d.height); if (w[c] < d.width) { w[c] = d.width; } if (h[r] < d.height) { h[r] = d.height; } } for (int c = 0, x = insets.left; c < ncols; c++) { for (int r = 0, y = insets.top; r < nrows; r++) { final int i = (r * ncols) + c; if (i < ncomponents) { parent.getComponent(i).setBounds(x, y, w[c], h[r]); } y += (h[r] + vgap); } x += (w[c] + hgap); } } } /** * @see java.awt.GridLayout#minimumLayoutSize(java.awt.Container) */ @Override public Dimension minimumLayoutSize(final Container parent) { synchronized (parent.getTreeLock()) { final Insets insets = parent.getInsets(); final int ncomponents = parent.getComponentCount(); int nrows = getRows(); int ncols = getColumns(); if (nrows > 0) { ncols = ((ncomponents + nrows) - 1) / nrows; } else { nrows = ((ncomponents + ncols) - 1) / ncols; } final int[] w = new int[ncols]; final int[] h = new int[nrows]; for (int i = 0; i < ncomponents; i++) { final int r = i / ncols; final int c = i % ncols; final Component comp = parent.getComponent(i); final Dimension d = comp.getMinimumSize(); if (w[c] < d.width) { w[c] = d.width; } if (h[r] < d.height) { h[r] = d.height; } } int nw = 0; for (int j = 0; j < ncols; j++) { nw += w[j]; } int nh = 0; for (int i = 0; i < nrows; i++) { nh += h[i]; } return new Dimension(insets.left + insets.right + nw + ((ncols - 1) * getHgap()), insets.top + insets.bottom + nh + ((nrows - 1) * getVgap())); } } /** * @see java.awt.GridLayout#preferredLayoutSize(java.awt.Container) */ @Override public Dimension preferredLayoutSize(final Container parent) { synchronized (parent.getTreeLock()) { final Insets insets = parent.getInsets(); final int ncomponents = parent.getComponentCount(); int nrows = getRows(); int ncols = getColumns(); if (nrows > 0) { ncols = ((ncomponents + nrows) - 1) / nrows; } else { nrows = ((ncomponents + ncols) - 1) / ncols; } final int[] w = new int[ncols]; final int[] h = new int[nrows]; for (int i = 0; i < ncomponents; i++) { final int r = i / ncols; final int c = i % ncols; final Component comp = parent.getComponent(i); final Dimension d = comp.getPreferredSize(); if (w[c] < d.width) { w[c] = d.width; } if (h[r] < d.height) { h[r] = d.height; } } int nw = 0; for (int j = 0; j < ncols; j++) { nw += w[j]; } int nh = 0; for (int i = 0; i < nrows; i++) { nh += h[i]; } return new Dimension(insets.left + insets.right + nw + ((ncols - 1) * getHgap()), insets.top + insets.bottom + nh + ((nrows - 1) * getVgap())); } } }
package com.eclipsesource.rap.punchy.ece13; import static com.eclipsesource.rap.punchy.ece13.ResourceLoaderUtil.readFile; import org.eclipse.jface.fieldassist.ControlDecoration; import org.eclipse.jface.fieldassist.FieldDecoration; import org.eclipse.jface.fieldassist.FieldDecorationRegistry; import org.eclipse.jface.viewers.ArrayContentProvider; import org.eclipse.jface.viewers.ColumnLabelProvider; import org.eclipse.jface.viewers.ColumnViewerToolTipSupport; import org.eclipse.jface.viewers.TableViewer; import org.eclipse.jface.viewers.TableViewerColumn; import org.eclipse.rap.rwt.RWT; import org.eclipse.rap.rwt.lifecycle.WidgetUtil; import org.eclipse.rap.rwt.scripting.ClientListener; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Canvas; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.ProgressBar; import org.eclipse.swt.widgets.Scale; import org.eclipse.swt.widgets.TabFolder; import org.eclipse.swt.widgets.TabItem; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.Text; import org.eclipse.swt.widgets.ToolBar; import org.eclipse.swt.widgets.ToolItem; import com.eclipsesource.rap.punchy.Presentation; public class RAP_2_2_Slides { public static class DigitsOnlyListener implements Listener { @Override public void handleEvent( Event event ) { Text text = ( Text )event.widget; if( text.getText().matches( "^[0-9]*$" ) == false ) { text.setBackground( new Color( Display.getCurrent(), 255, 255, 128 ) ); } else { text.setBackground( null ); } } } public static void createSlides( Presentation presentation ) { new EceSlide( presentation ) { @Override public String getTitle() { return "RAP 2.2"; } @Override protected void createContent( Composite slideComposite ) { super.createContent( slideComposite ); spacer( 25 ); list( "Scheduled for December 2013", "Themes", new String[] { "RWT Scripting", "New ToolTips", "Row Templates" } ); } }; new EceSlide( presentation ) { @Override public String getTitle() { return "RWT Scripting"; } @Override protected void createContent( Composite slideComposite ) { super.createContent( slideComposite ); spacer( 20 ); list( "Formerly \"ClientScripting\" (RAP Incubator project)", "SWT-Style event handler written in JavaScript", "Executed in the Browser (no network latency)" ); } }; new EceSlide( presentation ) { @Override public String getTitle() { return "RWT Scripting Example"; } @Override protected void createContent( Composite slideComposite ) { super.createContent( slideComposite ); spacer( 10 ); list( "Input Validation (digits only) in Java:" ); setPaddingLeft( 60 ); Text text1 = new Text( slideComposite, SWT.BORDER ); text1.addListener( SWT.Modify, new DigitsOnlyListener() ); flow( text1 ); setPaddingLeft( 30 ); spacer( 10 ); list( "Input Validation with Scripting:" ); setPaddingLeft( 60 ); Text text2 = new Text( slideComposite, SWT.BORDER ); text2.addListener( SWT.Modify, new ClientListener( readFile( "DigitsOnly.js" ) ) ); flow( text2 ); setPaddingLeft( 30 ); spacer( 10 ); list( "Event Registration:" ); setPaddingLeft( 60 ); snippet( "java", SWT.DEFAULT, 120, " Text text1 = new Text( slideComposite, SWT.BORDER );\n" + " text1.addListener( SWT.Modify, new DigitsOnlyListener() );\n\n" + " Text text2 = new Text( slideComposite, SWT.BORDER );\n" + " text2.addListener( SWT.Modify, new ClientListener( readFile( \"DigitsOnly.js\" ) ) );\n" ); } }; new EceSlide( presentation ) { @Override public String getTitle() { return "RWT Scripting Example Source"; } @Override protected void createContent( Composite slideComposite ) { super.createContent( slideComposite ); spacer( 10 ); text( "Listener in Java:" ); setPaddingLeft( 60 ); snippet( "java", SWT.DEFAULT, 160, " public void handleEvent( Event event ) {\n" + " Text text = ( Text )event.widget;\n" + " if( text.getText().matches( \"^[0-9]*$\" ) == false ) {\n" + " text.setBackground( new Color( Display.getCurrent(), 255, 255, 128 ) );\n" + " } else {\n" + " text.setBackground( null );\n" + " }\n" + " }" ); setPaddingLeft( 30 ); spacer( 20 ); text( "Listener in JavaScript:" ); setPaddingLeft( 60 ); snippet( "javascript", SWT.DEFAULT, 160, readFile( "DigitsOnly.js" ) ); } }; new EceSlide( presentation ) { @Override public String getTitle() { return "RWT Scripting"; } @Override protected void createContent( Composite slideComposite ) { super.createContent( slideComposite ); spacer( 20 ); list( "Currently 9 widgets and 19 event types", "Scripting can not create/dispose widgets", "Some more Examples:" ); spacer( 20 ); Composite parent = new Composite( slideComposite, SWT.NONE ); parent.setLayout( new GridLayout( 2, false ) ); addDateFieldExample( parent ); addNumpadExample( parent ); addFocusSwitchExample( parent ); addCanvasExample( parent ); flow( "transparent", parent ); } }; new EceSlide( presentation ) { @Override public String getTitle() { return "New ToolTips"; } @Override protected void createContent( Composite slideComposite ) { super.createContent( slideComposite ); spacer( 20 ); list( "New (default) look with pointer", "Revised, widget-specific behavior", "HTML/markup support enabled" ); spacer( 20 ); Composite parent = new Composite( slideComposite, SWT.NONE ); GridLayout layout = new GridLayout( 3, false ); layout.verticalSpacing = 10; layout.horizontalSpacing = 10; parent.setLayout( layout ); ToolBar toolBarLeft = new ToolBar( parent, SWT.VERTICAL | SWT.BORDER ); GridData layoutData = new GridData( SWT.DEFAULT, SWT.FILL, false, true ); layoutData.verticalSpan = 6; toolBarLeft.setLayoutData( layoutData ); fillToolBar( toolBarLeft ); ToolBar toolBarTop = new ToolBar( parent, SWT.HORIZONTAL | SWT.BORDER ); GridData layoutData1 = new GridData( SWT.LEFT, SWT.DEFAULT, false, false ); toolBarTop.setLayoutData( layoutData1 ); fillToolBar( toolBarTop ); createTabFolder( parent ); createButton( parent, "Button" ); styleAs( "transparent", createCheck( parent ) ); createText( parent ); ProgressBar bar = new ProgressBar( parent, SWT.HORIZONTAL ); bar.setSelection( 40 ); bar.setToolTipText( "hor" ); addBarListener( bar ); Scale scale = new Scale( parent, SWT.HORIZONTAL ); scale.setToolTipText( "Scale" ); styleAs( "transparent", scale ); addScaleListener( scale ); flow( "transparent", parent ); } }; new EceSlide( presentation ) { @Override public String getTitle() { return "RowTemplates"; } @Override protected void createContent( Composite slideComposite ) { super.createContent( slideComposite ); spacer( 20 ); list( "Customize how tree/table items are rendered", "Make text/images clickable", "Pure Java API, no HTML/CSS/JS", "Possible future support for other widgets", "Tabris support" ); } }; new EceSlide( presentation ) { @Override public String getTitle() { return "RowTemplates Example"; } @Override protected void createContent( Composite slideComposite ) { super.createContent( slideComposite ); Composite example = new Composite( slideComposite, SWT.NONE ); new RowTemplateDemo().createContents( example ); flow( "transparent", example, SWT.DEFAULT, 550 ); } }; } ////////////// // Helper private static void fillToolBar( ToolBar toolbarTop ) { createToolItem( toolbarTop, "ICON_ERROR" ); createToolItem( toolbarTop, "ICON_INFORMATION" ); createToolItem( toolbarTop, "ICON_QUESTION" ); createToolItem( toolbarTop, "ICON_WARNING" ); createToolItem( toolbarTop, "ICON_WORKING" ); } private static void createToolItem( ToolBar toolbar, String icon ) { try { int id = SWT.class.getField( icon ).getInt( null ); ToolItem item = new ToolItem( toolbar, SWT.CHECK ); item.setImage( Display.getCurrent().getSystemImage( id ) ); item.setToolTipText( icon ); } catch( IllegalArgumentException e ) { e.printStackTrace(); } catch( SecurityException e ) { e.printStackTrace(); } catch( IllegalAccessException e ) { e.printStackTrace(); } catch( NoSuchFieldException e ) { e.printStackTrace(); } } private static void addFocusSwitchExample( Composite parent ) { Composite area = new Composite( parent, SWT.NONE ); area.setLayoutData( new GridData( SWT.FILL, SWT.CENTER, true, false ) ); GridLayout layout = new GridLayout( 2, false ); layout.marginHeight = 0; layout.marginWidth = 0; area.setLayout( layout ); Text text = new Text( area, SWT.BORDER ); text.setLayoutData( new GridData( SWT.FILL, SWT.CENTER, true, false ) ); Text next = new Text( area, SWT.BORDER ); next.setLayoutData( new GridData( SWT.FILL, SWT.CENTER, true, false ) ); CustomBehaviors.addFocusNextBehavior( text, next ); CustomBehaviors.addFocusPreviousBehavior( next, text ); } private static void addDateFieldExample( Composite parent ) { final Text text = new Text( parent, SWT.BORDER ); CustomBehaviors.addDateFieldBehavior( text ); //addDateValidator( text ); text.setLayoutData( new GridData( SWT.FILL, SWT.CENTER, true, false ) ); } private static void addCanvasExample( Composite parent ) { Canvas canvas = new Canvas( parent, SWT.BORDER ); canvas.setLayoutData( new GridData( SWT.FILL, SWT.FILL, true, true ) ); CustomBehaviors.addPaintingBehavior( canvas ); } private static void addNumpadExample( Composite parent ) { Composite pad = new Composite( parent, SWT.BORDER ); pad.setLayout( new GridLayout( 3, true ) ); GridData padLayoutData = new GridData( SWT.LEFT, SWT.TOP, false, false ); padLayoutData.verticalSpan = 3; pad.setLayoutData( padLayoutData ); Text text = new Text( pad, SWT.BORDER ); GridData texLayoutData = new GridData( SWT.FILL, SWT.TOP, true, false ); texLayoutData.horizontalSpan = 3; text.setLayoutData( texLayoutData ); text.setEditable( false ); createNumKeys( pad, text ); } private static void createNumKeys( Composite parent, Text text ) { WidgetUtil.registerDataKeys( "textWidget" ); WidgetUtil.registerDataKeys( "numValue" ); int[] numbers = new int[]{ 7, 8, 9, 4, 5, 6, 1, 2, 3 }; ClientListener listener = new ClientListener( ResourceLoaderUtil.readFile( "NumKey.js" ) ); for( int i = 0; i < numbers.length; i++ ) { createNumButton( parent, text, numbers[ i ], listener ); } createNumButton( parent, text, -1, listener ).setText( "C" ); createNumButton( parent, text, 0, listener ); createNumButton( parent, text, -2, listener ).setText( "." ); } private static Button createNumButton( Composite parent, Text text, int number, Listener listener ) { Button button = new Button( parent, SWT.PUSH ); button.setText( String.valueOf( number ) ); button.setLayoutData( new GridData( 40, 35 ) ); button.setData( "textWidget", WidgetUtil.getId( text ) ); button.setData( "numValue", Integer.valueOf( number ) ); button.addListener( SWT.MouseDown, listener ); return button; } private static void createTabFolder( Composite composite ) { TabFolder folder = new TabFolder( composite, SWT.NONE ); GridData layoutData = new GridData( SWT.FILL, SWT.FILL, true, true ); layoutData.verticalSpan = 6; folder.setLayoutData( layoutData ); TabItem tabItem1 = new TabItem( folder, SWT.NONE ); tabItem1.setText( "Create" ); tabItem1.setToolTipText( "ToolTips" ); TabItem tabItem2 = new TabItem( folder, SWT.NONE ); tabItem2.setText( "Edit" ); tabItem2.setToolTipText( "appear" ); TabItem tabItem3 = new TabItem( folder, SWT.NONE ); tabItem3.setText( "Publish" ); tabItem3.setToolTipText( "quickliy..." ); tabItem1.setControl( createTableViewer( folder ) ); folder.setSelection( 0 ); } private static Control createTableViewer( Composite composite ) { TableViewer table = new TableViewer( composite ); table.getControl().setToolTipText( "Table" ); GridData layoutData = new GridData( SWT.FILL, SWT.FILL, true, true ); //layoutData.horizontalSpan = 3; table.getControl().setLayoutData( layoutData ); table.setContentProvider( new ArrayContentProvider() ); ColumnViewerToolTipSupport.enableFor( table ); ( ( Table )table.getControl() ).setHeaderVisible( true ); TableViewerColumn columnViewer = new TableViewerColumn( table, SWT.NONE ); columnViewer.getColumn().setWidth( 100 ); columnViewer.getColumn().setToolTipText( "a column" ); columnViewer.getColumn().setText( "Col 1" ); columnViewer.setLabelProvider( new ColumnLabelProvider(){ @Override public String getToolTipText(Object element) { return "More about: " + element.toString(); }; @Override public String getText(Object element) { return element.toString(); }; } ); TableViewerColumn columnViewerTwo = new TableViewerColumn( table, SWT.NONE ); columnViewerTwo.getColumn().setWidth( 170 ); columnViewerTwo.getColumn().setText( "Col 2" ); columnViewerTwo.setLabelProvider( new ColumnLabelProvider(){ @Override public String getToolTipText(Object element) { return "Even more about " + element.toString(); }; @Override public String getText(Object element) { return "Another " + element.toString(); }; } ); TableViewerColumn columnViewer3= new TableViewerColumn( table, SWT.NONE ); columnViewer3.getColumn().setWidth( 100 ); columnViewer3.getColumn().setText( "Col 3" ); columnViewer3.setLabelProvider( new ColumnLabelProvider() ); table.setInput( new String[]{ "Data One ", "Data Two", "Data Three" } ); return table.getControl(); } private static void addScaleListener( Scale scale ) { String script = "function handleEvent( event ) { " + " event.widget.setToolTipText( event.widget.getSelection() + \"%\" ); " + "}"; scale.addListener( SWT.Selection, new ClientListener( script ) ); } private static void addBarListener( final ProgressBar bar ) { bar.addListener( SWT.MouseDown, new Listener() { @Override public void handleEvent( Event event ) { int sel = bar.getSelection(); sel += 10; if( sel > bar.getMaximum() ) { sel = 0; } bar.setSelection( sel ); bar.setToolTipText( sel + "%" ); } } ); } private static Button createButton( Composite composite, String text ) { Button ok = new Button( composite, SWT.PUSH ); ok.setText( text ); ok.setData( RWT.TOOLTIP_MARKUP_ENABLED, Boolean.TRUE ); ok.setToolTipText( "This is an OK tooltip<br/><br/><i>with</i> <b>some</b> <big>markup</big>" ); return ok; } private static Button createCheck( Composite composite ) { Button check = new Button( composite, SWT.CHECK ); check.setText( "check this out" ); check.setToolTipText( "yohoo!" ); return check; } private static Text createText( Composite composite ) { Text text = new Text( composite, SWT.BORDER ); text.setText( "Text with text in the text" ); text.setToolTipText( "I have nothing to say" ); ControlDecoration decoration = new ControlDecoration( text, SWT.RIGHT | SWT.TOP ); decoration.setDescriptionText( "Deco" ); decoration.setMarginWidth( 2 ); Image warningImage = getDecorationImage( FieldDecorationRegistry.DEC_WARNING ); decoration.setImage( warningImage ); return text; } private static Image getDecorationImage( String id ) { FieldDecorationRegistry registry = FieldDecorationRegistry.getDefault(); FieldDecoration decoration = registry.getFieldDecoration( id ); return decoration.getImage(); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.kanishka.virustotalv2; import com.google.gson.Gson; import com.kanishka.net.commons.BasicHTTPRequestImpl; import com.kanishka.net.commons.HTTPRequest; import com.kanishka.net.exception.RequestNotComplete; import com.kanishka.net.model.MultiPartEntity; import com.kanishka.net.model.RequestMethod; import com.kanishka.net.model.Response; import com.kanishka.virustotal.dto.DomainReport; import com.kanishka.virustotal.dto.FileScanReport; import com.kanishka.virustotal.dto.GeneralResponse; import com.kanishka.virustotal.dto.IPAddressReport; import com.kanishka.virustotal.dto.ScanInfo; import com.kanishka.virustotal.exception.APIKeyNotFoundException; import com.kanishka.virustotal.exception.InvalidArguentsException; import com.kanishka.virustotal.exception.QuotaExceededException; import com.kanishka.virustotal.exception.UnauthorizedAccessException; import org.apache.http.entity.mime.content.FileBody; import org.apache.http.entity.mime.content.StringBody; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * @author kdkanishka@gmail.com */ public class VirustotalPublicV2Impl implements VirustotalPublicV2 { private Gson gsonProcessor; private String apiKey; private static final String API_KEY_FIELD = "apikey"; private static final String RESOURCE_FIELD = "resource"; private static final String ERR_MSG_EXCEED_MAX_REQ_PM = "Exceeded maximum" + " number of requests per minute, Please try again later."; private static final String ERR_MSG_INVALID_API_KEY = "Invalid api key"; private static final String ERR_MSG_API_KEY_NOT_FOUND = "API Key is not set. Please set api key.\nSample :" + " VirusTotalConfig.getConfigInstance()." + "setVirusTotalAPIKey(\"APIKEY\")"; private static final String ERR_MSG_FILE_NOT_FOUND = "Could not access file, either the file may not exists or not" + " accessible!"; private static final String ERR_MSG_INCORRECT_PARAM = "Incorrect parameter \'%s\', resource should be an array" + " with at least one element"; private static final String ERR_MSG2_INCORRECT_PARAM = "Incorrect parameter \'%s\' , " + "maximum number(%d) of %s per request has been exceeded."; private static final String ERR_MSG3_INCORRECT_PARAM = "Incorrect parameter '%s', it should be a valid %s"; private static final String ERR_COMMENTING = "Could not publish the " + "comment," + " API error occured!"; public static final String URLS_LITERAL = "urls"; private HTTPRequest httpRequestObject; public VirustotalPublicV2Impl() throws APIKeyNotFoundException { initialize(); httpRequestObject = new BasicHTTPRequestImpl(); } public VirustotalPublicV2Impl(HTTPRequest httpRequestObject) throws APIKeyNotFoundException { initialize(); this.httpRequestObject = httpRequestObject; } private void initialize() throws APIKeyNotFoundException { gsonProcessor = new Gson(); apiKey = VirusTotalConfig.getConfigInstance().getVirusTotalAPIKey(); if (apiKey == null || apiKey.length() == 0) { throw new APIKeyNotFoundException(ERR_MSG_API_KEY_NOT_FOUND); } } @Override public ScanInfo scanFile(File fileToScan) throws IOException, UnauthorizedAccessException, QuotaExceededException { if (!fileToScan.canRead()) { throw new FileNotFoundException(ERR_MSG_FILE_NOT_FOUND); } Response responseWrapper = new Response(); ScanInfo scanInfo = new ScanInfo(); FileBody fileBody = new FileBody(fileToScan); MultiPartEntity file = new MultiPartEntity("file", fileBody); MultiPartEntity apikey = new MultiPartEntity(API_KEY_FIELD, new StringBody(apiKey)); List<MultiPartEntity> multiParts = new ArrayList<MultiPartEntity>(); multiParts.add(file); multiParts.add(apikey); Integer statusCode = -1; try { responseWrapper = httpRequestObject.request(URI_VT2_FILE_SCAN, null, null, RequestMethod.GET, multiParts); statusCode = responseWrapper.getStatus(); } catch (RequestNotComplete e) { statusCode = e.getHttpStatus().getStatusCode(); if (statusCode == VirustotalStatus.FORBIDDEN) { //fobidden throw new UnauthorizedAccessException(ERR_MSG_INVALID_API_KEY, e); } } if (statusCode == VirustotalStatus.SUCCESSFUL) { //valid response String serviceResponse = responseWrapper.getResponse(); scanInfo = gsonProcessor.fromJson(serviceResponse, ScanInfo.class); } else if (statusCode == VirustotalStatus.API_LIMIT_EXCEEDED) { //limit exceeded throw new QuotaExceededException(ERR_MSG_EXCEED_MAX_REQ_PM); } return scanInfo; } @Override public ScanInfo[] reScanFiles(String[] resources) throws IOException, UnauthorizedAccessException, InvalidArguentsException, QuotaExceededException { ScanInfo[] scanInfo = null; if (resources == null) { String errorMsg = String.format(ERR_MSG_INCORRECT_PARAM, "resources"); throw new InvalidArguentsException(errorMsg); } Response responseWrapper = new Response(); MultiPartEntity apikey = new MultiPartEntity(API_KEY_FIELD, new StringBody(apiKey)); StringBuilder resourceStr = new StringBuilder(); for (String resource : resources) { resourceStr.append(resource).append(", "); } //clean up resource string int lastCommaIdx = resourceStr.lastIndexOf(","); if (lastCommaIdx > 0) { resourceStr.deleteCharAt(lastCommaIdx); } MultiPartEntity part = new MultiPartEntity(RESOURCE_FIELD, new StringBody(resourceStr.toString())); List<MultiPartEntity> multiParts = new ArrayList<MultiPartEntity>(); multiParts.add(part); multiParts.add(apikey); Integer statusCode = -1; try { responseWrapper = httpRequestObject.request(URI_VT2_RESCAN, null, null, RequestMethod.POST, multiParts); statusCode = responseWrapper.getStatus(); } catch (RequestNotComplete e) { statusCode = e.getHttpStatus().getStatusCode(); if (statusCode == VirustotalStatus.FORBIDDEN) { //fobidden throw new UnauthorizedAccessException(ERR_MSG_INVALID_API_KEY, e); } } if (statusCode == VirustotalStatus.SUCCESSFUL) { //valid response String serviceResponse = responseWrapper.getResponse(); if (resources.length > 1) { scanInfo = gsonProcessor.fromJson(serviceResponse, ScanInfo[].class); } else { ScanInfo scanInfo1Elem = gsonProcessor.fromJson(serviceResponse, ScanInfo.class); scanInfo = new ScanInfo[]{scanInfo1Elem}; } } else if (statusCode == VirustotalStatus.API_LIMIT_EXCEEDED) { //limit exceeded throw new QuotaExceededException(ERR_MSG_EXCEED_MAX_REQ_PM); } return scanInfo; } @Override public FileScanReport getScanReport(String resource) throws IOException, UnauthorizedAccessException, QuotaExceededException { Response responseWrapper = new Response(); FileScanReport fileScanReport = new FileScanReport(); MultiPartEntity apikey = new MultiPartEntity(API_KEY_FIELD, new StringBody(apiKey)); MultiPartEntity resourcePart = new MultiPartEntity(RESOURCE_FIELD, new StringBody(resource)); List<MultiPartEntity> multiParts = new ArrayList<MultiPartEntity>(); multiParts.add(apikey); multiParts.add(resourcePart); Integer statusCode = -1; try { responseWrapper = httpRequestObject.request(URI_VT2_FILE_SCAN_REPORT , null, null, RequestMethod.POST, multiParts); statusCode = responseWrapper.getStatus(); } catch (RequestNotComplete e) { statusCode = e.getHttpStatus().getStatusCode(); if (statusCode == VirustotalStatus.FORBIDDEN) { //fobidden throw new UnauthorizedAccessException(ERR_MSG_INVALID_API_KEY, e); } } if (statusCode == VirustotalStatus.SUCCESSFUL) { //valid response String serviceResponse = responseWrapper.getResponse(); fileScanReport = gsonProcessor.fromJson(serviceResponse, FileScanReport.class); } else if (statusCode == VirustotalStatus.API_LIMIT_EXCEEDED) { //limit exceeded throw new QuotaExceededException(ERR_MSG_EXCEED_MAX_REQ_PM); } return fileScanReport; } @Override public FileScanReport[] getScanReports(String[] resources) throws IOException, UnauthorizedAccessException, QuotaExceededException, InvalidArguentsException { Response responseWrapper = new Response(); FileScanReport[] fileScanReport = null; if (resources == null) { String errorMsg = String.format(ERR_MSG_INCORRECT_PARAM, "resources"); throw new InvalidArguentsException(errorMsg); } MultiPartEntity apikey = new MultiPartEntity(API_KEY_FIELD, new StringBody(apiKey)); StringBuilder resourceStr = new StringBuilder(); for (String resource : resources) { resourceStr.append(resource).append(", "); } //clean up resource string int lastCommaIdx = resourceStr.lastIndexOf(","); if (lastCommaIdx > 0) { resourceStr.deleteCharAt(lastCommaIdx); } MultiPartEntity part = new MultiPartEntity(RESOURCE_FIELD, new StringBody(resourceStr.toString())); List<MultiPartEntity> multiParts = new ArrayList<MultiPartEntity>(); multiParts.add(apikey); multiParts.add(part); Integer statusCode = -1; try { responseWrapper = httpRequestObject.request(URI_VT2_FILE_SCAN_REPORT , null, null, RequestMethod.POST, multiParts); statusCode = responseWrapper.getStatus(); } catch (RequestNotComplete e) { statusCode = e.getHttpStatus().getStatusCode(); if (statusCode == VirustotalStatus.FORBIDDEN) { //fobidden throw new UnauthorizedAccessException(ERR_MSG_INVALID_API_KEY, e); } } if (statusCode == VirustotalStatus.SUCCESSFUL) { //valid response String serviceResponse = responseWrapper.getResponse(); if (resources.length > 1) { fileScanReport = gsonProcessor.fromJson(serviceResponse, FileScanReport[].class); } else { FileScanReport fScanRep = gsonProcessor.fromJson(serviceResponse, FileScanReport.class); fileScanReport = new FileScanReport[]{fScanRep}; } } else if (statusCode == VirustotalStatus.API_LIMIT_EXCEEDED) { //limit exceeded throw new QuotaExceededException(ERR_MSG_EXCEED_MAX_REQ_PM); } return fileScanReport; } @Override public ScanInfo[] scanUrls(String[] urls) throws IOException, UnauthorizedAccessException, QuotaExceededException, InvalidArguentsException { Response responseWrapper = new Response(); ScanInfo[] scanInfo = null; if (urls == null) { String errorMsg = String.format(ERR_MSG_INCORRECT_PARAM, URLS_LITERAL); throw new InvalidArguentsException(errorMsg); } else if (urls.length > VT2_MAX_ALLOWED_URLS_PER_REQUEST) { String errMsg = String.format(ERR_MSG2_INCORRECT_PARAM, URLS_LITERAL, VT2_MAX_ALLOWED_URLS_PER_REQUEST, URLS_LITERAL); throw new InvalidArguentsException(errMsg); } MultiPartEntity apikey = new MultiPartEntity(API_KEY_FIELD, new StringBody(apiKey)); StringBuilder resourceStr = new StringBuilder(); for (String url : urls) { resourceStr.append(url).append(VT2_URLSEPERATOR); } //clean up resource string int lastUrlSepIdx = resourceStr.lastIndexOf(VT2_URLSEPERATOR); if (lastUrlSepIdx > 0) { resourceStr.deleteCharAt(lastUrlSepIdx); } MultiPartEntity part = new MultiPartEntity("url", new StringBody(resourceStr.toString())); List<MultiPartEntity> multiParts = new ArrayList<MultiPartEntity>(); multiParts.add(apikey); multiParts.add(part); Integer statusCode = -1; try { responseWrapper = httpRequestObject.request(URI_VT2_URL_SCAN, null, null, RequestMethod.POST, multiParts); statusCode = responseWrapper.getStatus(); } catch (RequestNotComplete e) { statusCode = e.getHttpStatus().getStatusCode(); if (statusCode == VirustotalStatus.FORBIDDEN) { //fobidden throw new UnauthorizedAccessException(ERR_MSG_INVALID_API_KEY, e); } } if (statusCode == VirustotalStatus.SUCCESSFUL) { //valid response String serviceResponse = responseWrapper.getResponse(); if (urls.length > 1) { scanInfo = gsonProcessor.fromJson(serviceResponse, ScanInfo[].class); } else { ScanInfo scanInforElem = gsonProcessor.fromJson(serviceResponse, ScanInfo.class); scanInfo = new ScanInfo[]{scanInforElem}; } } else if (statusCode == VirustotalStatus.API_LIMIT_EXCEEDED) { //limit exceeded throw new QuotaExceededException(ERR_MSG_EXCEED_MAX_REQ_PM); } return scanInfo; } @Override public FileScanReport[] getUrlScanReport(String[] urls, boolean scan) throws IOException, UnauthorizedAccessException, QuotaExceededException, InvalidArguentsException { Response responseWrapper = new Response(); FileScanReport[] fileScanReport = null; if (urls == null) { String errMsg = String.format(ERR_MSG_INCORRECT_PARAM, "resources"); throw new InvalidArguentsException(errMsg); } else if (urls.length > VT2_MAX_ALLOWED_URLS_PER_REQUEST) { String errMsg = String.format(ERR_MSG2_INCORRECT_PARAM, "urls", VT2_MAX_ALLOWED_URLS_PER_REQUEST, "urls"); throw new InvalidArguentsException(errMsg); } MultiPartEntity apikey = new MultiPartEntity(API_KEY_FIELD, new StringBody(apiKey)); StringBuilder resourceStr = new StringBuilder(); for (String resource : urls) { resourceStr.append(resource).append("\n"); } //clean up resource string int lastCommaIdx = resourceStr.lastIndexOf("\n"); if (lastCommaIdx > 0) { resourceStr.deleteCharAt(lastCommaIdx); } MultiPartEntity part = new MultiPartEntity(RESOURCE_FIELD, new StringBody(resourceStr.toString())); List<MultiPartEntity> multiParts = new ArrayList<MultiPartEntity>(); multiParts.add(apikey); multiParts.add(part); if (scan) { MultiPartEntity scanPart = new MultiPartEntity("scan", new StringBody("1")); multiParts.add(scanPart); } Integer statusCode = -1; try { responseWrapper = httpRequestObject.request(URI_VT2_URL_SCAN_REPORT, null, null, RequestMethod.POST, multiParts); statusCode = responseWrapper.getStatus(); } catch (RequestNotComplete e) { statusCode = e.getHttpStatus().getStatusCode(); if (statusCode == VirustotalStatus.FORBIDDEN) { //fobidden throw new UnauthorizedAccessException(ERR_MSG_INVALID_API_KEY, e); } } if (statusCode == VirustotalStatus.SUCCESSFUL) { //valid response String serviceResponse = responseWrapper.getResponse(); if (urls.length > 1) { fileScanReport = gsonProcessor.fromJson(serviceResponse, FileScanReport[].class); } else { FileScanReport fileScanReportElem = gsonProcessor.fromJson(serviceResponse, FileScanReport.class); fileScanReport = new FileScanReport[]{fileScanReportElem}; } } else if (statusCode == VirustotalStatus.API_LIMIT_EXCEEDED) { //limit exceeded throw new QuotaExceededException(ERR_MSG_EXCEED_MAX_REQ_PM); } return fileScanReport; } @Override public IPAddressReport getIPAddresReport(String ipAddress) throws InvalidArguentsException, QuotaExceededException, UnauthorizedAccessException, IOException { Response responseWrapper = new Response(); IPAddressReport ipReport = new IPAddressReport(); if (ipAddress == null) { String errMsg = String.format(ERR_MSG3_INCORRECT_PARAM, "ipAddress", "IP address"); throw new InvalidArguentsException(errMsg); } String uriWithParams = URI_VT2_IP_REPORT + "?apikey=" + apiKey + "&ip=" + ipAddress; Integer statusCode = -1; try { responseWrapper = httpRequestObject.request(uriWithParams, null, null, RequestMethod.GET, null); statusCode = responseWrapper.getStatus(); } catch (RequestNotComplete e) { statusCode = e.getHttpStatus().getStatusCode(); if (statusCode == VirustotalStatus.FORBIDDEN) { //fobidden throw new UnauthorizedAccessException(ERR_MSG_INVALID_API_KEY, e); } } if (statusCode == VirustotalStatus.SUCCESSFUL) { //valid response String serviceResponse = responseWrapper.getResponse(); ipReport = gsonProcessor.fromJson(serviceResponse, IPAddressReport.class); } else if (statusCode == VirustotalStatus.API_LIMIT_EXCEEDED) { //limit exceeded throw new QuotaExceededException(ERR_MSG_EXCEED_MAX_REQ_PM); } return ipReport; } @Override public DomainReport getDomainReport(String domain) throws InvalidArguentsException, UnauthorizedAccessException, QuotaExceededException, IOException { Response responseWrapper = new Response(); DomainReport domainReport = new DomainReport(); if (domain == null) { String errMsg = String.format(ERR_MSG3_INCORRECT_PARAM, "domain", "domain name"); throw new InvalidArguentsException(errMsg); } String uriWithParams = URI_VT2_DOMAIN_REPORT + "?apikey=" + apiKey + "&domain=" + domain; Integer statusCode = -1; try { responseWrapper = httpRequestObject.request(uriWithParams, null, null, RequestMethod.GET, null); statusCode = responseWrapper.getStatus(); } catch (RequestNotComplete e) { statusCode = e.getHttpStatus().getStatusCode(); if (statusCode == VirustotalStatus.FORBIDDEN) { //fobidden throw new UnauthorizedAccessException(ERR_MSG_INVALID_API_KEY, e); } } if (statusCode == VirustotalStatus.SUCCESSFUL) { //valid response String serviceResponse = responseWrapper.getResponse(); domainReport = gsonProcessor.fromJson(serviceResponse, DomainReport.class); } else if (statusCode == VirustotalStatus.API_LIMIT_EXCEEDED) { //limit exceeded throw new QuotaExceededException(ERR_MSG_EXCEED_MAX_REQ_PM); } return domainReport; } @Override public GeneralResponse makeAComment(String resource, String comment) throws IOException, UnauthorizedAccessException, InvalidArguentsException, QuotaExceededException { if (resource == null || resource.length() == 0) { String errMsg = String.format(ERR_MSG3_INCORRECT_PARAM, "resource", "string representing a hash value (md2,sha1," + "sha256)"); throw new InvalidArguentsException(errMsg); } Response responseWrapper = new Response(); GeneralResponse generalResponse = new GeneralResponse(); generalResponse.setResponseCode(-1); generalResponse.setVerboseMessage(ERR_COMMENTING); MultiPartEntity apikey = new MultiPartEntity(API_KEY_FIELD, new StringBody(apiKey)); MultiPartEntity resourcePart = new MultiPartEntity(RESOURCE_FIELD, new StringBody(resource)); MultiPartEntity commentPart = new MultiPartEntity("comment", new StringBody(comment)); List<MultiPartEntity> multiParts = new ArrayList<MultiPartEntity>(); multiParts.add(apikey); multiParts.add(resourcePart); multiParts.add(commentPart); Integer statusCode = -1; try { responseWrapper = httpRequestObject.request(URI_VT2_PUT_COMMENT, null, null, RequestMethod.POST, multiParts); statusCode = responseWrapper.getStatus(); } catch (RequestNotComplete e) { statusCode = e.getHttpStatus().getStatusCode(); if (statusCode == VirustotalStatus.FORBIDDEN) { //fobidden throw new UnauthorizedAccessException(ERR_MSG_INVALID_API_KEY, e); } } if (statusCode == VirustotalStatus.SUCCESSFUL) { //valid response String serviceResponse = responseWrapper.getResponse(); generalResponse = gsonProcessor.fromJson(serviceResponse, GeneralResponse.class); } else if (statusCode == VirustotalStatus.API_LIMIT_EXCEEDED) { //limit exceeded throw new QuotaExceededException(ERR_MSG_EXCEED_MAX_REQ_PM); } return generalResponse; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/domains/v1alpha2/domains.proto package com.google.cloud.domains.v1alpha2; /** * * * <pre> * Response for the `RetrieveRegisterParameters` method. * </pre> * * Protobuf type {@code google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse} */ public final class RetrieveRegisterParametersResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse) RetrieveRegisterParametersResponseOrBuilder { private static final long serialVersionUID = 0L; // Use RetrieveRegisterParametersResponse.newBuilder() to construct. private RetrieveRegisterParametersResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RetrieveRegisterParametersResponse() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new RetrieveRegisterParametersResponse(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RetrieveRegisterParametersResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.domains.v1alpha2.RegisterParameters.Builder subBuilder = null; if (registerParameters_ != null) { subBuilder = registerParameters_.toBuilder(); } registerParameters_ = input.readMessage( com.google.cloud.domains.v1alpha2.RegisterParameters.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(registerParameters_); registerParameters_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.domains.v1alpha2.DomainsProto .internal_static_google_cloud_domains_v1alpha2_RetrieveRegisterParametersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.domains.v1alpha2.DomainsProto .internal_static_google_cloud_domains_v1alpha2_RetrieveRegisterParametersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse.class, com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse.Builder.class); } public static final int REGISTER_PARAMETERS_FIELD_NUMBER = 1; private com.google.cloud.domains.v1alpha2.RegisterParameters registerParameters_; /** * * * <pre> * Parameters to use when calling the `RegisterDomain` method. * </pre> * * <code>.google.cloud.domains.v1alpha2.RegisterParameters register_parameters = 1;</code> * * @return Whether the registerParameters field is set. */ @java.lang.Override public boolean hasRegisterParameters() { return registerParameters_ != null; } /** * * * <pre> * Parameters to use when calling the `RegisterDomain` method. * </pre> * * <code>.google.cloud.domains.v1alpha2.RegisterParameters register_parameters = 1;</code> * * @return The registerParameters. */ @java.lang.Override public com.google.cloud.domains.v1alpha2.RegisterParameters getRegisterParameters() { return registerParameters_ == null ? com.google.cloud.domains.v1alpha2.RegisterParameters.getDefaultInstance() : registerParameters_; } /** * * * <pre> * Parameters to use when calling the `RegisterDomain` method. * </pre> * * <code>.google.cloud.domains.v1alpha2.RegisterParameters register_parameters = 1;</code> */ @java.lang.Override public com.google.cloud.domains.v1alpha2.RegisterParametersOrBuilder getRegisterParametersOrBuilder() { return getRegisterParameters(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (registerParameters_ != null) { output.writeMessage(1, getRegisterParameters()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (registerParameters_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getRegisterParameters()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse)) { return super.equals(obj); } com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse other = (com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse) obj; if (hasRegisterParameters() != other.hasRegisterParameters()) return false; if (hasRegisterParameters()) { if (!getRegisterParameters().equals(other.getRegisterParameters())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRegisterParameters()) { hash = (37 * hash) + REGISTER_PARAMETERS_FIELD_NUMBER; hash = (53 * hash) + getRegisterParameters().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response for the `RetrieveRegisterParameters` method. * </pre> * * Protobuf type {@code google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse) com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.domains.v1alpha2.DomainsProto .internal_static_google_cloud_domains_v1alpha2_RetrieveRegisterParametersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.domains.v1alpha2.DomainsProto .internal_static_google_cloud_domains_v1alpha2_RetrieveRegisterParametersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse.class, com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse.Builder.class); } // Construct using // com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (registerParametersBuilder_ == null) { registerParameters_ = null; } else { registerParameters_ = null; registerParametersBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.domains.v1alpha2.DomainsProto .internal_static_google_cloud_domains_v1alpha2_RetrieveRegisterParametersResponse_descriptor; } @java.lang.Override public com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse getDefaultInstanceForType() { return com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse .getDefaultInstance(); } @java.lang.Override public com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse build() { com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse buildPartial() { com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse result = new com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse(this); if (registerParametersBuilder_ == null) { result.registerParameters_ = registerParameters_; } else { result.registerParameters_ = registerParametersBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse) { return mergeFrom( (com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse other) { if (other == com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse .getDefaultInstance()) return this; if (other.hasRegisterParameters()) { mergeRegisterParameters(other.getRegisterParameters()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.cloud.domains.v1alpha2.RegisterParameters registerParameters_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.domains.v1alpha2.RegisterParameters, com.google.cloud.domains.v1alpha2.RegisterParameters.Builder, com.google.cloud.domains.v1alpha2.RegisterParametersOrBuilder> registerParametersBuilder_; /** * * * <pre> * Parameters to use when calling the `RegisterDomain` method. * </pre> * * <code>.google.cloud.domains.v1alpha2.RegisterParameters register_parameters = 1;</code> * * @return Whether the registerParameters field is set. */ public boolean hasRegisterParameters() { return registerParametersBuilder_ != null || registerParameters_ != null; } /** * * * <pre> * Parameters to use when calling the `RegisterDomain` method. * </pre> * * <code>.google.cloud.domains.v1alpha2.RegisterParameters register_parameters = 1;</code> * * @return The registerParameters. */ public com.google.cloud.domains.v1alpha2.RegisterParameters getRegisterParameters() { if (registerParametersBuilder_ == null) { return registerParameters_ == null ? com.google.cloud.domains.v1alpha2.RegisterParameters.getDefaultInstance() : registerParameters_; } else { return registerParametersBuilder_.getMessage(); } } /** * * * <pre> * Parameters to use when calling the `RegisterDomain` method. * </pre> * * <code>.google.cloud.domains.v1alpha2.RegisterParameters register_parameters = 1;</code> */ public Builder setRegisterParameters( com.google.cloud.domains.v1alpha2.RegisterParameters value) { if (registerParametersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } registerParameters_ = value; onChanged(); } else { registerParametersBuilder_.setMessage(value); } return this; } /** * * * <pre> * Parameters to use when calling the `RegisterDomain` method. * </pre> * * <code>.google.cloud.domains.v1alpha2.RegisterParameters register_parameters = 1;</code> */ public Builder setRegisterParameters( com.google.cloud.domains.v1alpha2.RegisterParameters.Builder builderForValue) { if (registerParametersBuilder_ == null) { registerParameters_ = builderForValue.build(); onChanged(); } else { registerParametersBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Parameters to use when calling the `RegisterDomain` method. * </pre> * * <code>.google.cloud.domains.v1alpha2.RegisterParameters register_parameters = 1;</code> */ public Builder mergeRegisterParameters( com.google.cloud.domains.v1alpha2.RegisterParameters value) { if (registerParametersBuilder_ == null) { if (registerParameters_ != null) { registerParameters_ = com.google.cloud.domains.v1alpha2.RegisterParameters.newBuilder(registerParameters_) .mergeFrom(value) .buildPartial(); } else { registerParameters_ = value; } onChanged(); } else { registerParametersBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Parameters to use when calling the `RegisterDomain` method. * </pre> * * <code>.google.cloud.domains.v1alpha2.RegisterParameters register_parameters = 1;</code> */ public Builder clearRegisterParameters() { if (registerParametersBuilder_ == null) { registerParameters_ = null; onChanged(); } else { registerParameters_ = null; registerParametersBuilder_ = null; } return this; } /** * * * <pre> * Parameters to use when calling the `RegisterDomain` method. * </pre> * * <code>.google.cloud.domains.v1alpha2.RegisterParameters register_parameters = 1;</code> */ public com.google.cloud.domains.v1alpha2.RegisterParameters.Builder getRegisterParametersBuilder() { onChanged(); return getRegisterParametersFieldBuilder().getBuilder(); } /** * * * <pre> * Parameters to use when calling the `RegisterDomain` method. * </pre> * * <code>.google.cloud.domains.v1alpha2.RegisterParameters register_parameters = 1;</code> */ public com.google.cloud.domains.v1alpha2.RegisterParametersOrBuilder getRegisterParametersOrBuilder() { if (registerParametersBuilder_ != null) { return registerParametersBuilder_.getMessageOrBuilder(); } else { return registerParameters_ == null ? com.google.cloud.domains.v1alpha2.RegisterParameters.getDefaultInstance() : registerParameters_; } } /** * * * <pre> * Parameters to use when calling the `RegisterDomain` method. * </pre> * * <code>.google.cloud.domains.v1alpha2.RegisterParameters register_parameters = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.domains.v1alpha2.RegisterParameters, com.google.cloud.domains.v1alpha2.RegisterParameters.Builder, com.google.cloud.domains.v1alpha2.RegisterParametersOrBuilder> getRegisterParametersFieldBuilder() { if (registerParametersBuilder_ == null) { registerParametersBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.domains.v1alpha2.RegisterParameters, com.google.cloud.domains.v1alpha2.RegisterParameters.Builder, com.google.cloud.domains.v1alpha2.RegisterParametersOrBuilder>( getRegisterParameters(), getParentForChildren(), isClean()); registerParameters_ = null; } return registerParametersBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse) } // @@protoc_insertion_point(class_scope:google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse) private static final com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse(); } public static com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<RetrieveRegisterParametersResponse> PARSER = new com.google.protobuf.AbstractParser<RetrieveRegisterParametersResponse>() { @java.lang.Override public RetrieveRegisterParametersResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RetrieveRegisterParametersResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<RetrieveRegisterParametersResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<RetrieveRegisterParametersResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license. package com.intellij.notification.impl.widget; import com.intellij.icons.AllIcons; import com.intellij.ide.ui.UISettings; import com.intellij.notification.*; import com.intellij.notification.impl.NotificationsToolWindowFactory; import com.intellij.notification.impl.ui.NotificationsUtil; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.wm.CustomStatusBarWidget; import com.intellij.openapi.wm.IconLikeCustomStatusBarWidget; import com.intellij.openapi.wm.StatusBar; import com.intellij.ui.*; import com.intellij.ui.scale.JBUIScale; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import sun.swing.SwingUtilities2; import javax.swing.*; import java.awt.*; import java.awt.event.MouseEvent; import java.util.List; public class IdeNotificationArea extends JLabel implements CustomStatusBarWidget, IconLikeCustomStatusBarWidget { public static final String WIDGET_ID = "Notifications"; private static final BadgeIconSupplier NOTIFICATION_ICON = new BadgeIconSupplier(AllIcons.Toolwindows.Notifications); private @Nullable StatusBar myStatusBar; public IdeNotificationArea() { setBorder(JBUI.CurrentTheme.StatusBar.Widget.iconBorder()); } @Override public WidgetPresentation getPresentation() { return null; } @Override public void dispose() { myStatusBar = null; UIUtil.dispose(this); } @Override public void install(@NotNull StatusBar statusBar) { myStatusBar = statusBar; Project project = myStatusBar.getProject(); if (project != null && !project.isDisposed()) { new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent e, int clickCount) { if (!project.isDisposed()) { EventLog.toggleLog(project, null); } return true; } }.installOn(this, true); Application app = ApplicationManager.getApplication(); app.getMessageBus().connect(this).subscribe(LogModel.LOG_MODEL_CHANGED, () -> app.invokeLater(() -> updateStatus(project))); updateStatus(project); } } @Override public @NotNull String ID() { return WIDGET_ID; } private void updateStatus(@Nullable Project project) { if (project == null || project.isDisposed()) { return; } List<Notification> notifications; if (ActionCenter.isEnabled()) { notifications = NotificationsToolWindowFactory.Companion.getStateNotifications(project); } else { notifications = EventLog.getLogModel(project).getNotifications(); } updateIconOnStatusBar(notifications); int count = notifications.size(); setToolTipText(count > 0 ? UIBundle.message("status.bar.notifications.widget.tooltip", count) : UIBundle.message("status.bar.notifications.widget.no.notification.tooltip")); } private void updateIconOnStatusBar(List<? extends Notification> notifications) { if (ActionCenter.isEnabled()) { setIcon(getActionCenterNotificationIcon(notifications)); } else { setIcon(createIconWithNotificationCount(this, NotificationType.getDominatingType(notifications), notifications.size(), false)); } } public static @NotNull Icon getActionCenterNotificationIcon(List<? extends Notification> notifications) { for (Notification notification : notifications) { if (notification.isSuggestionType() && notification.isImportantSuggestion() || notification.getType() == NotificationType.ERROR) { if (ExperimentalUI.isNewUI()) return NOTIFICATION_ICON.getErrorIcon(); return AllIcons.Toolwindows.NotificationsNewImportant; } } if (ExperimentalUI.isNewUI()) return NOTIFICATION_ICON.getInfoIcon(!notifications.isEmpty()); return notifications.isEmpty() ? AllIcons.Toolwindows.Notifications : AllIcons.Toolwindows.NotificationsNew; } public static @NotNull LayeredIcon createIconWithNotificationCount(JComponent component, NotificationType type, int size, boolean forToolWindow) { LayeredIcon icon = new LayeredIcon(2); Icon baseIcon = getPendingNotificationsIcon(type, forToolWindow); icon.setIcon(baseIcon, 0); if (size > 0) { //noinspection UseJBColor Color textColor = type == NotificationType.ERROR || type == NotificationType.INFORMATION ? new JBColor(Color.white, new Color(0xF2F2F2)) : new Color(0x333333); icon.setIcon(new TextIcon(component, size < 10 ? String.valueOf(size) : "9+", textColor, baseIcon), 1); } return icon; } @Override public JComponent getComponent() { return this; } private static Icon getPendingNotificationsIcon(NotificationType maximumType, boolean forToolWindow) { if (maximumType != null) { switch (maximumType) { case WARNING: return forToolWindow ? AllIcons.Toolwindows.WarningEvents : AllIcons.Ide.Notification.WarningEvents; case ERROR: return forToolWindow ? AllIcons.Toolwindows.ErrorEvents : AllIcons.Ide.Notification.ErrorEvents; case INFORMATION: case IDE_UPDATE: return forToolWindow ? AllIcons.Toolwindows.InfoEvents : AllIcons.Ide.Notification.InfoEvents; } } return forToolWindow ? AllIcons.Toolwindows.NoEvents : AllIcons.Ide.Notification.NoEvents; } private static class TextIcon implements Icon { private final String myStr; private final JComponent myComponent; private final Color myTextColor; private final Icon myBaseIcon; private final int myWidth; private final Font myFont; TextIcon(JComponent component, @NotNull String str, @NotNull Color textColor, @NotNull Icon baseIcon) { myStr = str; myComponent = component; myTextColor = textColor; myBaseIcon = baseIcon; myFont = new Font(NotificationsUtil.getFontName(), Font.BOLD, JBUIScale.scale(9)); myWidth = myComponent.getFontMetrics(myFont).stringWidth(myStr); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof TextIcon)) return false; TextIcon icon = (TextIcon)o; if (myWidth != icon.myWidth) return false; if (!myComponent.equals(icon.myComponent)) return false; if (!myStr.equals(icon.myStr)) return false; return true; } @Override public int hashCode() { int result = myStr.hashCode(); result = 31 * result + myComponent.hashCode(); result = 31 * result + myWidth; return result; } @Override public void paintIcon(Component c, Graphics g, int x, int y) { Graphics2D g2 = (Graphics2D)g; UISettings.setupAntialiasing(g); Font originalFont = g.getFont(); Color originalColor = g.getColor(); g.setFont(myFont); FontMetrics fm = SwingUtilities2.getFontMetrics((JComponent)c, g); boolean isTwoChar = myStr.length() == 2; float center = getIconWidth() - fm.stringWidth(myStr) + (isTwoChar ? JBUIScale.scale(1) : 0); float textX = x + center / 2; float textY = y + SimpleColoredComponent.getTextBaseLine(fm, getIconHeight()); if (!JreHiDpiUtil.isJreHiDPI(g2)) { textX = (float)Math.floor(textX); } g.setColor(myTextColor); g2.drawString(myStr.substring(0, 1), textX, textY); if (isTwoChar) { textX += fm.charWidth(myStr.charAt(0)) - JBUIScale.scale(1); g2.drawString(myStr.substring(1), textX, textY); } g.setFont(originalFont); g.setColor(originalColor); } @Override public int getIconWidth() { return myBaseIcon.getIconWidth(); } @Override public int getIconHeight() { return myBaseIcon.getIconHeight(); } } }
package org.drools.integrationtests; import java.io.InputStreamReader; import java.io.StringReader; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.drools.Alarm; import org.drools.Cheese; import org.drools.CommonTestMethodBase; import org.drools.FactA; import org.drools.FactHandle; import org.drools.Foo; import org.drools.Pet; import org.drools.RuleBase; import org.drools.RuleBaseFactory; import org.drools.StatefulSession; import org.drools.WorkingMemory; import org.drools.compiler.PackageBuilder; import org.drools.io.impl.ByteArrayResource; import org.drools.rule.Package; import org.drools.time.impl.PseudoClockScheduler; import org.junit.Ignore; import org.junit.Test; import org.kie.KnowledgeBase; import org.kie.KnowledgeBaseFactory; import org.kie.builder.KnowledgeBuilder; import org.kie.builder.KnowledgeBuilderFactory; import org.kie.definition.KnowledgePackage; import org.kie.io.ResourceFactory; import org.kie.io.ResourceType; import org.kie.runtime.KieSessionConfiguration; import org.kie.runtime.StatefulKnowledgeSession; import org.kie.runtime.conf.ClockTypeOption; import org.kie.time.Calendar; import org.kie.time.SessionClock; public class TimerAndCalendarTest extends CommonTestMethodBase { @Test public void testDuration() throws Exception { final PackageBuilder builder = new PackageBuilder(); builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_Duration.drl" ) ) ); final Package pkg = builder.getPackage(); RuleBase ruleBase = getRuleBase(); ruleBase.addPackage( pkg ); ruleBase = SerializationHelper.serializeObject( ruleBase ); final WorkingMemory workingMemory = ruleBase.newStatefulSession(); final List list = new ArrayList(); workingMemory.setGlobal( "list", list ); final Cheese brie = new Cheese( "brie", 12 ); final FactHandle brieHandle = workingMemory.insert( brie ); workingMemory.fireAllRules(); // now check for update assertEquals( 0, list.size() ); // sleep for 300ms Thread.sleep( 300 ); // now check for update assertEquals( 1, list.size() ); } @Test public void testDurationWithNoLoop() throws Exception { final PackageBuilder builder = new PackageBuilder(); builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_Duration_with_NoLoop.drl" ) ) ); final Package pkg = builder.getPackage(); RuleBase ruleBase = getRuleBase(); ruleBase.addPackage( pkg ); ruleBase = SerializationHelper.serializeObject( ruleBase ); final WorkingMemory workingMemory = ruleBase.newStatefulSession(); final List list = new ArrayList(); workingMemory.setGlobal( "list", list ); final Cheese brie = new Cheese( "brie", 12 ); final FactHandle brieHandle = workingMemory.insert( brie ); workingMemory.fireAllRules(); // now check for update assertEquals( 0, list.size() ); // sleep for 300ms Thread.sleep( 300 ); // now check for update assertEquals( 1, list.size() ); } @Test public void testDurationMemoryLeakonRepeatedUpdate() throws Exception { String str = ""; str += "package org.drools.test\n"; str += "import org.drools.Alarm\n"; str += "global java.util.List list;"; str += "rule \"COMPTEUR\"\n"; str += " timer 50\n"; str += " when\n"; str += " $alarm : Alarm( number < 5 )\n"; str += " then\n"; str += " $alarm.incrementNumber();\n"; str += " list.add( $alarm );\n"; str += " update($alarm);\n"; str += "end\n"; PackageBuilder builder = new PackageBuilder(); builder.addPackageFromDrl( new StringReader( str ) ); RuleBase ruleBase = RuleBaseFactory.newRuleBase(); ruleBase.addPackage( builder.getPackage() ); StatefulSession session = ruleBase.newStatefulSession(); List list = new ArrayList(); session.setGlobal( "list", list ); session.insert( new Alarm() ); session.fireAllRules(); Thread.sleep( 1000 ); assertEquals( 5, list.size() ); assertEquals( 0, session.getAgenda().getScheduledActivations().length ); } @Test public void testFireRuleAfterDuration() throws Exception { final PackageBuilder builder = new PackageBuilder(); builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_FireRuleAfterDuration.drl" ) ) ); final Package pkg = builder.getPackage(); RuleBase ruleBase = getRuleBase(); ruleBase.addPackage( pkg ); ruleBase = SerializationHelper.serializeObject( ruleBase ); final WorkingMemory workingMemory = ruleBase.newStatefulSession(); final List list = new ArrayList(); workingMemory.setGlobal( "list", list ); final Cheese brie = new Cheese( "brie", 12 ); final FactHandle brieHandle = workingMemory.insert( brie ); workingMemory.fireAllRules(); // now check for update assertEquals( 0, list.size() ); // sleep for 300ms Thread.sleep( 300 ); workingMemory.fireAllRules(); // now check for update assertEquals( 2, list.size() ); } @Test public void testNoProtocolIntervalTimer() throws Exception { String str = ""; str += "package org.simple \n"; str += "global java.util.List list \n"; str += "rule xxx \n"; str += " duration (30s 10s) "; str += "when \n"; str += "then \n"; str += " list.add(\"fired\"); \n"; str += "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { System.out.println( kbuilder.getErrors() ); assertTrue( kbuilder.hasErrors() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); List list = new ArrayList(); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); timeService.advanceTime( new Date().getTime(), TimeUnit.MILLISECONDS ); ksession.setGlobal( "list", list ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 20, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 15, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 1, list.size() ); timeService.advanceTime( 3, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 1, list.size() ); timeService.advanceTime( 2, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 2, list.size() ); timeService.advanceTime( 10, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 3, list.size() ); } @Test public void testIntervalTimer() throws Exception { String str = ""; str += "package org.simple \n"; str += "global java.util.List list \n"; str += "rule xxx \n"; str += " timer (int:30s 10s) "; str += "when \n"; str += "then \n"; str += " list.add(\"fired\"); \n"; str += "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); List list = new ArrayList(); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); timeService.advanceTime( new Date().getTime(), TimeUnit.MILLISECONDS ); ksession.setGlobal( "list", list ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 20, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 15, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 1, list.size() ); timeService.advanceTime( 3, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 1, list.size() ); timeService.advanceTime( 2, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 2, list.size() ); timeService.advanceTime( 10, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 3, list.size() ); } @Test public void testUnknownProtocol() throws Exception { wrongTimerExpression("xyz:30"); } @Test public void testMissingColon() throws Exception { wrongTimerExpression("int 30"); } @Test public void testMalformedExpression() throws Exception { wrongTimerExpression("30s s30"); } @Test public void testMalformedIntExpression() throws Exception { wrongTimerExpression("int 30s"); } @Test public void testMalformedCronExpression() throws Exception { wrongTimerExpression("cron: 0/30 * * * * *"); } private void wrongTimerExpression(String timer) { String str = ""; str += "package org.simple \n"; str += "rule xxx \n"; str += " timer (" + timer + ") "; str += "when \n"; str += "then \n"; str += "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); assertTrue( kbuilder.hasErrors() ); } @Test public void testCronTimer() throws Exception { String str = ""; str += "package org.simple \n"; str += "global java.util.List list \n"; str += "rule xxx \n"; str += " timer (cron:15 * * * * ?) "; str += "when \n"; str += "then \n"; str += " list.add(\"fired\"); \n"; str += "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { System.out.println( kbuilder.getErrors() ); assertTrue( kbuilder.hasErrors() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); List list = new ArrayList(); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); DateFormat df = new SimpleDateFormat( "yyyy-MM-dd'T'HH:mm:ss.SSSZ" ); Date date = df.parse( "2009-01-01T00:00:00.000-0000" ); timeService.advanceTime( date.getTime(), TimeUnit.MILLISECONDS ); ksession.setGlobal( "list", list ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 10, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 10, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 1, list.size() ); timeService.advanceTime( 30, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 1, list.size() ); timeService.advanceTime( 30, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 2, list.size() ); } @Test public void testCalendarNormalRuleSingleCalendar() throws Exception { String str = ""; str += "package org.simple \n"; str += "global java.util.List list \n"; str += "rule xxx \n"; str += " calendars \"cal1\"\n"; str += "when \n"; str += " String()\n"; str += "then \n"; str += " list.add(\"fired\"); \n"; str += "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { System.out.println( kbuilder.getErrors() ); assertTrue( kbuilder.hasErrors() ); } Calendar calFalse = new Calendar() { public boolean isTimeIncluded(long timestamp) { return false; } }; Calendar calTrue = new Calendar() { public boolean isTimeIncluded(long timestamp) { return true; } }; KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); List list = new ArrayList(); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); DateFormat df = new SimpleDateFormat( "yyyy-MM-dd'T'HH:mm:ss.SSSZ" ); Date date = df.parse( "2009-01-01T00:00:00.000-0000" ); ksession.getCalendars().set( "cal1", calTrue ); timeService.advanceTime( date.getTime(), TimeUnit.MILLISECONDS ); ksession.setGlobal( "list", list ); ksession.insert( "o1" ); ksession.fireAllRules(); assertEquals( 1, list.size() ); timeService.advanceTime( 10, TimeUnit.SECONDS ); ksession.insert( "o2" ); ksession.fireAllRules(); assertEquals( 2, list.size() ); ksession.getCalendars().set( "cal1", calFalse ); timeService.advanceTime( 10, TimeUnit.SECONDS ); ksession.insert( "o3" ); ksession.fireAllRules(); assertEquals( 2, list.size() ); ksession.getCalendars().set( "cal1", calTrue ); timeService.advanceTime( 30, TimeUnit.SECONDS ); ksession.insert( "o4" ); ksession.fireAllRules(); assertEquals( 3, list.size() ); } @Test public void testCalendarNormalRuleMultipleCalendars() throws Exception { String str = ""; str += "package org.simple \n"; str += "global java.util.List list \n"; str += "rule xxx \n"; str += " calendars \"cal1\", \"cal2\"\n"; str += "when \n"; str += " String()\n"; str += "then \n"; str += " list.add(\"fired\"); \n"; str += "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { System.out.println( kbuilder.getErrors() ); assertTrue( kbuilder.hasErrors() ); } Calendar calFalse = new Calendar() { public boolean isTimeIncluded(long timestamp) { return false; } }; Calendar calTrue = new Calendar() { public boolean isTimeIncluded(long timestamp) { return true; } }; KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); List list = new ArrayList(); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); DateFormat df = new SimpleDateFormat( "yyyy-MM-dd'T'HH:mm:ss.SSSZ" ); Date date = df.parse( "2009-01-01T00:00:00.000-0000" ); ksession.getCalendars().set( "cal1", calTrue ); ksession.getCalendars().set( "cal2", calTrue ); timeService.advanceTime( date.getTime(), TimeUnit.MILLISECONDS ); ksession.setGlobal( "list", list ); ksession.insert( "o1" ); ksession.fireAllRules(); assertEquals( 1, list.size() ); ksession.getCalendars().set( "cal2", calFalse ); timeService.advanceTime( 10, TimeUnit.SECONDS ); ksession.insert( "o2" ); ksession.fireAllRules(); assertEquals( 1, list.size() ); ksession.getCalendars().set( "cal1", calFalse ); timeService.advanceTime( 10, TimeUnit.SECONDS ); ksession.insert( "o3" ); ksession.fireAllRules(); assertEquals( 1, list.size() ); ksession.getCalendars().set( "cal1", calTrue ); ksession.getCalendars().set( "cal2", calTrue ); timeService.advanceTime( 30, TimeUnit.SECONDS ); ksession.insert( "o4" ); ksession.fireAllRules(); assertEquals( 2, list.size() ); } @Test public void testCalendarsWithCron() throws Exception { String str = ""; str += "package org.simple \n"; str += "global java.util.List list \n"; str += "rule xxx \n"; str += " calendars \"cal1\", \"cal2\"\n"; str += " timer (cron:15 * * * * ?) "; str += "when \n"; str += "then \n"; str += " list.add(\"fired\"); \n"; str += "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { System.out.println( kbuilder.getErrors() ); assertTrue( kbuilder.hasErrors() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); List list = new ArrayList(); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); DateFormat df = new SimpleDateFormat( "yyyy-MM-dd'T'HH:mm:ss.SSSZ" ); Date date = df.parse( "2009-01-01T00:00:00.000-0000" ); timeService.advanceTime( date.getTime(), TimeUnit.MILLISECONDS ); final Date date1 = new Date( date.getTime() + (15 * 1000) ); final Date date2 = new Date( date1.getTime() + (60 * 1000) ); final Date date3 = new Date( date2.getTime() + (60 * 1000) ); final Date date4 = new Date( date3.getTime() + (60 * 1000) ); Calendar cal1 = new Calendar() { public boolean isTimeIncluded(long timestamp) { if ( timestamp == date1.getTime() ) { return true; } else if ( timestamp == date4.getTime() ) { return false; } else { return true; } } }; Calendar cal2 = new Calendar() { public boolean isTimeIncluded(long timestamp) { if ( timestamp == date2.getTime() ) { return false; } else if ( timestamp == date3.getTime() ) { return true; } else { return true; } } }; ksession.getCalendars().set( "cal1", cal1 ); ksession.getCalendars().set( "cal2", cal2 ); ksession.setGlobal( "list", list ); ksession.fireAllRules(); timeService.advanceTime( 20, TimeUnit.SECONDS ); assertEquals( 1, list.size() ); timeService.advanceTime( 60, TimeUnit.SECONDS ); assertEquals( 1, list.size() ); timeService.advanceTime( 60, TimeUnit.SECONDS ); assertEquals( 2, list.size() ); timeService.advanceTime( 60, TimeUnit.SECONDS ); assertEquals( 2, list.size() ); timeService.advanceTime( 60, TimeUnit.SECONDS ); assertEquals( 3, list.size() ); timeService.advanceTime( 60, TimeUnit.SECONDS ); assertEquals( 4, list.size() ); } @Test public void testCalendarsWithIntervals() throws Exception { String str = ""; str += "package org.simple \n"; str += "global java.util.List list \n"; str += "rule xxx \n"; str += " calendars \"cal1\", \"cal2\"\n"; str += " timer (15s 60s) "; //int: protocol is assumed str += "when \n"; str += "then \n"; str += " list.add(\"fired\"); \n"; str += "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { System.out.println( kbuilder.getErrors() ); assertTrue( kbuilder.hasErrors() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); List list = new ArrayList(); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); DateFormat df = new SimpleDateFormat( "yyyy-MM-dd'T'HH:mm:ss.SSSZ" ); Date date = df.parse( "2009-01-01T00:00:00.000-0000" ); timeService.advanceTime( date.getTime(), TimeUnit.MILLISECONDS ); final Date date1 = new Date( date.getTime() + (15 * 1000) ); final Date date2 = new Date( date1.getTime() + (60 * 1000) ); final Date date3 = new Date( date2.getTime() + (60 * 1000) ); final Date date4 = new Date( date3.getTime() + (60 * 1000) ); Calendar cal1 = new Calendar() { public boolean isTimeIncluded(long timestamp) { if ( timestamp == date1.getTime() ) { return true; } else if ( timestamp == date4.getTime() ) { return false; } else { return true; } } }; Calendar cal2 = new Calendar() { public boolean isTimeIncluded(long timestamp) { if ( timestamp == date2.getTime() ) { return false; } else if ( timestamp == date3.getTime() ) { return true; } else { return true; } } }; ksession.getCalendars().set( "cal1", cal1 ); ksession.getCalendars().set( "cal2", cal2 ); ksession.setGlobal( "list", list ); ksession.fireAllRules(); timeService.advanceTime( 20, TimeUnit.SECONDS ); assertEquals( 1, list.size() ); timeService.advanceTime( 60, TimeUnit.SECONDS ); assertEquals( 1, list.size() ); timeService.advanceTime( 60, TimeUnit.SECONDS ); assertEquals( 2, list.size() ); timeService.advanceTime( 60, TimeUnit.SECONDS ); assertEquals( 2, list.size() ); timeService.advanceTime( 60, TimeUnit.SECONDS ); assertEquals( 3, list.size() ); timeService.advanceTime( 60, TimeUnit.SECONDS ); assertEquals( 4, list.size() ); } @Test public void testCalendarsWithIntervalsAndStartAndEnd() throws Exception { String str = ""; str += "package org.simple \n"; str += "global java.util.List list \n"; str += "rule xxx \n"; str += " calendars \"cal1\"\n"; str += " timer (0d 1d start=3-JAN-2010 end=5-JAN-2010) "; //int: protocol is assumed str += "when \n"; str += "then \n"; str += " list.add(\"fired\"); \n"; str += "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { System.out.println( kbuilder.getErrors() ); assertTrue( kbuilder.hasErrors() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); List list = new ArrayList(); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); DateFormat df = new SimpleDateFormat( "dd-MMM-yyyy", Locale.UK ); Date date = df.parse( "1-JAN-2010" ); Calendar cal1 = new Calendar() { public boolean isTimeIncluded(long timestamp) { return true; } }; long oneDay = 60 * 60 * 24; ksession.getCalendars().set( "cal1", cal1 ); ksession.setGlobal( "list", list ); timeService.advanceTime( date.getTime(), TimeUnit.MILLISECONDS ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); assertEquals( 0, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); // day 3 assertEquals( 1, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); assertEquals( 2, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); // day 5 assertEquals( 3, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); assertEquals( 3, list.size() ); } @Test public void testCalendarsWithIntervalsAndStartAndLimit() throws Exception { String str = ""; str += "package org.simple \n"; str += "global java.util.List list \n"; str += "rule xxx \n"; str += " calendars \"cal1\"\n"; str += " timer (0d 1d start=3-JAN-2010 repeat-limit=4) "; //int: protocol is assumed str += "when \n"; str += "then \n"; str += " list.add(\"fired\"); \n"; str += "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { System.out.println( kbuilder.getErrors() ); assertTrue( kbuilder.hasErrors() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); List list = new ArrayList(); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); DateFormat df = new SimpleDateFormat( "dd-MMM-yyyy", Locale.UK ); Date date = df.parse( "1-JAN-2010" ); Calendar cal1 = new Calendar() { public boolean isTimeIncluded(long timestamp) { return true; } }; long oneDay = 60 * 60 * 24; ksession.getCalendars().set( "cal1", cal1 ); ksession.setGlobal( "list", list ); timeService.advanceTime( date.getTime(), TimeUnit.MILLISECONDS ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); assertEquals( 0, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); // day 3 assertEquals( 1, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); assertEquals( 2, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); // day 5 assertEquals( 3, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); assertEquals( 3, list.size() ); } @Test public void testCalendarsWithCronAndStartAndEnd() throws Exception { Locale defaultLoc = Locale.getDefault(); try { Locale.setDefault( Locale.UK ); // Because of the date strings in the DRL, fixable with JBRULES-3444 String str = ""; str += "package org.simple \n"; str += "global java.util.List list \n"; str += "rule xxx \n"; str += " calendars \"cal1\"\n"; str += " timer (cron: 0 0 0 * * ? start=3-JAN-2010 end=5-JAN-2010) "; str += "when \n"; str += "then \n"; str += " list.add(\"fired\"); \n"; str += "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { System.out.println( kbuilder.getErrors() ); assertTrue( kbuilder.hasErrors() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); List list = new ArrayList(); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); DateFormat df = new SimpleDateFormat( "dd-MMM-yyyy", Locale.UK ); Date date = df.parse( "1-JAN-2010" ); Calendar cal1 = new Calendar() { public boolean isTimeIncluded(long timestamp) { return true; } }; long oneDay = 60 * 60 * 24; ksession.getCalendars().set( "cal1", cal1 ); ksession.setGlobal( "list", list ); timeService.advanceTime( date.getTime(), TimeUnit.MILLISECONDS ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); assertEquals( 0, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); // day 3 assertEquals( 1, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); assertEquals( 2, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); // day 5 assertEquals( 3, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); assertEquals( 3, list.size() ); } finally { Locale.setDefault( defaultLoc ); } } @Test public void testCalendarsWithCronAndStartAndLimit() throws Exception { Locale defaultLoc = Locale.getDefault(); try { Locale.setDefault( Locale.UK ); // Because of the date strings in the DRL, fixable with JBRULES-3444 String str = ""; str += "package org.simple \n"; str += "global java.util.List list \n"; str += "rule xxx \n"; str += " calendars \"cal1\"\n"; str += " timer (cron: 0 0 0 * * ? start=3-JAN-2010 repeat-limit=4) "; str += "when \n"; str += "then \n"; str += " list.add(\"fired\"); \n"; str += "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { System.out.println( kbuilder.getErrors() ); assertTrue( kbuilder.hasErrors() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); List list = new ArrayList(); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); DateFormat df = new SimpleDateFormat( "dd-MMM-yyyy", Locale.UK ); Date date = df.parse( "1-JAN-2010" ); Calendar cal1 = new Calendar() { public boolean isTimeIncluded(long timestamp) { return true; } }; long oneDay = 60 * 60 * 24; ksession.getCalendars().set( "cal1", cal1 ); ksession.setGlobal( "list", list ); timeService.advanceTime( date.getTime(), TimeUnit.MILLISECONDS ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); assertEquals( 0, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); // day 3 assertEquals( 1, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); assertEquals( 2, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); // day 5 assertEquals( 3, list.size() ); timeService.advanceTime( oneDay, TimeUnit.SECONDS ); assertEquals( 3, list.size() ); } finally { Locale.setDefault( defaultLoc ); } } @Test public void testTimerWithNot() throws Exception { final PackageBuilder builder = new PackageBuilder(); builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_Timer_With_Not.drl" ) ) ); final Package pkg = builder.getPackage(); RuleBase ruleBase = getRuleBase(); ruleBase.addPackage( pkg ); ruleBase = SerializationHelper.serializeObject( ruleBase ); final WorkingMemory workingMemory = ruleBase.newStatefulSession(); workingMemory.fireAllRules(); Thread.sleep( 1500 ); // now check that rule "wrap A" fired once, creating one B assertEquals( 2, workingMemory.getFactCount() ); } @Test public void testHaltWithTimer() throws Exception { final PackageBuilder builder = new PackageBuilder(); builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_Halt_With_Timer.drl" ) ) ); final Package pkg = builder.getPackage(); RuleBase ruleBase = getRuleBase(); ruleBase.addPackage( pkg ); ruleBase = SerializationHelper.serializeObject( ruleBase ); final StatefulSession workingMemory = ruleBase.newStatefulSession(); new Thread( new Runnable(){ public void run(){ workingMemory.fireUntilHalt(); } } ).start(); Thread.sleep( 1000 ); FactHandle handle = workingMemory.insert( "halt" ); Thread.sleep( 2000 ); // now check that rule "halt" fired once, creating one Integer assertEquals( 2, workingMemory.getFactCount() ); workingMemory.retract( handle ); } @Test public void testTimerRemoval() { try { String str = "package org.drools.test\n" + "import " + TimeUnit.class.getName() + "\n" + "global java.util.List list \n" + "global " + CountDownLatch.class.getName() + " latch\n" + "rule TimerRule \n" + " timer (int:0 50) \n" + "when \n" + "then \n" + " //forces it to pause until main thread is ready\n" + " latch.await(10, TimeUnit.MINUTES); \n" + " list.add(list.size()); \n" + " end"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); // this will parse and compile in one step kbuilder.add(ResourceFactory.newByteArrayResource( str.getBytes()), ResourceType.DRL); // Check the builder for errors if (kbuilder.hasErrors()) { System.out.println(kbuilder.getErrors().toString()); throw new RuntimeException("Unable to compile \"TimerRule.drl\"."); } // get the compiled packages (which are serializable) final Collection<KnowledgePackage> pkgs = kbuilder.getKnowledgePackages(); // add the packages to a knowledgebase (deploy the knowledge packages). final KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages(pkgs); CountDownLatch latch = new CountDownLatch(1); final StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession(); List list = Collections.synchronizedList( new ArrayList() ); ksession.setGlobal( "list", list ); ksession.setGlobal( "latch", latch ); ksession.fireAllRules(); Thread.sleep(200); // this makes sure it actually enters a rule kbase.removeRule("org.drools.test", "TimerRule"); latch.countDown(); Thread.sleep(100); // allow the last rule, if we were in the middle of one to actually fire, before clearing list.clear(); Thread.sleep(500); // now wait to see if any more fire, they shouldn't assertEquals( 0, list.size() ); ksession.dispose(); } catch (InterruptedException e) { throw new RuntimeException( e ); } } @Test public void testIntervalTimerWithLongExpressions() throws Exception { String str = "package org.simple;\n" + "global java.util.List list;\n" + "\n" + "declare Bean\n" + " delay : long = 30000\n" + " period : long = 10000\n" + "end\n" + "\n" + "rule init \n" + "when \n" + "then \n" + " insert( new Bean() );\n" + "end \n" + "\n" + "rule xxx\n" + " salience ($d) \n" + " timer( expr: $d, $p; start=3-JAN-2010 )\n" + "when\n" + " Bean( $d : delay, $p : period )\n" + "then\n" + " list.add( \"fired\" );\n" + "end"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); List list = new ArrayList(); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); timeService.advanceTime( new Date().getTime(), TimeUnit.MILLISECONDS ); ksession.setGlobal( "list", list ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 20, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 15, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 1, list.size() ); timeService.advanceTime( 3, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 1, list.size() ); timeService.advanceTime( 2, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 2, list.size() ); timeService.advanceTime( 10, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 3, list.size() ); } @Test public void testIntervalTimerWithStringExpressions() throws Exception { String str = "package org.simple;\n" + "global java.util.List list;\n" + "\n" + "declare Bean\n" + " delay : String = \"30s\"\n" + " period : long = 10000\n" + "end\n" + "\n" + "rule init \n" + "when \n" + "then \n" + " insert( new Bean() );\n" + "end \n" + "\n" + "rule xxx\n" + " salience ($d) \n" + " timer( expr: $d, $p; start=3-JAN-2010 )\n" + "when\n" + " Bean( $d : delay, $p : period )\n" + "then\n" + " list.add( \"fired\" );\n" + "end"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); List list = new ArrayList(); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); timeService.advanceTime( new Date().getTime(), TimeUnit.MILLISECONDS ); ksession.setGlobal( "list", list ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 20, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 15, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 1, list.size() ); timeService.advanceTime( 3, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 1, list.size() ); timeService.advanceTime( 2, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 2, list.size() ); timeService.advanceTime( 10, TimeUnit.SECONDS ); ksession.fireAllRules(); assertEquals( 3, list.size() ); } @Test public void testIntervalTimerExpressionWithOr() throws Exception { final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); String text = "package org.kie.test\n" + "global java.util.List list\n" + "import " + FactA.class.getCanonicalName() + "\n" + "import " + Foo.class.getCanonicalName() + "\n" + "import " + Pet.class.getCanonicalName() + "\n" + "rule r1 timer (expr: f1.field2, f1.field2; repeat-limit=3)\n" + "when\n" + " foo: Foo()\n" + " ( Pet() and f1 : FactA( field1 == 'f1') ) or \n" + " f1 : FactA(field1 == 'f2') \n" + "then\n" + " list.add( f1 );\n" + " foo.setId( 'xxx' );\n" + "end\n" + "\n"; kbuilder.add( ResourceFactory.newByteArrayResource( text.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); timeService.advanceTime( new Date().getTime(), TimeUnit.MILLISECONDS ); List list = new ArrayList(); ksession.setGlobal( "list", list ); ksession.insert ( new Foo(null, null) ); ksession.insert ( new Pet(null) ); FactA fact1 = new FactA(); fact1.setField1( "f1" ); fact1.setField2( 250 ); FactA fact3 = new FactA(); fact3.setField1( "f2" ); fact3.setField2( 1000 ); ksession.insert( fact1 ); ksession.insert( fact3 ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 900, TimeUnit.MILLISECONDS ); ksession.fireAllRules(); assertEquals( 2, list.size() ); assertEquals( fact1, list.get( 0 ) ); assertEquals( fact1, list.get( 1 ) ); timeService.advanceTime( 5000, TimeUnit.MILLISECONDS ); ksession.fireAllRules(); assertEquals( 4, list.size() ); assertEquals( fact3, list.get( 2 ) ); assertEquals( fact3, list.get( 3 ) ); } @Test public void testExprTimeRescheduled() throws Exception { final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); String text = "package org.kie.test\n" + "global java.util.List list\n" + "import " + FactA.class.getCanonicalName() + "\n" + "rule r1 timer (expr: f1.field2, f1.field4)\n" + "when\n" + " f1 : FactA() \n" + "then\n" + " list.add( f1 );\n" + "end\n" + "\n"; kbuilder.add( ResourceFactory.newByteArrayResource( text.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); KieSessionConfiguration conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(); conf.setOption( ClockTypeOption.get( "pseudo" ) ); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession( conf, null ); PseudoClockScheduler timeService = ( PseudoClockScheduler ) ksession.<SessionClock>getSessionClock(); timeService.advanceTime( new Date().getTime(), TimeUnit.MILLISECONDS ); List list = new ArrayList(); ksession.setGlobal( "list", list ); FactA fact1 = new FactA(); fact1.setField1( "f1" ); fact1.setField2( 500 ); fact1.setField4( 1000 ); FactHandle fh = (FactHandle) ksession.insert (fact1 ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 2600, TimeUnit.MILLISECONDS ); ksession.fireAllRules(); assertEquals( 3, list.size() ); assertEquals( fact1, list.get( 0 ) ); assertEquals( fact1, list.get( 1 ) ); assertEquals( fact1, list.get( 2 ) ); list.clear(); fact1.setField2( 300 ); fact1.setField4( 2000 ); ksession.update( fh, fact1 ); // 100 has passed of the 1000, from the previous schedule // so that should be deducted from the 300 delay above, meaning // we only need to increment another 250 timeService.advanceTime( 250, TimeUnit.MILLISECONDS ); ksession.fireAllRules(); assertEquals( 1, list.size() ); assertEquals( fact1, list.get( 0 ) ); list.clear(); timeService.advanceTime( 1000, TimeUnit.MILLISECONDS ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 700, TimeUnit.MILLISECONDS ); ksession.fireAllRules(); assertEquals( 0, list.size() ); timeService.advanceTime( 300, TimeUnit.MILLISECONDS ); ksession.fireAllRules(); assertEquals( 1, list.size() ); } @Test @Ignore // TODO: fails randomly FIXME public void testHaltAfterSomeTimeThenRestart() throws Exception { String drl = "package org.kie.test;" + "global java.util.List list; \n" + "\n" + "\n" + "rule FireAtWill\n" + "timer(int:0 100)\n" + "when \n" + "then \n" + " list.add( 0 );\n" + "end\n" + "\n" + "rule ImDone\n" + "when\n" + " String( this == \"halt\" )\n" + "then\n" + " drools.halt();\n" + "end\n" + "\n" + "rule Hi \n" + "salience 10 \n" + "when \n" + " String( this == \"trigger\" ) \n" + "then \n " + " list.add( 5 ); \n" + "end \n" + "\n" + "rule Lo \n" + "salience -5 \n" + "when \n" + " String( this == \"trigger\" ) \n" + "then \n " + " list.add( -5 ); \n" + "end \n" ; final PackageBuilder builder = new PackageBuilder(); builder.addPackageFromDrl( new ByteArrayResource( drl.getBytes() ) ); final Package pkg = builder.getPackage(); RuleBase ruleBase = getRuleBase(); ruleBase.addPackage( pkg ); ruleBase = SerializationHelper.serializeObject( ruleBase ); final StatefulSession ksession = ruleBase.newStatefulSession(); List list = new ArrayList(); ksession.setGlobal( "list", list ); new Thread( new Runnable(){ public void run(){ ksession.fireUntilHalt(); } } ).start(); Thread.sleep( 250 ); ksession.insert( "halt" ); ksession.insert( "trigger" ); Thread.sleep( 300 ); new Thread( new Runnable(){ public void run(){ ksession.fireUntilHalt(); } } ).start(); Thread.sleep( 200 ); assertEquals( java.util.Arrays.asList( 0, 0, 0, 5, 0, 0, 0, -5, 0, 0 ), list ); } @Test @Ignore // TODO: fix random failures public void testHaltAfterSomeTimeThenRestartButNoLongerHolding() throws Exception { String drl = "package org.kie.test;" + "global java.util.List list; \n" + "\n" + "\n" + "rule FireAtWill\n" + "timer(int:0 100)\n" + "when \n" + " eval(true)" + " String( this == \"trigger\" )" + "then \n" + " list.add( 0 );\n" + "end\n" + "\n" + "rule ImDone\n" + "when\n" + " String( this == \"halt\" )\n" + "then\n" + " drools.halt();\n" + "end\n" + "\n" ; final PackageBuilder builder = new PackageBuilder(); builder.addPackageFromDrl( new ByteArrayResource( drl.getBytes() ) ); final Package pkg = builder.getPackage(); RuleBase ruleBase = getRuleBase(); ruleBase.addPackage( pkg ); ruleBase = SerializationHelper.serializeObject( ruleBase ); final StatefulSession ksession = ruleBase.newStatefulSession(); List list = new ArrayList(); ksession.setGlobal( "list", list ); FactHandle handle = ksession.insert( "trigger" ); new Thread( new Runnable(){ public void run(){ ksession.fireUntilHalt(); } } ).start(); Thread.sleep( 150 ); ksession.insert( "halt" ); Thread.sleep( 200 ); ksession.retract( handle ); new Thread( new Runnable(){ public void run(){ ksession.fireUntilHalt(); } } ).start(); Thread.sleep( 200 ); assertEquals( 2, list.size() ); assertEquals( java.util.Arrays.asList( 0, 0 ), list ); } }
package com.vies.viesmachines.client.gui.machines.customize; import java.awt.Color; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.lwjgl.input.Keyboard; import com.vies.viesmachines.api.ColorHelperVM; import com.vies.viesmachines.api.CostsVM; import com.vies.viesmachines.api.GuiVM; import com.vies.viesmachines.api.References; import com.vies.viesmachines.api.util.Keybinds; import com.vies.viesmachines.client.gui.GuiContainerVM; import com.vies.viesmachines.client.gui.buttons.GuiButtonGeneral1VM; import com.vies.viesmachines.client.gui.buttons.GuiButtonGeneral2VM; import com.vies.viesmachines.common.entity.machines.EntityMachineBase; import com.vies.viesmachines.common.entity.machines.containers.ContainerMachineNoSlots; import com.vies.viesmachines.network.NetworkHandler; import com.vies.viesmachines.network.server.machine.gui.customize.primaryskin.color.MessageHelperGuiMachineMenuCustomizePrimarySkinColorApply; import com.vies.viesmachines.network.server.machine.gui.customize.primaryskin.color.MessageHelperGuiMachineMenuCustomizePrimarySkinColorDefault; import com.vies.viesmachines.network.server.machine.gui.navigation.MessageGuiMachineMenuCustomize; import net.minecraft.client.gui.GuiButton; import net.minecraft.client.gui.GuiTextField; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.IInventory; import net.minecraft.util.ResourceLocation; import net.minecraft.util.text.TextFormatting; public class GuiMachineMenuCustomizePrimarySkinColor extends GuiContainerVM { private GuiTextField textRed; private GuiTextField textGreen; private GuiTextField textBlue; private final ResourceLocation TEXTURE = new ResourceLocation(References.MOD_ID + ":" + "textures/gui/container_gui_machine_menu_customize_color.png"); public GuiMachineMenuCustomizePrimarySkinColor(IInventory playerInv, EntityMachineBase airshipIn) { super(new ContainerMachineNoSlots(playerInv, airshipIn), playerInv, airshipIn); this.modelRotationHorizontal = 160; this.modelRidingEntity = false; this.machineColorActive = this.machine.getVisualFrameColor(); this.textRedNumber = this.machine.getVisualFrameColorRed(); this.textGreenNumber = this.machine.getVisualFrameColorGreen(); this.textBlueNumber = this.machine.getVisualFrameColorBlue(); } @Override public void initGui() { super.initGui(); buttonList.clear(); Keyboard.enableRepeatEvents(true); this.textRed = new GuiTextField(11, this.fontRenderer, this.guiLeft + 8+2, this.guiTop + 56, 26, 18); textRed.setMaxStringLength(3); textRed.setText(String.valueOf(this.textRedNumber)); this.textRed.setFocused(false); this.textRed.setTextColor(Color.RED.getRGB()); this.textGreen = new GuiTextField(12, this.fontRenderer, this.guiLeft + 41+1, this.guiTop + 56, 26, 18); textGreen.setMaxStringLength(3); textGreen.setText(String.valueOf(this.textGreenNumber)); this.textGreen.setFocused(false); this.textGreen.setTextColor(Color.GREEN.getRGB()); this.textBlue = new GuiTextField(13, this.fontRenderer, this.guiLeft + 74, this.guiTop + 56, 26, 18); textBlue.setMaxStringLength(3); textBlue.setText(String.valueOf(this.textBlueNumber)); this.textBlue.setFocused(false); this.textBlue.setTextColor(Color.BLUE.getRGB()); //-------------------------------------------------- GuiVM.buttonRotateLeft = new GuiButtonGeneral2VM(10, this.guiLeft + 110, this.guiTop + 68, 6, 6, "", 3); GuiVM.buttonRotateRight = new GuiButtonGeneral2VM(10, this.guiLeft + 122, this.guiTop + 68, 6, 6, "", 3); GuiVM.buttonRidingPlayerTrue = new GuiButtonGeneral1VM(12, this.guiLeft + 130, this.guiTop + 66, 10, 10, "", 1); GuiVM.buttonRidingPlayerFalse = new GuiButtonGeneral1VM(13, this.guiLeft + 140, this.guiTop + 66, 10, 10, "", 2); GuiVM.buttonUndo = new GuiButtonGeneral2VM(11, this.guiLeft + 158, this.guiTop + 66, 10, 10, "", 1); GuiVM.button00 = new GuiButtonGeneral2VM(20, this.guiLeft + 7, this.guiTop + 81, 42, 14, References.localNameVC("viesmachines.button.remove"), 0); GuiVM.buttonApply = new GuiButtonGeneral1VM(21, this.guiLeft + 7, this.guiTop + 99, 42, 14, References.localNameVC("viesmachines.button.apply"), 1); GuiVM.buttonBack = new GuiButtonGeneral1VM(22, this.guiLeft + 61, this.guiTop + 99, 42, 14, References.localNameVC("viesmachines.button.back"), 2); //-------------------------------------------------- // Red: GuiVM.button01 = new GuiButtonGeneral1VM(201, this.guiLeft + 25, this.guiTop + 119, 42, 14, References.localNameVC("viesmachines.item.color.114"), 0); // Green: GuiVM.button02 = new GuiButtonGeneral1VM(202, this.guiLeft + 67, this.guiTop + 119, 42, 14, References.localNameVC("viesmachines.item.color.77"), 0); // Blue: GuiVM.button03 = new GuiButtonGeneral1VM(203, this.guiLeft + 109, this.guiTop + 119, 42, 14, References.localNameVC("viesmachines.item.color.10"), 0); // Yellow: GuiVM.button04 = new GuiButtonGeneral1VM(204, this.guiLeft + 25, this.guiTop + 119 + (14 * 1), 42, 14, References.localNameVC("viesmachines.item.color.139"), 0); // Magenta: GuiVM.button05 = new GuiButtonGeneral1VM(205, this.guiLeft + 67, this.guiTop + 119 + (14 * 1), 42, 14, References.localNameVC("viesmachines.item.color.46"), 0); // Cyan: GuiVM.button06 = new GuiButtonGeneral1VM(206, this.guiLeft + 109, this.guiTop + 119 + (14 * 1), 42, 14, References.localNameVC("viesmachines.item.color.3"), 0); // Orange: GuiVM.button07 = new GuiButtonGeneral1VM(207, this.guiLeft + 25, this.guiTop + 119 + (14 * 2), 42, 14, References.localNameVC("viesmachines.item.color.100"), 0); // Purple: GuiVM.button08 = new GuiButtonGeneral1VM(208, this.guiLeft + 67, this.guiTop + 119 + (14 * 2), 42, 14, References.localNameVC("viesmachines.item.color.113"), 0); // Teal: GuiVM.button09 = new GuiButtonGeneral1VM(209, this.guiLeft + 109, this.guiTop + 119 + (14 * 2), 42, 14, References.localNameVC("viesmachines.item.color.131"), 0); // Crimson: GuiVM.button10 = new GuiButtonGeneral1VM(210, this.guiLeft + 25, this.guiTop + 119 + (14 * 3), 42, 14, References.localNameVC("viesmachines.item.color.20"), 0); // Black: GuiVM.button11 = new GuiButtonGeneral1VM(211, this.guiLeft + 67, this.guiTop + 119 + (14 * 3), 42, 14, References.localNameVC("viesmachines.item.color.8"), 0); // White: GuiVM.button12 = new GuiButtonGeneral1VM(212, this.guiLeft + 109, this.guiTop + 119 + (14 * 3), 42, 14, References.localNameVC("viesmachines.item.color.137"), 0); //-------------------------------------------------- this.buttonList.add(GuiVM.button01); this.buttonList.add(GuiVM.button02); this.buttonList.add(GuiVM.button03); this.buttonList.add(GuiVM.button04); this.buttonList.add(GuiVM.button05); this.buttonList.add(GuiVM.button06); this.buttonList.add(GuiVM.button07); this.buttonList.add(GuiVM.button08); this.buttonList.add(GuiVM.button09); this.buttonList.add(GuiVM.button10); this.buttonList.add(GuiVM.button11); this.buttonList.add(GuiVM.button12); this.buttonList.add(GuiVM.button00); this.buttonList.add(GuiVM.buttonApply); this.buttonList.add(GuiVM.buttonBack); this.buttonList.add(GuiVM.buttonRotateLeft); this.buttonList.add(GuiVM.buttonRotateRight); this.buttonList.add(GuiVM.buttonRidingPlayerTrue); this.buttonList.add(GuiVM.buttonRidingPlayerFalse); this.buttonList.add(GuiVM.buttonUndo); this.buttonList.add(GuiVM.buttonMM1); this.buttonList.add(GuiVM.buttonMM2); this.buttonList.add(GuiVM.buttonMM3); GuiVM.buttonMM3.enabled = false; } @Override protected void actionPerformed(GuiButton parButton) { super.actionPerformed(parButton); // Remove: if (parButton.id == 20) { this.machineColorActive = false; this.textRedNumber = 0; this.textGreenNumber = 0; this.textBlueNumber = 0; NetworkHandler.sendToServer(new MessageHelperGuiMachineMenuCustomizePrimarySkinColorDefault()); } // Apply: if (parButton.id == 21) { NetworkHandler.sendToServer(new MessageHelperGuiMachineMenuCustomizePrimarySkinColorApply()); } // Back: if (parButton.id == 22) { NetworkHandler.sendToServer(new MessageGuiMachineMenuCustomize()); } //-------------------------------------------------- // Red: if (parButton.id == 201) { this.machineColorActive = true; this.textRedNumber = 255; this.textGreenNumber = 0; this.textBlueNumber = 0; } // Green: if (parButton.id == 202) { this.machineColorActive = true; this.textRedNumber = 0; this.textGreenNumber = 255; this.textBlueNumber = 0; } // Blue: if (parButton.id == 203) { this.machineColorActive = true; this.textRedNumber = 0; this.textGreenNumber = 0; this.textBlueNumber = 255; } // Yellow: if (parButton.id == 204) { this.machineColorActive = true; this.textRedNumber = 255; this.textGreenNumber = 255; this.textBlueNumber = 0; } // Magenta: if (parButton.id == 205) { this.machineColorActive = true; this.textRedNumber = 255; this.textGreenNumber = 0; this.textBlueNumber = 255; } // Cyan: if (parButton.id == 206) { this.machineColorActive = true; this.textRedNumber = 0; this.textGreenNumber = 255; this.textBlueNumber = 255; } // Orange: if (parButton.id == 207) { this.machineColorActive = true; this.textRedNumber = 255; this.textGreenNumber = 165; this.textBlueNumber = 0; } // Purple: if (parButton.id == 208) { this.machineColorActive = true; this.textRedNumber = 128; this.textGreenNumber = 0; this.textBlueNumber = 128; } // Teal: if (parButton.id == 209) { this.machineColorActive = true; this.textRedNumber = 0; this.textGreenNumber = 128; this.textBlueNumber = 128; } // Crimson: if (parButton.id == 210) { this.machineColorActive = true; this.textRedNumber = 220; this.textGreenNumber = 20; this.textBlueNumber = 60; } // Black: if (parButton.id == 211) { this.machineColorActive = true; this.textRedNumber = 1; this.textGreenNumber = 1; this.textBlueNumber = 1; } // White: if (parButton.id == 212) { this.machineColorActive = true; this.textRedNumber = 255; this.textGreenNumber = 255; this.textBlueNumber = 255; } this.buttonList.clear(); this.initGui(); this.updateScreen(); } @Override protected void drawGuiContainerBackgroundLayer(float partialTicks, int mouseX, int mouseY) { super.drawGuiContainerBackgroundLayer(partialTicks, mouseX, mouseY); // Draws a gray box behind the main background texture: this.drawRect(this.guiLeft + 108, this.guiTop + 6, this.guiLeft + 168, this.guiTop + 64, Color.GRAY.getRGB()); // Draws a black box behind the r/g/b input boxes: this.drawRect(this.guiLeft + 8, this.guiTop + 6, this.guiLeft + 108, this.guiTop + 84, Color.BLACK.getRGB()); // Draws a black box behind the color name: this.drawRect(this.guiLeft + 108, this.guiTop + 78, this.guiLeft + 168, this.guiTop + 100, Color.BLACK.getRGB()); // Colors, binds, and draws the background texture: GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F); this.mc.getTextureManager().bindTexture(TEXTURE); this.drawTexturedModalRect(this.guiLeft, this.guiTop, 0, 0, this.xSize, this.ySize); float red = this.textRedNumber / 255.0F; float green = this.textGreenNumber / 255.0F; float blue = this.textBlueNumber / 255.0F; // Color Box made from the r/g/b values: GlStateManager.color(red, green, blue, 1.0F); this.drawTexturedModalRect(this.guiLeft + 108, this.guiTop + 91, 176, 65, 64, 25); GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F); // Color name of the Color Box made from the r/g/b values: GlStateManager.pushMatrix(); { GlStateManager.translate(this.guiLeft + 138.25, this.guiTop + 82.5, 0); GlStateManager.scale(.65, .65, .65); if (this.machineColorActive) { this.centeredString(getFontRenderer(), ColorHelperVM.getColorNameFromRgb(this.textRedNumber, this.textGreenNumber, this.textBlueNumber), 0, 0, Color.WHITE.getRGB()); } else { this.centeredString(getFontRenderer(), References.localNameVC("viesmachines.button.none"), 0, 0, Color.WHITE.getRGB()); } } GlStateManager.popMatrix(); // Red label: GlStateManager.pushMatrix(); { GlStateManager.translate(this.guiLeft + 23.5, this.guiTop + 32.5, 0); GlStateManager.scale(0.75F, 0.75F, 0.75F); this.drawCenteredString(fontRenderer, References.localNameVC("viesmachines.main.red"), 0, 0, Color.RED.getRGB()); } GlStateManager.popMatrix(); // Green label: GlStateManager.pushMatrix(); { GlStateManager.translate(this.guiLeft + 55.5, this.guiTop + 32.5, 0); GlStateManager.scale(0.75F, 0.75F, 0.75F); this.drawCenteredString(fontRenderer, References.localNameVC("viesmachines.main.green"), 0, 0, Color.GREEN.getRGB()); } GlStateManager.popMatrix();; // Blue label: GlStateManager.pushMatrix(); { GlStateManager.translate(this.guiLeft + 87.5, this.guiTop + 32.5, 0); GlStateManager.scale(0.75F, 0.75F, 0.75F); this.drawCenteredString(fontRenderer, References.localNameVC("viesmachines.main.blue"), 0, 0, Color.BLUE.getRGB()); } GlStateManager.popMatrix(); // Red label '0-255': GlStateManager.pushMatrix(); { GlStateManager.translate(this.guiLeft + 23.5, this.guiTop + 46, 0); GlStateManager.scale(0.75F, 0.75F, 0.75F); this.drawCenteredString(fontRenderer, "0-255", 0, 0, Color.WHITE.getRGB()); } GlStateManager.popMatrix(); // Green label '0-255': GlStateManager.pushMatrix(); { GlStateManager.translate(this.guiLeft + 55.5, this.guiTop + 46, 0); GlStateManager.scale(0.75F, 0.75F, 0.75F); this.drawCenteredString(fontRenderer, "0-255", 0, 0, Color.WHITE.getRGB()); } GlStateManager.popMatrix();; // Blue label '0-255': GlStateManager.pushMatrix(); { GlStateManager.translate(this.guiLeft + 87.5, this.guiTop + 46, 0); GlStateManager.scale(0.75F, 0.75F, 0.75F); this.drawCenteredString(fontRenderer, "0-255", 0, 0, Color.WHITE.getRGB()); } GlStateManager.popMatrix(); // Draws the 3 text boxes: this.textRed.drawTextBox(); this.textGreen.drawTextBox(); this.textBlue.drawTextBox(); // Renders the 'Preview Entity' for the current machine: this.drawEntityOnScreen(this.guiLeft + 139, this.guiTop + 58, this.modelRotationHorizontal, 11, this.machine, this.modelRidingEntity); } @Override protected void drawGuiContainerForegroundLayer(int mouseX, int mouseY) { super.drawGuiContainerForegroundLayer(mouseX, mouseY); // Primary Color: GlStateManager.pushMatrix(); { GlStateManager.scale(0.75F, 0.75F, 0.75F); this.centeredString(fontRenderer, References.localNameVC("viesmachines.gui.tt.customize.primarycolor.primarycolor.0"), 74, 16, Color.BLACK.getRGB()); } GlStateManager.popMatrix(); // 'Preview': GlStateManager.pushMatrix(); { GlStateManager.translate(138.5, 10, 0); GlStateManager.scale(0.5F, 0.5F, 0.5F); this.centeredString(fontRenderer, References.localNameVC("viesmachines.main.preview"), 0, 0, Color.WHITE.getRGB()); } GlStateManager.popMatrix(); // Draws a black line under the machine preview options buttons: this.drawRect(130, 75, 168, 76, Color.BLACK.getRGB()); // Colors and binds the background texture: GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F); this.mc.getTextureManager().bindTexture(TEXTURE); // Preview Left Arrow symbol: GlStateManager.pushMatrix(); { GlStateManager.translate(110, 67.75, 0); GlStateManager.scale(0.5F, 0.5F, 0.5F); this.drawTexturedModalRect(0, 0, 176, 16, 12, 12); } GlStateManager.popMatrix(); // Preview Right Arrow symbol: GlStateManager.pushMatrix(); { GlStateManager.translate(122, 67.75, 0); GlStateManager.scale(0.5F, 0.5F, 0.5F); this.drawTexturedModalRect(0, 0, 176, 28, 12, 12); } GlStateManager.popMatrix(); // Preview Steve head symbol: GlStateManager.pushMatrix(); { GlStateManager.translate(132, 67.5, 0); GlStateManager.scale(0.5F, 0.5F, 0.5F); this.drawTexturedModalRect(0, 0, 176, 40, 12, 12); } GlStateManager.popMatrix(); // Preview Steve head 'X' symbol: GlStateManager.pushMatrix(); { GlStateManager.translate(142, 67.5, 0); GlStateManager.scale(0.5F, 0.5F, 0.5F); this.drawTexturedModalRect(0, 0, 176, 52, 12, 12); } GlStateManager.popMatrix(); // Preview Undo symbol: GlStateManager.pushMatrix(); { GlStateManager.translate(161, 69, 0); GlStateManager.scale(0.25F, 0.25F, 0.25F); this.drawTexturedModalRect(0, 0, 176, 0, 16, 16); } GlStateManager.popMatrix(); //-------------------------------------------------- // Logic for mouse-over tooltip - Turn Left: if (mouseX >= this.guiLeft + 110 && mouseX <= this.guiLeft + 115 && mouseY >= this.guiTop + 68 && mouseY <= this.guiTop + 73) { if(this.isShiftKeyDown()) { List<String> text = new ArrayList<String>(); text.add(TextFormatting.YELLOW + References.localNameVC("viesmachines.gui.tt.general.previewturnleft.0")); GlStateManager.pushMatrix(); { int textNumber = text.toString().length(); GlStateManager.translate(mouseX - this.guiLeft + 3 - textNumber - (textNumber / 2), mouseY - this.guiTop + 6, 0); GlStateManager.scale(0.5, 0.5, 0.5); this.drawHoveringText(text, 0, 0); } GlStateManager.popMatrix(); } else { List<String> text = new ArrayList<String>(); text.add(TextFormatting.YELLOW + References.localNameVC("viesmachines.gui.tt.general.previewturn.0")); GlStateManager.pushMatrix(); { int textNumber = text.toString().length(); GlStateManager.translate(mouseX - this.guiLeft + 3 - textNumber - (textNumber / 2), mouseY - this.guiTop + 6, 0); GlStateManager.scale(0.5, 0.5, 0.5); this.drawHoveringText(text, 0, 0); } GlStateManager.popMatrix(); } } // Logic for mouse-over tooltip - Turn Right: if (mouseX >= this.guiLeft + 122 && mouseX <= this.guiLeft + 127 && mouseY >= this.guiTop + 68 && mouseY <= this.guiTop + 73) { if(this.isShiftKeyDown()) { List<String> text = new ArrayList<String>(); text.add(TextFormatting.YELLOW + References.localNameVC("viesmachines.gui.tt.general.previewturnright.0")); GlStateManager.pushMatrix(); { int textNumber = text.toString().length(); GlStateManager.translate(mouseX - this.guiLeft + 3 - textNumber - (textNumber / 2), mouseY - this.guiTop + 6, 0); GlStateManager.scale(0.5, 0.5, 0.5); this.drawHoveringText(text, 0, 0); } GlStateManager.popMatrix(); } else { List<String> text = new ArrayList<String>(); text.add(TextFormatting.YELLOW + References.localNameVC("viesmachines.gui.tt.general.previewturn.0")); GlStateManager.pushMatrix(); { int textNumber = text.toString().length(); GlStateManager.translate(mouseX - this.guiLeft + 3 - textNumber - (textNumber / 2), mouseY - this.guiTop + 6, 0); GlStateManager.scale(0.5, 0.5, 0.5); this.drawHoveringText(text, 0, 0); } GlStateManager.popMatrix(); } } // Logic for mouse-over tooltip - Apply: if (mouseX >= this.guiLeft + 7 && mouseX <= this.guiLeft + 7+41 && mouseY >= this.guiTop + 36+63 && mouseY <= this.guiTop + 36+63+13) { List<String> text = new ArrayList<String>(); if (this.machine.getControllingPassenger() instanceof EntityPlayer) { EntityPlayer player = (EntityPlayer) this.machine.getControllingPassenger(); if (!GuiVM.buttonApply.enabled && this.machine.getEnergy() < CostsVM.COST_FRAME_COLOR && !player.isCreative()) { text.add(TextFormatting.DARK_RED + "" + CostsVM.COST_FRAME_COLOR + " " + References.localNameVC("viesmachines.gui.tt.customize.color.cost.4")); } else if (!GuiVM.buttonApply.enabled) { text.add(TextFormatting.RED + References.localNameVC("viesmachines.gui.tt.customize.color.cost.0")); } else if (player.isCreative()) { text.add(TextFormatting.GREEN + References.localNameVC("viesmachines.gui.tt.customize.color.cost.5")); } else if (this.machineTexture == 0 && this.machineTexture != this.machine.getVisualFrameTexture()) { text.add(TextFormatting.YELLOW + References.localNameVC("viesmachines.gui.tt.customize.color.cost.3")); } else { text.add(TextFormatting.YELLOW + References.localNameVC("viesmachines.gui.tt.customize.color.cost.1") + " " + CostsVM.COST_FRAME_COLOR + " " + References.localNameVC("viesmachines.gui.tt.customize.color.cost.2")); } } GlStateManager.pushMatrix(); { int textNumber = text.toString().length(); GlStateManager.translate(mouseX - this.guiLeft + 3 - textNumber - (textNumber / 2), mouseY - this.guiTop - 13, 0); GlStateManager.scale(0.5, 0.5, 0.5); this.drawHoveringText(text, 0, 0); } GlStateManager.popMatrix(); } // Logic for mouse-over tooltip - Remove: if (mouseX >= this.guiLeft + 7 && mouseX <= this.guiLeft + 7+41 && mouseY >= this.guiTop - 18+36+63 && mouseY <= this.guiTop - 18 +36+63+13) { List<String> text = new ArrayList<String>(); if (!GuiVM.button00.enabled) { } else { text.add(TextFormatting.YELLOW + References.localNameVC("viesmachines.gui.tt.customize.color.cost.3")); } GlStateManager.pushMatrix(); { int textNumber = text.toString().length(); GlStateManager.translate(mouseX - this.guiLeft + 3 - textNumber - (textNumber / 2), mouseY - this.guiTop - 13, 0); GlStateManager.scale(0.5, 0.5, 0.5); this.drawHoveringText(text, 0, 0); } GlStateManager.popMatrix(); } } @Override protected void keyTyped(char typedChar, int keyCode) throws IOException { super.keyTyped(typedChar, keyCode); this.textRed.textboxKeyTyped(typedChar, keyCode); this.textGreen.textboxKeyTyped(typedChar, keyCode); this.textBlue.textboxKeyTyped(typedChar, keyCode); if (keyCode == 1 || keyCode == Keybinds.openGuiMenu.getKeyCode() || this.mc.gameSettings.keyBindInventory.isActiveAndMatches(keyCode)) { this.mc.player.closeScreen(); } } @Override public void updateScreen() { super.updateScreen(); // Fix invalid input in the Red box: if (textRed.getText() != null) { try { textRedNumber = Integer.parseInt(textRed.getText()); } catch(NumberFormatException ex) { textRed.setText("0"); textRedNumber = 0; } if(textRedNumber > 255) { textRed.setText("255"); textRedNumber = 255; } } // Fix invalid input in the Green box: if (textGreen.getText() != null) { try { textGreenNumber = Integer.parseInt(textGreen.getText()); } catch(NumberFormatException ex) { textGreen.setText("0"); textGreenNumber = 0; } if(textGreenNumber > 255) { textGreen.setText("255"); textGreenNumber = 255; } } // Fix invalid input in the Blue box: if (textBlue.getText() != null) { try { textBlueNumber = Integer.parseInt(textBlue.getText()); } catch(NumberFormatException ex) { textBlue.setText("0"); textBlueNumber = 0; } if(textBlueNumber > 255) { textBlue.setText("255"); textBlueNumber = 255; } } // Updates the cursor position of each box: this.textRed.updateCursorCounter(); this.textGreen.updateCursorCounter(); this.textBlue.updateCursorCounter(); // Deals with the Preview player buttons toggle: if (!this.modelRidingEntity) { GuiVM.buttonRidingPlayerTrue.enabled = true; GuiVM.buttonRidingPlayerFalse.enabled = false; } else { GuiVM.buttonRidingPlayerTrue.enabled = false; GuiVM.buttonRidingPlayerFalse.enabled = true; } // Turn machine preview left with shift down: if (GuiVM.buttonRotateLeft.isMouseOver() && this.isShiftKeyDown()) { this.modelRotationHorizontal = this.modelRotationHorizontal - 2; } // Turn machine preview right with shift down: if (GuiVM.buttonRotateRight.isMouseOver() && this.isShiftKeyDown()) { this.modelRotationHorizontal = this.modelRotationHorizontal + 2; } // Checks to see if the 'Apply' button is enabled: if (this.machine.getVisualFrameColorRed() == this.textRedNumber && this.machine.getVisualFrameColorGreen() == this.textGreenNumber && this.machine.getVisualFrameColorBlue() == this.textBlueNumber) { GuiVM.buttonApply.enabled = false; } else if (this.mc.player.isCreative()) { GuiVM.buttonApply.enabled = true; } else if (CostsVM.COST_FRAME_COLOR > this.machine.getEnergy()) { GuiVM.buttonApply.enabled = false; } else { GuiVM.buttonApply.enabled = true; } // Checks to see if the 'Remove' button is enabled: if (this.machine.getVisualFrameColor()) { GuiVM.button00.enabled = true; } else { GuiVM.button00.enabled = false; } } @Override protected void mouseClicked(int x, int y, int btn) throws IOException { super.mouseClicked(x, y, btn); this.textRed.mouseClicked(x, y, btn); this.textGreen.mouseClicked(x, y, btn); this.textBlue.mouseClicked(x, y, btn); } @Override protected void drawEntityOnScreen(int posX, int posY, int horizontalIn, int scale, Entity entityIn, boolean ridingEntityIn) { EntityMachineBase machineIn = (EntityMachineBase)entityIn; boolean currentColor = machineIn.getVisualFrameColor(); int currentR = machineIn.getVisualFrameColorRed(); int currentG = machineIn.getVisualFrameColorGreen(); int currentB = machineIn.getVisualFrameColorBlue(); machineIn.setVisualFrameColor(this.machineColorActive); machineIn.setVisualFrameColorRed(this.textRedNumber); machineIn.setVisualFrameColorGreen(this.textGreenNumber); machineIn.setVisualFrameColorBlue(this.textBlueNumber); super.drawEntityOnScreen(posX, posY, horizontalIn, scale, entityIn, ridingEntityIn); machineIn.setVisualFrameColor(currentColor); machineIn.setVisualFrameColorRed(currentR); machineIn.setVisualFrameColorGreen(currentG); machineIn.setVisualFrameColorBlue(currentB); } }
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.jstype.FunctionType; import com.google.javascript.rhino.jstype.JSType; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; /** * Creates an externs file containing all exported symbols and properties * for later consumption. * */ final class ExternExportsPass extends NodeTraversal.AbstractPostOrderCallback implements CompilerPass { static final DiagnosticType EXPORTED_FUNCTION_UNKNOWN_PARAMETER_TYPE = DiagnosticType.warning( "JSC_EXPORTED_FUNCTION_UNKNOWN_PARAMETER_TYPE", "Unable to determine type of parameter {0} for exported function {1}"); static final DiagnosticType EXPORTED_FUNCTION_UNKNOWN_RETURN_TYPE = DiagnosticType.warning( "JSC_EXPORTED_FUNCTION_UNKNOWN_RETURN_TYPE", "Unable to determine return type for exported function {0}"); /** The exports found. */ private final List<Export> exports; /** A map of all assigns to their parent nodes. */ private final Map<String, Node> definitionMap; /** The parent compiler. */ private final AbstractCompiler compiler; /** The AST root which holds the externs generated. */ private final Node externsRoot; /** A mapping of internal paths to exported paths. */ private final Map<String, String> mappedPaths; /** A list of exported paths. */ private final Set<String> alreadyExportedPaths; /** A list of function names used to export symbols. */ private List<String> exportSymbolFunctionNames; /** A list of function names used to export properties. */ private List<String> exportPropertyFunctionNames; private abstract class Export { protected final String symbolName; protected final Node value; Export(String symbolName, Node value) { this.symbolName = symbolName; this.value = value; } /** * Generates the externs representation of this export and appends * it to the externsRoot AST. */ void generateExterns() { appendExtern(getExportedPath(), getFunctionValue(value)); } /** * Returns the path exported by this export. */ abstract String getExportedPath(); /** * Appends the exported function and all paths necessary for the path to be * declared. For example, for a property "a.b.c", the initializers for * paths "a", "a.b" will be appended (if they have not already) and a.b.c * will be initialized with the exported version of the function: * <pre> * var a = {}; * a.b = {}; * a.b.c = function(x,y) { } * </pre> */ protected void appendExtern(String path, Node functionToExport) { List<String> pathPrefixes = computePathPrefixes(path); for (int i = 0; i < pathPrefixes.size(); ++i) { String pathPrefix = pathPrefixes.get(i); /* The complete path (the last path prefix) must be emitted and * it gets initialized to the externed version of the value. */ boolean isCompletePathPrefix = (i == pathPrefixes.size() - 1); boolean skipPathPrefix = pathPrefix.endsWith(".prototype") || (alreadyExportedPaths.contains(pathPrefix) && !isCompletePathPrefix); if (!skipPathPrefix) { Node initializer; /* Namespaces get initialized to {}, functions to * externed versions of their value, and if we can't * figure out where the value came from we initialize * it to {}. * * Since externs are always exported in sorted order, * we know that if we export a.b = function() {} and later * a.b.c = function then a.b will always be in alreadyExportedPaths * when we emit a.b.c and thus we will never overwrite the function * exported for a.b with a namespace. */ if (isCompletePathPrefix && functionToExport != null) { initializer = createExternFunction(functionToExport); } else { initializer = new Node(Token.OBJECTLIT); } appendPathDefinition(pathPrefix, initializer); } } } /** * Computes a list of the path prefixes constructed from the components * of the path. * <pre> * E.g., if the path is: * "a.b.c" * then then path prefixes will be * ["a","a.b","a.b.c"]: * </pre> */ private List<String> computePathPrefixes(String path) { List<String> pieces = Lists.newArrayList(path.split("\\.")); List<String> pathPrefixes = Lists.newArrayList(); for (int i = 0; i < pieces.size(); i++) { pathPrefixes.add(Joiner.on(".").join(Iterables.limit(pieces, i + 1))); } return pathPrefixes; } private void appendPathDefinition(String path, Node initializer) { Node pathDefinition; if (!path.contains(".")) { pathDefinition = NodeUtil.newVarNode(path, initializer); } else { Node qualifiedPath = NodeUtil.newQualifiedNameNode( compiler.getCodingConvention(), path, -1, -1); pathDefinition = NodeUtil.newExpr(new Node(Token.ASSIGN, qualifiedPath, initializer)); } externsRoot.addChildToBack(pathDefinition); alreadyExportedPaths.add(path); } /** * Given a function to export, create the empty function that * will be put in the externs file. This extern function should have * the same type as the original function and the same parameter * name but no function body. * * We create a warning here if the the function to export is missing * parameter or return types. */ private Node createExternFunction(Node exportedFunction) { List<Node> externParameters = Lists.newLinkedList(); for (Node param : NodeUtil.getFunctionParameters(exportedFunction).children()) { externParameters.add(param.cloneNode()); } Node externFunction = NodeUtil.newFunctionNode("", externParameters, new Node(Token.BLOCK), -1, -1); checkForFunctionsWithUnknownTypes(exportedFunction); externFunction.setJSType(exportedFunction.getJSType()); return externFunction; } /** * Warn the user if there is an exported function for which a parameter * or return type is unknown. */ private void checkForFunctionsWithUnknownTypes(Node function) { Preconditions.checkArgument(NodeUtil.isFunction(function)); FunctionType functionType = (FunctionType) function.getJSType(); if (functionType == null) { // No type information is available (CheckTypes was probably not run) // so just bail. return; } /* We must get the JSDocInfo from the function's type since the function * itself does not have an associated JSDocInfo node. */ JSDocInfo functionJSDocInfo = functionType.getJSDocInfo(); JSType returnType = functionType.getReturnType(); /* It is OK if a constructor doesn't have a return type */ if (!functionType.isConstructor() && (returnType == null || returnType.isUnknownType())) { reportUnknownReturnType(function); } /* We can't just use the function's type's getParameters() to get the * parameter nodes because the nodes returned from that method * do not have names or locations. Similarly, the function's AST parameter * nodes do not have JSTypes(). So we walk both lists of parameter nodes * in lock step getting parameter names from the first and types from the * second. */ Node astParameterIterator = NodeUtil.getFunctionParameters(function) .getFirstChild(); Node typeParameterIterator = functionType.getParametersNode() .getFirstChild(); while (astParameterIterator != null) { JSType parameterType = typeParameterIterator.getJSType(); if (parameterType == null || parameterType.isUnknownType()) { reportUnknownParameterType(function, astParameterIterator); } astParameterIterator = astParameterIterator.getNext(); typeParameterIterator = typeParameterIterator.getNext(); } } private void reportUnknownParameterType(Node function, Node parameter) { compiler.report(JSError.make(NodeUtil.getSourceName(function), parameter, CheckLevel.WARNING, EXPORTED_FUNCTION_UNKNOWN_PARAMETER_TYPE, NodeUtil.getFunctionName(function), parameter.getString())); } private void reportUnknownReturnType(Node function) { compiler.report(JSError.make(NodeUtil.getSourceName(function), function, CheckLevel.WARNING, EXPORTED_FUNCTION_UNKNOWN_RETURN_TYPE, NodeUtil.getFunctionName(function))); } /** * If the given value is a qualified name which refers * a function, the function's node is returned. Otherwise, * {@code null} is returned. */ protected Node getFunctionValue(Node qualifiedNameNode) { String qualifiedName = value.getQualifiedName(); if (qualifiedName == null) { return null; } Node definitionParent = definitionMap.get(qualifiedName); if (definitionParent == null) { return null; } Node definition; switch(definitionParent.getType()) { case Token.ASSIGN: definition = definitionParent.getLastChild(); break; case Token.VAR: definition = definitionParent.getLastChild().getLastChild(); break; default: return null; } if (definition.getType() != Token.FUNCTION) { return null; } return definition; } } /** * A symbol export. */ private class SymbolExport extends Export { public SymbolExport(String symbolName, Node value) { super(symbolName, value); String qualifiedName = value.getQualifiedName(); if (qualifiedName != null) { mappedPaths.put(qualifiedName, symbolName); } } @Override String getExportedPath() { return symbolName; } } /** * A property export. */ private class PropertyExport extends Export { private final String exportPath; public PropertyExport(String exportPath, String symbolName, Node value) { super(symbolName, value); this.exportPath = exportPath; } @Override String getExportedPath() { // Find the longest path that has been mapped (if any). List<String> pieces = Lists.newArrayList(exportPath.split("\\.")); for (int i = pieces.size(); i > 0; i--) { // Find the path of the current length. String cPath = Joiner.on(".").join(Iterables.limit(pieces, i)); // If this path is mapped, return the mapped path plus any remaining // pieces. if (mappedPaths.containsKey(cPath)) { String newPath = mappedPaths.get(cPath); if (i < pieces.size()) { newPath += "." + Joiner.on(".").join(Iterables.skip(pieces, i)); } return newPath + "." + symbolName; } } return exportPath + "." + symbolName; } } /** * Creates an instance. */ ExternExportsPass(AbstractCompiler compiler) { this.exports = Lists.newArrayList(); this.compiler = compiler; this.definitionMap = Maps.newHashMap(); this.externsRoot = new Node(Token.BLOCK); this.externsRoot.setIsSyntheticBlock(true); this.alreadyExportedPaths = Sets.newHashSet(); this.mappedPaths = Maps.newHashMap(); initExportMethods(); } private void initExportMethods() { exportSymbolFunctionNames = Lists.newArrayList(); exportPropertyFunctionNames = Lists.newArrayList(); // From Closure: // goog.exportSymbol = function(publicName, symbol) // goog.exportProperty = function(object, publicName, symbol) CodingConvention convention = compiler.getCodingConvention(); exportSymbolFunctionNames.add(convention.getExportSymbolFunction()); exportPropertyFunctionNames.add(convention.getExportPropertyFunction()); // Another common one used inside google: exportSymbolFunctionNames.add("google_exportSymbol"); exportPropertyFunctionNames.add("google_exportProperty"); } @Override public void process(Node externs, Node root) { new NodeTraversal(compiler, this).traverse(root); // Sort by path length to ensure that the longer // paths (which may depend on the shorter ones) // come later. Set<Export> sorted = new TreeSet<Export>(new Comparator<Export>() { @Override public int compare(Export e1, Export e2) { return e1.getExportedPath().compareTo(e2.getExportedPath()); } }); sorted.addAll(exports); for (Export export : sorted) { export.generateExterns(); } } /** * Returns the generated externs. */ public String getGeneratedExterns() { CodePrinter.Builder builder = new CodePrinter.Builder(externsRoot) .setPrettyPrint(true).setOutputTypes(true); return builder.build(); } @Override public void visit(NodeTraversal t, Node n, Node parent) { switch (n.getType()) { case Token.NAME: case Token.GETPROP: String name = n.getQualifiedName(); if (name == null) { return; } if (parent.getType() == Token.ASSIGN || parent.getType() == Token.VAR) { definitionMap.put(n.getQualifiedName(), parent); } // Only handle function calls. This avoids assignments // that do not export items directly. if (parent.getType() != Token.CALL) { return; } if (exportPropertyFunctionNames.contains(n.getQualifiedName())) { handlePropertyExport(parent); } if (exportSymbolFunctionNames.contains(n.getQualifiedName())) { handleSymbolExport(parent); } } } private void handleSymbolExport(Node parent) { // Ensure that we only check valid calls with the 2 arguments // (plus the GETPROP node itself). if (parent.getChildCount() != 3) { return; } Node thisNode = parent.getFirstChild(); Node nameArg = thisNode.getNext(); Node valueArg = nameArg.getNext(); // Confirm the arguments are the expected types. If they are not, // then we have an export that we cannot statically identify. if (nameArg.getType() != Token.STRING) { return; } // Add the export to the list. this.exports.add(new SymbolExport(nameArg.getString(), valueArg)); } private void handlePropertyExport(Node parent) { // Ensure that we only check valid calls with the 3 arguments // (plus the GETPROP node itself). if (parent.getChildCount() != 4) { return; } Node thisNode = parent.getFirstChild(); Node objectArg = thisNode.getNext(); Node nameArg = objectArg.getNext(); Node valueArg = nameArg.getNext(); // Confirm the arguments are the expected types. If they are not, // then we have an export that we cannot statically identify. if (objectArg.getQualifiedName() == null) { return; } if (nameArg.getType() != Token.STRING) { return; } // Add the export to the list. this.exports.add( new PropertyExport(objectArg.getQualifiedName(), nameArg.getString(), valueArg)); } }
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.xmlui.wing.element; import java.util.ArrayList; import org.dspace.app.xmlui.wing.AttributeMap; import org.dspace.app.xmlui.wing.Message; import org.dspace.app.xmlui.wing.WingContext; import org.dspace.app.xmlui.wing.WingException; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; import org.xml.sax.ext.LexicalHandler; import org.xml.sax.helpers.NamespaceSupport; /** * Class representing a set of referenced metadata. * * @author Scott Phillips */ public class ReferenceSet extends AbstractWingElement implements StructuralElement { /** The name of the referenceSet element */ public static final String E_REFERENCE_SET = "referenceSet"; /** The name of the orderBy attribute */ public static final String A_ORDER_BY = "orderBy"; /** The name of the type attribute */ public static final String A_TYPE = "type"; /** The possible interactive division methods: get,post, or multipart. */ public static final String TYPE_SUMMARY_LIST = "summaryList"; public static final String TYPE_SUMMARY_VIEW = "summaryView"; public static final String TYPE_DETAIL_LIST = "detailList"; public static final String TYPE_DETAIL_VIEW = "detailView"; /** The possible interactive division methods names collected into one array */ public static final String[] TYPES = { TYPE_SUMMARY_LIST, TYPE_SUMMARY_VIEW, TYPE_DETAIL_LIST, TYPE_DETAIL_VIEW }; /** The name assigned to this metadata set */ private String name; /** The ordering mechanism to use. */ private String orderBy; /** The reference type, see TYPES defined above */ private String type; /** Special rendering instructions */ private String rend; /** The head label for this referenceset */ private Head head; /** All content of this container, items & lists */ private java.util.List<AbstractWingElement> contents = new ArrayList<AbstractWingElement>(); /** * Construct a new referenceSet * * @param context * (Required) The context this element is contained in, such as * where to route SAX events and what i18n catalogue to use. * @param childreference * Whether this is a child reference (not requiring a name). * @param name * (May be null) a local identifier used to differentiate the * element from its siblings. * @param type * (Required) The type of reference set which determines the level * of detail for the metadata rendered. See TYPES for a list of * available types. * @param orderBy * (May be null) Determines the ordering of referenced metadata. * @param rend * (May be null) a rendering hint used to override the default * display of the element. */ protected ReferenceSet(WingContext context, boolean childreference, String name, String type, String orderBy, String rend) throws WingException { super(context); // Names are only required for parent reference sets. if (!childreference) { require(name, "The 'name' parameter is required for reference sets."); } restrict( type, TYPES, "The 'method' parameter must be one of these values: 'summaryList', 'summaryView', 'detailList', or 'detailView'."); this.name = name; this.type = type; this.orderBy = orderBy; this.rend = rend; } /** * Set the head element which is the label associated with this referenceset. */ public Head setHead() throws WingException { this.head = new Head(context, null); return head; } /** * Set the head element which is the label associated with this referenceset. * * @param characters * (May be null) Unprocessed characters to be referenced */ public void setHead(String characters) throws WingException { Head head = this.setHead(); head.addContent(characters); } /** * Set the head element which is the label associated with this referenceset. * * @param message * (Required) A key into the i18n catalogue for translation into * the user's preferred language. */ public void setHead(Message message) throws WingException { Head head = this.setHead(); head.addContent(message); } /** * Add an object reference. * * @param object * (Required) The referenced object. */ public Reference addReference(Object object) throws WingException { Reference reference = new Reference(context, object); contents.add(reference); return reference; } /** * Translate this metadata inclusion set to SAX * * @param contentHandler * (Required) The registered contentHandler where SAX events * should be routed too. * @param lexicalHandler * (Required) The registered lexicalHandler where lexical * events (such as CDATA, DTD, etc) should be routed too. * @param namespaces * (Required) SAX Helper class to keep track of namespaces able * to determine the correct prefix for a given namespace URI. */ public void toSAX(ContentHandler contentHandler, LexicalHandler lexicalHandler, NamespaceSupport namespaces) throws SAXException { AttributeMap attributes = new AttributeMap(); if (name != null) { attributes.put(A_NAME, name); } if (name != null) { attributes.put(A_ID, context.generateID(E_REFERENCE_SET, name)); } attributes.put(A_TYPE, type); if (orderBy != null) { attributes.put(A_ORDER_BY, orderBy); } if (rend != null) { attributes.put(A_RENDER, rend); } startElement(contentHandler, namespaces, E_REFERENCE_SET, attributes); if (head != null) { head.toSAX(contentHandler, lexicalHandler, namespaces); } for (AbstractWingElement content : contents) { content.toSAX(contentHandler, lexicalHandler, namespaces); } endElement(contentHandler, namespaces, E_REFERENCE_SET); } /** * dispose */ public void dispose() { if (contents != null) { for (AbstractWingElement content : contents) { content.dispose(); } contents.clear(); } contents = null; super.dispose(); } }
package com.my.core.bean.order; import java.util.ArrayList; import java.util.Date; import java.util.List; public class OrderQuery { protected String orderByClause; protected boolean distinct; protected List<Criteria> oredCriteria; protected Integer pageNo = 1; protected Integer startRow; protected Integer pageSize = 10; protected String fields; public OrderQuery() { oredCriteria = new ArrayList<Criteria>(); } public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } public String getOrderByClause() { return orderByClause; } public void setDistinct(boolean distinct) { this.distinct = distinct; } public boolean isDistinct() { return distinct; } public List<Criteria> getOredCriteria() { return oredCriteria; } public void or(Criteria criteria) { oredCriteria.add(criteria); } public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } public void setPageNo(Integer pageNo) { this.pageNo=pageNo; this.startRow = (pageNo-1)*this.pageSize; } public Integer getPageNo() { return pageNo; } public void setStartRow(Integer startRow) { this.startRow=startRow; } public Integer getStartRow() { return startRow; } public void setPageSize(Integer pageSize) { this.pageSize=pageSize; this.startRow = (pageNo-1)*this.pageSize; } public Integer getPageSize() { return pageSize; } public void setFields(String fields) { this.fields=fields; } public String getFields() { return fields; } protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andIdIsNull() { addCriterion("id is null"); return (Criteria) this; } public Criteria andIdIsNotNull() { addCriterion("id is not null"); return (Criteria) this; } public Criteria andIdEqualTo(Long value) { addCriterion("id =", value, "id"); return (Criteria) this; } public Criteria andIdNotEqualTo(Long value) { addCriterion("id <>", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThan(Long value) { addCriterion("id >", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThanOrEqualTo(Long value) { addCriterion("id >=", value, "id"); return (Criteria) this; } public Criteria andIdLessThan(Long value) { addCriterion("id <", value, "id"); return (Criteria) this; } public Criteria andIdLessThanOrEqualTo(Long value) { addCriterion("id <=", value, "id"); return (Criteria) this; } public Criteria andIdIn(List<Long> values) { addCriterion("id in", values, "id"); return (Criteria) this; } public Criteria andIdNotIn(List<Long> values) { addCriterion("id not in", values, "id"); return (Criteria) this; } public Criteria andIdBetween(Long value1, Long value2) { addCriterion("id between", value1, value2, "id"); return (Criteria) this; } public Criteria andIdNotBetween(Long value1, Long value2) { addCriterion("id not between", value1, value2, "id"); return (Criteria) this; } public Criteria andDeliverFeeIsNull() { addCriterion("deliver_fee is null"); return (Criteria) this; } public Criteria andDeliverFeeIsNotNull() { addCriterion("deliver_fee is not null"); return (Criteria) this; } public Criteria andDeliverFeeEqualTo(Float value) { addCriterion("deliver_fee =", value, "deliverFee"); return (Criteria) this; } public Criteria andDeliverFeeNotEqualTo(Float value) { addCriterion("deliver_fee <>", value, "deliverFee"); return (Criteria) this; } public Criteria andDeliverFeeGreaterThan(Float value) { addCriterion("deliver_fee >", value, "deliverFee"); return (Criteria) this; } public Criteria andDeliverFeeGreaterThanOrEqualTo(Float value) { addCriterion("deliver_fee >=", value, "deliverFee"); return (Criteria) this; } public Criteria andDeliverFeeLessThan(Float value) { addCriterion("deliver_fee <", value, "deliverFee"); return (Criteria) this; } public Criteria andDeliverFeeLessThanOrEqualTo(Float value) { addCriterion("deliver_fee <=", value, "deliverFee"); return (Criteria) this; } public Criteria andDeliverFeeIn(List<Float> values) { addCriterion("deliver_fee in", values, "deliverFee"); return (Criteria) this; } public Criteria andDeliverFeeNotIn(List<Float> values) { addCriterion("deliver_fee not in", values, "deliverFee"); return (Criteria) this; } public Criteria andDeliverFeeBetween(Float value1, Float value2) { addCriterion("deliver_fee between", value1, value2, "deliverFee"); return (Criteria) this; } public Criteria andDeliverFeeNotBetween(Float value1, Float value2) { addCriterion("deliver_fee not between", value1, value2, "deliverFee"); return (Criteria) this; } public Criteria andTotalFeeIsNull() { addCriterion("total_fee is null"); return (Criteria) this; } public Criteria andTotalFeeIsNotNull() { addCriterion("total_fee is not null"); return (Criteria) this; } public Criteria andTotalFeeEqualTo(Float value) { addCriterion("total_fee =", value, "totalFee"); return (Criteria) this; } public Criteria andTotalFeeNotEqualTo(Float value) { addCriterion("total_fee <>", value, "totalFee"); return (Criteria) this; } public Criteria andTotalFeeGreaterThan(Float value) { addCriterion("total_fee >", value, "totalFee"); return (Criteria) this; } public Criteria andTotalFeeGreaterThanOrEqualTo(Float value) { addCriterion("total_fee >=", value, "totalFee"); return (Criteria) this; } public Criteria andTotalFeeLessThan(Float value) { addCriterion("total_fee <", value, "totalFee"); return (Criteria) this; } public Criteria andTotalFeeLessThanOrEqualTo(Float value) { addCriterion("total_fee <=", value, "totalFee"); return (Criteria) this; } public Criteria andTotalFeeIn(List<Float> values) { addCriterion("total_fee in", values, "totalFee"); return (Criteria) this; } public Criteria andTotalFeeNotIn(List<Float> values) { addCriterion("total_fee not in", values, "totalFee"); return (Criteria) this; } public Criteria andTotalFeeBetween(Float value1, Float value2) { addCriterion("total_fee between", value1, value2, "totalFee"); return (Criteria) this; } public Criteria andTotalFeeNotBetween(Float value1, Float value2) { addCriterion("total_fee not between", value1, value2, "totalFee"); return (Criteria) this; } public Criteria andOrderPriceIsNull() { addCriterion("order_price is null"); return (Criteria) this; } public Criteria andOrderPriceIsNotNull() { addCriterion("order_price is not null"); return (Criteria) this; } public Criteria andOrderPriceEqualTo(Float value) { addCriterion("order_price =", value, "orderPrice"); return (Criteria) this; } public Criteria andOrderPriceNotEqualTo(Float value) { addCriterion("order_price <>", value, "orderPrice"); return (Criteria) this; } public Criteria andOrderPriceGreaterThan(Float value) { addCriterion("order_price >", value, "orderPrice"); return (Criteria) this; } public Criteria andOrderPriceGreaterThanOrEqualTo(Float value) { addCriterion("order_price >=", value, "orderPrice"); return (Criteria) this; } public Criteria andOrderPriceLessThan(Float value) { addCriterion("order_price <", value, "orderPrice"); return (Criteria) this; } public Criteria andOrderPriceLessThanOrEqualTo(Float value) { addCriterion("order_price <=", value, "orderPrice"); return (Criteria) this; } public Criteria andOrderPriceIn(List<Float> values) { addCriterion("order_price in", values, "orderPrice"); return (Criteria) this; } public Criteria andOrderPriceNotIn(List<Float> values) { addCriterion("order_price not in", values, "orderPrice"); return (Criteria) this; } public Criteria andOrderPriceBetween(Float value1, Float value2) { addCriterion("order_price between", value1, value2, "orderPrice"); return (Criteria) this; } public Criteria andOrderPriceNotBetween(Float value1, Float value2) { addCriterion("order_price not between", value1, value2, "orderPrice"); return (Criteria) this; } public Criteria andPaymentWayIsNull() { addCriterion("payment_way is null"); return (Criteria) this; } public Criteria andPaymentWayIsNotNull() { addCriterion("payment_way is not null"); return (Criteria) this; } public Criteria andPaymentWayEqualTo(Integer value) { addCriterion("payment_way =", value, "paymentWay"); return (Criteria) this; } public Criteria andPaymentWayNotEqualTo(Integer value) { addCriterion("payment_way <>", value, "paymentWay"); return (Criteria) this; } public Criteria andPaymentWayGreaterThan(Integer value) { addCriterion("payment_way >", value, "paymentWay"); return (Criteria) this; } public Criteria andPaymentWayGreaterThanOrEqualTo(Integer value) { addCriterion("payment_way >=", value, "paymentWay"); return (Criteria) this; } public Criteria andPaymentWayLessThan(Integer value) { addCriterion("payment_way <", value, "paymentWay"); return (Criteria) this; } public Criteria andPaymentWayLessThanOrEqualTo(Integer value) { addCriterion("payment_way <=", value, "paymentWay"); return (Criteria) this; } public Criteria andPaymentWayIn(List<Integer> values) { addCriterion("payment_way in", values, "paymentWay"); return (Criteria) this; } public Criteria andPaymentWayNotIn(List<Integer> values) { addCriterion("payment_way not in", values, "paymentWay"); return (Criteria) this; } public Criteria andPaymentWayBetween(Integer value1, Integer value2) { addCriterion("payment_way between", value1, value2, "paymentWay"); return (Criteria) this; } public Criteria andPaymentWayNotBetween(Integer value1, Integer value2) { addCriterion("payment_way not between", value1, value2, "paymentWay"); return (Criteria) this; } public Criteria andPaymentCashIsNull() { addCriterion("payment_cash is null"); return (Criteria) this; } public Criteria andPaymentCashIsNotNull() { addCriterion("payment_cash is not null"); return (Criteria) this; } public Criteria andPaymentCashEqualTo(Integer value) { addCriterion("payment_cash =", value, "paymentCash"); return (Criteria) this; } public Criteria andPaymentCashNotEqualTo(Integer value) { addCriterion("payment_cash <>", value, "paymentCash"); return (Criteria) this; } public Criteria andPaymentCashGreaterThan(Integer value) { addCriterion("payment_cash >", value, "paymentCash"); return (Criteria) this; } public Criteria andPaymentCashGreaterThanOrEqualTo(Integer value) { addCriterion("payment_cash >=", value, "paymentCash"); return (Criteria) this; } public Criteria andPaymentCashLessThan(Integer value) { addCriterion("payment_cash <", value, "paymentCash"); return (Criteria) this; } public Criteria andPaymentCashLessThanOrEqualTo(Integer value) { addCriterion("payment_cash <=", value, "paymentCash"); return (Criteria) this; } public Criteria andPaymentCashIn(List<Integer> values) { addCriterion("payment_cash in", values, "paymentCash"); return (Criteria) this; } public Criteria andPaymentCashNotIn(List<Integer> values) { addCriterion("payment_cash not in", values, "paymentCash"); return (Criteria) this; } public Criteria andPaymentCashBetween(Integer value1, Integer value2) { addCriterion("payment_cash between", value1, value2, "paymentCash"); return (Criteria) this; } public Criteria andPaymentCashNotBetween(Integer value1, Integer value2) { addCriterion("payment_cash not between", value1, value2, "paymentCash"); return (Criteria) this; } public Criteria andDeliveryIsNull() { addCriterion("delivery is null"); return (Criteria) this; } public Criteria andDeliveryIsNotNull() { addCriterion("delivery is not null"); return (Criteria) this; } public Criteria andDeliveryEqualTo(Integer value) { addCriterion("delivery =", value, "delivery"); return (Criteria) this; } public Criteria andDeliveryNotEqualTo(Integer value) { addCriterion("delivery <>", value, "delivery"); return (Criteria) this; } public Criteria andDeliveryGreaterThan(Integer value) { addCriterion("delivery >", value, "delivery"); return (Criteria) this; } public Criteria andDeliveryGreaterThanOrEqualTo(Integer value) { addCriterion("delivery >=", value, "delivery"); return (Criteria) this; } public Criteria andDeliveryLessThan(Integer value) { addCriterion("delivery <", value, "delivery"); return (Criteria) this; } public Criteria andDeliveryLessThanOrEqualTo(Integer value) { addCriterion("delivery <=", value, "delivery"); return (Criteria) this; } public Criteria andDeliveryIn(List<Integer> values) { addCriterion("delivery in", values, "delivery"); return (Criteria) this; } public Criteria andDeliveryNotIn(List<Integer> values) { addCriterion("delivery not in", values, "delivery"); return (Criteria) this; } public Criteria andDeliveryBetween(Integer value1, Integer value2) { addCriterion("delivery between", value1, value2, "delivery"); return (Criteria) this; } public Criteria andDeliveryNotBetween(Integer value1, Integer value2) { addCriterion("delivery not between", value1, value2, "delivery"); return (Criteria) this; } public Criteria andIsConfirmIsNull() { addCriterion("is_confirm is null"); return (Criteria) this; } public Criteria andIsConfirmIsNotNull() { addCriterion("is_confirm is not null"); return (Criteria) this; } public Criteria andIsConfirmEqualTo(Boolean value) { addCriterion("is_confirm =", value, "isConfirm"); return (Criteria) this; } public Criteria andIsConfirmNotEqualTo(Boolean value) { addCriterion("is_confirm <>", value, "isConfirm"); return (Criteria) this; } public Criteria andIsConfirmGreaterThan(Boolean value) { addCriterion("is_confirm >", value, "isConfirm"); return (Criteria) this; } public Criteria andIsConfirmGreaterThanOrEqualTo(Boolean value) { addCriterion("is_confirm >=", value, "isConfirm"); return (Criteria) this; } public Criteria andIsConfirmLessThan(Boolean value) { addCriterion("is_confirm <", value, "isConfirm"); return (Criteria) this; } public Criteria andIsConfirmLessThanOrEqualTo(Boolean value) { addCriterion("is_confirm <=", value, "isConfirm"); return (Criteria) this; } public Criteria andIsConfirmIn(List<Boolean> values) { addCriterion("is_confirm in", values, "isConfirm"); return (Criteria) this; } public Criteria andIsConfirmNotIn(List<Boolean> values) { addCriterion("is_confirm not in", values, "isConfirm"); return (Criteria) this; } public Criteria andIsConfirmBetween(Boolean value1, Boolean value2) { addCriterion("is_confirm between", value1, value2, "isConfirm"); return (Criteria) this; } public Criteria andIsConfirmNotBetween(Boolean value1, Boolean value2) { addCriterion("is_confirm not between", value1, value2, "isConfirm"); return (Criteria) this; } public Criteria andIsPaiyIsNull() { addCriterion("is_paiy is null"); return (Criteria) this; } public Criteria andIsPaiyIsNotNull() { addCriterion("is_paiy is not null"); return (Criteria) this; } public Criteria andIsPaiyEqualTo(Integer value) { addCriterion("is_paiy =", value, "isPaiy"); return (Criteria) this; } public Criteria andIsPaiyNotEqualTo(Integer value) { addCriterion("is_paiy <>", value, "isPaiy"); return (Criteria) this; } public Criteria andIsPaiyGreaterThan(Integer value) { addCriterion("is_paiy >", value, "isPaiy"); return (Criteria) this; } public Criteria andIsPaiyGreaterThanOrEqualTo(Integer value) { addCriterion("is_paiy >=", value, "isPaiy"); return (Criteria) this; } public Criteria andIsPaiyLessThan(Integer value) { addCriterion("is_paiy <", value, "isPaiy"); return (Criteria) this; } public Criteria andIsPaiyLessThanOrEqualTo(Integer value) { addCriterion("is_paiy <=", value, "isPaiy"); return (Criteria) this; } public Criteria andIsPaiyIn(List<Integer> values) { addCriterion("is_paiy in", values, "isPaiy"); return (Criteria) this; } public Criteria andIsPaiyNotIn(List<Integer> values) { addCriterion("is_paiy not in", values, "isPaiy"); return (Criteria) this; } public Criteria andIsPaiyBetween(Integer value1, Integer value2) { addCriterion("is_paiy between", value1, value2, "isPaiy"); return (Criteria) this; } public Criteria andIsPaiyNotBetween(Integer value1, Integer value2) { addCriterion("is_paiy not between", value1, value2, "isPaiy"); return (Criteria) this; } public Criteria andOrderStateIsNull() { addCriterion("order_state is null"); return (Criteria) this; } public Criteria andOrderStateIsNotNull() { addCriterion("order_state is not null"); return (Criteria) this; } public Criteria andOrderStateEqualTo(Integer value) { addCriterion("order_state =", value, "orderState"); return (Criteria) this; } public Criteria andOrderStateNotEqualTo(Integer value) { addCriterion("order_state <>", value, "orderState"); return (Criteria) this; } public Criteria andOrderStateGreaterThan(Integer value) { addCriterion("order_state >", value, "orderState"); return (Criteria) this; } public Criteria andOrderStateGreaterThanOrEqualTo(Integer value) { addCriterion("order_state >=", value, "orderState"); return (Criteria) this; } public Criteria andOrderStateLessThan(Integer value) { addCriterion("order_state <", value, "orderState"); return (Criteria) this; } public Criteria andOrderStateLessThanOrEqualTo(Integer value) { addCriterion("order_state <=", value, "orderState"); return (Criteria) this; } public Criteria andOrderStateIn(List<Integer> values) { addCriterion("order_state in", values, "orderState"); return (Criteria) this; } public Criteria andOrderStateNotIn(List<Integer> values) { addCriterion("order_state not in", values, "orderState"); return (Criteria) this; } public Criteria andOrderStateBetween(Integer value1, Integer value2) { addCriterion("order_state between", value1, value2, "orderState"); return (Criteria) this; } public Criteria andOrderStateNotBetween(Integer value1, Integer value2) { addCriterion("order_state not between", value1, value2, "orderState"); return (Criteria) this; } public Criteria andCreateDateIsNull() { addCriterion("create_date is null"); return (Criteria) this; } public Criteria andCreateDateIsNotNull() { addCriterion("create_date is not null"); return (Criteria) this; } public Criteria andCreateDateEqualTo(Date value) { addCriterion("create_date =", value, "createDate"); return (Criteria) this; } public Criteria andCreateDateNotEqualTo(Date value) { addCriterion("create_date <>", value, "createDate"); return (Criteria) this; } public Criteria andCreateDateGreaterThan(Date value) { addCriterion("create_date >", value, "createDate"); return (Criteria) this; } public Criteria andCreateDateGreaterThanOrEqualTo(Date value) { addCriterion("create_date >=", value, "createDate"); return (Criteria) this; } public Criteria andCreateDateLessThan(Date value) { addCriterion("create_date <", value, "createDate"); return (Criteria) this; } public Criteria andCreateDateLessThanOrEqualTo(Date value) { addCriterion("create_date <=", value, "createDate"); return (Criteria) this; } public Criteria andCreateDateIn(List<Date> values) { addCriterion("create_date in", values, "createDate"); return (Criteria) this; } public Criteria andCreateDateNotIn(List<Date> values) { addCriterion("create_date not in", values, "createDate"); return (Criteria) this; } public Criteria andCreateDateBetween(Date value1, Date value2) { addCriterion("create_date between", value1, value2, "createDate"); return (Criteria) this; } public Criteria andCreateDateNotBetween(Date value1, Date value2) { addCriterion("create_date not between", value1, value2, "createDate"); return (Criteria) this; } public Criteria andNoteIsNull() { addCriterion("note is null"); return (Criteria) this; } public Criteria andNoteIsNotNull() { addCriterion("note is not null"); return (Criteria) this; } public Criteria andNoteEqualTo(String value) { addCriterion("note =", value, "note"); return (Criteria) this; } public Criteria andNoteNotEqualTo(String value) { addCriterion("note <>", value, "note"); return (Criteria) this; } public Criteria andNoteGreaterThan(String value) { addCriterion("note >", value, "note"); return (Criteria) this; } public Criteria andNoteGreaterThanOrEqualTo(String value) { addCriterion("note >=", value, "note"); return (Criteria) this; } public Criteria andNoteLessThan(String value) { addCriterion("note <", value, "note"); return (Criteria) this; } public Criteria andNoteLessThanOrEqualTo(String value) { addCriterion("note <=", value, "note"); return (Criteria) this; } public Criteria andNoteLike(String value) { addCriterion("note like", value, "note"); return (Criteria) this; } public Criteria andNoteNotLike(String value) { addCriterion("note not like", value, "note"); return (Criteria) this; } public Criteria andNoteIn(List<String> values) { addCriterion("note in", values, "note"); return (Criteria) this; } public Criteria andNoteNotIn(List<String> values) { addCriterion("note not in", values, "note"); return (Criteria) this; } public Criteria andNoteBetween(String value1, String value2) { addCriterion("note between", value1, value2, "note"); return (Criteria) this; } public Criteria andNoteNotBetween(String value1, String value2) { addCriterion("note not between", value1, value2, "note"); return (Criteria) this; } public Criteria andBuyerIdIsNull() { addCriterion("buyer_id is null"); return (Criteria) this; } public Criteria andBuyerIdIsNotNull() { addCriterion("buyer_id is not null"); return (Criteria) this; } public Criteria andBuyerIdEqualTo(String value) { addCriterion("buyer_id =", value, "buyerId"); return (Criteria) this; } public Criteria andBuyerIdNotEqualTo(String value) { addCriterion("buyer_id <>", value, "buyerId"); return (Criteria) this; } public Criteria andBuyerIdGreaterThan(String value) { addCriterion("buyer_id >", value, "buyerId"); return (Criteria) this; } public Criteria andBuyerIdGreaterThanOrEqualTo(String value) { addCriterion("buyer_id >=", value, "buyerId"); return (Criteria) this; } public Criteria andBuyerIdLessThan(String value) { addCriterion("buyer_id <", value, "buyerId"); return (Criteria) this; } public Criteria andBuyerIdLessThanOrEqualTo(String value) { addCriterion("buyer_id <=", value, "buyerId"); return (Criteria) this; } public Criteria andBuyerIdLike(String value) { addCriterion("buyer_id like", value, "buyerId"); return (Criteria) this; } public Criteria andBuyerIdNotLike(String value) { addCriterion("buyer_id not like", value, "buyerId"); return (Criteria) this; } public Criteria andBuyerIdIn(List<String> values) { addCriterion("buyer_id in", values, "buyerId"); return (Criteria) this; } public Criteria andBuyerIdNotIn(List<String> values) { addCriterion("buyer_id not in", values, "buyerId"); return (Criteria) this; } public Criteria andBuyerIdBetween(String value1, String value2) { addCriterion("buyer_id between", value1, value2, "buyerId"); return (Criteria) this; } public Criteria andBuyerIdNotBetween(String value1, String value2) { addCriterion("buyer_id not between", value1, value2, "buyerId"); return (Criteria) this; } } public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } }
/* * Copyright 2012 GitHub Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.mobile.ui.commit; import static com.github.mobile.Intents.EXTRA_BASE; import static com.github.mobile.Intents.EXTRA_HEAD; import static com.github.mobile.Intents.EXTRA_REPOSITORY; import android.accounts.Account; import android.app.Activity; import android.os.Bundle; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ListView; import android.widget.ProgressBar; import android.widget.TextView; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuInflater; import com.actionbarsherlock.view.MenuItem; import com.github.kevinsawicki.wishlist.ViewUtils; import com.github.mobile.R; import com.github.mobile.core.commit.CommitCompareTask; import com.github.mobile.core.commit.CommitUtils; import com.github.mobile.ui.DialogFragment; import com.github.mobile.ui.HeaderFooterListAdapter; import com.github.mobile.util.AvatarLoader; import com.github.mobile.util.ToastUtils; import com.google.inject.Inject; import java.text.MessageFormat; import java.util.Collection; import java.util.Collections; import java.util.List; import org.eclipse.egit.github.core.CommitFile; import org.eclipse.egit.github.core.Repository; import org.eclipse.egit.github.core.RepositoryCommit; import org.eclipse.egit.github.core.RepositoryCommitCompare; /** * Fragment to display a list of commits being compared */ public class CommitCompareListFragment extends DialogFragment implements OnItemClickListener { private DiffStyler diffStyler; private ListView list; private ProgressBar progress; private Repository repository; private String base; private String head; @Inject private AvatarLoader avatars; private HeaderFooterListAdapter<CommitFileListAdapter> adapter; private RepositoryCommitCompare compare; @Override public void onAttach(Activity activity) { super.onAttach(activity); repository = getSerializableExtra(EXTRA_REPOSITORY); base = getStringExtra(EXTRA_BASE); head = getStringExtra(EXTRA_HEAD); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); diffStyler = new DiffStyler(getResources()); compareCommits(); } @Override public void onCreateOptionsMenu(final Menu optionsMenu, final MenuInflater inflater) { inflater.inflate(R.menu.refresh, optionsMenu); } @Override public boolean onOptionsItemSelected(final MenuItem item) { if (!isUsable()) return false; switch (item.getItemId()) { case R.id.m_refresh: compareCommits(); return true; default: return super.onOptionsItemSelected(item); } } private void compareCommits() { new CommitCompareTask(getActivity(), repository, base, head) { @Override protected RepositoryCommitCompare run(Account account) throws Exception { RepositoryCommitCompare compare = super.run(account); List<CommitFile> files = compare.getFiles(); diffStyler.setFiles(files); if (files != null) Collections.sort(files, new CommitFileComparator()); return compare; } @Override protected void onSuccess(RepositoryCommitCompare compare) throws Exception { super.onSuccess(compare); updateList(compare); } @Override protected void onException(Exception e) throws RuntimeException { super.onException(e); ToastUtils.show(getActivity(), e, R.string.error_commits_load); } }.execute(); } private void updateList(RepositoryCommitCompare compare) { if (!isUsable()) return; this.compare = compare; ViewUtils.setGone(progress, true); ViewUtils.setGone(list, false); LayoutInflater inflater = getActivity().getLayoutInflater(); adapter.clearHeaders(); adapter.getWrappedAdapter().clear(); List<RepositoryCommit> commits = compare.getCommits(); if (commits != null && !commits.isEmpty()) { View commitHeader = inflater.inflate(R.layout.commit_details_header, null); ((TextView) commitHeader.findViewById(R.id.tv_commit_summary)) .setText(MessageFormat.format( getString(R.string.comparing_commits), commits.size())); adapter.addHeader(commitHeader); adapter.addHeader(inflater.inflate(R.layout.list_divider, null)); CommitListAdapter commitAdapter = new CommitListAdapter( R.layout.commit_item, inflater, commits, avatars); for (int i = 0; i < commits.size(); i++) { RepositoryCommit commit = commits.get(i); View view = commitAdapter.getView(i, null, null); adapter.addHeader(view, commit, true); adapter.addHeader(inflater.inflate(R.layout.list_divider, null)); } } CommitFileListAdapter rootAdapter = adapter.getWrappedAdapter(); rootAdapter.clear(); List<CommitFile> files = compare.getFiles(); if (files != null && !files.isEmpty()) { addFileStatHeader(files, inflater); for (CommitFile file : files) rootAdapter.addItem(file); } } private void addFileStatHeader(List<CommitFile> files, LayoutInflater inflater) { View fileHeader = inflater.inflate( R.layout.commit_compare_file_details_header, null); ((TextView) fileHeader.findViewById(R.id.tv_commit_file_summary)) .setText(CommitUtils.formatStats(files)); adapter.addHeader(fileHeader); } @Override public void onViewCreated(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); list = finder.find(android.R.id.list); progress = finder.find(R.id.pb_loading); LayoutInflater inflater = getActivity().getLayoutInflater(); list.setOnItemClickListener(this); adapter = new HeaderFooterListAdapter<CommitFileListAdapter>(list, new CommitFileListAdapter(inflater, diffStyler, null, null)); adapter.addFooter(inflater.inflate(R.layout.footer_separator, null)); list.setAdapter(adapter); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.commit_diff_list, container); } private void openCommit(final RepositoryCommit commit) { if (compare != null) { int commitPosition = 0; Collection<RepositoryCommit> commits = compare.getCommits(); for (RepositoryCommit candidate : commits) if (commit == candidate) break; else commitPosition++; if (commitPosition < commits.size()) startActivity(CommitViewActivity.createIntent(repository, commitPosition, commits)); } else startActivity(CommitViewActivity.createIntent(repository, commit.getSha())); } private void openFile(final CommitFile file) { if (!TextUtils.isEmpty(file.getFilename()) && !TextUtils.isEmpty(file.getSha())) startActivity(CommitFileViewActivity.createIntent(repository, head, file)); } private void openLine(AdapterView<?> parent, int position) { Object item = null; while (--position >= 0) { item = parent.getItemAtPosition(position); if (item instanceof CommitFile) { openFile((CommitFile) item); return; } } } @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { Object item = parent.getItemAtPosition(position); if (item instanceof RepositoryCommit) openCommit((RepositoryCommit) item); else if (item instanceof CommitFile) openFile((CommitFile) item); else if (item instanceof CharSequence) openLine(parent, position); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence.db; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import javax.cache.expiry.AccessedExpiryPolicy; import javax.cache.expiry.CreatedExpiryPolicy; import javax.cache.expiry.Duration; import javax.cache.expiry.ExpiryPolicy; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cache.CachePeekMode; import org.apache.ignite.cache.CacheRebalanceMode; import org.apache.ignite.cache.CacheWriteSynchronizationMode; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.configuration.WALMode; import org.apache.ignite.failure.FailureContext; import org.apache.ignite.failure.FailureHandler; import org.apache.ignite.failure.NoOpFailureHandler; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteFutureTimeoutCheckedException; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.IgniteCacheOffheapManager; import org.apache.ignite.internal.processors.cache.IgniteCacheProxy; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointManager; import org.apache.ignite.internal.util.ReentrantReadWriteLockWithTracking; import org.apache.ignite.internal.util.lang.GridAbsPredicate; import org.apache.ignite.internal.util.lang.GridCursor; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.PA; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.MvccFeatureChecker; import org.apache.ignite.testframework.junits.WithSystemProperty; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.junit.Test; import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cluster.ClusterState.ACTIVE; import static org.apache.ignite.cluster.ClusterState.INACTIVE; import static org.apache.ignite.testframework.GridTestUtils.runAsync; import static org.apache.ignite.testframework.GridTestUtils.runMultiThreadedAsync; import static org.apache.ignite.testframework.GridTestUtils.waitForAllFutures; /** * Test TTL worker with persistence enabled */ @WithSystemProperty(key = IgniteSystemProperties.IGNITE_UNWIND_THROTTLING_TIMEOUT, value = "5") public class IgnitePdsWithTtlTest extends GridCommonAbstractTest { /** */ private static final String CACHE_NAME_ATOMIC = "expirable-cache-atomic"; /** */ private static final String CACHE_NAME_ATOMIC_NON_PERSISTENT = "expirable-non-persistent-cache-atomic"; /** */ private static final String CACHE_NAME_TX = "expirable-cache-tx"; /** */ private static final String CACHE_NAME_LOCAL_ATOMIC = "expirable-cache-local-atomic"; /** */ private static final String CACHE_NAME_LOCAL_TX = "expirable-cache-local-tx"; /** */ private static final String CACHE_NAME_NEAR_ATOMIC = "expirable-cache-near-atomic"; /** */ private static final String CACHE_NAME_NEAR_TX = "expirable-cache-near-tx"; /** */ private static final String NON_PERSISTENT_DATA_REGION = "non-persistent-region"; /** */ public static final int PART_SIZE = 2; /** */ private static final int EXPIRATION_TIMEOUT = 10; /** */ public static final int ENTRIES = 50_000; /** */ public static final int SMALL_ENTRIES = 10; /** */ private static final int WORKLOAD_THREADS_CNT = 16; /** Fail. */ private volatile boolean failureHndTriggered; /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { MvccFeatureChecker.skipIfNotSupported(MvccFeatureChecker.Feature.EXPIRATION); super.beforeTest(); cleanPersistenceDir(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { super.afterTest(); //protection if test failed to finish, e.g. by error stopAllGrids(); cleanPersistenceDir(); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { final IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); DataRegionConfiguration dfltRegion = new DataRegionConfiguration() .setMaxSize(2L * 1024 * 1024 * 1024) .setPersistenceEnabled(true); DataRegionConfiguration nonPersistentRegion = new DataRegionConfiguration() .setName(NON_PERSISTENT_DATA_REGION) .setMaxSize(2L * 1024 * 1024 * 1024) .setPersistenceEnabled(false); cfg.setDataStorageConfiguration( new DataStorageConfiguration() .setDefaultDataRegionConfiguration(dfltRegion) .setDataRegionConfigurations(nonPersistentRegion) .setWalMode(WALMode.LOG_ONLY)); cfg.setCacheConfiguration( getCacheConfiguration(CACHE_NAME_ATOMIC).setAtomicityMode(ATOMIC), getCacheConfiguration(CACHE_NAME_TX).setAtomicityMode(TRANSACTIONAL), getCacheConfiguration(CACHE_NAME_LOCAL_ATOMIC).setAtomicityMode(ATOMIC).setCacheMode(CacheMode.LOCAL), getCacheConfiguration(CACHE_NAME_LOCAL_TX).setAtomicityMode(TRANSACTIONAL).setCacheMode(CacheMode.LOCAL), getCacheConfiguration(CACHE_NAME_NEAR_ATOMIC).setAtomicityMode(ATOMIC) .setNearConfiguration(new NearCacheConfiguration<>()), getCacheConfiguration(CACHE_NAME_NEAR_TX).setAtomicityMode(TRANSACTIONAL) .setNearConfiguration(new NearCacheConfiguration<>()) ); return cfg; } /** {@inheritDoc} */ @Override protected FailureHandler getFailureHandler(String igniteInstanceName) { return new NoOpFailureHandler() { @Override protected boolean handle(Ignite ignite, FailureContext failureCtx) { failureHndTriggered = true; return super.handle(ignite, failureCtx); } }; } /** * Returns a new cache configuration with the given name and {@code GROUP_NAME} group. * * @param name Cache name. * @return Cache configuration. */ private CacheConfiguration<?, ?> getCacheConfiguration(String name) { CacheConfiguration<?, ?> ccfg = new CacheConfiguration<>(); ccfg.setName(name); ccfg.setAffinity(new RendezvousAffinityFunction(false, PART_SIZE)); ccfg.setExpiryPolicyFactory(AccessedExpiryPolicy.factoryOf(new Duration(TimeUnit.MILLISECONDS, EXPIRATION_TIMEOUT))); ccfg.setEagerTtl(true); ccfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); ccfg.setRebalanceMode(CacheRebalanceMode.SYNC); return ccfg; } /** * @throws Exception if failed. */ @Test public void testTtlIsApplied() throws Exception { loadAndWaitForCleanup(false); } /** * @throws Exception if failed. */ @Test public void testTtlIsAppliedForMultipleCaches() throws Exception { IgniteEx srv = startGrid(0); srv.cluster().state(ACTIVE); int cacheCnt = 2; // Create a new caches in the same group. // It is important that initially created cache CACHE_NAME remains empty. for (int i = 0; i < cacheCnt; ++i) { String cacheName = CACHE_NAME_ATOMIC + "-" + i; srv.getOrCreateCache(getCacheConfiguration(cacheName)); fillCache(srv.cache(cacheName)); } waitAndCheckExpired(srv, srv.cache(CACHE_NAME_ATOMIC + "-" + (cacheCnt - 1))); srv.cluster().state(ACTIVE); stopAllGrids(); } /** * @throws Exception if failed. */ @Test public void testTtlIsAppliedAfterRestart() throws Exception { loadAndWaitForCleanup(true); } /** * @throws Exception if failed. */ @Test public void testPutOpsIntoCacheWithExpirationConcurrentlyWithCheckpointCompleteSuccessfully() throws Exception { IgniteEx ig0 = startGrid(0); ig0.cluster().state(ACTIVE); IgniteCache<Object, Object> cache = ig0.getOrCreateCache(CACHE_NAME_ATOMIC); AtomicBoolean timeoutReached = new AtomicBoolean(false); CheckpointManager checkpointManager = U.field(ig0.context().cache().context().database(), "checkpointManager"); IgniteInternalFuture<?> ldrFut = runMultiThreadedAsync(() -> { while (!timeoutReached.get()) { Map<Object, Object> map = new TreeMap<>(); for (int i = 0; i < ENTRIES; i++) map.put(i, i); cache.putAll(map); } }, WORKLOAD_THREADS_CNT, "loader"); IgniteInternalFuture<?> updaterFut = runMultiThreadedAsync(() -> { while (!timeoutReached.get()) { for (int i = 0; i < SMALL_ENTRIES; i++) cache.put(i, i * 10); } }, WORKLOAD_THREADS_CNT, "updater"); IgniteInternalFuture<?> cpWriteLockUnlockFut = runAsync(() -> { Object checkpointReadWriteLock = U.field( checkpointManager.checkpointTimeoutLock(), "checkpointReadWriteLock" ); ReentrantReadWriteLockWithTracking lock = U.field(checkpointReadWriteLock, "checkpointLock"); while (!timeoutReached.get()) { try { lock.writeLock().lockInterruptibly(); doSleep(30); } catch (InterruptedException ignored) { break; } finally { lock.writeLock().unlock(); } doSleep(30); } }, "cp-write-lock-holder"); doSleep(10_000); timeoutReached.set(true); waitForAllFutures(cpWriteLockUnlockFut, ldrFut, updaterFut); } /** * @throws Exception if failed. */ @Test public void testConcurrentPutOpsToCacheWithExpirationCompleteSuccesfully() throws Exception { final AtomicBoolean end = new AtomicBoolean(); final IgniteEx srv = startGrids(3); srv.cluster().state(ACTIVE); // Start high workload. IgniteInternalFuture<?> loadFut = runMultiThreadedAsync(() -> { List<IgniteCache<Object, Object>> caches = F.asList( srv.cache(CACHE_NAME_ATOMIC), srv.cache(CACHE_NAME_TX), srv.cache(CACHE_NAME_LOCAL_ATOMIC), srv.cache(CACHE_NAME_LOCAL_TX), srv.cache(CACHE_NAME_NEAR_ATOMIC), srv.cache(CACHE_NAME_NEAR_TX) ); while (!end.get() && !failureHndTriggered) { for (IgniteCache<Object, Object> cache : caches) { for (int i = 0; i < SMALL_ENTRIES; i++) cache.put(i, new byte[1024]); cache.putAll(new TreeMap<>(F.asMap(0, new byte[1024], 1, new byte[1024]))); for (int i = 0; i < SMALL_ENTRIES; i++) cache.get(i); cache.getAll(new TreeSet<>(F.asList(0, 1))); } } }, WORKLOAD_THREADS_CNT, "high-workload"); try { // Let's wait some time. loadFut.get(10, TimeUnit.SECONDS); } catch (Exception e) { assertFalse("Failure handler was called. See log above.", failureHndTriggered); assertTrue(X.hasCause(e, IgniteFutureTimeoutCheckedException.class)); } finally { end.set(true); } assertFalse("Failure handler was called. See log above.", failureHndTriggered); } /** * @throws Exception if failed. */ private void loadAndWaitForCleanup(boolean restartGrid) throws Exception { IgniteEx srv = startGrid(0); srv.cluster().state(ACTIVE); fillCache(srv.cache(CACHE_NAME_ATOMIC)); if (restartGrid) { srv.context().cache().context().database().waitForCheckpoint("test-checkpoint"); stopGrid(0); srv = startGrid(0); srv.cluster().state(ACTIVE); } final IgniteCache<Integer, byte[]> cache = srv.cache(CACHE_NAME_ATOMIC); printStatistics((IgniteCacheProxy)cache, "After restart from LFS"); waitAndCheckExpired(srv, cache); srv.cluster().state(ACTIVE); stopAllGrids(); } /** * @throws Exception if failed. */ @Test public void testRebalancingWithTtlExpirable() throws Exception { IgniteEx srv = startGrid(0); srv.cluster().baselineAutoAdjustEnabled(false); srv.cluster().state(ACTIVE); fillCache(srv.cache(CACHE_NAME_ATOMIC)); srv = startGrid(1); //causes rebalancing start srv.cluster().setBaselineTopology(srv.cluster().topologyVersion()); final IgniteCache<Integer, byte[]> cache = srv.cache(CACHE_NAME_ATOMIC); printStatistics((IgniteCacheProxy)cache, "After rebalancing start"); waitAndCheckExpired(srv, cache); srv.cluster().state(INACTIVE); stopAllGrids(); } /** * @throws Exception if failed. */ @Test public void testStartStopAfterRebalanceWithTtlExpirable() throws Exception { try { IgniteEx srv = startGrid(0); srv.cluster().baselineAutoAdjustEnabled(false); startGrid(1); srv.cluster().active(true); ExpiryPolicy plc = CreatedExpiryPolicy.factoryOf(Duration.ONE_DAY).create(); IgniteCache<Integer, byte[]> cache0 = srv.cache(CACHE_NAME_ATOMIC); fillCache(cache0.withExpiryPolicy(plc)); srv = startGrid(2); IgniteCache<Integer, byte[]> cache = srv.cache(CACHE_NAME_ATOMIC); //causes rebalancing start srv.cluster().setBaselineTopology(srv.cluster().topologyVersion()); GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { return Boolean.TRUE.equals(cache.rebalance().get()) && cache.localSizeLong(CachePeekMode.ALL) > 0; } }, 20_000); //check if pds is consistent stopGrid(0); startGrid(0); stopGrid(1); startGrid(1); srv.cluster().state(INACTIVE); } finally { stopAllGrids(); } } /** * Tests that cache entries (cache related to non persistent region) correctly expired. * * @throws Exception If failed. */ @Test public void testExpirationNonPersistentRegion() throws Exception { IgniteEx srv = startGrid(0); srv.cluster().baselineAutoAdjustEnabled(false); srv.cluster().state(ACTIVE); CacheConfiguration<?, ?> cfg = getCacheConfiguration(CACHE_NAME_ATOMIC_NON_PERSISTENT) .setAtomicityMode(ATOMIC) .setDataRegionName(NON_PERSISTENT_DATA_REGION); srv.getOrCreateCache(cfg); IgniteCache<Integer, byte[]> nonPersistentCache = srv.cache(CACHE_NAME_ATOMIC_NON_PERSISTENT); fillCache(nonPersistentCache); waitAndCheckExpired(srv, nonPersistentCache); stopAllGrids(); assertFalse("Failure handler should not be triggered.", failureHndTriggered); } /** * */ protected void fillCache(IgniteCache<Integer, byte[]> cache) { cache.putAll(new TreeMap<Integer, byte[]>() {{ for (int i = 0; i < ENTRIES; i++) put(i, new byte[1024]); }}); //Touch entries. for (int i = 0; i < ENTRIES; i++) cache.get(i); // touch entries printStatistics((IgniteCacheProxy)cache, "After cache puts"); } /** * */ protected void waitAndCheckExpired( IgniteEx srv, final IgniteCache<Integer, byte[]> cache ) throws IgniteCheckedException { boolean awaited = GridTestUtils.waitForCondition(new PA() { @Override public boolean apply() { return cache.size() == 0; } }, TimeUnit.SECONDS.toMillis(EXPIRATION_TIMEOUT + EXPIRATION_TIMEOUT / 2)); assertTrue("Cache is not empty. size=" + cache.size(), awaited); printStatistics((IgniteCacheProxy)cache, "After timeout"); GridCacheSharedContext ctx = srv.context().cache().context(); GridCacheContext cctx = ctx.cacheContext(CU.cacheId(CACHE_NAME_ATOMIC)); // Check partitions through internal API. for (int partId = 0; partId < PART_SIZE; ++partId) { GridDhtLocalPartition locPart = cctx.dht().topology().localPartition(partId); if (locPart == null) continue; IgniteCacheOffheapManager.CacheDataStore dataStore = ctx.cache().cacheGroup(CU.cacheId(CACHE_NAME_ATOMIC)).offheap().dataStore(locPart); GridCursor cur = dataStore.cursor(); assertFalse(cur.next()); assertEquals(0, locPart.fullSize()); } for (int i = 0; i < ENTRIES; i++) assertNull(cache.get(i)); } /** * */ private void printStatistics(IgniteCacheProxy cache, String msg) { System.out.println(msg + " {{"); cache.context().printMemoryStats(); System.out.println("}} " + msg); } }
package de.peeeq.wurstscript; import de.peeeq.wurstscript.antlr.JassParser; import de.peeeq.wurstscript.antlr.WurstLexer; import de.peeeq.wurstscript.antlr.WurstParser.CompilationUnitContext; import de.peeeq.wurstscript.ast.Ast; import de.peeeq.wurstscript.ast.CompilationUnit; import de.peeeq.wurstscript.attributes.CompilationUnitInfo; import de.peeeq.wurstscript.attributes.CompileError; import de.peeeq.wurstscript.attributes.ErrorHandler; import de.peeeq.wurstscript.gui.WurstGui; import de.peeeq.wurstscript.jass.AntlrJassParseTreeTransformer; import de.peeeq.wurstscript.jass.ExtendedJassLexer; import de.peeeq.wurstscript.jurst.AntlrJurstParseTreeTransformer; import de.peeeq.wurstscript.jurst.ExtendedJurstLexer; import de.peeeq.wurstscript.jurst.antlr.JurstParser; import de.peeeq.wurstscript.parser.WPos; import de.peeeq.wurstscript.parser.antlr.AntlrWurstParseTreeTransformer; import de.peeeq.wurstscript.parser.antlr.ExtendedWurstLexer; import de.peeeq.wurstscript.utils.LineOffsets; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.misc.Interval; import java.io.IOException; import java.io.Reader; import java.io.StringReader; public class WurstParser { private static final int MAX_SYNTAX_ERRORS = 15; private final ErrorHandler errorHandler; private final WurstGui gui; public WurstParser(ErrorHandler errorHandler, WurstGui gui) { this.errorHandler = errorHandler; this.gui = gui; } public CompilationUnit parse(Reader reader, String source, boolean hasCommonJ) { try (java.util.Scanner s = new java.util.Scanner(reader)) { s.useDelimiter("\\A"); String input = s.hasNext() ? s.next() : ""; return parseWithAntlr(new StringReader(input), source, hasCommonJ); } } private CompilationUnit parseWithAntlr(Reader reader, final String source, boolean hasCommonJ) { try { CharStream input = CharStreams.fromReader(reader); // create a lexer that feeds off of input CharStream final ExtendedWurstLexer lexer = new ExtendedWurstLexer(input); // create a buffer of tokens pulled from the lexer TokenStream tokens = new CommonTokenStream(lexer); // create a parser that feeds off the tokens buffer de.peeeq.wurstscript.antlr.WurstParser parser = new de.peeeq.wurstscript.antlr.WurstParser(tokens); ANTLRErrorListener l = new BaseErrorListener() { int errorCount = 0; @Override public void syntaxError(@SuppressWarnings("null") Recognizer<?, ?> recognizer, @SuppressWarnings("null") Object offendingSymbol, int line, int charPositionInLine, @SuppressWarnings("null") String msg, @SuppressWarnings("null") RecognitionException e) { // try to improve error message if (e instanceof NoViableAltException) { NoViableAltException ne = (NoViableAltException) e; if (ne.getStartToken().getType() == WurstLexer.HOTDOC_COMMENT) { msg = "Hotdoc comment is in invalid position, it can " + "only appear before function definitions, classes, and " + "other elements that can be documented."; offendingSymbol = ne.getStartToken(); } } LineOffsets offsets = lexer.getLineOffsets(); int pos; int posStop; if (offendingSymbol instanceof Token) { Token token = (Token) offendingSymbol; pos = token.getStartIndex(); posStop = token.getStopIndex() + 1; } else { pos = offsets.get(line) + charPositionInLine; posStop = pos + 1; } if (posStop >= input.size()) { posStop = input.size() - 1; } while (pos > 0 && input.getText(new Interval(pos, posStop)).matches("\\s*")){ pos--; } CompileError err = new CompileError(new WPos(source, offsets, pos, posStop), msg); gui.sendError(err); errorCount++; if (errorCount > MAX_SYNTAX_ERRORS) { throw new TooManyErrorsException(); } } }; lexer.setErrorListener(l); parser.removeErrorListeners(); parser.addErrorListener(l); CompilationUnitContext cu = parser.compilationUnit(); // begin parsing at init rule if (lexer.getTabWarning() != null) { CompileError warning = lexer.getTabWarning(); warning = new CompileError(warning.getSource().withFile(source), warning.getMessage(), CompileError.ErrorType.WARNING); gui.sendError(warning); } CompilationUnit root = new AntlrWurstParseTreeTransformer(source, errorHandler, lexer.getLineOffsets()).transform(cu); removeSyntacticSugar(root, hasCommonJ); root.getCuInfo().setIndentationMode(lexer.getIndentationMode()); return root; } catch (IOException e) { WLogger.severe(e); throw new Error(e); } catch (TooManyErrorsException e) { WLogger.info("Stopped parsing file " + source + ", too many errors"); return emptyCompilationUnit(); } } public CompilationUnit parseJurst(Reader reader, String source, boolean hasCommonJ) { return parseJurstWithAntlr(reader, source, hasCommonJ); } private CompilationUnit parseJurstWithAntlr(Reader reader, final String source, boolean hasCommonJ) { try { CharStream input = CharStreams.fromReader(reader); // create a lexer that feeds off of input CharStream final ExtendedJurstLexer lexer = new ExtendedJurstLexer(input); // create a buffer of tokens pulled from the lexer TokenStream tokens = new CommonTokenStream(lexer); // create a parser that feeds off the tokens buffer JurstParser parser = new JurstParser(tokens); ANTLRErrorListener l = new BaseErrorListener() { int errorCount = 0; @Override public void syntaxError(@SuppressWarnings("null") Recognizer<?, ?> recognizer, @SuppressWarnings("null") Object offendingSymbol, int line, int charPositionInLine, @SuppressWarnings("null") String msg, @SuppressWarnings("null") RecognitionException e) { LineOffsets offsets = lexer.getLineOffsets(); int pos; int posStop; if (offendingSymbol instanceof Token) { Token token = (Token) offendingSymbol; pos = token.getStartIndex(); posStop = token.getStopIndex() + 1; } else { pos = offsets.get(line) + charPositionInLine; posStop = pos + 1; } msg = "line " + line + ": " + msg; while (pos > 0 && input.getText(new Interval(pos, posStop)).matches("\\s*")) { pos--; } CompileError err = new CompileError(new WPos(source, offsets, pos, posStop), msg); gui.sendError(err); errorCount++; if (errorCount > MAX_SYNTAX_ERRORS) { throw new TooManyErrorsException(); } } }; lexer.addErrorListener(l); parser.removeErrorListeners(); parser.addErrorListener(l); de.peeeq.wurstscript.jurst.antlr.JurstParser.CompilationUnitContext cu = parser.compilationUnit(); // begin parsing at init rule CompilationUnit root = new AntlrJurstParseTreeTransformer(source, errorHandler, lexer.getLineOffsets()).transform(cu); removeSyntacticSugar(root, hasCommonJ); return root; } catch (IOException e) { WLogger.severe(e); throw new Error(e); } catch (TooManyErrorsException e) { WLogger.info("Stopped parsing file " + source + ", too many errors"); return emptyCompilationUnit(); } } public CompilationUnit parseJass(Reader reader, String source, boolean hasCommonJ) { return parseJassAntlr(reader, source, hasCommonJ); } private CompilationUnit parseJassAntlr(Reader reader, final String source, boolean hasCommonJ) { try { CharStream input = CharStreams.fromReader(reader); // create a lexer that feeds off of input CharStream final ExtendedJassLexer lexer = new ExtendedJassLexer(input); // create a buffer of tokens pulled from the lexer TokenStream tokens = new CommonTokenStream(lexer); // create a parser that feeds off the tokens buffer JassParser parser = new JassParser(tokens); ANTLRErrorListener l = new BaseErrorListener() { int errorCount = 0; @Override public void syntaxError(@SuppressWarnings("null") Recognizer<?, ?> recognizer, @SuppressWarnings("null") Object offendingSymbol, int line, int charPositionInLine, @SuppressWarnings("null") String msg, @SuppressWarnings("null") RecognitionException e) { LineOffsets offsets = lexer.getLineOffsets(); int pos; int posStop; if (offendingSymbol instanceof Token) { Token token = (Token) offendingSymbol; pos = token.getStartIndex(); posStop = token.getStopIndex() + 1; } else { pos = offsets.get(line) + charPositionInLine; posStop = pos + 1; } msg = "line " + line + ": " + msg; while (pos > 0 && input.getText(new Interval(pos, posStop)).matches("\\s*")) { pos--; } CompileError err = new CompileError(new WPos(source, offsets, pos, posStop), msg); gui.sendError(err); errorCount++; if (errorCount > MAX_SYNTAX_ERRORS) { throw new TooManyErrorsException(); } } }; lexer.addErrorListener(l); parser.removeErrorListeners(); parser.addErrorListener(l); JassParser.CompilationUnitContext cu = parser.compilationUnit(); // begin parsing at init rule CompilationUnit root = new AntlrJassParseTreeTransformer(source, errorHandler, lexer.getLineOffsets()).transform(cu); removeSyntacticSugar(root, hasCommonJ); return root; } catch (IOException e) { WLogger.severe(e); throw new Error(e); } catch (TooManyErrorsException e) { WLogger.info("Stopped parsing file " + source + ", too many errors"); return emptyCompilationUnit(); } } public CompilationUnit emptyCompilationUnit() { return Ast.CompilationUnit(new CompilationUnitInfo(errorHandler), Ast.JassToplevelDeclarations(), Ast.WPackages()); } private void removeSyntacticSugar(CompilationUnit root, boolean hasCommonJ) { new SyntacticSugar().removeSyntacticSugar(root, hasCommonJ); } class TooManyErrorsException extends RuntimeException { } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.cassandra; import com.datastax.driver.core.utils.Bytes; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.ConnectorSplit; import com.facebook.presto.spi.ConnectorSplitSource; import com.facebook.presto.spi.ConnectorTableHandle; import com.facebook.presto.spi.ConnectorTableLayoutHandle; import com.facebook.presto.spi.ConnectorTableLayoutResult; import com.facebook.presto.spi.ConnectorTableMetadata; import com.facebook.presto.spi.Constraint; import com.facebook.presto.spi.RecordCursor; import com.facebook.presto.spi.SchemaNotFoundException; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.SchemaTablePrefix; import com.facebook.presto.spi.connector.Connector; import com.facebook.presto.spi.connector.ConnectorMetadata; import com.facebook.presto.spi.connector.ConnectorRecordSetProvider; import com.facebook.presto.spi.connector.ConnectorSplitManager; import com.facebook.presto.spi.connector.ConnectorTransactionHandle; import com.facebook.presto.spi.type.Type; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.cassandraunit.utils.EmbeddedCassandraServerHelper; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Optional; import static com.facebook.presto.cassandra.CassandraTestingUtils.HOSTNAME; import static com.facebook.presto.cassandra.CassandraTestingUtils.KEYSPACE_NAME; import static com.facebook.presto.cassandra.CassandraTestingUtils.PORT; import static com.facebook.presto.cassandra.CassandraTestingUtils.TABLE_NAME; import static com.facebook.presto.cassandra.CassandraTestingUtils.initializeTestData; import static com.facebook.presto.cassandra.util.Types.checkType; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP; import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.testing.TestingConnectorSession.SESSION; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.Iterables.getOnlyElement; import static io.airlift.concurrent.MoreFutures.getFutureValue; import static io.airlift.testing.Assertions.assertInstanceOf; import static java.util.Locale.ENGLISH; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; @Test(singleThreaded = true) public class TestCassandraConnector { protected static final String INVALID_DATABASE = "totally_invalid_database"; private static final Date DATE = new Date(); protected String database; protected SchemaTableName table; protected SchemaTableName tableUnpartitioned; protected SchemaTableName invalidTable; private ConnectorMetadata metadata; private ConnectorSplitManager splitManager; private ConnectorRecordSetProvider recordSetProvider; @BeforeClass public void setup() throws Exception { EmbeddedCassandraServerHelper.startEmbeddedCassandra(); initializeTestData(DATE); String connectorId = "cassandra-test"; CassandraConnectorFactory connectorFactory = new CassandraConnectorFactory( connectorId, ImmutableMap.<String, String>of()); Connector connector = connectorFactory.create(connectorId, ImmutableMap.of( "cassandra.contact-points", HOSTNAME, "cassandra.native-protocol-port", Integer.toString(PORT))); metadata = connector.getMetadata(CassandraTransactionHandle.INSTANCE); assertInstanceOf(metadata, CassandraMetadata.class); splitManager = connector.getSplitManager(); assertInstanceOf(splitManager, CassandraSplitManager.class); recordSetProvider = connector.getRecordSetProvider(); assertInstanceOf(recordSetProvider, CassandraRecordSetProvider.class); database = KEYSPACE_NAME.toLowerCase(); table = new SchemaTableName(database, TABLE_NAME.toLowerCase()); tableUnpartitioned = new SchemaTableName(database, "presto_test_unpartitioned"); invalidTable = new SchemaTableName(database, "totally_invalid_table_name"); } @AfterMethod public void tearDown() throws Exception { } @Test public void testGetClient() { } @Test public void testGetDatabaseNames() throws Exception { List<String> databases = metadata.listSchemaNames(SESSION); assertTrue(databases.contains(database.toLowerCase(ENGLISH))); } @Test public void testGetTableNames() throws Exception { List<SchemaTableName> tables = metadata.listTables(SESSION, database); assertTrue(tables.contains(table)); } // disabled until metadata manager is updated to handle invalid catalogs and schemas @Test(enabled = false, expectedExceptions = SchemaNotFoundException.class) public void testGetTableNamesException() throws Exception { metadata.listTables(SESSION, INVALID_DATABASE); } @Test public void testListUnknownSchema() { assertNull(metadata.getTableHandle(SESSION, new SchemaTableName("totally_invalid_database_name", "dual"))); assertEquals(metadata.listTables(SESSION, "totally_invalid_database_name"), ImmutableList.of()); assertEquals(metadata.listTableColumns(SESSION, new SchemaTablePrefix("totally_invalid_database_name", "dual")), ImmutableMap.of()); } @Test public void testGetRecords() throws Exception { ConnectorTableHandle tableHandle = getTableHandle(table); ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(SESSION, tableHandle); List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(SESSION, tableHandle).values()); Map<String, Integer> columnIndex = indexColumns(columnHandles); ConnectorTransactionHandle transaction = CassandraTransactionHandle.INSTANCE; List<ConnectorTableLayoutResult> layouts = metadata.getTableLayouts(SESSION, tableHandle, Constraint.alwaysTrue(), Optional.empty()); ConnectorTableLayoutHandle layout = getOnlyElement(layouts).getTableLayout().getHandle(); List<ConnectorSplit> splits = getAllSplits(splitManager.getSplits(transaction, SESSION, layout)); long rowNumber = 0; for (ConnectorSplit split : splits) { CassandraSplit cassandraSplit = (CassandraSplit) split; long completedBytes = 0; try (RecordCursor cursor = recordSetProvider.getRecordSet(transaction, SESSION, cassandraSplit, columnHandles).cursor()) { while (cursor.advanceNextPosition()) { try { assertReadFields(cursor, tableMetadata.getColumns()); } catch (RuntimeException e) { throw new RuntimeException("row " + rowNumber, e); } rowNumber++; String keyValue = cursor.getSlice(columnIndex.get("key")).toStringUtf8(); assertTrue(keyValue.startsWith("key ")); int rowId = Integer.parseInt(keyValue.substring(4)); assertEquals(keyValue, String.format("key %d", rowId)); assertEquals(Bytes.toHexString(cursor.getSlice(columnIndex.get("typebytes")).getBytes()), String.format("0x%08X", rowId)); // VARINT is returned as a string assertEquals(cursor.getSlice(columnIndex.get("typeinteger")).toStringUtf8(), String.valueOf(rowId)); assertEquals(cursor.getLong(columnIndex.get("typelong")), 1000 + rowId); assertEquals(cursor.getSlice(columnIndex.get("typeuuid")).toStringUtf8(), String.format("00000000-0000-0000-0000-%012d", rowId)); assertEquals(cursor.getSlice(columnIndex.get("typetimestamp")).toStringUtf8(), Long.valueOf(DATE.getTime()).toString()); long newCompletedBytes = cursor.getCompletedBytes(); assertTrue(newCompletedBytes >= completedBytes); completedBytes = newCompletedBytes; } } } assertEquals(rowNumber, 9); } private static void assertReadFields(RecordCursor cursor, List<ColumnMetadata> schema) { for (int columnIndex = 0; columnIndex < schema.size(); columnIndex++) { ColumnMetadata column = schema.get(columnIndex); if (!cursor.isNull(columnIndex)) { Type type = column.getType(); if (BOOLEAN.equals(type)) { cursor.getBoolean(columnIndex); } else if (BIGINT.equals(type)) { cursor.getLong(columnIndex); } else if (TIMESTAMP.equals(type)) { cursor.getLong(columnIndex); } else if (DOUBLE.equals(type)) { cursor.getDouble(columnIndex); } else if (VARCHAR.equals(type) || VARBINARY.equals(type)) { try { cursor.getSlice(columnIndex); } catch (RuntimeException e) { throw new RuntimeException("column " + column, e); } } else { fail("Unknown primitive type " + columnIndex); } } } } private ConnectorTableHandle getTableHandle(SchemaTableName tableName) { ConnectorTableHandle handle = metadata.getTableHandle(SESSION, tableName); checkArgument(handle != null, "table not found: %s", tableName); return handle; } private static List<ConnectorSplit> getAllSplits(ConnectorSplitSource splitSource) throws InterruptedException { ImmutableList.Builder<ConnectorSplit> splits = ImmutableList.builder(); while (!splitSource.isFinished()) { splits.addAll(getFutureValue(splitSource.getNextBatch(1000))); } return splits.build(); } private static ImmutableMap<String, Integer> indexColumns(List<ColumnHandle> columnHandles) { ImmutableMap.Builder<String, Integer> index = ImmutableMap.builder(); int i = 0; for (ColumnHandle columnHandle : columnHandles) { String name = checkType(columnHandle, CassandraColumnHandle.class, "columnHandle").getName(); index.put(name, i); i++; } return index.build(); } }
/* * Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. */ package com.intellij.lang.ant.config.explorer; import com.intellij.execution.ExecutionBundle; import com.intellij.execution.RunManager; import com.intellij.execution.RunManagerListener; import com.intellij.execution.RunnerAndConfigurationSettings; import com.intellij.execution.impl.RunDialog; import com.intellij.icons.AllIcons; import com.intellij.ide.CommonActionsManager; import com.intellij.ide.DataManager; import com.intellij.ide.TreeExpander; import com.intellij.ide.dnd.FileCopyPasteUtil; import com.intellij.lang.ant.AntBundle; import com.intellij.lang.ant.config.*; import com.intellij.lang.ant.config.actions.AntBuildFilePropertiesAction; import com.intellij.lang.ant.config.actions.RemoveBuildFileAction; import com.intellij.lang.ant.config.execution.AntRunConfiguration; import com.intellij.lang.ant.config.execution.AntRunConfigurationType; import com.intellij.lang.ant.config.execution.ExecutionHandler; import com.intellij.lang.ant.config.impl.*; import com.intellij.lang.ant.config.impl.configuration.BuildFilePropertiesPanel; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.fileChooser.FileChooser; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.KeymapManagerListener; import com.intellij.openapi.keymap.ex.KeymapManagerEx; import com.intellij.openapi.keymap.impl.ui.EditKeymapsDialog; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.SimpleToolWindowPanel; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiElement; import com.intellij.ui.*; import com.intellij.ui.treeStructure.Tree; import com.intellij.util.*; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.tree.TreeUtil; import com.intellij.util.xml.DomEventListener; import com.intellij.util.xml.DomManager; import com.intellij.util.xml.events.DomEvent; import icons.AntIcons; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.awt.event.MouseEvent; import java.io.File; import java.util.*; import java.util.List; public class AntExplorer extends SimpleToolWindowPanel implements DataProvider, Disposable { private Project myProject; private AntExplorerTreeBuilder myBuilder; private Tree myTree; private KeymapListener myKeymapListener; private final AntBuildFilePropertiesAction myAntBuildFilePropertiesAction; private AntConfiguration myConfig; private final TreeExpander myTreeExpander = new TreeExpander() { @Override public void expandAll() { myBuilder.expandAll(); } @Override public boolean canExpand() { final AntConfiguration config = myConfig; return config != null && !config.getBuildFileList().isEmpty(); } @Override public void collapseAll() { myBuilder.collapseAll(); } @Override public boolean canCollapse() { return canExpand(); } }; public AntExplorer(@NotNull Project project) { super(true, true); setTransferHandler(new MyTransferHandler()); myProject = project; myConfig = AntConfiguration.getInstance(project); final DefaultTreeModel model = new DefaultTreeModel(new DefaultMutableTreeNode()); myTree = new Tree(model); myTree.setRootVisible(false); myTree.setShowsRootHandles(true); myTree.setCellRenderer(new NodeRenderer()); myBuilder = new AntExplorerTreeBuilder(project, myTree, model); myBuilder.setTargetsFiltered(AntConfigurationBase.getInstance(project).isFilterTargets()); TreeUtil.installActions(myTree); new TreeSpeedSearch(myTree); myTree.addMouseListener(new PopupHandler() { @Override public void invokePopup(final Component comp, final int x, final int y) { popupInvoked(comp, x, y); } }); new EditSourceOnDoubleClickHandler.TreeMouseListener(myTree, null) { @Override protected void processDoubleClick(@NotNull MouseEvent e, @NotNull DataContext dataContext, @NotNull TreePath treePath) { runSelection(DataManager.getInstance().getDataContext(myTree)); } }.installOn(myTree); myTree.registerKeyboardAction(new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { runSelection(DataManager.getInstance().getDataContext(myTree)); } }, KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), WHEN_FOCUSED); myTree.setLineStyleAngled(); myAntBuildFilePropertiesAction = new AntBuildFilePropertiesAction(this); setToolbar(createToolbarPanel()); setContent(ScrollPaneFactory.createScrollPane(myTree)); ToolTipManager.sharedInstance().registerComponent(myTree); myKeymapListener = new KeymapListener(); DomManager.getDomManager(project).addDomEventListener(new DomEventListener() { @Override public void eventOccured(DomEvent event) { myBuilder.queueUpdate(); } }, this); project.getMessageBus().connect(this).subscribe(RunManagerListener.TOPIC, new RunManagerListener() { @Override public void beforeRunTasksChanged () { myBuilder.queueUpdate(); } }); } @Override public void dispose() { final KeymapListener listener = myKeymapListener; if (listener != null) { myKeymapListener = null; listener.stopListen(); } final AntExplorerTreeBuilder builder = myBuilder; if (builder != null) { Disposer.dispose(builder); myBuilder = null; } final Tree tree = myTree; if (tree != null) { ToolTipManager.sharedInstance().unregisterComponent(tree); for (KeyStroke keyStroke : tree.getRegisteredKeyStrokes()) { tree.unregisterKeyboardAction(keyStroke); } myTree = null; } myProject = null; myConfig = null; } private JPanel createToolbarPanel() { final DefaultActionGroup group = new DefaultActionGroup(); group.add(new AddAction()); group.add(new RemoveAction()); group.add(new RunAction()); group.add(new ShowAllTargetsAction()); AnAction action = CommonActionsManager.getInstance().createExpandAllAction(myTreeExpander, this); action.getTemplatePresentation().setDescription(AntBundle.message("ant.explorer.expand.all.nodes.action.description")); group.add(action); action = CommonActionsManager.getInstance().createCollapseAllAction(myTreeExpander, this); action.getTemplatePresentation().setDescription(AntBundle.message("ant.explorer.collapse.all.nodes.action.description")); group.add(action); group.add(myAntBuildFilePropertiesAction); final ActionToolbar actionToolBar = ActionManager.getInstance().createActionToolbar(ActionPlaces.ANT_EXPLORER_TOOLBAR, group, true); return JBUI.Panels.simplePanel(actionToolBar.getComponent()); } private void addBuildFile() { final FileChooserDescriptor descriptor = createXmlDescriptor(); descriptor.setTitle(AntBundle.message("select.ant.build.file.dialog.title")); descriptor.setDescription(AntBundle.message("select.ant.build.file.dialog.description")); final VirtualFile[] files = FileChooser.chooseFiles(descriptor, myProject, null); addBuildFile(files); } private void addBuildFile(final VirtualFile[] files) { if (files.length == 0) { return; } ApplicationManager.getApplication().invokeLater(() -> { final AntConfiguration antConfiguration = myConfig; if (antConfiguration == null) { return; } final List<VirtualFile> ignoredFiles = new ArrayList<>(); for (VirtualFile file : files) { try { antConfiguration.addBuildFile(file); } catch (AntNoFileException e) { ignoredFiles.add(e.getFile()); } } if (ignoredFiles.size() != 0) { String messageText; final StringBuilder message = StringBuilderSpinAllocator.alloc(); try { String separator = ""; for (final VirtualFile virtualFile : ignoredFiles) { message.append(separator); message.append(virtualFile.getPresentableUrl()); separator = "\n"; } messageText = message.toString(); } finally { StringBuilderSpinAllocator.dispose(message); } Messages.showWarningDialog(myProject, messageText, AntBundle.message("cannot.add.ant.files.dialog.title")); } }); } public void removeBuildFile() { final AntBuildFile buildFile = getCurrentBuildFile(); if (buildFile == null) { return; } final String fileName = buildFile.getPresentableUrl(); final int result = Messages.showYesNoDialog(myProject, AntBundle.message("remove.the.reference.to.file.confirmation.text", fileName), AntBundle.message("confirm.remove.dialog.title"), Messages.getQuestionIcon()); if (result != Messages.YES) { return; } myConfig.removeBuildFile(buildFile); } public void setBuildFileProperties() { final AntBuildFileBase buildFile = getCurrentBuildFile(); if (buildFile != null && BuildFilePropertiesPanel.editBuildFile(buildFile, myProject)) { myConfig.updateBuildFile(buildFile); myBuilder.queueUpdate(); myTree.repaint(); } } private void runSelection(final DataContext dataContext) { if (!canRunSelection()) { return; } final AntBuildFileBase buildFile = getCurrentBuildFile(); if (buildFile != null) { final TreePath[] paths = myTree.getSelectionPaths(); final String[] targets = getTargetNamesFromPaths(paths); ExecutionHandler.runBuild(buildFile, targets, null, dataContext, Collections.emptyList(), AntBuildListener.NULL); } } private boolean canRunSelection() { if (myTree == null) { return false; } final TreePath[] paths = myTree.getSelectionPaths(); if (paths == null) { return false; } final AntBuildFile buildFile = getCurrentBuildFile(); if (buildFile == null || !buildFile.exists()) { return false; } for (final TreePath path : paths) { final DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); final Object userObject = node.getUserObject(); final AntBuildFileNodeDescriptor buildFileNodeDescriptor; if (userObject instanceof AntTargetNodeDescriptor) { buildFileNodeDescriptor = (AntBuildFileNodeDescriptor)((DefaultMutableTreeNode)node.getParent()).getUserObject(); } else if (userObject instanceof AntBuildFileNodeDescriptor){ buildFileNodeDescriptor = (AntBuildFileNodeDescriptor)userObject; } else { buildFileNodeDescriptor = null; } if (buildFileNodeDescriptor == null || buildFileNodeDescriptor.getBuildFile() != buildFile) { return false; } } return true; } private static String[] getTargetNamesFromPaths(TreePath[] paths) { final List<String> targets = new ArrayList<>(); for (final TreePath path : paths) { final Object userObject = ((DefaultMutableTreeNode)path.getLastPathComponent()).getUserObject(); if (!(userObject instanceof AntTargetNodeDescriptor)) { continue; } final AntBuildTarget target = ((AntTargetNodeDescriptor)userObject).getTarget(); if (target instanceof MetaTarget) { ContainerUtil.addAll(targets, ((MetaTarget)target).getTargetNames()); } else { targets.add(target.getName()); } } return ArrayUtil.toStringArray(targets); } private static AntBuildTarget[] getTargetObjectsFromPaths(TreePath[] paths) { return Arrays.stream(paths) .map(path -> ((DefaultMutableTreeNode)path.getLastPathComponent()).getUserObject()) .filter(userObject -> userObject instanceof AntTargetNodeDescriptor) .map(userObject -> ((AntTargetNodeDescriptor)userObject).getTarget()) .toArray(AntBuildTarget[]::new); } public boolean isBuildFileSelected() { if( myProject == null) return false; final AntBuildFileBase file = getCurrentBuildFile(); return file != null && file.exists(); } @Nullable private AntBuildFileBase getCurrentBuildFile() { final AntBuildFileNodeDescriptor descriptor = getCurrentBuildFileNodeDescriptor(); return (AntBuildFileBase)((descriptor == null) ? null : descriptor.getBuildFile()); } @Nullable private AntBuildFileNodeDescriptor getCurrentBuildFileNodeDescriptor() { if (myTree == null) { return null; } final TreePath path = myTree.getSelectionPath(); if (path == null) { return null; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); while (node != null) { final Object userObject = node.getUserObject(); if (userObject instanceof AntBuildFileNodeDescriptor) { return (AntBuildFileNodeDescriptor)userObject; } node = (DefaultMutableTreeNode)node.getParent(); } return null; } private void popupInvoked(final Component comp, final int x, final int y) { Object userObject = null; final TreePath path = myTree.getSelectionPath(); if (path != null) { final DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); if (node != null) { userObject = node.getUserObject(); } } final DefaultActionGroup group = new DefaultActionGroup(); group.add(new RunAction()); group.add(new CreateMetaTargetAction()); group.add(new MakeAntRunConfigurationAction()); group.add(new RemoveMetaTargetsOrBuildFileAction()); group.add(ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_SOURCE)); if (userObject instanceof AntBuildFileNodeDescriptor) { group.add(new RemoveBuildFileAction(this)); } if (userObject instanceof AntTargetNodeDescriptor) { final AntBuildTargetBase target = ((AntTargetNodeDescriptor)userObject).getTarget(); final DefaultActionGroup executeOnGroup = new DefaultActionGroup(AntBundle.message("ant.explorer.execute.on.action.group.name"), true); executeOnGroup.add(new ExecuteOnEventAction(target, ExecuteBeforeCompilationEvent.getInstance())); executeOnGroup.add(new ExecuteOnEventAction(target, ExecuteAfterCompilationEvent.getInstance())); executeOnGroup.addSeparator(); executeOnGroup.add(new ExecuteBeforeRunAction(target)); group.add(executeOnGroup); group.add(new AssignShortcutAction(target.getActionId())); } group.add(myAntBuildFilePropertiesAction); final ActionPopupMenu popupMenu = ActionManager.getInstance().createActionPopupMenu(ActionPlaces.ANT_EXPLORER_POPUP, group); popupMenu.getComponent().show(comp, x, y); } @Override @Nullable public Object getData(@NonNls String dataId) { if (CommonDataKeys.NAVIGATABLE.is(dataId)) { final AntBuildFile buildFile = getCurrentBuildFile(); if (buildFile == null) { return null; } final VirtualFile file = buildFile.getVirtualFile(); if (file == null) { return null; } final TreePath treePath = myTree.getLeadSelectionPath(); if (treePath == null) { return null; } final DefaultMutableTreeNode node = (DefaultMutableTreeNode)treePath.getLastPathComponent(); if (node == null) { return null; } if (node.getUserObject() instanceof AntTargetNodeDescriptor) { final AntTargetNodeDescriptor targetNodeDescriptor = (AntTargetNodeDescriptor)node.getUserObject(); final AntBuildTargetBase buildTarget = targetNodeDescriptor.getTarget(); final OpenFileDescriptor descriptor = buildTarget.getOpenFileDescriptor(); if (descriptor != null) { final VirtualFile descriptorFile = descriptor.getFile(); if (descriptorFile.isValid()) { return descriptor; } } } if (file.isValid()) { return new OpenFileDescriptor(myProject, file); } } else if (PlatformDataKeys.HELP_ID.is(dataId)) { return HelpID.ANT; } else if (PlatformDataKeys.TREE_EXPANDER.is(dataId)) { return myProject != null? myTreeExpander : null; } else if (CommonDataKeys.VIRTUAL_FILE_ARRAY.is(dataId)) { final List<VirtualFile> virtualFiles = collectAntFiles(buildFile -> { final VirtualFile virtualFile = buildFile.getVirtualFile(); if (virtualFile != null && virtualFile.isValid()) { return virtualFile; } return null; }); return virtualFiles == null ? null : virtualFiles.toArray(VirtualFile.EMPTY_ARRAY); } else if (LangDataKeys.PSI_ELEMENT_ARRAY.is(dataId)) { final List<PsiElement> elements = collectAntFiles(AntBuildFile::getAntFile); return elements == null ? null : elements.toArray(PsiElement.EMPTY_ARRAY); } return super.getData(dataId); } private <T> List<T> collectAntFiles(final Function<AntBuildFile, T> function) { final TreePath[] paths = myTree.getSelectionPaths(); if (paths == null) { return null; } Set<AntBuildFile> antFiles = new LinkedHashSet<>(); for (final TreePath path : paths) { for (DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); node != null; node = (DefaultMutableTreeNode)node.getParent()) { final Object userObject = node.getUserObject(); if (!(userObject instanceof AntBuildFileNodeDescriptor)) { continue; } final AntBuildFile buildFile = ((AntBuildFileNodeDescriptor)userObject).getBuildFile(); if (buildFile != null) { antFiles.add(buildFile); } break; } } final List<T> result = new ArrayList<>(); ContainerUtil.addAllNotNull(result, ContainerUtil.map(antFiles, function)); return result.isEmpty() ? null : result; } public static FileChooserDescriptor createXmlDescriptor() { return new FileChooserDescriptor(true, false, false, false, false, true){ @Override public boolean isFileVisible(VirtualFile file, boolean showHiddenFiles) { boolean b = super.isFileVisible(file, showHiddenFiles); if (!file.isDirectory()) { b &= StdFileTypes.XML.equals(file.getFileType()); } return b; } }; } private static final class NodeRenderer extends ColoredTreeCellRenderer { @Override public void customizeCellRenderer(@NotNull JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { final Object userObject = ((DefaultMutableTreeNode)value).getUserObject(); if (userObject instanceof AntNodeDescriptor) { final AntNodeDescriptor descriptor = (AntNodeDescriptor)userObject; descriptor.customize(this); } else { append(tree.convertValueToText(value, selected, expanded, leaf, row, hasFocus), SimpleTextAttributes.REGULAR_ATTRIBUTES); } } } private final class AddAction extends AnAction { public AddAction() { super(AntBundle.message("add.ant.file.action.name"), AntBundle.message("add.ant.file.action.description"), IconUtil.getAddIcon()); } @Override public void actionPerformed(AnActionEvent e) { addBuildFile(); } } private final class RemoveAction extends AnAction { public RemoveAction() { super(AntBundle.message("remove.ant.file.action.name"), AntBundle.message("remove.ant.file.action.description"), IconUtil.getRemoveIcon()); } @Override public void actionPerformed(AnActionEvent e) { removeBuildFile(); } @Override public void update(AnActionEvent event) { event.getPresentation().setEnabled(getCurrentBuildFile() != null); } } private final class RunAction extends AnAction { public RunAction() { super(AntBundle.message("run.ant.file.or.target.action.name"), AntBundle.message("run.ant.file.or.target.action.description"), AllIcons.Actions.Execute); } @Override public void actionPerformed(AnActionEvent e) { runSelection(e.getDataContext()); } @Override public void update(AnActionEvent event) { final Presentation presentation = event.getPresentation(); final String place = event.getPlace(); if (ActionPlaces.ANT_EXPLORER_TOOLBAR.equals(place)) { presentation.setText(AntBundle.message("run.ant.file.or.target.action.name")); } else { final TreePath[] paths = myTree.getSelectionPaths(); if (paths != null && paths.length == 1 && ((DefaultMutableTreeNode)paths[0].getLastPathComponent()).getUserObject() instanceof AntBuildFileNodeDescriptor) { presentation.setText(AntBundle.message("run.ant.build.action.name")); } else { if (paths == null || paths.length == 1) { presentation.setText(AntBundle.message("run.ant.target.action.name")); } else { presentation.setText(AntBundle.message("run.ant.targets.action.name")); } } } presentation.setEnabled(canRunSelection()); } } private final class MakeAntRunConfigurationAction extends AnAction { public MakeAntRunConfigurationAction() { super(AntBundle.message("make.ant.runconfiguration.name"), null, AntIcons.Build); } @Override public void update(AnActionEvent e) { super.update(e); final Presentation presentation = e.getPresentation(); presentation.setEnabled(myTree.getSelectionCount() == 1 && canRunSelection()); } @Override public void actionPerformed(AnActionEvent e) { final AntBuildFile buildFile = getCurrentBuildFile(); if (buildFile == null || !buildFile.exists()) { return; } TreePath selectionPath = myTree.getSelectionPath(); if (selectionPath == null) return; final DefaultMutableTreeNode node = (DefaultMutableTreeNode) selectionPath.getLastPathComponent(); final Object userObject = node.getUserObject(); AntBuildTarget target = null; if (userObject instanceof AntTargetNodeDescriptor) { AntTargetNodeDescriptor targetNodeDescriptor = (AntTargetNodeDescriptor)userObject; target = targetNodeDescriptor.getTarget(); } else if (userObject instanceof AntBuildFileNodeDescriptor){ AntBuildModel model = ((AntBuildFileNodeDescriptor)userObject).getBuildFile().getModel(); target = model.findTarget(model.getDefaultTargetName()); } String name = target != null ? target.getDisplayName() : null; if (target == null || name == null) { return; } RunManager runManager = RunManager.getInstance(myProject); RunnerAndConfigurationSettings settings = runManager.createRunConfiguration(name, AntRunConfigurationType.getInstance().getFactory()); AntRunConfiguration configuration = (AntRunConfiguration)settings.getConfiguration(); configuration.acceptSettings(target); if (RunDialog.editConfiguration(e.getProject(), settings, ExecutionBundle .message("create.run.configuration.for.item.dialog.title", configuration.getName()))) { runManager.addConfiguration(settings); runManager.setSelectedConfiguration(settings); } } } private final class ShowAllTargetsAction extends ToggleAction { public ShowAllTargetsAction() { super(AntBundle.message("filter.ant.targets.action.name"), AntBundle.message("filter.ant.targets.action.description"), AllIcons.General.Filter); } @Override public boolean isSelected(AnActionEvent event) { final Project project = myProject; return project != null && AntConfigurationBase.getInstance(project).isFilterTargets(); } @Override public void setSelected(AnActionEvent event, boolean flag) { setTargetsFiltered(flag); } } private void setTargetsFiltered(boolean value) { myBuilder.setTargetsFiltered(value); AntConfigurationBase.getInstance(myProject).setFilterTargets(value); } private final class ExecuteOnEventAction extends ToggleAction { private final AntBuildTargetBase myTarget; private final ExecutionEvent myExecutionEvent; public ExecuteOnEventAction(final AntBuildTargetBase target, final ExecutionEvent executionEvent) { super(executionEvent.getPresentableName()); myTarget = target; myExecutionEvent = executionEvent; } @Override public boolean isSelected(AnActionEvent e) { return myTarget.equals(AntConfigurationBase.getInstance(myProject).getTargetForEvent(myExecutionEvent)); } @Override public void setSelected(AnActionEvent event, boolean state) { final AntConfigurationBase antConfiguration = AntConfigurationBase.getInstance(myProject); if (state) { final AntBuildFileBase buildFile = (AntBuildFileBase)((myTarget instanceof MetaTarget) ? ((MetaTarget)myTarget).getBuildFile() : myTarget.getModel().getBuildFile()); antConfiguration.setTargetForEvent(buildFile, myTarget.getName(), myExecutionEvent); } else { antConfiguration.clearTargetForEvent(myExecutionEvent); } myBuilder.queueUpdate(); } @Override public void update(@NotNull AnActionEvent e) { super.update(e); final AntBuildFile buildFile = myTarget.getModel().getBuildFile(); e.getPresentation().setEnabled(buildFile != null && buildFile.exists()); } } private final class ExecuteBeforeRunAction extends AnAction { private final AntBuildTarget myTarget; public ExecuteBeforeRunAction(final AntBuildTarget target) { super(AntBundle.message("executes.before.run.debug.acton.name")); myTarget = target; } @Override public void actionPerformed(AnActionEvent e) { final AntExecuteBeforeRunDialog dialog = new AntExecuteBeforeRunDialog(myProject, myTarget); dialog.show(); } @Override public void update(AnActionEvent e) { e.getPresentation().setEnabled(myTarget.getModel().getBuildFile().exists()); } } private final class CreateMetaTargetAction extends AnAction { public CreateMetaTargetAction() { super(AntBundle.message("ant.create.meta.target.action.name"), AntBundle.message("ant.create.meta.target.action.description"), null /*IconLoader.getIcon("/actions/execute.png")*/); } @Override public void actionPerformed(AnActionEvent e) { final AntBuildFile buildFile = getCurrentBuildFile(); final String[] targets = getTargetNamesFromPaths(myTree.getSelectionPaths()); final ExecuteCompositeTargetEvent event = new ExecuteCompositeTargetEvent(targets); final SaveMetaTargetDialog dialog = new SaveMetaTargetDialog(myTree, event, AntConfigurationBase.getInstance(myProject), buildFile); dialog.setTitle(e.getPresentation().getText()); if (dialog.showAndGet()) { myBuilder.queueUpdate(); myTree.repaint(); } } @Override public void update(AnActionEvent e) { final TreePath[] paths = myTree.getSelectionPaths(); e.getPresentation().setEnabled(paths != null && paths.length > 1 && canRunSelection()); } } private final class RemoveMetaTargetsOrBuildFileAction extends AnAction { public RemoveMetaTargetsOrBuildFileAction() { super(AntBundle.message("remove.meta.targets.action.name"), AntBundle.message("remove.meta.targets.action.description"), null); registerCustomShortcutSet(CommonShortcuts.getDelete(), myTree); Disposer.register(AntExplorer.this, new Disposable() { @Override public void dispose() { RemoveMetaTargetsOrBuildFileAction.this.unregisterCustomShortcutSet(myTree); } }); myTree.registerKeyboardAction(new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { doAction(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, 0), WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); } @Override public void actionPerformed(AnActionEvent e) { doAction(); } private void doAction() { final TreePath[] paths = myTree.getSelectionPaths(); if (paths == null) { return; } try { // try to remove build file if (paths.length == 1) { final DefaultMutableTreeNode node = (DefaultMutableTreeNode)paths[0].getLastPathComponent(); if (node.getUserObject() instanceof AntBuildFileNodeDescriptor) { final AntBuildFileNodeDescriptor descriptor = (AntBuildFileNodeDescriptor)node.getUserObject(); if (descriptor.getBuildFile().equals(getCurrentBuildFile())) { removeBuildFile(); return; } } } // try to remove meta targets final AntBuildTarget[] targets = getTargetObjectsFromPaths(paths); final AntConfigurationBase antConfiguration = AntConfigurationBase.getInstance(myProject); for (final AntBuildTarget buildTarget : targets) { if (buildTarget instanceof MetaTarget) { for (final ExecutionEvent event : antConfiguration.getEventsForTarget(buildTarget)) { if (event instanceof ExecuteCompositeTargetEvent) { antConfiguration.clearTargetForEvent(event); } } } } } finally { myBuilder.queueUpdate(); myTree.repaint(); } } @Override public void update(AnActionEvent e) { final Presentation presentation = e.getPresentation(); final TreePath[] paths = myTree.getSelectionPaths(); if (paths == null) { presentation.setEnabled(false); return; } if (paths.length == 1) { String text = AntBundle.message("remove.meta.target.action.name"); boolean enabled = false; final DefaultMutableTreeNode node = (DefaultMutableTreeNode)paths[0].getLastPathComponent(); if (node.getUserObject() instanceof AntBuildFileNodeDescriptor) { final AntBuildFileNodeDescriptor descriptor = (AntBuildFileNodeDescriptor)node.getUserObject(); if (descriptor.getBuildFile().equals(getCurrentBuildFile())) { text = AntBundle.message("remove.selected.build.file.action.name"); enabled = true; } } else { if (node.getUserObject() instanceof AntTargetNodeDescriptor) { final AntTargetNodeDescriptor descr = (AntTargetNodeDescriptor)node.getUserObject(); final AntBuildTargetBase target = descr.getTarget(); if (target instanceof MetaTarget) { enabled = true; } } } presentation.setText(text); presentation.setEnabled(enabled); } else { presentation.setText(AntBundle.message("remove.selected.meta.targets.action.name")); final AntBuildTarget[] targets = getTargetObjectsFromPaths(paths); boolean enabled = targets.length > 0; for (final AntBuildTarget buildTarget : targets) { if (!(buildTarget instanceof MetaTarget)) { enabled = false; break; } } presentation.setEnabled(enabled); } } } private final class AssignShortcutAction extends AnAction { private final String myActionId; public AssignShortcutAction(String actionId) { super(AntBundle.message("ant.explorer.assign.shortcut.action.name")); myActionId = actionId; } @Override public void actionPerformed(AnActionEvent e) { new EditKeymapsDialog(myProject, myActionId).show(); } @Override public void update(AnActionEvent e) { e.getPresentation().setEnabled(myActionId != null && ActionManager.getInstance().getAction(myActionId) != null); } } private class KeymapListener implements KeymapManagerListener, Keymap.Listener { private Keymap myCurrentKeymap = null; public KeymapListener() { final KeymapManagerEx keymapManager = KeymapManagerEx.getInstanceEx(); final Keymap activeKeymap = keymapManager.getActiveKeymap(); listenTo(activeKeymap); keymapManager.addKeymapManagerListener(this, AntExplorer.this); } @Override public void activeKeymapChanged(Keymap keymap) { listenTo(keymap); updateTree(); } private void listenTo(Keymap keymap) { if (myCurrentKeymap != null) { myCurrentKeymap.removeShortcutChangeListener(this); } myCurrentKeymap = keymap; if (myCurrentKeymap != null) { myCurrentKeymap.addShortcutChangeListener(this); } } private void updateTree() { //noinspection deprecation myBuilder.updateFromRoot(); } @Override public void onShortcutChanged(String actionId) { updateTree(); } public void stopListen() { listenTo(null); } } private final class MyTransferHandler extends TransferHandler { @Override public boolean importData(final TransferSupport support) { if (canImport(support)) { addBuildFile(getAntFiles(support)); return true; } return false; } @Override public boolean canImport(final TransferSupport support) { return FileCopyPasteUtil.isFileListFlavorAvailable(support.getDataFlavors()); } private VirtualFile[] getAntFiles(final TransferSupport support) { List<VirtualFile> virtualFileList = new ArrayList<>(); final List<File> fileList = FileCopyPasteUtil.getFileList(support.getTransferable()); if (fileList != null) { for (File file : fileList ) { ContainerUtil.addIfNotNull(virtualFileList, VfsUtil.findFileByIoFile(file, true)); } } return VfsUtilCore.toVirtualFileArray(virtualFileList); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.server.handler; import com.codahale.metrics.Timer; import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; import org.javatuples.Pair; import org.javatuples.Quartet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.tinkerpop.gremlin.driver.MessageSerializer; import org.apache.tinkerpop.gremlin.driver.message.ResponseMessage; import org.apache.tinkerpop.gremlin.driver.message.ResponseStatusCode; import org.apache.tinkerpop.gremlin.driver.ser.MessageTextSerializer; import org.apache.tinkerpop.gremlin.groovy.engine.GremlinExecutor; import org.apache.tinkerpop.gremlin.process.traversal.TraversalSource; import org.apache.tinkerpop.gremlin.server.GraphManager; import org.apache.tinkerpop.gremlin.server.GremlinServer; import org.apache.tinkerpop.gremlin.server.Settings; import org.apache.tinkerpop.gremlin.server.auth.AuthenticatedUser; import org.apache.tinkerpop.gremlin.server.util.MetricManager; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.util.function.FunctionUtils; import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.channel.ChannelPromise; import io.netty.handler.codec.TooLongFrameException; import io.netty.handler.codec.http.DefaultFullHttpResponse; import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpUtil; import io.netty.util.ReferenceCountUtil; import javax.script.Bindings; import javax.script.SimpleBindings; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.codahale.metrics.MetricRegistry.name; import static io.netty.handler.codec.http.HttpHeaderNames.*; import static io.netty.handler.codec.http.HttpMethod.GET; import static io.netty.handler.codec.http.HttpMethod.POST; import static io.netty.handler.codec.http.HttpResponseStatus.METHOD_NOT_ALLOWED; import static io.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND; import static io.netty.handler.codec.http.HttpResponseStatus.OK; import static io.netty.handler.codec.http.HttpResponseStatus.CONTINUE; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; /** * Handler that processes HTTP requests to the HTTP Gremlin endpoint. * * @author Stephen Mallette (http://stephen.genoprime.com) */ @ChannelHandler.Sharable public class HttpGremlinEndpointHandler extends ChannelInboundHandlerAdapter { private static final Logger logger = LoggerFactory.getLogger(HttpGremlinEndpointHandler.class); private static final Logger auditLogger = LoggerFactory.getLogger(GremlinServer.AUDIT_LOGGER_NAME); private static final Charset UTF8 = StandardCharsets.UTF_8; private static final Timer evalOpTimer = MetricManager.INSTANCE.getTimer(name(GremlinServer.class, "op", "eval")); /** * Serializers for the response. */ private final Map<String, MessageSerializer<?>> serializers; private final GremlinExecutor gremlinExecutor; private final GraphManager graphManager; private final Settings settings; private static final Pattern pattern = Pattern.compile("(.*);q=(.*)"); public HttpGremlinEndpointHandler(final Map<String, MessageSerializer<?>> serializers, final GremlinExecutor gremlinExecutor, final GraphManager graphManager, final Settings settings) { this.serializers = serializers; this.gremlinExecutor = gremlinExecutor; this.graphManager = graphManager; this.settings = settings; } @Override public void channelRead(final ChannelHandlerContext ctx, final Object msg) { if (msg instanceof FullHttpRequest) { final FullHttpRequest req = (FullHttpRequest) msg; final boolean keepAlive = HttpUtil.isKeepAlive(req); if ("/favicon.ico".equals(req.uri())) { HttpHandlerUtil.sendError(ctx, NOT_FOUND, "Gremlin Server doesn't have a favicon.ico", keepAlive); ReferenceCountUtil.release(msg); return; } if (HttpUtil.is100ContinueExpected(req)) { ctx.write(new DefaultFullHttpResponse(HTTP_1_1, CONTINUE)); } if (req.method() != GET && req.method() != POST) { HttpHandlerUtil.sendError(ctx, METHOD_NOT_ALLOWED, METHOD_NOT_ALLOWED.toString(), keepAlive); ReferenceCountUtil.release(msg); return; } final Quartet<String, Map<String, Object>, String, Map<String, String>> requestArguments; try { requestArguments = HttpHandlerUtil.getRequestArguments(req); } catch (IllegalArgumentException iae) { HttpHandlerUtil.sendError(ctx, BAD_REQUEST, iae.getMessage(), keepAlive); ReferenceCountUtil.release(msg); return; } final String acceptString = Optional.ofNullable(req.headers().get("Accept")).orElse("application/json"); final Pair<String, MessageTextSerializer<?>> serializer = chooseSerializer(acceptString); if (null == serializer) { HttpHandlerUtil.sendError(ctx, BAD_REQUEST, String.format("no serializer for requested Accept header: %s", acceptString), keepAlive); ReferenceCountUtil.release(msg); return; } final String origin = req.headers().get(ORIGIN); // not using the req any where below here - assume it is safe to release at this point. ReferenceCountUtil.release(msg); try { logger.debug("Processing request containing script [{}] and bindings of [{}] on {}", requestArguments.getValue0(), requestArguments.getValue1(), Thread.currentThread().getName()); if (settings.enableAuditLog) { AuthenticatedUser user = ctx.channel().attr(StateKey.AUTHENTICATED_USER).get(); if (null == user) { // This is expected when using the AllowAllAuthenticator user = AuthenticatedUser.ANONYMOUS_USER; } String address = ctx.channel().remoteAddress().toString(); if (address.startsWith("/") && address.length() > 1) address = address.substring(1); auditLogger.info("User {} with address {} requested: {}", user.getName(), address, requestArguments.getValue0()); } final ChannelPromise promise = ctx.channel().newPromise(); final AtomicReference<Object> resultHolder = new AtomicReference<>(); promise.addListener(future -> { // if failed then the error was already written back to the client as part of the eval future // processing of the exception if (future.isSuccess()) { logger.debug("Preparing HTTP response for request with script [{}] and bindings of [{}] with result of [{}] on [{}]", requestArguments.getValue0(), requestArguments.getValue1(), resultHolder.get(), Thread.currentThread().getName()); final FullHttpResponse response = new DefaultFullHttpResponse(HTTP_1_1, OK, (ByteBuf) resultHolder.get()); response.headers().set(CONTENT_TYPE, serializer.getValue0()); // handle cors business if (origin != null) response.headers().set(ACCESS_CONTROL_ALLOW_ORIGIN, origin); HttpHandlerUtil.sendAndCleanupConnection(ctx, keepAlive, response); } }); final Timer.Context timerContext = evalOpTimer.time(); final Bindings bindings; try { bindings = createBindings(requestArguments.getValue1(), requestArguments.getValue3()); } catch (IllegalStateException iae) { HttpHandlerUtil.sendError(ctx, BAD_REQUEST, iae.getMessage(), keepAlive); ReferenceCountUtil.release(msg); return; } // provide a transform function to serialize to message - this will force serialization to occur // in the same thread as the eval. after the CompletableFuture is returned from the eval the result // is ready to be written as a ByteBuf directly to the response. nothing should be blocking here. final CompletableFuture<Object> evalFuture = gremlinExecutor.eval(requestArguments.getValue0(), requestArguments.getValue2(), bindings, FunctionUtils.wrapFunction(o -> { // stopping the timer here is roughly equivalent to where the timer would have been stopped for // this metric in other contexts. we just want to measure eval time not serialization time. timerContext.stop(); logger.debug("Transforming result of request with script [{}] and bindings of [{}] with result of [{}] on [{}]", requestArguments.getValue0(), requestArguments.getValue1(), o, Thread.currentThread().getName()); final ResponseMessage responseMessage = ResponseMessage.build(UUID.randomUUID()) .code(ResponseStatusCode.SUCCESS) .result(IteratorUtils.asList(o)).create(); // http server is sessionless and must handle commit on transactions. the commit occurs // before serialization to be consistent with how things work for websocket based // communication. this means that failed serialization does not mean that you won't get // a commit to the database attemptCommit(requestArguments.getValue3(), graphManager, settings.strictTransactionManagement); try { return Unpooled.wrappedBuffer(serializer.getValue1().serializeResponseAsString(responseMessage).getBytes(UTF8)); } catch (Exception ex) { logger.warn(String.format("Error during serialization for %s", responseMessage), ex); // creating a new SerializationException will clear the cause which will allow the // future to report a better error message. if the cause is present, then // GremlinExecutor will prefer the cause and we'll get a low level Jackson sort of // error in the response. if (ex instanceof SerializationException) { throw new SerializationException(String.format( "Could not serialize the result with %s - %s", serializer.getValue0(), ex.getMessage())); } throw ex; } })); evalFuture.exceptionally(t -> { if (t.getMessage() != null) HttpHandlerUtil.sendError(ctx, INTERNAL_SERVER_ERROR, t.getMessage(), Optional.of(t), keepAlive); else HttpHandlerUtil.sendError(ctx, INTERNAL_SERVER_ERROR, String.format("Error encountered evaluating script: %s", requestArguments.getValue0()) , Optional.of(t), keepAlive); promise.setFailure(t); return null; }); evalFuture.thenAcceptAsync(r -> { // now that the eval/serialization is done in the same thread - complete the promise so we can // write back the HTTP response on the same thread as the original request resultHolder.set(r); promise.setSuccess(); }, gremlinExecutor.getExecutorService()); } catch (Exception ex) { // send the error response here and don't rely on exception caught because it might not have the // context on whether to close the connection or not, based on keepalive. final Throwable t = ExceptionUtils.getRootCause(ex); if (t instanceof TooLongFrameException) { HttpHandlerUtil.sendError(ctx, HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, t.getMessage() + " - increase the maxContentLength", keepAlive); } else if (t != null){ HttpHandlerUtil.sendError(ctx, INTERNAL_SERVER_ERROR, t.getMessage(), keepAlive); } else { HttpHandlerUtil.sendError(ctx, INTERNAL_SERVER_ERROR, ex.getMessage(), keepAlive); } } } } @Override public void exceptionCaught(final ChannelHandlerContext ctx, final Throwable cause) { logger.error("Error processing HTTP Request", cause); if (ctx.channel().isActive()) { HttpHandlerUtil.sendError(ctx, INTERNAL_SERVER_ERROR, cause.getMessage(), false); } } private Bindings createBindings(final Map<String, Object> bindingMap, final Map<String, String> rebindingMap) { final Bindings bindings = new SimpleBindings(); // rebind any global bindings to a different variable. if (!rebindingMap.isEmpty()) { for (Map.Entry<String, String> kv : rebindingMap.entrySet()) { boolean found = false; final Graph g = this.graphManager.getGraph(kv.getValue()); if (null != g) { bindings.put(kv.getKey(), g); found = true; } if (!found) { final TraversalSource ts = this.graphManager.getTraversalSource(kv.getValue()); if (null != ts) { bindings.put(kv.getKey(), ts); found = true; } } if (!found) { final String error = String.format("Could not rebind [%s] to [%s] as [%s] not in the Graph or TraversalSource global bindings", kv.getKey(), kv.getValue(), kv.getValue()); throw new IllegalStateException(error); } } } bindings.putAll(bindingMap); return bindings; } private Pair<String, MessageTextSerializer<?>> chooseSerializer(final String acceptString) { final List<Pair<String, Double>> ordered = Stream.of(acceptString.split(",")).map(mediaType -> { // parse out each mediaType with its params - keeping it simple and just looking for "quality". if // that value isn't there, default it to 1.0. not really validating here so users better get their // accept headers straight final Matcher matcher = pattern.matcher(mediaType); return (matcher.matches()) ? Pair.with(matcher.group(1), Double.parseDouble(matcher.group(2))) : Pair.with(mediaType, 1.0); }).sorted((o1, o2) -> o2.getValue0().compareTo(o1.getValue0())).collect(Collectors.toList()); for (Pair<String, Double> p : ordered) { // this isn't perfect as it doesn't really account for wildcards. that level of complexity doesn't seem // super useful for gremlin server really. final String accept = p.getValue0().equals("*/*") ? "application/json" : p.getValue0(); if (serializers.containsKey(accept)) return Pair.with(accept, (MessageTextSerializer<?>) serializers.get(accept)); } return null; } private static void attemptCommit(final Map<String, String> aliases, final GraphManager graphManager, final boolean strict) { if (strict) graphManager.commit(new HashSet<>(aliases.values())); else graphManager.commitAll(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.displayvideo.v1.model; /** * A single invoice. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Display & Video 360 API. For a detailed explanation * see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Invoice extends com.google.api.client.json.GenericJson { /** * The budget grouping ID for this invoice. This field will only be set if the invoice level of * the corresponding billing profile was set to "Budget invoice grouping ID". * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String budgetInvoiceGroupingId; /** * The list of summarized information for each budget associated with this invoice. This field * will only be set if the invoice detail level of the corresponding billing profile was set to * "Budget level PO". * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<BudgetSummary> budgetSummaries; static { // hack to force ProGuard to consider BudgetSummary used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(BudgetSummary.class); } /** * The ID of the original invoice being adjusted by this invoice, if applicable. May appear on the * invoice PDF as `Reference invoice number`. If replaced_invoice_ids is set, this field will be * empty. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String correctedInvoiceId; /** * The currency used in the invoice in ISO 4217 format. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String currencyCode; /** * The display name of the invoice. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String displayName; /** * The date when the invoice is due. * The value may be {@code null}. */ @com.google.api.client.util.Key private Date dueDate; /** * The unique ID of the invoice. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String invoiceId; /** * The type of invoice document. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String invoiceType; /** * The date when the invoice was issued. * The value may be {@code null}. */ @com.google.api.client.util.Key private Date issueDate; /** * The resource name of the invoice. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * The total amount of costs or adjustments not tied to a particular budget, in micros of the * invoice's currency. For example, if currency_code is `USD`, then 1000000 represents one US * dollar. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long nonBudgetMicros; /** * The ID of the payments account the invoice belongs to. Appears on the invoice PDF as `Billing * Account Number`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String paymentsAccountId; /** * The ID of the payments profile the invoice belongs to. Appears on the invoice PDF as `Billing * ID`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String paymentsProfileId; /** * The URL to download a PDF copy of the invoice. This URL is user specific and requires a valid * OAuth 2.0 access token to access. The access token must be provided in an `Authorization: * Bearer` HTTP header and be authorized for one of the following scopes: * * `https://www.googleapis.com/auth/display-video-mediaplanning` * * `https://www.googleapis.com/auth/display-video` The URL will be valid for 7 days after * retrieval of this invoice object or until this invoice is retrieved again. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String pdfUrl; /** * Purchase order number associated with the invoice. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String purchaseOrderNumber; /** * The ID(s) of any originally issued invoice that is being cancelled by this invoice, if * applicable. Multiple invoices may be listed if those invoices are being consolidated into a * single invoice. May appear on invoice PDF as `Replaced invoice numbers`. If * corrected_invoice_id is set, this field will be empty. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> replacedInvoiceIds; /** * The service start and end dates which are covered by this invoice. * The value may be {@code null}. */ @com.google.api.client.util.Key private DateRange serviceDateRange; /** * The pre-tax subtotal amount, in micros of the invoice's currency. For example, if currency_code * is `USD`, then 1000000 represents one US dollar. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long subtotalAmountMicros; /** * The invoice total amount, in micros of the invoice's currency. For example, if currency_code is * `USD`, then 1000000 represents one US dollar. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long totalAmountMicros; /** * The sum of all taxes in invoice, in micros of the invoice's currency. For example, if * currency_code is `USD`, then 1000000 represents one US dollar. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long totalTaxAmountMicros; /** * The budget grouping ID for this invoice. This field will only be set if the invoice level of * the corresponding billing profile was set to "Budget invoice grouping ID". * @return value or {@code null} for none */ public java.lang.String getBudgetInvoiceGroupingId() { return budgetInvoiceGroupingId; } /** * The budget grouping ID for this invoice. This field will only be set if the invoice level of * the corresponding billing profile was set to "Budget invoice grouping ID". * @param budgetInvoiceGroupingId budgetInvoiceGroupingId or {@code null} for none */ public Invoice setBudgetInvoiceGroupingId(java.lang.String budgetInvoiceGroupingId) { this.budgetInvoiceGroupingId = budgetInvoiceGroupingId; return this; } /** * The list of summarized information for each budget associated with this invoice. This field * will only be set if the invoice detail level of the corresponding billing profile was set to * "Budget level PO". * @return value or {@code null} for none */ public java.util.List<BudgetSummary> getBudgetSummaries() { return budgetSummaries; } /** * The list of summarized information for each budget associated with this invoice. This field * will only be set if the invoice detail level of the corresponding billing profile was set to * "Budget level PO". * @param budgetSummaries budgetSummaries or {@code null} for none */ public Invoice setBudgetSummaries(java.util.List<BudgetSummary> budgetSummaries) { this.budgetSummaries = budgetSummaries; return this; } /** * The ID of the original invoice being adjusted by this invoice, if applicable. May appear on the * invoice PDF as `Reference invoice number`. If replaced_invoice_ids is set, this field will be * empty. * @return value or {@code null} for none */ public java.lang.String getCorrectedInvoiceId() { return correctedInvoiceId; } /** * The ID of the original invoice being adjusted by this invoice, if applicable. May appear on the * invoice PDF as `Reference invoice number`. If replaced_invoice_ids is set, this field will be * empty. * @param correctedInvoiceId correctedInvoiceId or {@code null} for none */ public Invoice setCorrectedInvoiceId(java.lang.String correctedInvoiceId) { this.correctedInvoiceId = correctedInvoiceId; return this; } /** * The currency used in the invoice in ISO 4217 format. * @return value or {@code null} for none */ public java.lang.String getCurrencyCode() { return currencyCode; } /** * The currency used in the invoice in ISO 4217 format. * @param currencyCode currencyCode or {@code null} for none */ public Invoice setCurrencyCode(java.lang.String currencyCode) { this.currencyCode = currencyCode; return this; } /** * The display name of the invoice. * @return value or {@code null} for none */ public java.lang.String getDisplayName() { return displayName; } /** * The display name of the invoice. * @param displayName displayName or {@code null} for none */ public Invoice setDisplayName(java.lang.String displayName) { this.displayName = displayName; return this; } /** * The date when the invoice is due. * @return value or {@code null} for none */ public Date getDueDate() { return dueDate; } /** * The date when the invoice is due. * @param dueDate dueDate or {@code null} for none */ public Invoice setDueDate(Date dueDate) { this.dueDate = dueDate; return this; } /** * The unique ID of the invoice. * @return value or {@code null} for none */ public java.lang.String getInvoiceId() { return invoiceId; } /** * The unique ID of the invoice. * @param invoiceId invoiceId or {@code null} for none */ public Invoice setInvoiceId(java.lang.String invoiceId) { this.invoiceId = invoiceId; return this; } /** * The type of invoice document. * @return value or {@code null} for none */ public java.lang.String getInvoiceType() { return invoiceType; } /** * The type of invoice document. * @param invoiceType invoiceType or {@code null} for none */ public Invoice setInvoiceType(java.lang.String invoiceType) { this.invoiceType = invoiceType; return this; } /** * The date when the invoice was issued. * @return value or {@code null} for none */ public Date getIssueDate() { return issueDate; } /** * The date when the invoice was issued. * @param issueDate issueDate or {@code null} for none */ public Invoice setIssueDate(Date issueDate) { this.issueDate = issueDate; return this; } /** * The resource name of the invoice. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * The resource name of the invoice. * @param name name or {@code null} for none */ public Invoice setName(java.lang.String name) { this.name = name; return this; } /** * The total amount of costs or adjustments not tied to a particular budget, in micros of the * invoice's currency. For example, if currency_code is `USD`, then 1000000 represents one US * dollar. * @return value or {@code null} for none */ public java.lang.Long getNonBudgetMicros() { return nonBudgetMicros; } /** * The total amount of costs or adjustments not tied to a particular budget, in micros of the * invoice's currency. For example, if currency_code is `USD`, then 1000000 represents one US * dollar. * @param nonBudgetMicros nonBudgetMicros or {@code null} for none */ public Invoice setNonBudgetMicros(java.lang.Long nonBudgetMicros) { this.nonBudgetMicros = nonBudgetMicros; return this; } /** * The ID of the payments account the invoice belongs to. Appears on the invoice PDF as `Billing * Account Number`. * @return value or {@code null} for none */ public java.lang.String getPaymentsAccountId() { return paymentsAccountId; } /** * The ID of the payments account the invoice belongs to. Appears on the invoice PDF as `Billing * Account Number`. * @param paymentsAccountId paymentsAccountId or {@code null} for none */ public Invoice setPaymentsAccountId(java.lang.String paymentsAccountId) { this.paymentsAccountId = paymentsAccountId; return this; } /** * The ID of the payments profile the invoice belongs to. Appears on the invoice PDF as `Billing * ID`. * @return value or {@code null} for none */ public java.lang.String getPaymentsProfileId() { return paymentsProfileId; } /** * The ID of the payments profile the invoice belongs to. Appears on the invoice PDF as `Billing * ID`. * @param paymentsProfileId paymentsProfileId or {@code null} for none */ public Invoice setPaymentsProfileId(java.lang.String paymentsProfileId) { this.paymentsProfileId = paymentsProfileId; return this; } /** * The URL to download a PDF copy of the invoice. This URL is user specific and requires a valid * OAuth 2.0 access token to access. The access token must be provided in an `Authorization: * Bearer` HTTP header and be authorized for one of the following scopes: * * `https://www.googleapis.com/auth/display-video-mediaplanning` * * `https://www.googleapis.com/auth/display-video` The URL will be valid for 7 days after * retrieval of this invoice object or until this invoice is retrieved again. * @return value or {@code null} for none */ public java.lang.String getPdfUrl() { return pdfUrl; } /** * The URL to download a PDF copy of the invoice. This URL is user specific and requires a valid * OAuth 2.0 access token to access. The access token must be provided in an `Authorization: * Bearer` HTTP header and be authorized for one of the following scopes: * * `https://www.googleapis.com/auth/display-video-mediaplanning` * * `https://www.googleapis.com/auth/display-video` The URL will be valid for 7 days after * retrieval of this invoice object or until this invoice is retrieved again. * @param pdfUrl pdfUrl or {@code null} for none */ public Invoice setPdfUrl(java.lang.String pdfUrl) { this.pdfUrl = pdfUrl; return this; } /** * Purchase order number associated with the invoice. * @return value or {@code null} for none */ public java.lang.String getPurchaseOrderNumber() { return purchaseOrderNumber; } /** * Purchase order number associated with the invoice. * @param purchaseOrderNumber purchaseOrderNumber or {@code null} for none */ public Invoice setPurchaseOrderNumber(java.lang.String purchaseOrderNumber) { this.purchaseOrderNumber = purchaseOrderNumber; return this; } /** * The ID(s) of any originally issued invoice that is being cancelled by this invoice, if * applicable. Multiple invoices may be listed if those invoices are being consolidated into a * single invoice. May appear on invoice PDF as `Replaced invoice numbers`. If * corrected_invoice_id is set, this field will be empty. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getReplacedInvoiceIds() { return replacedInvoiceIds; } /** * The ID(s) of any originally issued invoice that is being cancelled by this invoice, if * applicable. Multiple invoices may be listed if those invoices are being consolidated into a * single invoice. May appear on invoice PDF as `Replaced invoice numbers`. If * corrected_invoice_id is set, this field will be empty. * @param replacedInvoiceIds replacedInvoiceIds or {@code null} for none */ public Invoice setReplacedInvoiceIds(java.util.List<java.lang.String> replacedInvoiceIds) { this.replacedInvoiceIds = replacedInvoiceIds; return this; } /** * The service start and end dates which are covered by this invoice. * @return value or {@code null} for none */ public DateRange getServiceDateRange() { return serviceDateRange; } /** * The service start and end dates which are covered by this invoice. * @param serviceDateRange serviceDateRange or {@code null} for none */ public Invoice setServiceDateRange(DateRange serviceDateRange) { this.serviceDateRange = serviceDateRange; return this; } /** * The pre-tax subtotal amount, in micros of the invoice's currency. For example, if currency_code * is `USD`, then 1000000 represents one US dollar. * @return value or {@code null} for none */ public java.lang.Long getSubtotalAmountMicros() { return subtotalAmountMicros; } /** * The pre-tax subtotal amount, in micros of the invoice's currency. For example, if currency_code * is `USD`, then 1000000 represents one US dollar. * @param subtotalAmountMicros subtotalAmountMicros or {@code null} for none */ public Invoice setSubtotalAmountMicros(java.lang.Long subtotalAmountMicros) { this.subtotalAmountMicros = subtotalAmountMicros; return this; } /** * The invoice total amount, in micros of the invoice's currency. For example, if currency_code is * `USD`, then 1000000 represents one US dollar. * @return value or {@code null} for none */ public java.lang.Long getTotalAmountMicros() { return totalAmountMicros; } /** * The invoice total amount, in micros of the invoice's currency. For example, if currency_code is * `USD`, then 1000000 represents one US dollar. * @param totalAmountMicros totalAmountMicros or {@code null} for none */ public Invoice setTotalAmountMicros(java.lang.Long totalAmountMicros) { this.totalAmountMicros = totalAmountMicros; return this; } /** * The sum of all taxes in invoice, in micros of the invoice's currency. For example, if * currency_code is `USD`, then 1000000 represents one US dollar. * @return value or {@code null} for none */ public java.lang.Long getTotalTaxAmountMicros() { return totalTaxAmountMicros; } /** * The sum of all taxes in invoice, in micros of the invoice's currency. For example, if * currency_code is `USD`, then 1000000 represents one US dollar. * @param totalTaxAmountMicros totalTaxAmountMicros or {@code null} for none */ public Invoice setTotalTaxAmountMicros(java.lang.Long totalTaxAmountMicros) { this.totalTaxAmountMicros = totalTaxAmountMicros; return this; } @Override public Invoice set(String fieldName, Object value) { return (Invoice) super.set(fieldName, value); } @Override public Invoice clone() { return (Invoice) super.clone(); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.appservice.v2019_08_01.implementation; import java.util.List; import com.microsoft.azure.management.appservice.v2019_08_01.DatabaseBackupSetting; import com.microsoft.azure.management.appservice.v2019_08_01.BackupRestoreOperationType; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; import com.microsoft.azure.management.appservice.v2019_08_01.ProxyOnlyResource; /** * Description of a restore request. */ @JsonFlatten public class RestoreRequestInner extends ProxyOnlyResource { /** * SAS URL to the container. */ @JsonProperty(value = "properties.storageAccountUrl", required = true) private String storageAccountUrl; /** * Name of a blob which contains the backup. */ @JsonProperty(value = "properties.blobName") private String blobName; /** * &lt;code&gt;true&lt;/code&gt; if the restore operation can overwrite * target app; otherwise, &lt;code&gt;false&lt;/code&gt;. * &lt;code&gt;true&lt;/code&gt; is needed if trying to restore over an * existing app. */ @JsonProperty(value = "properties.overwrite", required = true) private boolean overwrite; /** * Name of an app. */ @JsonProperty(value = "properties.siteName") private String siteName; /** * Collection of databases which should be restored. This list has to match * the list of databases included in the backup. */ @JsonProperty(value = "properties.databases") private List<DatabaseBackupSetting> databases; /** * Changes a logic when restoring an app with custom domains. * &lt;code&gt;true&lt;/code&gt; to remove custom domains automatically. If * &lt;code&gt;false&lt;/code&gt;, custom domains are added to * the app's object when it is being restored, but that might fail due to * conflicts during the operation. */ @JsonProperty(value = "properties.ignoreConflictingHostNames") private Boolean ignoreConflictingHostNames; /** * Ignore the databases and only restore the site content. */ @JsonProperty(value = "properties.ignoreDatabases") private Boolean ignoreDatabases; /** * Specify app service plan that will own restored site. */ @JsonProperty(value = "properties.appServicePlan") private String appServicePlan; /** * Operation type. Possible values include: 'Default', 'Clone', * 'Relocation', 'Snapshot', 'CloudFS'. */ @JsonProperty(value = "properties.operationType") private BackupRestoreOperationType operationType; /** * &lt;code&gt;true&lt;/code&gt; if SiteConfig.ConnectionStrings should be * set in new app; otherwise, &lt;code&gt;false&lt;/code&gt;. */ @JsonProperty(value = "properties.adjustConnectionStrings") private Boolean adjustConnectionStrings; /** * App Service Environment name, if needed (only when restoring an app to * an App Service Environment). */ @JsonProperty(value = "properties.hostingEnvironment") private String hostingEnvironment; /** * Get sAS URL to the container. * * @return the storageAccountUrl value */ public String storageAccountUrl() { return this.storageAccountUrl; } /** * Set sAS URL to the container. * * @param storageAccountUrl the storageAccountUrl value to set * @return the RestoreRequestInner object itself. */ public RestoreRequestInner withStorageAccountUrl(String storageAccountUrl) { this.storageAccountUrl = storageAccountUrl; return this; } /** * Get name of a blob which contains the backup. * * @return the blobName value */ public String blobName() { return this.blobName; } /** * Set name of a blob which contains the backup. * * @param blobName the blobName value to set * @return the RestoreRequestInner object itself. */ public RestoreRequestInner withBlobName(String blobName) { this.blobName = blobName; return this; } /** * Get &lt;code&gt;true&lt;/code&gt; if the restore operation can overwrite target app; otherwise, &lt;code&gt;false&lt;/code&gt;. &lt;code&gt;true&lt;/code&gt; is needed if trying to restore over an existing app. * * @return the overwrite value */ public boolean overwrite() { return this.overwrite; } /** * Set &lt;code&gt;true&lt;/code&gt; if the restore operation can overwrite target app; otherwise, &lt;code&gt;false&lt;/code&gt;. &lt;code&gt;true&lt;/code&gt; is needed if trying to restore over an existing app. * * @param overwrite the overwrite value to set * @return the RestoreRequestInner object itself. */ public RestoreRequestInner withOverwrite(boolean overwrite) { this.overwrite = overwrite; return this; } /** * Get name of an app. * * @return the siteName value */ public String siteName() { return this.siteName; } /** * Set name of an app. * * @param siteName the siteName value to set * @return the RestoreRequestInner object itself. */ public RestoreRequestInner withSiteName(String siteName) { this.siteName = siteName; return this; } /** * Get collection of databases which should be restored. This list has to match the list of databases included in the backup. * * @return the databases value */ public List<DatabaseBackupSetting> databases() { return this.databases; } /** * Set collection of databases which should be restored. This list has to match the list of databases included in the backup. * * @param databases the databases value to set * @return the RestoreRequestInner object itself. */ public RestoreRequestInner withDatabases(List<DatabaseBackupSetting> databases) { this.databases = databases; return this; } /** * Get changes a logic when restoring an app with custom domains. &lt;code&gt;true&lt;/code&gt; to remove custom domains automatically. If &lt;code&gt;false&lt;/code&gt;, custom domains are added to the app's object when it is being restored, but that might fail due to conflicts during the operation. * * @return the ignoreConflictingHostNames value */ public Boolean ignoreConflictingHostNames() { return this.ignoreConflictingHostNames; } /** * Set changes a logic when restoring an app with custom domains. &lt;code&gt;true&lt;/code&gt; to remove custom domains automatically. If &lt;code&gt;false&lt;/code&gt;, custom domains are added to the app's object when it is being restored, but that might fail due to conflicts during the operation. * * @param ignoreConflictingHostNames the ignoreConflictingHostNames value to set * @return the RestoreRequestInner object itself. */ public RestoreRequestInner withIgnoreConflictingHostNames(Boolean ignoreConflictingHostNames) { this.ignoreConflictingHostNames = ignoreConflictingHostNames; return this; } /** * Get ignore the databases and only restore the site content. * * @return the ignoreDatabases value */ public Boolean ignoreDatabases() { return this.ignoreDatabases; } /** * Set ignore the databases and only restore the site content. * * @param ignoreDatabases the ignoreDatabases value to set * @return the RestoreRequestInner object itself. */ public RestoreRequestInner withIgnoreDatabases(Boolean ignoreDatabases) { this.ignoreDatabases = ignoreDatabases; return this; } /** * Get specify app service plan that will own restored site. * * @return the appServicePlan value */ public String appServicePlan() { return this.appServicePlan; } /** * Set specify app service plan that will own restored site. * * @param appServicePlan the appServicePlan value to set * @return the RestoreRequestInner object itself. */ public RestoreRequestInner withAppServicePlan(String appServicePlan) { this.appServicePlan = appServicePlan; return this; } /** * Get operation type. Possible values include: 'Default', 'Clone', 'Relocation', 'Snapshot', 'CloudFS'. * * @return the operationType value */ public BackupRestoreOperationType operationType() { return this.operationType; } /** * Set operation type. Possible values include: 'Default', 'Clone', 'Relocation', 'Snapshot', 'CloudFS'. * * @param operationType the operationType value to set * @return the RestoreRequestInner object itself. */ public RestoreRequestInner withOperationType(BackupRestoreOperationType operationType) { this.operationType = operationType; return this; } /** * Get &lt;code&gt;true&lt;/code&gt; if SiteConfig.ConnectionStrings should be set in new app; otherwise, &lt;code&gt;false&lt;/code&gt;. * * @return the adjustConnectionStrings value */ public Boolean adjustConnectionStrings() { return this.adjustConnectionStrings; } /** * Set &lt;code&gt;true&lt;/code&gt; if SiteConfig.ConnectionStrings should be set in new app; otherwise, &lt;code&gt;false&lt;/code&gt;. * * @param adjustConnectionStrings the adjustConnectionStrings value to set * @return the RestoreRequestInner object itself. */ public RestoreRequestInner withAdjustConnectionStrings(Boolean adjustConnectionStrings) { this.adjustConnectionStrings = adjustConnectionStrings; return this; } /** * Get app Service Environment name, if needed (only when restoring an app to an App Service Environment). * * @return the hostingEnvironment value */ public String hostingEnvironment() { return this.hostingEnvironment; } /** * Set app Service Environment name, if needed (only when restoring an app to an App Service Environment). * * @param hostingEnvironment the hostingEnvironment value to set * @return the RestoreRequestInner object itself. */ public RestoreRequestInner withHostingEnvironment(String hostingEnvironment) { this.hostingEnvironment = hostingEnvironment; return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sling.commons.logservice.internal; import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import org.osgi.framework.Bundle; import org.osgi.framework.BundleEvent; import org.osgi.framework.BundleException; import org.osgi.framework.Constants; import org.osgi.framework.FrameworkEvent; import org.osgi.framework.FrameworkListener; import org.osgi.framework.ServiceEvent; import org.osgi.framework.ServiceListener; import org.osgi.framework.ServiceReference; import org.osgi.framework.SynchronousBundleListener; import org.osgi.service.component.ComponentConstants; import org.osgi.service.log.LogEntry; import org.osgi.service.log.LogListener; import org.osgi.service.log.LogService; import org.osgi.service.startlevel.StartLevel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The <code>LogReaderServiceFactory</code> TODO */ public class LogSupport implements SynchronousBundleListener, ServiceListener, FrameworkListener { /** * The service property name of the component name (value is * "component.name"). Note: We use a private constant here to not create a * unneded dependency on the org.osgi.service.component package. */ private static final String COMPONENT_NAME = ComponentConstants.COMPONENT_NAME; // "component.name"; /** * The empty enumeration currently returned on the {@link #getLog()} call * because we do not currently record the log events. */ private final Enumeration<?> EMPTY = Collections.enumeration(Collections.emptyList()); // The registered LogListeners private LogListenerProxy[] listeners; // The lock used to guard concurrent access to the listeners array private final Object listenersLock = new Object(); // The loggers by bundle id used for logging messages originated from // specific bundles @SuppressWarnings("serial") private final Map<Long, Logger> loggers = new LinkedHashMap<Long, Logger>(16, 0.75f, true) { private static final int MAX_SIZE = 50; @Override protected boolean removeEldestEntry(Map.Entry<Long, Logger> eldest) { return size() > MAX_SIZE; } }; // the worker thread actually sending LogEvents to LogListeners private final LogEntryDispatcher logEntryDispatcher; private final StartLevel startLevelService; /* package */LogSupport(final StartLevel startLevelService) { logEntryDispatcher = new LogEntryDispatcher(this); logEntryDispatcher.start(); this.startLevelService = startLevelService; } /* package */void shutdown() { // terminate the dispatcher and wait for its termination here logEntryDispatcher.terminate(); try { logEntryDispatcher.join(1000L); } catch (InterruptedException ie) { // don't care } // drop all listeners synchronized (listenersLock) { listeners = null; } } // ---------- LogReaderService interface ----------------------------------- /* package */void addLogListener(Bundle bundle, LogListener listener) { synchronized (listenersLock) { LogListenerProxy llp = new LogListenerProxy(bundle, listener); if (listeners == null) { listeners = new LogListenerProxy[] { llp }; } else if (getListener(listener) < 0) { LogListenerProxy[] newListeners = new LogListenerProxy[listeners.length + 1]; System.arraycopy(listeners, 0, newListeners, 0, listeners.length); newListeners[listeners.length] = llp; listeners = newListeners; } } } /* package */void removeLogListener(LogListener listener) { synchronized (listenersLock) { // no listeners registered, nothing to do if (listeners == null) { return; } // listener is not registered, nothing to do int idx = getListener(listener); if (idx < 0) { return; } LogListenerProxy[] newListeners = new LogListenerProxy[listeners.length - 1]; if (idx > 0) { System.arraycopy(listeners, 0, newListeners, 0, idx); } if (idx < listeners.length) { System.arraycopy(listeners, idx + 1, newListeners, 0, newListeners.length - idx); } listeners = newListeners; } } /** * Removes all registered LogListeners belonging to the given bundle. This * is the task required by the specification from a Log Service * implemenation: * <p> * <blockquote> When a bundle which registers a LogListener object is * stopped or otherwise releases the Log Reader Service, the Log Reader * Service must remove all of the bundle's listeners.</blockquote> * <p> * * @param bundle The bundle whose listeners are to be removed. */ /* package */void removeLogListeners(Bundle bundle) { // grab an immediate copy of the array LogListenerProxy[] current = getListeners(); if (current == null) { return; } // check for listeners by bundle for (int i = 0; i < current.length; i++) { if (current[i].hasBundle(bundle)) { removeLogListener(current[i]); } } } private int getListener(LogListener listener) { if (listeners != null) { for (int i = 0; i < listeners.length; i++) { if (listeners[i].isSame(listener)) { return i; } } } // fall back to not found return -1; } /** * Returns the currently registered LogListeners */ private LogListenerProxy[] getListeners() { synchronized (listenersLock) { return listeners; } } /** * Returns an empty enumeration for now because we do not implement log * entry recording for the moment. */ Enumeration<?> getLog() { return EMPTY; } // ---------- Firing a log event ------------------------------------------- /** * Logs the given log entry to the log file and enqueues for the dispatching * to the registered LogListeners in a separate worker thread. */ /* package */void fireLogEvent(LogEntry logEntry) { // actually log it to SLF4J logOut(logEntry); // enqueue for asynchronous delivery logEntryDispatcher.enqueLogEntry(logEntry); } // ---------- BundleListener ----------------------------------------------- /** * Listens for Bundle events and logs the respective events according to the * Log Service specification. In addition, all LogListener instances * registered for stopped bundles are removed by this method. */ public void bundleChanged(BundleEvent event) { String message; switch (event.getType()) { case BundleEvent.INSTALLED: message = "BundleEvent INSTALLED"; break; case BundleEvent.RESOLVED: message = "BundleEvent RESOLVED"; break; case BundleEvent.STARTING: message = "BundleEvent STARTING"; break; case BundleEvent.STARTED: message = "BundleEvent STARTED"; break; case BundleEvent.STOPPING: message = "BundleEvent STOPPING"; break; case BundleEvent.STOPPED: // this is special, as we have to fix the listener list for // stopped bundles removeLogListeners(event.getBundle()); message = "BundleEvent STOPPED"; break; case BundleEvent.UNRESOLVED: message = "BundleEvent UNRESOLVED"; break; case BundleEvent.UPDATED: message = "BundleEvent UPDATED"; break; case BundleEvent.UNINSTALLED: // remove any cached logger for the uninstalled bundle ungetLogger(event.getBundle()); message = "BundleEvent UNINSTALLED"; break; default: message = "BundleEvent " + event.getType(); } LogEntry entry = new LogEntryImpl(event.getBundle(), null, LogService.LOG_INFO, message, null); fireLogEvent(entry); } // ---------- ServiceListener ---------------------------------------------- /** * Listens for Service events and logs the respective events according to * the Log Service specification. */ public void serviceChanged(ServiceEvent event) { int level = LogService.LOG_INFO; String message; switch (event.getType()) { case ServiceEvent.REGISTERED: message = "ServiceEvent REGISTERED"; break; case ServiceEvent.MODIFIED: message = "ServiceEvent MODIFIED"; level = LogService.LOG_DEBUG; break; case ServiceEvent.UNREGISTERING: message = "ServiceEvent UNREGISTERING"; break; default: message = "ServiceEvent " + event.getType(); } LogEntry entry = new LogEntryImpl( event.getServiceReference().getBundle(), event.getServiceReference(), level, message, null); fireLogEvent(entry); } // ---------- FrameworkListener -------------------------------------------- /** * Listens for Framework events and logs the respective events according to * the Log Service specification. * <p> * In the case of a Framework ERROR which is a ClassNotFoundException for an * unresolved bundle, the message is logged at INFO level instead of ERROR * level as prescribed by the spec. This is because such a situation should * not really result in a Framework ERROR but the Apache Felix framework has * no means of controlling this at the moment (framework 1.0.4 release). */ public void frameworkEvent(FrameworkEvent event) { int level = LogService.LOG_INFO; String message; Throwable exception = event.getThrowable(); switch (event.getType()) { case FrameworkEvent.STARTED: message = "FrameworkEvent STARTED"; break; case FrameworkEvent.ERROR: message = "FrameworkEvent ERROR"; // special precaution for Felix.loadBundleClass event overkill // FIXME: actually, the error is ok, if the bundle failed to // resolve if (exception instanceof BundleException) { StackTraceElement[] ste = exception.getStackTrace(); if (ste != null && ste.length > 0 && "loadBundleClass".equals(ste[0].getMethodName())) { message += ": Class " + exception.getMessage() + " not found"; if (event.getBundle() != null) { message += " in bundle " + event.getBundle().getSymbolicName() + " (" + event.getBundle().getBundleId() + ")"; } level = LogService.LOG_INFO; exception = null; // don't care for a stack trace here break; } } level = LogService.LOG_ERROR; break; case FrameworkEvent.PACKAGES_REFRESHED: message = "FrameworkEvent PACKAGES REFRESHED"; break; case FrameworkEvent.STARTLEVEL_CHANGED: message = "FrameworkEvent STARTLEVEL CHANGED to " + this.startLevelService.getStartLevel(); break; case FrameworkEvent.WARNING: message = "FrameworkEvent WARNING"; break; case FrameworkEvent.INFO: message = "FrameworkEvent INFO"; break; default: message = "FrameworkEvent " + event.getType(); } final LogEntry entry = new LogEntryImpl(event.getBundle(), null, level, message, exception); fireLogEvent(entry); } // ---------- Effective logging -------------------------------------------- /** * Get a logger for messages orginating from the given bundle. If no bundle * is specified, we use the system bundle logger. * * @param bundle The bundle for which a logger is to be returned. * @return The Logger for the bundle. */ private Logger getLogger(Bundle bundle) { Long bundleId = new Long((bundle == null) ? 0 : bundle.getBundleId()); Logger log; synchronized (loggers) { log = loggers.get(bundleId); } if (log == null) { String name; if (bundle == null) { // if we have no bundle, use the system bundle's name name = Constants.SYSTEM_BUNDLE_SYMBOLICNAME; } else { // otherwise use the bundle symbolic name name = bundle.getSymbolicName(); // if the bundle has no symbolic name, use the location if (name == null) { name = bundle.getLocation(); } // if the bundle also has no location, use the bundle Id if (name == null) { name = String.valueOf(bundle.getBundleId()); } } log = LoggerFactory.getLogger(name); synchronized (loggers) { loggers.put(bundleId, log); } } return log; } /** * Removes the cached logger for the given bundle, for example if the * bundle is uninstalled and thus there will be no more logs from this * bundle. */ private void ungetLogger(Bundle bundle) { synchronized (loggers) { loggers.remove(bundle.getBundleId()); } } /** * Actually logs the given log entry to the logger for the bundle recorded * in the log entry. */ private void logOut(LogEntry logEntry) { // get the logger for the bundle Logger log = getLogger(logEntry.getBundle()); if (logEntry.getLevel() > getLevel(log)) // early Exit, this message will not be logged, don't do any work... return; final StringBuilder msg = new StringBuilder(); ServiceReference sr = logEntry.getServiceReference(); if (sr != null) { msg.append("Service ["); if (sr.getProperty(Constants.SERVICE_PID) != null) { msg.append(sr.getProperty(Constants.SERVICE_PID)).append(','); } else if (sr.getProperty(COMPONENT_NAME) != null) { msg.append(sr.getProperty(COMPONENT_NAME)).append(','); } else if (sr.getProperty(Constants.SERVICE_DESCRIPTION) != null) { msg.append(sr.getProperty(Constants.SERVICE_DESCRIPTION)).append( ','); } msg.append(sr.getProperty(Constants.SERVICE_ID)) .append(", ") .append(Arrays.toString((String[]) sr.getProperty(Constants.OBJECTCLASS))) .append("] "); } if (logEntry.getMessage() != null) { msg.append(logEntry.getMessage()); } Throwable exception = logEntry.getException(); if (exception != null) { msg.append(" (").append(exception).append(')'); } String message = msg.toString(); switch (logEntry.getLevel()) { case LogService.LOG_DEBUG: log.debug(message, exception); break; case LogService.LOG_INFO: log.info(message, exception); break; case LogService.LOG_WARNING: log.warn(message, exception); break; case LogService.LOG_ERROR: log.error(message, exception); break; default: if (logEntry.getLevel() > LogService.LOG_DEBUG) { log.trace(message, exception); } else if (logEntry.getLevel() < LogService.LOG_ERROR) { log.error(message, exception); } break; } } static int getLevel(Logger log) { if (log.isTraceEnabled()) return LogService.LOG_DEBUG + 1; // No constant for trace in LogService else if (log.isDebugEnabled()) return LogService.LOG_DEBUG; else if (log.isInfoEnabled()) return LogService.LOG_INFO; else if (log.isWarnEnabled()) return LogService.LOG_WARNING; return LogService.LOG_ERROR; } // ---------- internal class ----------------------------------------------- /** * The <code>LogListenerProxy</code> class is a proxy to the actually * registered <code>LogListener</code> which also records the bundle * registering the listener. This allows for the removal of the log * listeners registered by bundles which have not been removed before the * bundle has been stopped. */ private static class LogListenerProxy implements LogListener { private final int runningBundle = Bundle.STARTING | Bundle.ACTIVE | Bundle.STOPPING; private final Bundle bundle; private final LogListener delegatee; public LogListenerProxy(Bundle bundle, LogListener delegatee) { this.bundle = bundle; this.delegatee = delegatee; } public void logged(LogEntry entry) { if ((bundle.getState() & runningBundle) != 0) { delegatee.logged(entry); } } /* package */boolean isSame(LogListener listener) { return listener == delegatee || listener == this; } /* package */boolean hasBundle(Bundle bundle) { return this.bundle == bundle; } } /** * The <code>LogEntryDispatcher</code> implements the worker thread * responsible for delivering log events to the log listeners. */ private static class LogEntryDispatcher extends Thread { // provides the actual log listeners on demand private final LogSupport logSupport; // the queue of log events to be dispatched private final BlockingQueue<LogEntry> dispatchQueue; // true as long as the thread is active private boolean active; LogEntryDispatcher(LogSupport logSupport) { super("LogEntry Dispatcher"); this.logSupport = logSupport; this.dispatchQueue = new LinkedBlockingQueue<LogEntry>(); this.active = true; } /** * Add a log entry for dispatching. */ void enqueLogEntry(LogEntry logEntry) { dispatchQueue.offer(logEntry); } /** * Get the next log entry for dispatching. This method blocks until an * event is available or the thread is interrupted. * * @return The next event to dispatch * @throws InterruptedException If the thread has been interrupted while * waiting for a log event to dispatch. */ LogEntry dequeueLogEntry() throws InterruptedException { return dispatchQueue.take(); } /** * Terminates this work thread by resetting the active flag and * interrupting itself such that the {@link #dequeueLogEntry()} is * aborted for the thread to terminate. */ void terminate() { active = false; interrupt(); } /** * Runs the actual log event dispatching. This method continues to get * log events from the {@link #dequeueLogEntry()} method until the * active flag is reset. */ @Override public void run() { while (active) { LogEntry logEntry = null; try { logEntry = dequeueLogEntry(); } catch (InterruptedException ie) { // don't care, this is expected } // dispatch the log entry if (logEntry != null) { // grab an immediate copy of the array LogListener[] logListeners = logSupport.getListeners(); // fire the events outside of the listenersLock if (logListeners != null) { for (LogListener logListener : logListeners) { try { logListener.logged(logEntry); } catch (Throwable t) { // should we really care ?? } } } } } } } }